turn off all repos in container

This commit is contained in:
Louis Abel 2022-07-06 23:23:07 -07:00
parent 73f08780d9
commit d482019ca5
Signed by untrusted user: label
GPG Key ID: B37E62D143879B36
6 changed files with 127 additions and 103 deletions

View File

@ -44,64 +44,69 @@
has_modules: has_modules:
- 'AppStream' - 'AppStream'
- 'PowerTools' - 'PowerTools'
iso_map:
hosts:
x86_64: ''
aarch64: ''
ppc64le: ''
s390x: ''
images:
- dvd1
- minimal
- boot
repos:
- 'BaseOS'
- 'AppStream'
variant: 'BaseOS'
lorax_removes:
- 'libreport-rhel-anaconda-bugzilla'
required_packages:
- 'lorax'
- 'genisoimage'
- 'isomd5sum'
- 'lorax-templates-rhel'
- 'lorax-templates-generic'
structure: structure:
packages: 'os/Packages' packages: 'os/Packages'
repodata: 'os/repodata' repodata: 'os/repodata'
iso_map: iso_map:
xorrisofs: False xorrisofs: False
iso_level: False iso_level: False
hosts:
x86_64: ''
aarch64: ''
images: images:
dvd: dvd:
disc: True
variant: 'AppStream'
repos: repos:
- 'BaseOS' - 'BaseOS'
- 'AppStream' - 'AppStream'
lorax_variants: minimal:
- dvd disc: True
- minimal isoskip: True
- BaseOS repos:
repos: - 'minimal'
- 'BaseOS' variant: 'minimal'
- 'AppStream' BaseOS:
variant: 'BaseOS' disc: False
lorax_removes: isoskip: True
- 'libreport-rhel-anaconda-bugzilla' variant: 'BaseOS'
required_pkgs: repos:
- 'lorax' - 'BaseOS'
- 'genisoimage' - 'AppStream'
- 'isomd5sum' lorax:
- 'lorax-templates-rhel' repos:
- 'lorax-templates-generic' - 'BaseOS'
- 'AppStream'
variant: 'BaseOS'
lorax_removes:
- 'libreport-rhel-anaconda-bugzilla'
required_pkgs:
- 'lorax'
- 'genisoimage'
- 'isomd5sum'
- 'lorax-templates-rhel'
- 'lorax-templates-generic'
- 'xorriso'
cloudimages:
images:
- EC2
- GenericCloud
formats:
- qcow2
- raw
livemap:
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
branch: 'r9'
ksentry:
Workstation: rocky-live-workstation.ks
Workstation-Lite: rocky-live-workstation-lite.ks
XFCE: rocky-live-xfce.ks
KDE: rocky-live-kde.ks
allowed_arches:
- x86_64
repoclosure_map: repoclosure_map:
arches: arches:
x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
aarch64: '--arch=aarch64 --arch=noarch' aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
ppc64le: '--arch=ppc64le --arch=noarch' ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
s390x: '--arch=s390x --arch=noarch' s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
repos: repos:
BaseOS: [] BaseOS: []
AppStream: AppStream:

View File

@ -73,8 +73,12 @@
- 'lorax-templates-generic' - 'lorax-templates-generic'
- 'xorriso' - 'xorriso'
cloudimages: cloudimages:
- EC2 images:
- GenericCloud - EC2
- GenericCloud
formats:
- qcow2
- raw
livemap: livemap:
git_repo: 'https://git.resf.org/sig_core/kickstarts.git' git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
branch: 'r9-beta' branch: 'r9-beta'

View File

@ -2,7 +2,7 @@
'9': '9':
fullname: 'Rocky Linux 9.0' fullname: 'Rocky Linux 9.0'
revision: '9.0' revision: '9.0'
rclvl: 'RC2' rclvl: 'RC3'
major: '9' major: '9'
minor: '0' minor: '0'
profile: '9' profile: '9'
@ -73,8 +73,12 @@
- 'lorax-templates-generic' - 'lorax-templates-generic'
- 'xorriso' - 'xorriso'
cloudimages: cloudimages:
- EC2 images:
- GenericCloud - EC2
- GenericCloud
formats:
- qcow2
- raw
livemap: livemap:
git_repo: 'https://git.resf.org/sig_core/kickstarts.git' git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
branch: 'r9' branch: 'r9'

View File

@ -73,8 +73,12 @@
- 'lorax-templates-generic' - 'lorax-templates-generic'
- 'xorriso' - 'xorriso'
cloudimages: cloudimages:
- EC2 images:
- GenericCloud - EC2
- GenericCloud
formats:
- qcow2
- raw
livemap: livemap:
git_repo: 'https://git.resf.org/sig_core/kickstarts.git' git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
branch: 'r9lh' branch: 'r9lh'

View File

@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
set -o pipefail set -o pipefail
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
{{ import_gpg_cmd }} | tee -a {{ sync_log }} {{ import_gpg_cmd }} | tee -a {{ sync_log }}
{{ arch_force_cp }} | tee -a {{ sync_log }} {{ arch_force_cp }} | tee -a {{ sync_log }}
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }} {{ dnf_plugin_cmd }} | tee -a {{ sync_log }}

View File

@ -1543,68 +1543,74 @@ class IsoBuild:
unpack_single_arch = True unpack_single_arch = True
arches_to_unpack = [self.arch] arches_to_unpack = [self.arch]
for imagename in self.cloudimages: for imagename in self.cloudimages['images']:
self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...') self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...')
if self.s3: for formattype in self.cloudimages['formats']:
latest_artifacts = Shared.s3_determine_latest(
self.s3_bucket,
self.release,
self.arches,
'qcow2',
imagename,
self.log
)
else:
latest_artifacts = Shared.reqs_determine_latest(
self.s3_bucket_url,
self.release,
self.arches,
'qcow2',
imagename,
self.log
)
if not len(latest_artifacts) > 0:
self.log.warn(Color.WARN + 'No images found.')
continue
self.log.info(Color.INFO + 'Downloading requested artifacts')
for arch in arches_to_unpack:
image_arch_dir = os.path.join(
self.image_work_dir,
arch
)
source_path = latest_artifacts[arch]
drop_name = source_path.split('/')[-1]
full_drop = '{}/{}'.format(
image_arch_dir,
drop_name
)
if not os.path.exists(image_arch_dir):
os.makedirs(image_arch_dir, exist_ok=True)
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
if self.s3: if self.s3:
Shared.s3_download_artifacts( latest_artifacts = Shared.s3_determine_latest(
self.force_download,
self.s3_bucket, self.s3_bucket,
source_path, self.release,
full_drop, self.arches,
formattype,
imagename,
self.log self.log
) )
else: else:
Shared.reqs_download_artifacts( latest_artifacts = Shared.reqs_determine_latest(
self.force_download,
self.s3_bucket_url, self.s3_bucket_url,
source_path, self.release,
full_drop, self.arches,
formattype,
imagename,
self.log self.log
) )
if not len(latest_artifacts) > 0:
self.log.warn(Color.WARN + 'No images found.')
continue
self.log.info(Color.INFO + 'Attempting to download requested artifacts')
for arch in arches_to_unpack:
image_arch_dir = os.path.join(
self.image_work_dir,
arch
)
if arch not in latest_artifacts.keys():
self.log.warn(Color.WARN + 'Artifact for ' + imagename +
' ' + arch + ' (' + formattype + ') does not exist.')
continue
source_path = latest_artifacts[arch]
drop_name = source_path.split('/')[-1]
full_drop = '{}/{}'.format(
image_arch_dir,
drop_name
)
if not os.path.exists(image_arch_dir):
os.makedirs(image_arch_dir, exist_ok=True)
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
if self.s3:
Shared.s3_download_artifacts(
self.force_download,
self.s3_bucket,
source_path,
full_drop,
self.log
)
else:
Shared.reqs_download_artifacts(
self.force_download,
self.s3_bucket_url,
source_path,
full_drop,
self.log
)
self.log.info(Color.INFO + 'Image download phase completed') self.log.info(Color.INFO + 'Image download phase completed')