Merge pull request 'feature/variant-images-download' (#3) from feature/variant-images-download into devel
Reviewed-on: #3
This commit is contained in:
commit
67d3ed3046
@ -103,10 +103,10 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
||||
|
||||
|
||||
ALLOWED_TYPE_VARIANTS = {
|
||||
"Azure": None,
|
||||
"Azure": ["Base", "LVM"],
|
||||
"Container": ["Base", "Minimal", "UBI"],
|
||||
"EC2": None,
|
||||
"GenericCloud": None,
|
||||
"EC2": ["Base", "LVM"],
|
||||
"GenericCloud": ["Base", "LVM"],
|
||||
"Vagrant": ["Libvirt", "Vbox"],
|
||||
"OCP": None
|
||||
}
|
||||
|
@ -74,10 +74,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r8'
|
||||
|
@ -74,10 +74,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r8'
|
||||
|
@ -74,10 +74,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r8'
|
||||
|
@ -77,10 +77,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r9-beta'
|
||||
|
@ -78,10 +78,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r9'
|
||||
|
@ -77,10 +77,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'r9lh'
|
||||
|
@ -74,10 +74,23 @@
|
||||
- 'xorriso'
|
||||
cloudimages:
|
||||
images:
|
||||
Azure:
|
||||
format: vhd
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
EC2:
|
||||
format: raw
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
GenericCloud:
|
||||
format: qcow2
|
||||
variants: [Base, LVM]
|
||||
primary_variant: 'Base'
|
||||
Container:
|
||||
format: tar.xz
|
||||
variants: [Base, Minimal, UBI]
|
||||
OCP:
|
||||
format: qcow2
|
||||
livemap:
|
||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||
branch: 'rln'
|
||||
|
@ -1357,128 +1357,194 @@ class IsoBuild:
|
||||
"""
|
||||
unpack_single_arch = False
|
||||
arches_to_unpack = self.arches
|
||||
latest_artifacts = {}
|
||||
if self.arch:
|
||||
unpack_single_arch = True
|
||||
arches_to_unpack = [self.arch]
|
||||
|
||||
for imagename in self.cloudimages['images']:
|
||||
self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...')
|
||||
formattype = self.cloudimages['images'][imagename]['format']
|
||||
for name, extra in self.cloudimages['images'].items():
|
||||
self.log.info(Color.INFO + 'Determining the latest images for ' + name + ' ...')
|
||||
formattype = extra['format']
|
||||
latest_artifacts[name] = {}
|
||||
primary_variant = extra['primary_variant'] if 'primary_variant' in extra else None
|
||||
latest_artifacts[name]['primary'] = primary_variant
|
||||
|
||||
if self.s3:
|
||||
latest_artifacts = Shared.s3_determine_latest(
|
||||
self.s3_bucket,
|
||||
self.release,
|
||||
arches_to_unpack,
|
||||
formattype,
|
||||
imagename,
|
||||
self.log
|
||||
)
|
||||
variants = extra['variants'] if 'variants' in extra.keys() else [None] # need to loop once
|
||||
imagename = name
|
||||
variantname = name
|
||||
|
||||
else:
|
||||
latest_artifacts = Shared.reqs_determine_latest(
|
||||
self.s3_bucket_url,
|
||||
self.release,
|
||||
arches_to_unpack,
|
||||
formattype,
|
||||
imagename,
|
||||
self.log
|
||||
)
|
||||
|
||||
if not len(latest_artifacts) > 0:
|
||||
self.log.warn(Color.WARN + 'No images found.')
|
||||
continue
|
||||
|
||||
self.log.info(Color.INFO + 'Attempting to download requested artifacts')
|
||||
for arch in arches_to_unpack:
|
||||
image_arch_dir = os.path.join(
|
||||
self.image_work_dir,
|
||||
arch
|
||||
)
|
||||
|
||||
if arch not in latest_artifacts.keys():
|
||||
self.log.warn(Color.WARN + 'Artifact for ' + imagename +
|
||||
' ' + arch + ' (' + formattype + ') does not exist.')
|
||||
continue
|
||||
|
||||
source_path = latest_artifacts[arch]
|
||||
drop_name = source_path.split('/')[-1]
|
||||
checksum_name = drop_name + '.CHECKSUM'
|
||||
full_drop = '{}/{}'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
|
||||
checksum_drop = '{}/{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
|
||||
if not os.path.exists(image_arch_dir):
|
||||
os.makedirs(image_arch_dir, exist_ok=True)
|
||||
|
||||
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
||||
for variant in variants:
|
||||
if variant:
|
||||
variantname = f"{name}-{variant}"
|
||||
self.log.info(Color.INFO + 'Getting latest for variant ' + variant + ' ...')
|
||||
if self.s3:
|
||||
Shared.s3_download_artifacts(
|
||||
self.force_download,
|
||||
latest_artifacts[name][variantname] = Shared.s3_determine_latest(
|
||||
self.s3_bucket,
|
||||
source_path,
|
||||
full_drop,
|
||||
self.release,
|
||||
arches_to_unpack,
|
||||
formattype,
|
||||
variantname,
|
||||
self.log
|
||||
)
|
||||
|
||||
else:
|
||||
Shared.reqs_download_artifacts(
|
||||
self.force_download,
|
||||
latest_artifacts[name][variantname] = Shared.reqs_determine_latest(
|
||||
self.s3_bucket_url,
|
||||
source_path,
|
||||
full_drop,
|
||||
self.release,
|
||||
arches_to_unpack,
|
||||
formattype,
|
||||
variantname,
|
||||
self.log
|
||||
)
|
||||
|
||||
self.log.info('Creating checksum ...')
|
||||
checksum = Shared.get_checksum(full_drop, self.checksum, self.log)
|
||||
if not checksum:
|
||||
self.log.error(Color.FAIL + full_drop + ' not found! Are you sure we copied it?')
|
||||
# latest_artifacts should have at least 1 result if has_variants, else == 1
|
||||
if not len(latest_artifacts[name][variantname]) > 0:
|
||||
self.log.warn(Color.WARN + 'No images found for ' + variantname +
|
||||
'. This means it will be skipped.')
|
||||
|
||||
del imagename
|
||||
del variantname
|
||||
del variants
|
||||
|
||||
#print(latest_artifacts)
|
||||
for keyname in latest_artifacts.keys():
|
||||
primary = latest_artifacts[keyname]['primary']
|
||||
for imgname in latest_artifacts[keyname]:
|
||||
keysect = latest_artifacts[keyname][imgname]
|
||||
if imgname == 'primary':
|
||||
continue
|
||||
with open(checksum_drop, 'w+') as c:
|
||||
c.write(checksum)
|
||||
c.close()
|
||||
|
||||
self.log.info('Creating a symlink to latest image...')
|
||||
latest_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
imagename,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
latest_path = latest_name.split('/')[-1]
|
||||
latest_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
imagename,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
# For some reason python doesn't have a "yeah just change this
|
||||
# link" part of the function
|
||||
if os.path.exists(latest_name):
|
||||
os.remove(latest_name)
|
||||
if not keysect:
|
||||
continue
|
||||
|
||||
os.symlink(drop_name, latest_name)
|
||||
self.log.info(Color.INFO + 'Attempting to download requested ' +
|
||||
'artifacts (' + keyname + ')')
|
||||
|
||||
self.log.info('Creating checksum for latest symlinked image...')
|
||||
shutil.copy2(checksum_drop, latest_checksum)
|
||||
with open(latest_checksum, 'r') as link:
|
||||
checkdata = link.read()
|
||||
for arch in arches_to_unpack:
|
||||
image_arch_dir = os.path.join(
|
||||
self.image_work_dir,
|
||||
arch
|
||||
)
|
||||
|
||||
checkdata = checkdata.replace(drop_name, latest_path)
|
||||
source_path = keysect[arch]
|
||||
drop_name = source_path.split('/')[-1]
|
||||
|
||||
with open(latest_checksum, 'w+') as link:
|
||||
link.write(checkdata)
|
||||
link.close()
|
||||
# Docker containers get a "layer" name, this hack gets
|
||||
# around it. I didn't feel like adding another config opt.
|
||||
if 'layer' in drop_name:
|
||||
fsuffix = drop_name.replace('layer', '')
|
||||
drop_name = source_path.split('/')[-3] + fsuffix
|
||||
|
||||
checksum_name = drop_name + '.CHECKSUM'
|
||||
full_drop = '{}/{}'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
|
||||
checksum_drop = '{}/{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
|
||||
if not os.path.exists(image_arch_dir):
|
||||
os.makedirs(image_arch_dir, exist_ok=True)
|
||||
|
||||
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
||||
if self.s3:
|
||||
Shared.s3_download_artifacts(
|
||||
self.force_download,
|
||||
self.s3_bucket,
|
||||
source_path,
|
||||
full_drop,
|
||||
self.log
|
||||
)
|
||||
else:
|
||||
Shared.reqs_download_artifacts(
|
||||
self.force_download,
|
||||
self.s3_bucket_url,
|
||||
source_path,
|
||||
full_drop,
|
||||
self.log
|
||||
)
|
||||
|
||||
self.log.info('Creating checksum ...')
|
||||
checksum = Shared.get_checksum(full_drop, self.checksum, self.log)
|
||||
if not checksum:
|
||||
self.log.error(Color.FAIL + full_drop + ' not found! Are you sure we copied it?')
|
||||
continue
|
||||
with open(checksum_drop, 'w+') as c:
|
||||
c.write(checksum)
|
||||
c.close()
|
||||
|
||||
self.log.info('Creating a symlink to latest image...')
|
||||
latest_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
imgname,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
latest_path = latest_name.split('/')[-1]
|
||||
latest_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
imgname,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
# For some reason python doesn't have a "yeah just change this
|
||||
# link" part of the function
|
||||
if os.path.exists(latest_name):
|
||||
os.remove(latest_name)
|
||||
|
||||
os.symlink(drop_name, latest_name)
|
||||
|
||||
self.log.info('Creating checksum for latest symlinked image...')
|
||||
shutil.copy2(checksum_drop, latest_checksum)
|
||||
with open(latest_checksum, 'r') as link:
|
||||
checkdata = link.read()
|
||||
|
||||
checkdata = checkdata.replace(drop_name, latest_path)
|
||||
|
||||
with open(latest_checksum, 'w+') as link:
|
||||
link.write(checkdata)
|
||||
link.close()
|
||||
|
||||
# If this is the primary image, set the appropriate symlink
|
||||
# and checksum
|
||||
if primary and primary in drop_name:
|
||||
# If an image is the primary, we set this.
|
||||
latest_primary_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
keyname,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
latest_primary_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
keyname,
|
||||
arch,
|
||||
formattype
|
||||
)
|
||||
latest_primary_path = latest_name.split('/')[-1]
|
||||
|
||||
self.log.info('This is the primary image, setting link and checksum')
|
||||
if os.path.exists(latest_primary_name):
|
||||
os.remove(latest_primary_name)
|
||||
os.symlink(drop_name, latest_primary_name)
|
||||
shutil.copy2(checksum_drop, latest_primary_checksum)
|
||||
with open(latest_primary_checksum) as link:
|
||||
checkpdata = link.read()
|
||||
checkpdata = checkpdata.replace(drop_name, latest_primary_path)
|
||||
with open(latest_primary_checksum, 'w+') as link:
|
||||
link.write(checkpdata)
|
||||
link.close()
|
||||
|
||||
self.log.info(Color.INFO + 'Image download phase completed')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user