forked from sig_core/toolkit
Merge pull request 'feature/variant-images-download' (#3) from feature/variant-images-download into devel
Reviewed-on: sig_core/toolkit#3
This commit is contained in:
commit
67d3ed3046
@ -103,10 +103,10 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
|||||||
|
|
||||||
|
|
||||||
ALLOWED_TYPE_VARIANTS = {
|
ALLOWED_TYPE_VARIANTS = {
|
||||||
"Azure": None,
|
"Azure": ["Base", "LVM"],
|
||||||
"Container": ["Base", "Minimal", "UBI"],
|
"Container": ["Base", "Minimal", "UBI"],
|
||||||
"EC2": None,
|
"EC2": ["Base", "LVM"],
|
||||||
"GenericCloud": None,
|
"GenericCloud": ["Base", "LVM"],
|
||||||
"Vagrant": ["Libvirt", "Vbox"],
|
"Vagrant": ["Libvirt", "Vbox"],
|
||||||
"OCP": None
|
"OCP": None
|
||||||
}
|
}
|
||||||
|
@ -74,10 +74,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r8'
|
branch: 'r8'
|
||||||
|
@ -74,10 +74,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r8'
|
branch: 'r8'
|
||||||
|
@ -74,10 +74,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r8'
|
branch: 'r8'
|
||||||
|
@ -77,10 +77,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9-beta'
|
branch: 'r9-beta'
|
||||||
|
@ -78,10 +78,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9'
|
branch: 'r9'
|
||||||
|
@ -77,10 +77,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9lh'
|
branch: 'r9lh'
|
||||||
|
@ -74,10 +74,23 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
|
Azure:
|
||||||
|
format: vhd
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
EC2:
|
EC2:
|
||||||
format: raw
|
format: raw
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
GenericCloud:
|
GenericCloud:
|
||||||
format: qcow2
|
format: qcow2
|
||||||
|
variants: [Base, LVM]
|
||||||
|
primary_variant: 'Base'
|
||||||
|
Container:
|
||||||
|
format: tar.xz
|
||||||
|
variants: [Base, Minimal, UBI]
|
||||||
|
OCP:
|
||||||
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'rln'
|
branch: 'rln'
|
||||||
|
@ -1357,128 +1357,194 @@ class IsoBuild:
|
|||||||
"""
|
"""
|
||||||
unpack_single_arch = False
|
unpack_single_arch = False
|
||||||
arches_to_unpack = self.arches
|
arches_to_unpack = self.arches
|
||||||
|
latest_artifacts = {}
|
||||||
if self.arch:
|
if self.arch:
|
||||||
unpack_single_arch = True
|
unpack_single_arch = True
|
||||||
arches_to_unpack = [self.arch]
|
arches_to_unpack = [self.arch]
|
||||||
|
|
||||||
for imagename in self.cloudimages['images']:
|
for name, extra in self.cloudimages['images'].items():
|
||||||
self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...')
|
self.log.info(Color.INFO + 'Determining the latest images for ' + name + ' ...')
|
||||||
formattype = self.cloudimages['images'][imagename]['format']
|
formattype = extra['format']
|
||||||
|
latest_artifacts[name] = {}
|
||||||
|
primary_variant = extra['primary_variant'] if 'primary_variant' in extra else None
|
||||||
|
latest_artifacts[name]['primary'] = primary_variant
|
||||||
|
|
||||||
if self.s3:
|
variants = extra['variants'] if 'variants' in extra.keys() else [None] # need to loop once
|
||||||
latest_artifacts = Shared.s3_determine_latest(
|
imagename = name
|
||||||
self.s3_bucket,
|
variantname = name
|
||||||
self.release,
|
|
||||||
arches_to_unpack,
|
|
||||||
formattype,
|
|
||||||
imagename,
|
|
||||||
self.log
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
for variant in variants:
|
||||||
latest_artifacts = Shared.reqs_determine_latest(
|
if variant:
|
||||||
self.s3_bucket_url,
|
variantname = f"{name}-{variant}"
|
||||||
self.release,
|
self.log.info(Color.INFO + 'Getting latest for variant ' + variant + ' ...')
|
||||||
arches_to_unpack,
|
|
||||||
formattype,
|
|
||||||
imagename,
|
|
||||||
self.log
|
|
||||||
)
|
|
||||||
|
|
||||||
if not len(latest_artifacts) > 0:
|
|
||||||
self.log.warn(Color.WARN + 'No images found.')
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Attempting to download requested artifacts')
|
|
||||||
for arch in arches_to_unpack:
|
|
||||||
image_arch_dir = os.path.join(
|
|
||||||
self.image_work_dir,
|
|
||||||
arch
|
|
||||||
)
|
|
||||||
|
|
||||||
if arch not in latest_artifacts.keys():
|
|
||||||
self.log.warn(Color.WARN + 'Artifact for ' + imagename +
|
|
||||||
' ' + arch + ' (' + formattype + ') does not exist.')
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_path = latest_artifacts[arch]
|
|
||||||
drop_name = source_path.split('/')[-1]
|
|
||||||
checksum_name = drop_name + '.CHECKSUM'
|
|
||||||
full_drop = '{}/{}'.format(
|
|
||||||
image_arch_dir,
|
|
||||||
drop_name
|
|
||||||
)
|
|
||||||
|
|
||||||
checksum_drop = '{}/{}.CHECKSUM'.format(
|
|
||||||
image_arch_dir,
|
|
||||||
drop_name
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(image_arch_dir):
|
|
||||||
os.makedirs(image_arch_dir, exist_ok=True)
|
|
||||||
|
|
||||||
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
|
||||||
if self.s3:
|
if self.s3:
|
||||||
Shared.s3_download_artifacts(
|
latest_artifacts[name][variantname] = Shared.s3_determine_latest(
|
||||||
self.force_download,
|
|
||||||
self.s3_bucket,
|
self.s3_bucket,
|
||||||
source_path,
|
self.release,
|
||||||
full_drop,
|
arches_to_unpack,
|
||||||
|
formattype,
|
||||||
|
variantname,
|
||||||
self.log
|
self.log
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
Shared.reqs_download_artifacts(
|
latest_artifacts[name][variantname] = Shared.reqs_determine_latest(
|
||||||
self.force_download,
|
|
||||||
self.s3_bucket_url,
|
self.s3_bucket_url,
|
||||||
source_path,
|
self.release,
|
||||||
full_drop,
|
arches_to_unpack,
|
||||||
|
formattype,
|
||||||
|
variantname,
|
||||||
self.log
|
self.log
|
||||||
)
|
)
|
||||||
|
|
||||||
self.log.info('Creating checksum ...')
|
# latest_artifacts should have at least 1 result if has_variants, else == 1
|
||||||
checksum = Shared.get_checksum(full_drop, self.checksum, self.log)
|
if not len(latest_artifacts[name][variantname]) > 0:
|
||||||
if not checksum:
|
self.log.warn(Color.WARN + 'No images found for ' + variantname +
|
||||||
self.log.error(Color.FAIL + full_drop + ' not found! Are you sure we copied it?')
|
'. This means it will be skipped.')
|
||||||
|
|
||||||
|
del imagename
|
||||||
|
del variantname
|
||||||
|
del variants
|
||||||
|
|
||||||
|
#print(latest_artifacts)
|
||||||
|
for keyname in latest_artifacts.keys():
|
||||||
|
primary = latest_artifacts[keyname]['primary']
|
||||||
|
for imgname in latest_artifacts[keyname]:
|
||||||
|
keysect = latest_artifacts[keyname][imgname]
|
||||||
|
if imgname == 'primary':
|
||||||
continue
|
continue
|
||||||
with open(checksum_drop, 'w+') as c:
|
|
||||||
c.write(checksum)
|
|
||||||
c.close()
|
|
||||||
|
|
||||||
self.log.info('Creating a symlink to latest image...')
|
if not keysect:
|
||||||
latest_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
continue
|
||||||
image_arch_dir,
|
|
||||||
self.shortname,
|
|
||||||
self.major_version,
|
|
||||||
imagename,
|
|
||||||
arch,
|
|
||||||
formattype
|
|
||||||
)
|
|
||||||
latest_path = latest_name.split('/')[-1]
|
|
||||||
latest_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
|
||||||
image_arch_dir,
|
|
||||||
self.shortname,
|
|
||||||
self.major_version,
|
|
||||||
imagename,
|
|
||||||
arch,
|
|
||||||
formattype
|
|
||||||
)
|
|
||||||
# For some reason python doesn't have a "yeah just change this
|
|
||||||
# link" part of the function
|
|
||||||
if os.path.exists(latest_name):
|
|
||||||
os.remove(latest_name)
|
|
||||||
|
|
||||||
os.symlink(drop_name, latest_name)
|
self.log.info(Color.INFO + 'Attempting to download requested ' +
|
||||||
|
'artifacts (' + keyname + ')')
|
||||||
|
|
||||||
self.log.info('Creating checksum for latest symlinked image...')
|
for arch in arches_to_unpack:
|
||||||
shutil.copy2(checksum_drop, latest_checksum)
|
image_arch_dir = os.path.join(
|
||||||
with open(latest_checksum, 'r') as link:
|
self.image_work_dir,
|
||||||
checkdata = link.read()
|
arch
|
||||||
|
)
|
||||||
|
|
||||||
checkdata = checkdata.replace(drop_name, latest_path)
|
source_path = keysect[arch]
|
||||||
|
drop_name = source_path.split('/')[-1]
|
||||||
|
|
||||||
with open(latest_checksum, 'w+') as link:
|
# Docker containers get a "layer" name, this hack gets
|
||||||
link.write(checkdata)
|
# around it. I didn't feel like adding another config opt.
|
||||||
link.close()
|
if 'layer' in drop_name:
|
||||||
|
fsuffix = drop_name.replace('layer', '')
|
||||||
|
drop_name = source_path.split('/')[-3] + fsuffix
|
||||||
|
|
||||||
|
checksum_name = drop_name + '.CHECKSUM'
|
||||||
|
full_drop = '{}/{}'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
drop_name
|
||||||
|
)
|
||||||
|
|
||||||
|
checksum_drop = '{}/{}.CHECKSUM'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
drop_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(image_arch_dir):
|
||||||
|
os.makedirs(image_arch_dir, exist_ok=True)
|
||||||
|
|
||||||
|
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
||||||
|
if self.s3:
|
||||||
|
Shared.s3_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
self.s3_bucket,
|
||||||
|
source_path,
|
||||||
|
full_drop,
|
||||||
|
self.log
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Shared.reqs_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
self.s3_bucket_url,
|
||||||
|
source_path,
|
||||||
|
full_drop,
|
||||||
|
self.log
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info('Creating checksum ...')
|
||||||
|
checksum = Shared.get_checksum(full_drop, self.checksum, self.log)
|
||||||
|
if not checksum:
|
||||||
|
self.log.error(Color.FAIL + full_drop + ' not found! Are you sure we copied it?')
|
||||||
|
continue
|
||||||
|
with open(checksum_drop, 'w+') as c:
|
||||||
|
c.write(checksum)
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
self.log.info('Creating a symlink to latest image...')
|
||||||
|
latest_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
imgname,
|
||||||
|
arch,
|
||||||
|
formattype
|
||||||
|
)
|
||||||
|
latest_path = latest_name.split('/')[-1]
|
||||||
|
latest_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
imgname,
|
||||||
|
arch,
|
||||||
|
formattype
|
||||||
|
)
|
||||||
|
# For some reason python doesn't have a "yeah just change this
|
||||||
|
# link" part of the function
|
||||||
|
if os.path.exists(latest_name):
|
||||||
|
os.remove(latest_name)
|
||||||
|
|
||||||
|
os.symlink(drop_name, latest_name)
|
||||||
|
|
||||||
|
self.log.info('Creating checksum for latest symlinked image...')
|
||||||
|
shutil.copy2(checksum_drop, latest_checksum)
|
||||||
|
with open(latest_checksum, 'r') as link:
|
||||||
|
checkdata = link.read()
|
||||||
|
|
||||||
|
checkdata = checkdata.replace(drop_name, latest_path)
|
||||||
|
|
||||||
|
with open(latest_checksum, 'w+') as link:
|
||||||
|
link.write(checkdata)
|
||||||
|
link.close()
|
||||||
|
|
||||||
|
# If this is the primary image, set the appropriate symlink
|
||||||
|
# and checksum
|
||||||
|
if primary and primary in drop_name:
|
||||||
|
# If an image is the primary, we set this.
|
||||||
|
latest_primary_name = '{}/{}-{}-{}.latest.{}.{}'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
keyname,
|
||||||
|
arch,
|
||||||
|
formattype
|
||||||
|
)
|
||||||
|
latest_primary_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
keyname,
|
||||||
|
arch,
|
||||||
|
formattype
|
||||||
|
)
|
||||||
|
latest_primary_path = latest_name.split('/')[-1]
|
||||||
|
|
||||||
|
self.log.info('This is the primary image, setting link and checksum')
|
||||||
|
if os.path.exists(latest_primary_name):
|
||||||
|
os.remove(latest_primary_name)
|
||||||
|
os.symlink(drop_name, latest_primary_name)
|
||||||
|
shutil.copy2(checksum_drop, latest_primary_checksum)
|
||||||
|
with open(latest_primary_checksum) as link:
|
||||||
|
checkpdata = link.read()
|
||||||
|
checkpdata = checkpdata.replace(drop_name, latest_primary_path)
|
||||||
|
with open(latest_primary_checksum, 'w+') as link:
|
||||||
|
link.write(checkpdata)
|
||||||
|
link.close()
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Image download phase completed')
|
self.log.info(Color.INFO + 'Image download phase completed')
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user