From 6946b737fc5f887de453faee662c9f2f62fc8c24 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Tue, 5 Jul 2022 14:47:38 -0700 Subject: [PATCH] Simplify colors and start fleshing out cloud image pulls --- iso/empanadas/empanadas/common.py | 4 + iso/empanadas/empanadas/configs/el9.yaml | 3 + iso/empanadas/empanadas/util/dnf_utils.py | 233 +++++++--------------- iso/empanadas/empanadas/util/iso_utils.py | 233 ++++++++++------------ iso/empanadas/empanadas/util/shared.py | 8 +- 5 files changed, 183 insertions(+), 298 deletions(-) diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index 01e90b8..7073606 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -38,6 +38,10 @@ class Color: UNDERLINE = '\033[4m' BOLD = '\033[1m' END = '\033[0m' + INFO = '[' + BOLD + GREEN + 'INFO' + END + '] ' + WARN = '[' + BOLD + YELLOW + 'WARN' + END + '] ' + FAIL = '[' + BOLD + RED + 'FAIL' + END + '] ' + STAT = '[' + BOLD + CYAN + 'STAT' + END + '] ' # vars and additional checks rldict = AttributeDict() diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 8280d50..e53f851 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -72,6 +72,9 @@ - 'lorax-templates-rhel' - 'lorax-templates-generic' - 'xorriso' + cloudimages: + - EC2 + - GenericCloud repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index d708b2d..191fa16 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -235,10 +235,7 @@ class RepoSync: raise SystemExit() if self.fullrun and self.refresh_extra_files: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'A full run implies extra files are also deployed.' - ) + self.log.warn(Color.WARN + 'A full run implies extra files are also deployed.') self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch) @@ -575,10 +572,7 @@ class RepoSync: join_all_pods = ' '.join(entry_name_list) time.sleep(3) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Syncing ' + r + ' ...' - ) + self.log.info(Color.INFO + 'Syncing ' + r + ' ...') pod_watcher = '{} wait {}'.format( cmd, join_all_pods @@ -608,9 +602,7 @@ class RepoSync: output, errors = podcheck.communicate() if 'Exited (0)' not in output.decode(): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod - ) + self.log.error(Color.FAIL + pod) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( @@ -626,10 +618,7 @@ class RepoSync: ) entry_name_list.clear() - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Syncing ' + r + ' completed' - ) + self.log.info(Color.INFO + 'Syncing ' + r + ' completed') if len(bad_exit_list) > 0: self.log.error( @@ -849,9 +838,7 @@ class RepoSync: output, errors = podcheck.communicate() if 'Exited (0)' not in output.decode(): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod - ) + self.log.error(Color.FAIL + pod) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( @@ -885,10 +872,7 @@ class RepoSync: might also deploy COMPOSE_ID and maybe in the future a metadata dir with a bunch of compose-esque stuff. """ - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Deploying treeinfo, discinfo, and media.repo' - ) + self.log.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo') cmd = Shared.git_cmd(self.log) tmpclone = '/tmp/clone' @@ -919,10 +903,7 @@ class RepoSync: stderr=subprocess.DEVNULL ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Deploying extra files to work and metadata directories ...' - ) + self.log.info(Color.INFO + 'Deploying extra files to work and metadata directories ...') # Copy files to work root for extra in self.extra_files['list']: @@ -934,18 +915,13 @@ class RepoSync: shutil.copy2(src, extra_files_dir) shutil.copy2(src, metadata_dir) except: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Extra file not copied: ' + src - ) + self.log.warn(Color.WARN + 'Extra file not copied: ' + src) try: shutil.rmtree(tmpclone) except OSError as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Directory ' + tmpclone + ' could not be removed: ' + - e.strerror + self.log.error(Color.FAIL + 'Directory ' + tmpclone + + ' could not be removed: ' + e.strerror ) def deploy_metadata(self, sync_root): @@ -954,10 +930,7 @@ class RepoSync: will be close to how pungi produces it, but it won't be exact nor a perfect replica. """ - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Deploying metadata for this compose' - ) + self.log.info(Color.INFO + 'Deploying metadata for this compose') # Create metadata here # Create COMPOSE_ID here (this doesn't necessarily match anything, it's # just an indicator) @@ -984,10 +957,7 @@ class RepoSync: metadata_dir + '/metadata' ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Metadata files phase completed.' - ) + self.log.info(Color.INFO + 'Metadata files phase completed.') # Deploy README to metadata directory readme_template = self.tmplenv.get_template('README.tmpl') @@ -1007,10 +977,7 @@ class RepoSync: overwritten by our ISO process, which is fine. If there is a treeinfo found, it will be skipped. """ - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Deploying treeinfo, discinfo, and media.repo' - ) + self.log.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo') arches_to_tree = self.arches if arch: @@ -1093,16 +1060,12 @@ class RepoSync: repo_name ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' os .treeinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + + a + ' os .treeinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' os .treeinfo already exists' - ) + self.log.warn(Color.WARN + repo_name + ' ' + a + ' os .treeinfo already exists') if not os.path.exists(os_disc_path): try: @@ -1113,15 +1076,13 @@ class RepoSync: os_disc_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' os .discinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + + a + ' os .discinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' os .discinfo already exists' + self.log.warn(Color.WARN + repo_name + ' ' + a + + ' os .discinfo already exists' ) if not os.path.exists(os_media_path): @@ -1132,15 +1093,13 @@ class RepoSync: os_media_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' os media.repo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' os media.repo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' os media.repo already exists' + self.log.warn(Color.WARN + repo_name + ' ' + a + + ' os media.repo already exists' ) # Kickstart part of the repos @@ -1156,15 +1115,13 @@ class RepoSync: repo_name ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart .treeinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' kickstart .treeinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart .treeinfo already exists' + self.log.warn(Color.WARN + repo_name + ' ' + a + + ' kickstart .treeinfo already exists' ) if not os.path.exists(ks_disc_path): @@ -1176,15 +1133,13 @@ class RepoSync: ks_disc_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart .discinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' kickstart .discinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart .discinfo already exists' + self.log.warn(Color.FAIL + repo_name + ' ' + a + + ' kickstart .discinfo already exists' ) if not os.path.exists(ks_media_path): @@ -1195,15 +1150,13 @@ class RepoSync: ks_media_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart media.repo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' kickstart media.repo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' kickstart media.repo already exists' + self.log.warn(Color.WARN + repo_name + ' ' + a + + ' kickstart media.repo already exists' ) if not self.ignore_debug and not a == 'source': @@ -1240,15 +1193,13 @@ class RepoSync: repo_name ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' debug .treeinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' debug .treeinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - r + ' ' + a + ' debug .treeinfo already exists' + self.log.warn(Color.WARN + r + ' ' + a + + ' debug .treeinfo already exists' ) if not os.path.exists(debug_disc_path): @@ -1260,15 +1211,13 @@ class RepoSync: debug_disc_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' debug .discinfo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' debug .discinfo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - r + ' ' + a + ' debug .discinfo already exists' + self.log.warn(Color.WARN + r + ' ' + a + + ' debug .discinfo already exists' ) if not os.path.exists(debug_media_path): @@ -1279,15 +1228,13 @@ class RepoSync: debug_media_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' ' + a + ' debug media.repo could not be written' + self.log.error(Color.FAIL + repo_name + ' ' + a + + ' debug media.repo could not be written' ) self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + ' debug media.repo already exists' + self.log.warn(Color.WARN + repo_name + ' ' + a + + ' debug media.repo already exists' ) @@ -1322,16 +1269,10 @@ class RepoSync: repo_name ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' source os .treeinfo could not be written' - ) + self.log.error(Color.FAIL + repo_name + ' source os .treeinfo could not be written') self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' source os .treeinfo already exists' - ) + self.log.warn(Color.WARN + repo_name + ' source os .treeinfo already exists') if not os.path.exists(source_disc_path): try: @@ -1342,16 +1283,10 @@ class RepoSync: source_disc_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' source os .discinfo could not be written' - ) + self.log.error(Color.FAIL + repo_name + ' source os .discinfo could not be written') self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' source .discinfo already exists' - ) + self.log.warn(Color.WARN + repo_name + ' source .discinfo already exists') if not os.path.exists(source_media_path): try: @@ -1361,16 +1296,10 @@ class RepoSync: source_media_path ) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - repo_name + ' source os media.repo could not be written' - ) + self.log.error(Color.FAIL + repo_name + ' source os media.repo could not be written') self.log.error(e) else: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' source media.repo already exists' - ) + self.log.warn(Color.WARN + repo_name + ' source media.repo already exists') def tweak_treeinfo(self, repo, sync_root, arch): """ @@ -1396,18 +1325,12 @@ class RepoSync: variants_to_tweak.append(r) if not len(variants_to_tweak) > 0: - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'No treeinfo to tweak.' - ) + self.log.info(Color.INFO + 'No treeinfo to tweak.') return for a in arches_to_tree: for v in variants_to_tweak: - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Tweaking treeinfo for ' + a + ' ' + v - ) + self.log.info(Color.INFO + 'Tweaking treeinfo for ' + a + ' ' + v) image = os.path.join(sync_root, v, a, 'os') imagemap = self.iso_map['images'][v] data = { @@ -1425,10 +1348,7 @@ class RepoSync: try: Shared.treeinfo_modify_write(data, imagemap, self.log) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'There was an error writing os treeinfo.' - ) + self.log.error(Color.FAIL + 'There was an error writing os treeinfo.') self.log.error(e) if self.fullrun: @@ -1448,10 +1368,7 @@ class RepoSync: try: Shared.treeinfo_modify_write(ksdata, imagemap, self.log) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'There was an error writing kickstart treeinfo.' - ) + self.log.error(Color.FAIL + 'There was an error writing kickstart treeinfo.') self.log.error(e) def run_compose_closeout(self): @@ -1513,18 +1430,17 @@ class RepoSync: "global", ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Starting to sync ISOs to compose' - ) + self.log.info(Color.INFO + 'Starting to sync ISOs to compose') if os.path.exists('/usr/bin/fpsync'): + self.log.info(Color.INFO + 'Starting up fpsync') message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir) elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'): + self.log.info(Color.INFO + 'Starting up parallel | rsync') message, ret = Shared.rsync_method(iso_root, sync_iso_root) else: self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + Color.FAIL + 'fpsync nor parallel + rsync were found on this system. ' + 'There is also no built-in parallel rsync method at this ' + 'time.' @@ -1532,21 +1448,12 @@ class RepoSync: raise SystemExit() if ret != 0: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - message - ) + self.log.error(Color.FAIL + message) else: - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - message - ) + self.log.info(Color.INFO + message) if os.path.exists(live_root): - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Starting to sync live images to compose' - ) + self.log.info(Color.INFO + 'Starting to sync live images to compose') if os.path.exists('/usr/bin/fpsync'): message, ret = Shared.fpsync_method(live_root, sync_live_root, tmp_dir) @@ -1554,15 +1461,9 @@ class RepoSync: message, ret = Shared.rsync_method(live_root, sync_live_root) if ret != 0: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - message - ) + self.log.error(Color.FAIL + message) else: - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - message - ) + self.log.info(Color.INFO + message) # Combine all checksums here for arch in self.arches: diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index a9a80fa..6c911b9 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -83,6 +83,7 @@ class IsoBuild: self.lorax_result_root = config['mock_work_root'] + "/" + "lorax" self.mock_isolation = isolation self.iso_map = rlvars['iso_map'] + self.cloudimages = rlvars['cloudimages'] self.release_candidate = rc self.s3 = s3 self.force_unpack = force_unpack @@ -156,6 +157,16 @@ class IsoBuild: "work/isos" ) + self.live_work_dir = os.path.join( + self.compose_latest_dir, + "work/live" + ) + + self.image_work_dir = os.path.join( + self.compose_latest_dir, + "work/images" + ) + self.lorax_work_dir = os.path.join( self.compose_latest_dir, "work/lorax" @@ -347,16 +358,14 @@ class IsoBuild: unpack_single_arch = True arches_to_unpack = [self.arch] - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Determining the latest pulls...' - ) + self.log.info(Color.INFO + 'Determining the latest pulls...') if self.s3: latest_artifacts = Shared.s3_determine_latest( self.s3_bucket, self.release, self.arches, 'tar.gz', + 'lorax', self.log ) else: @@ -365,13 +374,11 @@ class IsoBuild: self.release, self.arches, 'tar.gz', + 'lorax', self.log ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Downloading requested artifact(s)' - ) + self.log.info(Color.INFO + 'Downloading requested artifact(s)') for arch in arches_to_unpack: lorax_arch_dir = os.path.join( self.lorax_work_dir, @@ -408,14 +415,8 @@ class IsoBuild: full_drop, self.log ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Download phase completed' - ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Beginning unpack phase...' - ) + self.log.info(Color.INFO + 'Download phase completed') + self.log.info(Color.INFO + 'Beginning unpack phase...') for arch in arches_to_unpack: tarname = 'lorax-{}-{}.tar.gz'.format( @@ -430,22 +431,13 @@ class IsoBuild: ) if not os.path.exists(tarball): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Artifact does not exist: ' + tarball - ) + self.log.error(Color.FAIL + 'Artifact does not exist: ' + tarball) continue self._unpack_artifacts(self.force_unpack, arch, tarball) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Unpack phase completed' - ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Beginning image variant phase' - ) + self.log.info(Color.INFO + 'Unpack phase completed') + self.log.info(Color.INFO + 'Beginning image variant phase') for arch in arches_to_unpack: self.log.info( @@ -456,15 +448,9 @@ class IsoBuild: self._copy_boot_to_work(self.force_unpack, arch) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Image variant phase completed' - ) + self.log.info(Color.INFO + 'Image variant phase completed') - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Beginning treeinfo phase' - ) + self.log.info(Color.INFO + 'Beginning treeinfo phase') for arch in arches_to_unpack: for variant in self.iso_map['images']: @@ -488,10 +474,7 @@ class IsoBuild: if not force_unpack: file_check = os.path.join(unpack_dir, 'lorax/.treeinfo') if os.path.exists(file_check): - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Artifact (' + arch + ') already unpacked' - ) + self.log.warn(Color.WARN + 'Artifact (' + arch + ') already unpacked') return self.log.info('Unpacking %s' % tarball) @@ -515,10 +498,7 @@ class IsoBuild: ) if not os.path.exists(os.path.join(src_to_image, '.treeinfo')): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Lorax base image does not exist' - ) + self.log.error(Color.FAIL + 'Lorax base image does not exist') return path_to_image = os.path.join( @@ -530,10 +510,7 @@ class IsoBuild: if not force_unpack: file_check = os.path.join(path_to_image, '.treeinfo') if os.path.exists(file_check): - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Lorax image for ' + image + ' already exists' - ) + self.log.warn(Color.WARN + 'Lorax image for ' + image + ' already exists') return self.log.info('Copying base lorax to %s directory...' % image) @@ -595,10 +572,7 @@ class IsoBuild: if not force_unpack: file_check = isobootpath if os.path.exists(file_check): - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Boot image (' + discname + ') already exists' - ) + self.log.warn(Color.WARN + 'Boot image (' + discname + ') already exists') return self.log.info('Copying %s boot iso to work directory...' % arch) @@ -610,10 +584,7 @@ class IsoBuild: self.log.info('Creating checksum for %s boot iso...' % arch) checksum = Shared.get_checksum(isobootpath, self.checksum, self.log) if not checksum: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - isobootpath + ' not found! Are you sure we copied it?' - ) + self.log.error(Color.FAIL + isobootpath + ' not found! Are you sure we copied it?') return with open(isobootpath + '.CHECKSUM', "w+") as c: c.write(checksum) @@ -638,8 +609,7 @@ class IsoBuild: ) if not os.path.exists(pathway): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + self.log.error(Color.FAIL + 'Repo and Image variant either does NOT match or does ' + 'NOT exist. Are you sure you have synced the repository?' ) @@ -658,16 +628,10 @@ class IsoBuild: found_files.append('/images/boot.iso') if len(found_files) > 0: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Images and data for ' + repo + ' and ' + arch + ' already exists.' - ) + self.log.warn(Color.WARN + 'Images and data for ' + repo + ' and ' + arch + ' already exists.') return - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Copying images and data for ' + repo + ' ' + arch - ) + self.log.info(Color.INFO + 'Copying images and data for ' + repo + ' ' + arch) try: shutil.copytree(src_to_image, pathway, copy_function=shutil.copy2, dirs_exist_ok=True) @@ -727,10 +691,7 @@ class IsoBuild: try: Shared.treeinfo_modify_write(data, imagemap, self.log) except Exception as e: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'There was an error writing treeinfo.' - ) + self.log.error(Color.FAIL + 'There was an error writing treeinfo.') self.log.error(e) # Next set of functions are loosely borrowed (in concept) from pungi. Some @@ -743,26 +704,17 @@ class IsoBuild: """ sync_root = self.compose_latest_sync - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Starting Extra ISOs phase' - ) + self.log.info(Color.INFO + 'Starting Extra ISOs phase') if not os.path.exists(self.compose_base): - self.log.info( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'The compose directory MUST be here. Cannot continue.' - ) + self.log.info(Color.FAIL + 'The compose directory MUST be here. Cannot continue.') raise SystemExit() self._extra_iso_build_wrap() self.log.info('Compose repo directory: %s' % sync_root) self.log.info('ISO result directory: %s/$arch' % self.lorax_work_dir) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Extra ISO phase completed.' - ) + self.log.info(Color.INFO + 'Extra ISO phase completed.') def _extra_iso_build_wrap(self): """ @@ -784,26 +736,17 @@ class IsoBuild: for y in images_to_build: if 'isoskip' in self.iso_map['images'][y] and self.iso_map['images'][y]['isoskip']: - self.log.info( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Skipping ' + y + ' image' - ) + self.log.info(Color.WARN + 'Skipping ' + y + ' image') continue for a in arches_to_build: lorax_path = os.path.join(self.lorax_work_dir, a, 'lorax', '.treeinfo') image_path = os.path.join(self.lorax_work_dir, a, y, '.treeinfo') if not os.path.exists(image_path): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Lorax data not found for ' + y + '. Skipping.' - ) + self.log.error(Color.FAIL + 'Lorax data not found for ' + y + '. Skipping.') if not os.path.exists(lorax_path): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Lorax not found at all. This is considered fatal.' - ) + self.log.error(Color.FAIL + 'Lorax not found at all. This is considered fatal.') raise SystemExit() @@ -819,10 +762,7 @@ class IsoBuild: elif self.extra_iso_mode == 'podman': continue else: - self.log.info( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Mode specified is not valid.' - ) + self.log.info(Color.FAIL + 'Mode specified is not valid.') raise SystemExit() if self.extra_iso_mode == 'podman': @@ -1053,10 +993,7 @@ class IsoBuild: join_all_pods = ' '.join(entry_name_list) time.sleep(3) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Building ' + i + ' ...' - ) + self.log.info(Color.INFO + 'Building ' + i + ' ...') pod_watcher = '{} wait {}'.format( cmd, join_all_pods @@ -1085,9 +1022,7 @@ class IsoBuild: output, errors = podcheck.communicate() if 'Exited (0)' not in output.decode(): - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod - ) + self.log.error(Color.FAIL + pod) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( @@ -1106,34 +1041,21 @@ class IsoBuild: for p in checksum_list: path = os.path.join(isos_dir, p) if os.path.exists(path): - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Performing checksum for ' + p - ) + self.log.info(Color.INFO + 'Performing checksum for ' + p) checksum = Shared.get_checksum(path, self.checksum, self.log) if not checksum: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - path + ' not found! Are you sure it was built?' - ) + self.log.error(Color.FAIL + path + ' not found! Are you sure it was built?') with open(path + '.CHECKSUM', "w+") as c: c.write(checksum) c.close() - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Building ' + i + ' completed' - ) + self.log.info(Color.INFO + 'Building ' + i + ' completed') if len(bad_exit_list) == 0: - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Copying ISOs over to compose directory...' - ) - print() + self.log.info(Color.INFO + 'Copying ISOs over to compose directory...') else: self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + Color.FAIL + 'There were issues with the work done. As a result, ' + 'the ISOs will not be copied.' ) @@ -1152,17 +1074,14 @@ class IsoBuild: lorax_base_dir = os.path.join(self.lorax_work_dir, arch) global_work_dir = os.path.join(self.compose_latest_dir, "work/global") - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Generating graft points for extra iso: (' + arch + ') ' + iso - ) + self.log.info(Color.INFO + 'Generating graft points for extra iso: (' + arch + ') ' + iso) files = {} # This is the data we need to actually boot lorax_for_var = os.path.join(lorax_base_dir, iso) if not os.path.exists(lorax_for_var + '/.treeinfo'): self.log.info( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + Color.FAIL + '!! .treeinfo is missing, does this variant actually exist? !!' ) return @@ -1612,6 +1531,64 @@ class IsoBuild: returned_cmd = ' '.join(cmd) return returned_cmd + def run_pull_generic_images(self): + """ + Pulls generic images built in peridot and places them where they need + to be. This relies on a list called "cloudimages" in the version + configuration. + """ + unpack_single_arch = False + arches_to_unpack = self.arches + if self.arch: + unpack_single_arch = True + arches_to_unpack = [self.arch] + + for imagename in self.cloudimages: + self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...') + + if self.s3: + latest_artifacts = Shared.s3_determine_latest( + self.s3_bucket, + self.release, + self.arches, + 'qcow2', + imagename, + self.log + ) + + else: + latest_artifacts = Shared.reqs_determine_latest( + self.s3_bucket_url, + self.release, + self.arches, + 'qcow2', + imagename, + self.log + ) + + + def run_build_live_iso(self): + """ + Builds DVD images based on the data created from the initial lorax on + each arch. This should NOT be called during the usual run() section. + """ + sync_root = self.compose_latest_sync + + self.log.info(Color.INFO + 'Starting Live ISOs phase') + + self._live_iso_build_wrap() + + self.log.info('Compose repo directory: %s' % sync_root) + self.log.info('ISO result directory: %s/$arch' % self.lorax_work_dir) + self.log.info(Color.INFO + 'Extra ISO phase completed.') + + def _live_iso_build_wrap(self): + """ + Prepare and actually build the live images. Based on arguments in self, + we'll either do it on mock in a loop or in podman, just like with the + extra iso phase. + """ + class LiveBuild: """ This helps us build the live images for Rocky Linux. diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 77e6346..453e33b 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -513,7 +513,7 @@ class Shared: return 'Not available', 1 @staticmethod - def s3_determine_latest(s3_bucket, release, arches, filetype, logger): + def s3_determine_latest(s3_bucket, release, arches, filetype, name, logger): """ Using native s3, determine the latest artifacts and return a dict """ @@ -531,7 +531,7 @@ class Shared: raise SystemExit() for y in s3.list_objects(Bucket=s3_bucket)['Contents']: - if filetype in y['Key'] and release in y['Key']: + if filetype in y['Key'] and release in y['Key'] and name in y['Key']: temp.append(y['Key']) for arch in arches: @@ -569,7 +569,7 @@ class Shared: logger.error('There was an issue downloading from %s' % s3_bucket) @staticmethod - def reqs_determine_latest(s3_bucket_url, release, arches, filetype, logger): + def reqs_determine_latest(s3_bucket_url, release, arches, filetype, name, logger): """ Using requests, determine the latest artifacts and return a list """ @@ -585,7 +585,7 @@ class Shared: resp = xmltodict.parse(bucket_data.content) for y in resp['ListBucketResult']['Contents']: - if filetype in y['Key'] and release in y['Key']: + if filetype in y['Key'] and release in y['Key'] and name in y['Key']: temp.append(y['Key']) for arch in arches: