From c081f6f202eeeab5c7ab4440a72d7eba697f78c5 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 00:19:13 -0700 Subject: [PATCH] test out parts of finalize --- iso/empanadas/empanadas/sig/cloud.yaml | 11 +- iso/empanadas/empanadas/util/dnf_utils.py | 147 +++++++-------- iso/empanadas/empanadas/util/iso_utils.py | 20 +- iso/empanadas/empanadas/util/shared.py | 215 ++++++++++++++++++++++ 4 files changed, 294 insertions(+), 99 deletions(-) diff --git a/iso/empanadas/empanadas/sig/cloud.yaml b/iso/empanadas/empanadas/sig/cloud.yaml index e7305c7..e0ad17a 100644 --- a/iso/empanadas/empanadas/sig/cloud.yaml +++ b/iso/empanadas/empanadas/sig/cloud.yaml @@ -1,6 +1,7 @@ --- cloud: '8': + profile: 'cloud' cloud-kernel: project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c' allowed_arches: @@ -10,7 +11,15 @@ cloud: allowed_arches: - aarch64 - x86_64 - project_id: '' + project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c' + extra_files: + git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/' + branch: 'r8' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud' + list: + - 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud' '9': cloud-kernel: project_id: '' diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index e2a6b0a..b275490 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -186,7 +186,13 @@ class RepoSync: # This should create the initial compose dir and set the path. # Otherwise, just use the latest link. if self.fullrun: - generated_dir = self.generate_compose_dirs() + generated_dir = Shared.generate_compose_dirs( + self.compose_base, + self.shortname, + self.fullversion, + self.date_stamp, + self.log + ) work_root = os.path.join( generated_dir, 'work' @@ -292,7 +298,7 @@ class RepoSync: Each container runs their own script wait till all is finished """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) contrunlist = [] bad_exit_list = [] self.log.info('Generating container entries') @@ -582,24 +588,6 @@ class RepoSync: 'No issues detected.' ) - def generate_compose_dirs(self) -> str: - """ - Generate compose dirs for full runs - """ - compose_base_dir = os.path.join( - self.compose_base, - "{}-{}-{}".format( - self.shortname, - self.fullversion, - self.date_stamp - ) - ) - self.log.info('Creating compose directory %s' % compose_base_dir) - if not os.path.exists(compose_base_dir): - os.makedirs(compose_base_dir) - - return compose_base_dir - def symlink_to_latest(self, generated_dir): """ Emulates pungi and symlinks latest-Rocky-X @@ -628,7 +616,7 @@ class RepoSync: """ fname = os.path.join( dest_path, - "{}-config.repo".format(self.major_version) + "{}-{}-config.repo".format(self.shortname, self.major_version) ) self.log.info('Generating the repo configuration: %s' % fname) @@ -676,60 +664,6 @@ class RepoSync: config_file.close() return fname - def reposync_cmd(self) -> str: - """ - This generates the reposync command. We don't support reposync by - itself and will raise an error. - - :return: The path to the reposync command. If dnf exists, we'll use - that. Otherwise, fail immediately. - """ - cmd = None - if os.path.exists("/usr/bin/dnf"): - cmd = "/usr/bin/dnf reposync" - else: - self.log.error('/usr/bin/dnf was not found. Good bye.') - raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync " - "is not sufficient and you are likely running on an el7 " - "system or a grossly modified EL8+ system, " + Color.BOLD + - "which tells us that you probably made changes to these tools " - "expecting them to work and got to this point." + Color.END) - return cmd - - def podman_cmd(self) -> str: - """ - This generates the podman run command. This is in the case that we want - to do reposyncs in parallel as we cannot reasonably run multiple - instances of dnf reposync on a single system. - """ - cmd = None - if os.path.exists("/usr/bin/podman"): - cmd = "/usr/bin/podman" - else: - self.log.error('/usr/bin/podman was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " + Color.BOLD + "Note that docker is not " - "supported." + Color.END - ) - return cmd - - def git_cmd(self) -> str: - """ - This generates the git command. This is when we need to pull down extra - files or do work from a git repository. - """ - cmd = None - if os.path.exists("/usr/bin/git"): - cmd = "/usr/bin/git" - else: - self.log.error('/usr/bin/git was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " - ) - return cmd - def repoclosure_work(self, sync_root, work_root, log_root): """ This is where we run repoclosures, based on the configuration of each @@ -740,7 +674,7 @@ class RepoSync: against itself. (This means BaseOS should be able to survive by itself.) """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) entries_dir = os.path.join(work_root, "entries") bad_exit_list = [] @@ -901,7 +835,7 @@ class RepoSync: 'Deploying treeinfo, discinfo, and media.repo' ) - cmd = self.git_cmd() + cmd = Shared.git_cmd(self.log) tmpclone = '/tmp/clone' extra_files_dir = os.path.join( global_work_root, @@ -1231,7 +1165,57 @@ class RepoSync: work/isos to compose/isos, and combines all checksum files per arch into a final CHECKSUM file. """ + # latest-X-Y should exist at all times for this to work. + work_root = os.path.join( + self.compose_latest_dir, + 'work' + ) + sync_root = self.compose_latest_sync + sync_iso_root = os.path.join( + sync_root, + 'isos' + ) + + tmp_dir = os.path.join( + self.compose_root, + 'partitions' + ) + + # Verify if the link even exists + if not os.path.exists(self.compose_latest_dir): + self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir) + self.log.error('!! Please perform a full run if you have not done so.') + raise SystemExit() + + log_root = os.path.join( + work_root, + "logs", + self.date_stamp + ) + + iso_root = os.path.join( + work_root, + "isos" + ) + + global_work_root = os.path.join( + work_root, + "global", + ) + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Starting to sync ISOs to compose' + ) + + iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir) + + if not iso_result: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Sync failed' + ) class SigRepoSync: """ @@ -1278,6 +1262,7 @@ class SigRepoSync: self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major self.profile = rlvars['profile'] + self.sigprofile = sigvars['profile'] self.iso_map = rlvars['iso_map'] self.distname = config['distname'] self.fullname = rlvars['fullname'] @@ -1312,7 +1297,11 @@ class SigRepoSync: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-{}-{}-SIG".format(self.shortname, major) + "latest-{}-{}-SIG-{}".format( + self.shortname, + major, + self.sigprofile + ) ) self.compose_latest_sync = os.path.join( @@ -1345,7 +1334,7 @@ class SigRepoSync: self.log.info('sig reposync init') self.log.info(major) - #self.dnf_config = self.generate_conf() + #self.dnf_config = Shared.generate_conf() def run(self): """ diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index d26a39a..e402c4a 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -1156,7 +1156,7 @@ class IsoBuild: to the compose directories. It's the same as if you were doing a reposync of the repositories. """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) entries_dir = os.path.join(work_root, "entries") isos_dir = os.path.join(work_root, "isos") bad_exit_list = [] @@ -1766,24 +1766,6 @@ class IsoBuild: returned_cmd = ' '.join(cmd) return returned_cmd - def podman_cmd(self) -> str: - """ - This generates the podman run command. This is in the case that we want - to do reposyncs in parallel as we cannot reasonably run multiple - instances of dnf reposync on a single system. - """ - cmd = None - if os.path.exists("/usr/bin/podman"): - cmd = "/usr/bin/podman" - else: - self.log.error('/usr/bin/podman was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " + Color.BOLD + "Note that docker is not " - "supported." + Color.END - ) - return cmd - class LiveBuild: """ This helps us build the live images for Rocky Linux. diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index be3d33f..c6b8eb8 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -3,8 +3,11 @@ import os import json import hashlib +import shlex +import subprocess import yaml import productmd.treeinfo +from empanadas.common import Color class ArchCheck: """ @@ -179,3 +182,215 @@ class Shared: with open(file_path, "w") as f: f.write("\n".join(data)) + + @staticmethod + def generate_compose_dirs( + compose_base, + shortname, + version, + date_stamp, + logger + ) -> str: + """ + Generate compose dirs for full runs + """ + compose_base_dir = os.path.join( + compose_base, + "{}-{}-{}".format( + shortname, + version, + date_stamp + ) + ) + logger.info('Creating compose directory %s' % compose_base_dir) + if not os.path.exists(compose_base_dir): + os.makedirs(compose_base_dir) + + return compose_base_dir + + @staticmethod + def podman_cmd(logger) -> str: + """ + This generates the podman run command. This is in the case that we want + to do reposyncs in parallel as we cannot reasonably run multiple + instances of dnf reposync on a single system. + """ + cmd = None + if os.path.exists("/usr/bin/podman"): + cmd = "/usr/bin/podman" + else: + logger.error('/usr/bin/podman was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + Color.BOLD + "Note that docker is not " + "supported." + Color.END + ) + return cmd + + @staticmethod + def reposync_cmd(logger) -> str: + """ + This generates the reposync command. We don't support reposync by + itself and will raise an error. + + :return: The path to the reposync command. If dnf exists, we'll use + that. Otherwise, fail immediately. + """ + cmd = None + if os.path.exists("/usr/bin/dnf"): + cmd = "/usr/bin/dnf reposync" + else: + logger('/usr/bin/dnf was not found. Good bye.') + raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync " + "is not sufficient and you are likely running on an el7 " + "system or a grossly modified EL8+ system, " + Color.BOLD + + "which tells us that you probably made changes to these tools " + "expecting them to work and got to this point." + Color.END) + return cmd + + @staticmethod + def git_cmd(logger) -> str: + """ + This generates the git command. This is when we need to pull down extra + files or do work from a git repository. + """ + cmd = None + if os.path.exists("/usr/bin/git"): + cmd = "/usr/bin/git" + else: + logger.error('/usr/bin/git was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + ) + return cmd + + @staticmethod + def generate_conf(data, logger, dest_path='/var/tmp') -> str: + """ + Generates the necessary repo conf file for the operation. This repo + file should be temporary in nature. This will generate a repo file + with all repos by default. If a repo is chosen for sync, that will be + the only one synced. + + :param dest_path: The destination where the temporary conf goes + :param repo: The repo object to create a file for + """ + fname = os.path.join( + dest_path, + "{}-{}-config.repo".format(data.shortname, data.major_version) + ) + data.log.info('Generating the repo configuration: %s' % fname) + + if data.repo_base_url.startswith("/"): + logger.error("Local file syncs are not supported.") + raise SystemExit(Color.BOLD + "Local file syncs are not " + "supported." + Color.END) + + prehashed = '' + if data.hashed: + prehashed = "hashed-" + # create dest_path + if not os.path.exists(dest_path): + os.makedirs(dest_path, exist_ok=True) + config_file = open(fname, "w+") + repolist = [] + for repo in data.repos: + + constructed_url = '{}/{}/repo/{}{}/$basearch'.format( + data.repo_base_url, + data.project_id, + prehashed, + repo, + ) + + constructed_url_src = '{}/{}/repo/{}{}/src'.format( + data.repo_base_url, + data.project_id, + prehashed, + repo, + ) + + repodata = { + 'name': repo, + 'baseurl': constructed_url, + 'srcbaseurl': constructed_url_src, + 'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey] + } + repolist.append(repodata) + + template = data.tmplenv.get_template('repoconfig.tmpl') + output = template.render(repos=repolist) + config_file.write(output) + + config_file.close() + return fname + + @staticmethod + def quick_sync(src, dest, logger, tmp_dir): + """ + Does a quick sync from one place to another. This determines the method + in which will be used. We will look for fpsync and fall back to + parallel | rsync if that is also available. It will fail if parallel is + not available. + + Return true or false on completion? + """ + + @staticmethod + def simple_sync(src, dest): + """ + This is for simple syncs only, using rsync or copytree. + """ + + @staticmethod + def fpsync_method(src, dest, logger, tmp_dir): + """ + Returns a list for the fpsync command + """ + cmd = '/usr/bin/fpsync' + rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005' + if not os.path.exists(cmd): + logger.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'fpsync not found' + ) + return False + + os.makedirs(tmp_dir, exist_ok=True) + + fpsync_cmd = '{} -o "{}" -n 18 -t {} {} {}'.format( + cmd, + rsync_switches, + tmp_dir, + src, + dest + ) + + process = subprocess.call( + shlex.split(fpsync_cmd), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + if process != 0: + logger.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'fpsync failed' + ) + return False + + if os.path.exists(dest): + return True + else: + logger.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Path synced does not seem to exist for some reason.' + ) + return False + + @staticmethod + def rsync_method(src, dest, logger, tmp_dir): + """ + Returns a string for the rsync command plus parallel. Yes, this is a + hack. + """