From 4bf6fb66185dc42cc9ca15b905efe25637fb5416 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 20 Jun 2022 20:12:20 -0400 Subject: [PATCH 01/96] Implement a feature to assist in generating various images * use a flag to determine if we want an RC or not * Convert rldict and sigdict to an AttributeDict to allow access via __getattr__ * add fedora_release variable to configs for controlling icicle templates * build_image.py script to generate per-architecture XML files used by imagefactory * refactor time to call utcnow() once * add jinja types to development dependencies until we move past jinja 2.x * Generate TDL templates per architecture for each image variant on demand * Generate imagefactory and copy commands to execute image build * Refactor Kubernetes job template to be generic for all current jobs --- iso/empanadas/empanadas/common.py | 45 +++++- iso/empanadas/empanadas/configs/el8.yaml | 1 + iso/empanadas/empanadas/configs/el9-beta.yaml | 1 + iso/empanadas/empanadas/configs/el9.yaml | 1 + iso/empanadas/empanadas/configs/el9lh.yaml | 1 + .../empanadas/scripts/build_image.py | 145 ++++++++++++++++++ .../empanadas/scripts/launch_builds.py | 20 ++- .../empanadas/templates/icicle/tdl.xml.tmpl | 21 +++ .../empanadas/templates/kube/Job.tmpl | 15 +- iso/empanadas/pyproject.toml | 2 + 10 files changed, 239 insertions(+), 13 deletions(-) create mode 100644 iso/empanadas/empanadas/scripts/build_image.py create mode 100644 iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index c3619ce..edb2533 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -8,6 +8,24 @@ import yaml import logging import hashlib + +from collections import defaultdict +from typing import Tuple + +# An implementation from the Fabric python library +class AttributeDict(defaultdict): + def __init__(self): + super(AttributeDict, self).__init__(AttributeDict) + + def __getattr__(self, key): + try: + return self[key] + except KeyError: + raise AttributeError(key) + + def __setattr__(self, key, value): + self[key] = value + # These are a bunch of colors we may use in terminal output class Color: RED = '\033[91m' @@ -22,8 +40,8 @@ class Color: END = '\033[0m' # vars and additional checks -rldict = {} -sigdict = {} +rldict = AttributeDict() +sigdict = AttributeDict() config = { "rlmacro": rpm.expandMacro('%rhel'), "dist": 'el' + rpm.expandMacro('%rhel'), @@ -77,3 +95,26 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"): #rlvars = rldict[rlver] #rlvars = rldict[rlmacro] #COMPOSE_ISO_WORKDIR = COMPOSE_ROOT + "work/" + arch + "/" + date_stamp + + +def valid_type_variant(_type: str, variant: str="") -> Tuple[bool, str]: + ALLOWED_TYPE_VARIANTS = { + "Container": ["Base", "Minimal"], + "GenericCloud": [], + } + + if _type not in ALLOWED_TYPE_VARIANTS: + return False, f"Type is invalid: ({_type}, {variant})" + elif variant not in ALLOWED_TYPE_VARIANTS[_type]: + if variant.capitalize() in ALLOWED_TYPE_VARIANTS[_type]: + return False, f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})" + return False, f"Type/Variant Combination is not allowed: ({_type}, {variant})" + return True, "" + +class Architecture(str): + @staticmethod + def New(architecture: str, version: int): + if architecture not in rldict[version]["allowed_arches"]: + print("Invalid architecture/version combo, skipping") + exit() + return Architecture(architecture) diff --git a/iso/empanadas/empanadas/configs/el8.yaml b/iso/empanadas/empanadas/configs/el8.yaml index eb80aff..8ce71c7 100644 --- a/iso/empanadas/empanadas/configs/el8.yaml +++ b/iso/empanadas/empanadas/configs/el8.yaml @@ -7,6 +7,7 @@ minor: '6' profile: '8' bugurl: 'https://bugs.rockylinux.org' + fedora_release: 28 allowed_arches: - x86_64 - aarch64 diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 19d6cd5..116cc45 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -8,6 +8,7 @@ profile: '9-beta' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' + fedora_release: 34 allowed_arches: - x86_64 - aarch64 diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 88a978b..786c46a 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -6,6 +6,7 @@ major: '9' minor: '0' profile: '9' + fedora_release: 34 bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' allowed_arches: diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 4176f66..ea833f7 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -8,6 +8,7 @@ profile: '9-lookahead' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' + fedora_release: 34 allowed_arches: - x86_64 - aarch64 diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py new file mode 100644 index 0000000..c9f7782 --- /dev/null +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -0,0 +1,145 @@ +# Builds an image given a version, type, variant, and architecture +# Defaults to the running host's architecture + +import argparse +import datetime +import os +import tempfile +import pathlib + +from jinja2 import Environment, FileSystemLoader, Template +from typing import List, Tuple + +from empanadas.common import Architecture, rldict, valid_type_variant +from empanadas.common import _rootdir + +parser = argparse.ArgumentParser(description="ISO Compose") + +parser.add_argument('--version', type=str, help="Release Version (8.6, 9.1)", required=True) +parser.add_argument('--rc', action='store_true', help="Release Candidate") +parser.add_argument('--kickstartdir', action='store_true', help="Use the kickstart dir instead of the os dir for repositories") +parser.add_argument('--debug', action='store_true', help="debug?") +parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True) +parser.add_argument('--variant', type=str, help="", required=False) +parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False) + +results = parser.parse_args() +rlvars = rldict[results.version] +major = rlvars["major"] + +STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage") +KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts")) +BUILDTIME = datetime.datetime.utcnow() + + +def render_icicle_template(template: Template, architecture: Architecture) -> str: + handle, output = tempfile.mkstemp() + if not handle: + exit(3) + with os.fdopen(handle, "wb") as tmp: + _template = template.render( + architecture=architecture, + fedora_version=rlvars["fedora_release"], + iso8601date=BUILDTIME.strftime("%Y%m%d"), + installdir="kickstart" if results.kickstartdir else "os", + major=major, + release=results.release if results.release else 0, + size="10G", + type=results.type.capitalize(), + utcnow=BUILDTIME, + version_variant=rlvars["revision"] if not results.variant else f"{rlvars['revision']}-{results.variant.capitalize()}", + ) + tmp.write(_template.encode()) + return output + + +def generate_kickstart_imagefactory_args(debug: bool = False) -> str: + type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup + kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{major}-{type_variant}.ks") + + if not kickstart_path.is_file(): + print(f"Kickstart file is not available: {kickstart_path}") + if not debug: + exit(2) + + return f"--file-parameter install_script {kickstart_path}" + +def get_image_format(_type: str) -> str: + mapping = { + "Container": "docker" + } + return mapping[_type] if _type in mapping.keys() else '' + +def generate_imagefactory_commands(tdl_template: Template, architecture: Architecture) -> List[List[str]]: + template_path = render_icicle_template(tdl_template, architecture) + if not template_path: + exit(2) + + args_mapping = { + "debug": "--debug" + } + + # only supports boolean flags right now? + args = [param for name, param in args_mapping.items() if getattr(results,name)] + package_args = [] + + kickstart_arg = generate_kickstart_imagefactory_args(True) # REMOVE DEBUG ARG + + if results.type == "Container": + args += ["--parameter", "offline_icicle", "true"] + package_args += ["--parameter", "compress", "xz"] + tar_command = ["tar", "-Oxf", f"{STORAGE_DIR}/*.body" "./layer.tar"] + + type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup + outname = f"Rocky-{rlvars['major']}-{type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{architecture}" + + outdir = pathlib.Path(f"/tmp/{outname}") + + build_command = (f"imagefactory base_image {kickstart_arg} {' '.join(args)} {template_path}" + f" | tee -a {outdir}/logs/base_image-{outname}.out" + f" | tail -n4 > {outdir}/base.meta || exit 2" + ) + + + out_type = get_image_format(results.type) + package_command = ["imagefactory", "target_image", *args, template_path, + "--id", "$(awk '$1==\"UUID\":{print $NF}'"+f" /tmp/{outname}/base.meta)", + *package_args, + "--parameter", "repository", outname, out_type, + "|", "tee", "-a", f"{outdir}/base_image-{outname}.out", + "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3" + ] + + copy_command = (f"aws s3 cp --recursive {outdir}/ s3://resf-empanadas/buildimage-{ outname }/{ BUILDTIME.strftime('%s') }/" + ) + commands = [build_command, package_command, copy_command] + return commands + +def run(): + result, error = valid_type_variant(results.type, results.variant) + if not result: + print(error) + exit(2) + + file_loader = FileSystemLoader(f"{_rootdir}/templates") + tmplenv = Environment(loader=file_loader) + tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl') + job_template = tmplenv.get_template('kube/Job.tmpl') + + for architecture in rlvars["allowed_arches"]: + architecture = Architecture.New(architecture, major) + + commands = generate_imagefactory_commands(tdl_template, architecture) + + print(job_template.render( + architecture=architecture, + backoffLimit=4, + buildTime=datetime.datetime.utcnow().strftime("%s"), + command=commands, + imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", + jobname="buildimage", + namespace="empanadas", + major=major, + restartPolicy="Never", + )) + diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py index f0f82f7..abd01a4 100755 --- a/iso/empanadas/empanadas/scripts/launch_builds.py +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -12,6 +12,7 @@ parser = argparse.ArgumentParser(description="ISO Compose") parser.add_argument('--release', type=str, help="Major Release Version", required=True) parser.add_argument('--env', type=str, help="environment", required=True) +parser.add_argument('--rc', action='store_true', help="Release Candidate") results = parser.parse_args() rlvars = rldict[results.release] major = rlvars['major'] @@ -30,16 +31,25 @@ def run(): elif results.env == "all": arches = EKSARCH+EXTARCH - command = ["build-iso", "--release", f"{results.release}", "--rc", "--isolation", "simple"] + command = ["build-iso", "--release", f"{results.release}", "--isolation", "simple"] + if results.rc: + command += ["--rc"] + + buildstamp = datetime.datetime.utcnow() out = "" - for arch in arches: + for architecture in arches: + copy_command = (f"aws s3 cp --recursive --exclude=* --include=lorax* " + f"/var/lib/mock/rocky-{ major }-$(uname -m)/root/builddir/ " + f"s3://resf-empanadas/buildiso-{ major }-{ architecture }/{ buildstamp.strftime('%s') }/" + ) out += job_template.render( - architecture=arch, + architecture=architecture, backoffLimit=4, - buildTime=datetime.datetime.utcnow().strftime("%s"), - command=command, + buildTime=buildstamp.strftime("%s"), + command=[command, copy_command], imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", + jobname="buildiso", namespace="empanadas", major=major, restartPolicy="Never", diff --git a/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl new file mode 100644 index 0000000..14e8dd8 --- /dev/null +++ b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl @@ -0,0 +1,21 @@ + + + diff --git a/iso/empanadas/empanadas/templates/kube/Job.tmpl b/iso/empanadas/empanadas/templates/kube/Job.tmpl index bfcc20a..1ddf1f2 100644 --- a/iso/empanadas/empanadas/templates/kube/Job.tmpl +++ b/iso/empanadas/empanadas/templates/kube/Job.tmpl @@ -2,7 +2,7 @@ apiVersion: batch/v1 kind: Job metadata: - name: build-iso-{{ major }}-{{ architecture }} + name: {{ jobname }}-{{ major }}-{{ architecture }} namespace: {{ namespace }} spec: template: @@ -11,15 +11,18 @@ spec: peridot.rockylinux.org/workflow-tolerates-arch: {{ architecture }} spec: containers: - - name: buildiso-{{ major }}-{{ architecture }} + - name: {{ jobname }}-{{ major }}-{{ architecture }} image: {{ imageName }} command: ["/bin/bash", "-c"] args: - | - {{ command | join(' ') }} - aws s3 cp --recursive --exclude=* --include=lorax* \ - /var/lib/mock/rocky-{{ major }}-$(uname -m)/root/builddir/ \ - "s3://resf-empanadas/buildiso-{{ major }}-{{ architecture }}/{{ buildTime }}/" +{%- for c in command -%} +{%- if c is string %} + {{ c }} +{%- else %} + {{ ' '.join(c) }} +{%- endif %} +{%- endfor %} securityContext: runAsUser: 0 runAsGroup: 0 diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index a43a91d..35460be 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -19,6 +19,7 @@ kobo = "^0.24.1" [tool.poetry.dev-dependencies] pytest = "~5" +types-Jinja2 = "^2.11.9" # Remove when upgrading past Jinja 2.x as type annotations are in-tree [tool.poetry.scripts] sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" @@ -28,6 +29,7 @@ build-iso = "empanadas.scripts.build_iso:run" build-iso-extra = "empanadas.scripts.build_iso_extra:run" pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" launch-builds = "empanadas.scripts.launch_builds:run" +build-image = "empanadas.scripts.build_image:run" [build-system] requires = ["poetry-core>=1.0.0"] From e60f6524a22e03ee6799b2828d36656b1a7809be Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 20 Jun 2022 20:12:20 -0400 Subject: [PATCH 02/96] Implement a feature to assist in generating various images * use a flag to determine if we want an RC or not * Convert rldict and sigdict to an AttributeDict to allow access via __getattr__ * add fedora_release variable to configs for controlling icicle templates * build_image.py script to generate per-architecture XML files used by imagefactory * refactor time to call utcnow() once * add jinja types to development dependencies until we move past jinja 2.x * Generate TDL templates per architecture for each image variant on demand * Generate imagefactory and copy commands to execute image build * Refactor Kubernetes job template to be generic for all current jobs --- iso/empanadas/empanadas/scripts/build_image.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index c9f7782..6222253 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -103,15 +103,14 @@ def generate_imagefactory_commands(tdl_template: Template, architecture: Archite out_type = get_image_format(results.type) package_command = ["imagefactory", "target_image", *args, template_path, - "--id", "$(awk '$1==\"UUID\":{print $NF}'"+f" /tmp/{outname}/base.meta)", + "--id", "$(awk '$1==\"UUID:\"{print $NF}'"+f" /tmp/{outname}/base.meta)", *package_args, "--parameter", "repository", outname, out_type, "|", "tee", "-a", f"{outdir}/base_image-{outname}.out", "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3" ] - copy_command = (f"aws s3 cp --recursive {outdir}/ s3://resf-empanadas/buildimage-{ outname }/{ BUILDTIME.strftime('%s') }/" - ) + copy_command = (f"aws s3 cp --recursive {outdir}/* s3://resf-empanadas/buildimage-{ outname }/{ BUILDTIME.strftime('%s') }/") commands = [build_command, package_command, copy_command] return commands From 79425e848e2232186ba4296cbea3031358c10096 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Tue, 28 Jun 2022 09:29:49 -0400 Subject: [PATCH 03/96] Use devel branch for container, doc fixes --- iso/empanadas/Containerfile | 2 +- iso/empanadas/empanadas/scripts/launch_builds.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/Containerfile b/iso/empanadas/Containerfile index c3c4ed9..3eb45c2 100644 --- a/iso/empanadas/Containerfile +++ b/iso/empanadas/Containerfile @@ -56,7 +56,7 @@ RUN rm -rf /etc/yum.repos.d/*.repo RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d -RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas' +RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas' RUN pip install awscli diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py index abd01a4..86ae1e8 100755 --- a/iso/empanadas/empanadas/scripts/launch_builds.py +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -8,11 +8,11 @@ from empanadas.common import _rootdir from jinja2 import Environment, FileSystemLoader -parser = argparse.ArgumentParser(description="ISO Compose") +parser = argparse.ArgumentParser(description="Generate Kubernetes Jobs to run lorax in mock and upload the result. Pipe into kubectl for the appropriate cluster") -parser.add_argument('--release', type=str, help="Major Release Version", required=True) -parser.add_argument('--env', type=str, help="environment", required=True) -parser.add_argument('--rc', action='store_true', help="Release Candidate") +parser.add_argument('--release', type=str, help="Major Release Version: (8|9)", required=True) +parser.add_argument('--env', type=str, help="environment: one of (eks|ext|all). presently jobs are scheduled on different kubernetes clusters", required=True) +parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN") results = parser.parse_args() rlvars = rldict[results.release] major = rlvars['major'] From 0deaae0c65ee8dd1b162464ef3dc92e540b6a3a8 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Tue, 28 Jun 2022 09:39:01 -0400 Subject: [PATCH 04/96] update github workflow to build image in the right namespace --- iso/empanadas/empanadas/scripts/build_image.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index 6222253..8a7f373 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -135,7 +135,7 @@ def run(): backoffLimit=4, buildTime=datetime.datetime.utcnow().strftime("%s"), command=commands, - imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", + imageName="ghcr.io/rockylinux/sig-core-toolkit:latest", jobname="buildimage", namespace="empanadas", major=major, From 710cb081349c793d7d88e962e628c677153fa3b1 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Tue, 28 Jun 2022 10:04:23 -0400 Subject: [PATCH 05/96] update github workflow to build image in the right namespace --- iso/empanadas/empanadas/scripts/launch_builds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py index 86ae1e8..e693d7b 100755 --- a/iso/empanadas/empanadas/scripts/launch_builds.py +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -48,7 +48,7 @@ def run(): backoffLimit=4, buildTime=buildstamp.strftime("%s"), command=[command, copy_command], - imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", + imageName="ghcr.io/rocky-linux/sig-core-toolkit:latest", jobname="buildiso", namespace="empanadas", major=major, From 760967211baa57bfdbcbbe320dde5e71e207559b Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Tue, 28 Jun 2022 10:10:30 -0400 Subject: [PATCH 06/96] Update the image tag --- .github/workflows/mix-empanadas.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mix-empanadas.yml b/.github/workflows/mix-empanadas.yml index 2f408ca..ebae78d 100644 --- a/.github/workflows/mix-empanadas.yml +++ b/.github/workflows/mix-empanadas.yml @@ -3,7 +3,7 @@ name: Build empanada container images on: push: - branches: [ $default-branch ] + branches: [ $default-branch, "devel" ] pull_request: branches: [ $default-branch ] workflow_dispatch: @@ -42,6 +42,6 @@ jobs: context: ./iso/empanadas file: ./iso/empanadas/Containerfile push: ${{ github.event_name != 'pull_request' }} - tags: ghcr.io/neilhanlon/sig-core-toolkit:latest + tags: ghcr.io/rocky-linux/sig-core-toolkit:latest cache-from: type=gha cache-to: type=gha,mode=max From 7d7163a1567f06a40034075c361be0b1eaed2d29 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 28 Jun 2022 07:49:23 -0700 Subject: [PATCH 07/96] prepping treeinfo outside --- iso/empanadas/empanadas/util/dnf_utils.py | 4 ++- iso/empanadas/empanadas/util/iso_utils.py | 15 ++++++--- iso/empanadas/empanadas/util/shared.py | 41 +++++++++++++++++++++++ 3 files changed, 55 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index a54b4ee..8291a8f 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -78,6 +78,7 @@ class RepoSync: self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major self.profile = rlvars['profile'] + self.iso_map = rlvars['iso_map'] # Relevant major version items self.shortname = config['shortname'] @@ -935,7 +936,8 @@ class RepoSync: def deploy_treeinfo(self, repo, sync_root, arch): """ Deploys initial treeinfo files. These have the potential of being - overwritten by our ISO process, which is fine. + overwritten by our ISO process, which is fine. If there is a treeinfo + found, it will be skipped. """ arches_to_tree = self.arches if arch: diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 5921df9..de93dac 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -450,7 +450,7 @@ class IsoBuild: for arch in arches_to_unpack: for variant in self.iso_map['images']: self.log.info( - 'Configuring treeinfo for %s%s %s%s' % (Color.BOLD, arch, variant, Color.END) + 'Configuring treeinfo and discinfo for %s%s %s%s' % (Color.BOLD, arch, variant, Color.END) ) self._treeinfo_wrapper(arch, variant) @@ -741,12 +741,15 @@ class IsoBuild: def _treeinfo_wrapper(self, arch, variant): """ - Ensure treeinfo is written correctly based on the variant passed. Each - .treeinfo file should be configured similarly but also differently from - the next. + Ensure treeinfo and discinfo is written correctly based on the variant + passed. Each file should be configured similarly but also differently + from the next. The Shared module does have a .treeinfo writer, but it + is for basic use. Eventually it'll be expanded to handle this scenario. """ image = os.path.join(self.lorax_work_dir, arch, variant) treeinfo = os.path.join(image, '.treeinfo') + discinfo = os.path.join(image, '.discinfo') + mediarepo = os.path.join(image, 'media.repo') imagemap = self.iso_map['images'][variant] primary = imagemap['variant'] repos = imagemap['repos'] @@ -827,6 +830,10 @@ class IsoBuild: # Set default variant ti.dump(treeinfo, main_variant=primary) + # Set discinfo + Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) + # Set media.repo + Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index d9bb357..e3b08c5 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -2,6 +2,7 @@ import os import hashlib +import productmd.treeinfo class Shared: """ @@ -44,6 +45,46 @@ class Shared: checksum.hexdigest() ) + @staticmethod + def treeinfo_new_write( + file_path, + distname, + shortname, + release, + arch, + time, + repo + ): + """ + Writes really basic treeinfo, this is for single repository treeinfo + data. This is usually called in the case of a fresh run and each repo + needs one. + """ + ti = productmd.treeinfo.TreeInfo() + ti.release.name = distname + ti.release.short = shortname + ti.release.version = release + ti.tree.arch = arch + ti.tree.build_timestamp = time + # Variants (aka repos) + variant = productmd.treeinfo.Variant(ti) + variant.id = repo + variant.uid = repo + variant.name = repo + variant.type = "variant" + variant.repository = "." + variant.packages = "Packages" + ti.variants.add(variant) + ti.dump(file_path) + + @staticmethod + def treeinfo_modify_write(): + """ + Modifies a specific treeinfo with already available data. This is in + the case of modifying treeinfo for primary repos or images. + """ + + @staticmethod def discinfo_write(timestamp, fullname, arch, file_path): """ From f65a331826077d26bc31bf93a2c486f988070ff3 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Tue, 28 Jun 2022 11:19:54 -0400 Subject: [PATCH 08/96] remove jinja types --- iso/empanadas/pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 35460be..fb25ee5 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -19,7 +19,6 @@ kobo = "^0.24.1" [tool.poetry.dev-dependencies] pytest = "~5" -types-Jinja2 = "^2.11.9" # Remove when upgrading past Jinja 2.x as type annotations are in-tree [tool.poetry.scripts] sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" From f342046f2516fb682c263a90018efe9cb48ceeb1 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 28 Jun 2022 10:08:17 -0700 Subject: [PATCH 09/96] test should use && --- iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index ae9b5cf..fbac286 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -5,7 +5,7 @@ set -ex {{ lorax_pkg_cmd }} mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cd {{ compose_work_iso_dir }}/{{ arch }} -test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; } +test -f {{ isoname }} && { echo "ERROR: ISO ALREDY EXISTS!"; exit 1; } {% else %} cd /builddir From 1a0439985117af086b109b42d57e1ec33b9ab5a6 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Tue, 28 Jun 2022 12:41:12 -0700 Subject: [PATCH 10/96] fix up readme, disable gitlab tests for now --- .gitlab-ci.yml => .disable.gitlab-ci.yml | 0 README.md | 8 +++---- iso/empanadas/README.md | 27 +++++++++++++++--------- 3 files changed, 20 insertions(+), 15 deletions(-) rename .gitlab-ci.yml => .disable.gitlab-ci.yml (100%) diff --git a/.gitlab-ci.yml b/.disable.gitlab-ci.yml similarity index 100% rename from .gitlab-ci.yml rename to .disable.gitlab-ci.yml diff --git a/README.md b/README.md index 9495108..1a88e0d 100644 --- a/README.md +++ b/README.md @@ -3,10 +3,8 @@ sig-core-toolkit Release Engineering toolkit for repeatable operations or functionality testing. -Currently mirrored at our [github](https://github.com/rocky-linux), -[Rocky Linux Git Service](https://git.rockylinux.org), and the -[RESF Git Service](https://git.resf.org). Changes either occur at the Rocky -Linux Git Service or RESF Git Service. +Currently mirrored at our [github](https://github.com/rocky-linux), and the +[RESF Git Service](https://git.resf.org). Changes will typically occur at GitHub. What does this have? -------------------- @@ -14,7 +12,7 @@ What does this have? * analyze -> Analysis utilities (such as download stats) * chat -> mattermost related utilities * func -> (mostly defunct) testing scripts and tools to test base functionality -* iso -> ISO related utilities +* iso -> ISO and Compose related utilities, primarily for Rocky Linux 9+ * live -> Live image related utilities * mangle -> Manglers and other misc stuff * sync -> Sync tools, primarily for Rocky Linux 8 diff --git a/iso/empanadas/README.md b/iso/empanadas/README.md index e414f5e..0c90442 100644 --- a/iso/empanadas/README.md +++ b/iso/empanadas/README.md @@ -22,18 +22,24 @@ There should be additional logging regardless, not just to stdout, but also to a ## scripts -* sync-variant-pungi -* sync-variant-peridot -* sync-from-pungi -* sync-from-peridot -* sync-sig -* build-all-iso -* sign-repos-only +``` +* sync_from_peridot -> Syncs repositories from Peridot +* sync_sig -> Syncs SIG repositories from Peridot +* build-iso -> Builds initial ISO's using Lorax +* build-iso-extra -> Builds DVD's and other images based on Lorax data +* launch-builds -> Creates a kube config to run build-iso +* build-image -> Runs build-iso +``` ## wrappers -* lorax-generators -* sync-generators +``` +* common -> The starting point +* iso_utils -> Does work for ISO building and generation +* dnf_utils -> Does work for repo building and generation +* check -> Checks if the architecture/release combination are valid +* shared -> Shared utilities between all wrappers +``` ## rules @@ -43,8 +49,9 @@ When making a script, you *must* import common. This is insanely bad practice, but we would prefer if we started out this way: ``` -from common import * import argparse +from empanadas.common import * +from empanadas.util import Checks ``` Whatever is imported in common will effectively be imported in your scripts as From 69317f388137ccd24ef9880fe48558fb039c7149 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Wed, 29 Jun 2022 20:36:59 -0700 Subject: [PATCH 11/96] make IPA tests runnable, lamp disabled for now --- func/ipa.sh | 59 +++++++++++++++++++++++ func/stacks/ipa/00-ipa-pregame.sh | 0 func/stacks/ipa/10-install-ipa.sh | 0 func/stacks/ipa/11-configure-ipa.sh | 0 func/stacks/ipa/12-verify-ipa.sh | 0 func/stacks/ipa/20-ipa-user.sh | 0 func/stacks/ipa/21-ipa-service.sh | 0 func/stacks/ipa/22-ipa-dns.sh | 0 func/stacks/ipa/23-ipa-sudo.sh | 0 func/stacks/ipa/50-cleanup-ipa.sh | 0 func/stacks/lamp/00-install-lamp.sh | 0 func/stacks/lamp/01-verification.sh | 0 func/stacks/lamp/10-test-lamp.sh | 0 iso/empanadas/empanadas/util/dnf_utils.py | 10 ++++ 14 files changed, 69 insertions(+) create mode 100644 func/ipa.sh mode change 100644 => 100755 func/stacks/ipa/00-ipa-pregame.sh mode change 100644 => 100755 func/stacks/ipa/10-install-ipa.sh mode change 100644 => 100755 func/stacks/ipa/11-configure-ipa.sh mode change 100644 => 100755 func/stacks/ipa/12-verify-ipa.sh mode change 100644 => 100755 func/stacks/ipa/20-ipa-user.sh mode change 100644 => 100755 func/stacks/ipa/21-ipa-service.sh mode change 100644 => 100755 func/stacks/ipa/22-ipa-dns.sh mode change 100644 => 100755 func/stacks/ipa/23-ipa-sudo.sh mode change 100644 => 100755 func/stacks/ipa/50-cleanup-ipa.sh mode change 100755 => 100644 func/stacks/lamp/00-install-lamp.sh mode change 100755 => 100644 func/stacks/lamp/01-verification.sh mode change 100755 => 100644 func/stacks/lamp/10-test-lamp.sh diff --git a/func/ipa.sh b/func/ipa.sh new file mode 100644 index 0000000..19086c1 --- /dev/null +++ b/func/ipa.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Release Engineering Core Functionality Testing +# Louis Abel @nazunalika + +################################################################################ +# Settings and variables + +# Exits on any non-zero exit status - Disabled for now. +#set -e +# Undefined variables will cause an exit +set -u + +COMMON_EXPORTS='./common/exports.sh' +COMMON_IMPORTS='./common/imports.sh' +SELINUX=$(getenforce) + +# End +################################################################################ + +# shellcheck source=/dev/null disable=SC2015 +[ -f $COMMON_EXPORTS ] && source $COMMON_EXPORTS || { echo -e "\n[-] $(date): Variables cannot be sourced."; exit 1; } +# shellcheck source=/dev/null disable=SC2015 +[ -f $COMMON_IMPORTS ] && source $COMMON_IMPORTS || { echo -e "\n[-] $(date): Functions cannot be sourced."; exit 1; } +# Init log +# shellcheck disable=SC2015 +[ -e "$LOGFILE" ] && m_recycleLog || touch "$LOGFILE" +# SELinux check +if [ "$SELINUX" != "Enforcing" ]; then + echo -e "\n[-] $(date): SELinux is not enforcing." + exit 1 +fi + +r_log "internal" "Starting Release Engineering Core Tests" + +################################################################################ +# Script Work + +# Skip tests in a list - some tests are already -x, so it won't be an issue +if [ -e skip.list ]; then + r_log "internal" "Disabling tests" + # shellcheck disable=SC2162 + grep -E "^${RL_VER}" skip.list | while read line; do + # shellcheck disable=SC2086 + testFile="$(echo $line | cut -d '|' -f 2)" + r_log "internal" "SKIP ${testFile}" + chmod -x "${testFile}" + done + r_log "internal" "WARNING: Tests above were disabled." +fi + +# TODO: should we let $1 judge what directory is ran? +# TODO: get some stacks and lib in there + +#r_processor <(/usr/bin/find ./core -type f | sort -t'/') +#r_processor <(/usr/bin/find ./lib -type f | sort -t'/') +r_processor <(/usr/bin/find ./stacks/ipa -type f | sort -t'/') + +r_log "internal" "Core Tests completed" +exit 0 diff --git a/func/stacks/ipa/00-ipa-pregame.sh b/func/stacks/ipa/00-ipa-pregame.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/10-install-ipa.sh b/func/stacks/ipa/10-install-ipa.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/11-configure-ipa.sh b/func/stacks/ipa/11-configure-ipa.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/12-verify-ipa.sh b/func/stacks/ipa/12-verify-ipa.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/20-ipa-user.sh b/func/stacks/ipa/20-ipa-user.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/21-ipa-service.sh b/func/stacks/ipa/21-ipa-service.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/22-ipa-dns.sh b/func/stacks/ipa/22-ipa-dns.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/23-ipa-sudo.sh b/func/stacks/ipa/23-ipa-sudo.sh old mode 100644 new mode 100755 diff --git a/func/stacks/ipa/50-cleanup-ipa.sh b/func/stacks/ipa/50-cleanup-ipa.sh old mode 100644 new mode 100755 diff --git a/func/stacks/lamp/00-install-lamp.sh b/func/stacks/lamp/00-install-lamp.sh old mode 100755 new mode 100644 diff --git a/func/stacks/lamp/01-verification.sh b/func/stacks/lamp/01-verification.sh old mode 100755 new mode 100644 diff --git a/func/stacks/lamp/10-test-lamp.sh b/func/stacks/lamp/10-test-lamp.sh old mode 100755 new mode 100644 diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 8291a8f..1c17c7b 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -947,6 +947,16 @@ class RepoSync: if repo and not self.fullrun: repos_to_tree = [repo] + # If a treeinfo or discinfo file exists, it should be skipped. + + def run_compose_closeout(self): + """ + Closes out a compose as file. This ensures kickstart repositories are + made, the treeinfo is modifed for the primary repository, syncs + work/isos to compose/isos, and combines all checksum files per arch + into a final CHECKSUM file. + """ + class SigRepoSync: """ From 2e384002556aee504f502e0fed4c646952b6a7cc Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Wed, 29 Jun 2022 20:39:13 -0700 Subject: [PATCH 12/96] expect is missing from install --- func/stacks/ipa/10-install-ipa.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/func/stacks/ipa/10-install-ipa.sh b/func/stacks/ipa/10-install-ipa.sh index 0e5b029..901055e 100755 --- a/func/stacks/ipa/10-install-ipa.sh +++ b/func/stacks/ipa/10-install-ipa.sh @@ -11,4 +11,4 @@ if [ "$RL_VER" -eq 8 ]; then p_enableModule idm:DL1/{client,common,dns,server} fi -p_installPackageNormal ipa-server ipa-server-dns +p_installPackageNormal ipa-server ipa-server-dns expect From 1acca22141b22ff9d3063b931ee8fb52155c1cc3 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Wed, 29 Jun 2022 20:45:58 -0700 Subject: [PATCH 13/96] make kinit as admin simpler --- func/stacks/ipa/20-ipa-user.sh | 10 +--------- func/stacks/ipa/21-ipa-service.sh | 10 +--------- func/stacks/ipa/22-ipa-dns.sh | 10 +--------- func/stacks/ipa/23-ipa-sudo.sh | 10 +--------- 4 files changed, 4 insertions(+), 36 deletions(-) diff --git a/func/stacks/ipa/20-ipa-user.sh b/func/stacks/ipa/20-ipa-user.sh index 9965a56..71be555 100755 --- a/func/stacks/ipa/20-ipa-user.sh +++ b/func/stacks/ipa/20-ipa-user.sh @@ -13,15 +13,7 @@ kdestroy &> /dev/null klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null r_checkExitStatus $? -expect -f - < /dev/null r_checkExitStatus $? diff --git a/func/stacks/ipa/21-ipa-service.sh b/func/stacks/ipa/21-ipa-service.sh index db50dd1..408709b 100755 --- a/func/stacks/ipa/21-ipa-service.sh +++ b/func/stacks/ipa/21-ipa-service.sh @@ -13,15 +13,7 @@ kdestroy &> /dev/null klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null r_checkExitStatus $? -expect -f - < /dev/null r_checkExitStatus $? diff --git a/func/stacks/ipa/22-ipa-dns.sh b/func/stacks/ipa/22-ipa-dns.sh index 4d74174..d9aa8e9 100755 --- a/func/stacks/ipa/22-ipa-dns.sh +++ b/func/stacks/ipa/22-ipa-dns.sh @@ -13,15 +13,7 @@ kdestroy &> /dev/null klist 2>&1 | grep -qE "(No credentials|Credentials cache .* not found)" &> /dev/null r_checkExitStatus $? -expect -f - < /dev/null r_checkExitStatus $? diff --git a/func/stacks/ipa/23-ipa-sudo.sh b/func/stacks/ipa/23-ipa-sudo.sh index 68e50ac..983bd41 100755 --- a/func/stacks/ipa/23-ipa-sudo.sh +++ b/func/stacks/ipa/23-ipa-sudo.sh @@ -13,15 +13,7 @@ kdestroy &> /dev/null klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null r_checkExitStatus $? -expect -f - < /dev/null r_checkExitStatus $? From 1d710a6d42d3700cfb9981d0d9ca323b9c80f130 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Wed, 29 Jun 2022 21:27:50 -0700 Subject: [PATCH 14/96] fix up IPA portion --- func/stacks/ipa/20-ipa-user.sh | 25 +++++++++++++++++-------- func/stacks/ipa/21-ipa-service.sh | 8 ++++---- func/stacks/ipa/22-ipa-dns.sh | 6 +++--- 3 files changed, 24 insertions(+), 15 deletions(-) diff --git a/func/stacks/ipa/20-ipa-user.sh b/func/stacks/ipa/20-ipa-user.sh index 71be555..c0f5313 100755 --- a/func/stacks/ipa/20-ipa-user.sh +++ b/func/stacks/ipa/20-ipa-user.sh @@ -19,28 +19,37 @@ klist | grep "admin@RLIPA.LOCAL" &> /dev/null r_checkExitStatus $? r_log "ipa" "Test adding a user" -userDetails="$(ipa user-add --first=test --last=user --random ipatestuser)" -echo "$userDetails" | grep -q 'Added user "ipatestuser"' -r_checkExitStatus $? +ipa user-add --first=test --last=user --random ipatestuser > /tmp/ipatestuser +grep -q 'Added user "ipatestuser"' /tmp/ipatestuser -echo "$userDetails" | grep -q 'First name: test' +ret_val=$? +if [ "$ret_val" -ne 0 ]; then + r_log "ipa" "User was not created, this is considered fatal" + r_checkExitStatus 1 + exit 1 +fi + +sed -i 's|^ ||g' /tmp/ipatestuser +grep -q 'First name: test' /tmp/ipatestuser r_checkExitStatus $? -echo "$userDetails" | grep -q 'Last name: user' +grep -q 'Last name: user' /tmp/ipatestuser r_checkExitStatus $? -echo "$userDetails" | grep -q 'Full name: test user' +grep -q 'Full name: test user' /tmp/ipatestuser r_checkExitStatus $? -echo "$userDetails" | grep -q 'Home directory: /home/ipatestuser' +grep -q 'Home directory: /home/ipatestuser' /tmp/ipatestuser r_checkExitStatus $? r_log "ipa" "Changing password of the user" kdestroy &> /dev/null +userPassword="$(awk '/Random password/ { print $3 }' /tmp/ipatestuser)" +/bin/rm /tmp/ipatestuser expect -f - < /dev/null r_checkExitStatus $? r_log "ipa" "Adding test service" -ipa service-add testservice/rltest.rlipa.local &> /dev/null +ipa service-add testservice/onyxtest.rlipa.local &> /dev/null r_checkExitStatus $? r_log "ipa" "Getting keytab for service" -ipa-getkeytab -s rltest.rlipa.local -p testservice/rltest.rlipa.local -k /tmp/testservice.keytab &> /dev/null +ipa-getkeytab -s onyxtest.rlipa.local -p testservice/onyxtest.rlipa.local -k /tmp/testservice.keytab &> /dev/null r_checkExitStatus $? r_log "ipa" "Getting a certificate for service" -ipa-getcert request -K testservice/rltest.rlipa.local -D rltest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null +ipa-getcert request -K testservice/onyxtest.rlipa.local -D onyxtest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null r_checkExitStatus $? while true; do @@ -49,7 +49,7 @@ while ! stat /etc/pki/tls/certs/testservice.crt &> /dev/null; do done r_log "ipa" "Verifying keytab" -klist -k /tmp/testservice.keytab | grep "testservice/rltest.rlipa.local" &> /dev/null +klist -k /tmp/testservice.keytab | grep "testservice/onyxtest.rlipa.local" &> /dev/null r_checkExitStatus $? r_log "ipa" "Verifying key matches the certificate" diff --git a/func/stacks/ipa/22-ipa-dns.sh b/func/stacks/ipa/22-ipa-dns.sh index d9aa8e9..e0b507f 100755 --- a/func/stacks/ipa/22-ipa-dns.sh +++ b/func/stacks/ipa/22-ipa-dns.sh @@ -19,7 +19,7 @@ klist | grep "admin@RLIPA.LOCAL" &> /dev/null r_checkExitStatus $? r_log "ipa" "Adding testzone subdomain" -ipa dnszone-add --name-server=rltest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null +ipa dnszone-add --name-server=onyxtest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null r_checkExitStatus $? sleep 5 @@ -28,7 +28,7 @@ dig @localhost SOA testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null r_checkExitStatus $? r_log "ipa" "Adding a CNAME record to the primary domain" -ipa dnsrecord-add rlipa.local testrecord --cname-hostname=rltest &> /dev/null +ipa dnsrecord-add rlipa.local testrecord --cname-hostname=onyxtest &> /dev/null r_checkExitStatus $? sleep 5 @@ -37,7 +37,7 @@ dig @localhost CNAME testrecord.rlipa.local | grep -q "status: NOERROR" &> /dev/ r_checkExitStatus $? r_log "ipa" "Adding a CNAME to subdomain" -ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=rltest.rlipa.local. &> /dev/null +ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=onyxtest.rlipa.local. &> /dev/null r_checkExitStatus $? sleep 5 From 4922e283d6962a781c9b78df2e32c7a591381fef Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Wed, 29 Jun 2022 21:49:12 -0700 Subject: [PATCH 15/96] add a sudo test --- func/stacks/ipa/23-ipa-sudo.sh | 46 +++++++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/func/stacks/ipa/23-ipa-sudo.sh b/func/stacks/ipa/23-ipa-sudo.sh index 983bd41..a52c4ef 100755 --- a/func/stacks/ipa/23-ipa-sudo.sh +++ b/func/stacks/ipa/23-ipa-sudo.sh @@ -9,11 +9,51 @@ if [ "$IPAINSTALLED" -eq 1 ]; then r_checkExitStatus 1 fi -kdestroy &> /dev/null -klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null +kdestroy -A +klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" r_checkExitStatus $? echo "b1U3OnyX!" | kinit admin@RLIPA.LOCAL -klist | grep "admin@RLIPA.LOCAL" &> /dev/null +klist | grep -q "admin@RLIPA.LOCAL" +r_checkExitStatus $? + +r_log "ipa" "Creating a test sudo rule" +ipa sudorule-add testrule --desc="Test rule in IPA" --hostcat=all --cmdcat=all --runasusercat=all --runasgroupcat=all &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Adding user to test sudo rule" +ipa sudorule-add-user testrule --users="ipatestuser" &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Verifying rule..." +ipa sudorule-show testrule > /tmp/testrule +grep -q 'Rule name: testrule' /tmp/testrule +r_checkExitStatus $? +grep -q 'Description: Test rule in IPA' /tmp/testrule +r_checkExitStatus $? +grep -q 'Enabled: TRUE' /tmp/testrule +r_checkExitStatus $? +grep -q 'Host category: all' /tmp/testrule +r_checkExitStatus $? +grep -q 'Command category: all' /tmp/testrule +r_checkExitStatus $? +grep -q 'RunAs User category: all' /tmp/testrule +r_checkExitStatus $? +grep -q 'RunAs Group category: all' /tmp/testrule +r_checkExitStatus $? +grep -q 'Users: ipatestuser' /tmp/testrule +r_checkExitStatus $? + +m_serviceCycler sssd stop +rm -rf /var/lib/sss/db/* +m_serviceCycler sssd start + +sleep 5 + +r_log "ipa" "Verifying sudo abilities" +sudo -l -U ipatestuser > /tmp/sudooutput +grep -q 'ipatestuser may run the following commands' /tmp/sudooutput +r_checkExitStatus $? +grep -q 'ALL) ALL' /tmp/sudooutput r_checkExitStatus $? From 5a02fe5a25497ce12e2ace27af554e1692aaed24 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Thu, 30 Jun 2022 01:43:14 -0700 Subject: [PATCH 16/96] Resolve RLBT#0000132 discinfo and treeinfo --- iso/empanadas/empanadas/util/dnf_utils.py | 208 ++++++++++++++++++++++ 1 file changed, 208 insertions(+) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 1c17c7b..8af3bac 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -74,11 +74,15 @@ class RepoSync: # Relevant config items self.major_version = major self.date_stamp = config['date_stamp'] + self.timestamp = time.time() self.repo_base_url = config['repo_base_url'] self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major self.profile = rlvars['profile'] self.iso_map = rlvars['iso_map'] + self.distname = config['distname'] + self.fullname = rlvars['fullname'] + self.shortname = config['shortname'] # Relevant major version items self.shortname = config['shortname'] @@ -939,6 +943,11 @@ class RepoSync: overwritten by our ISO process, which is fine. If there is a treeinfo found, it will be skipped. """ + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Deploying treeinfo, discinfo, and media.repo' + ) + arches_to_tree = self.arches if arch: arches_to_tree = [arch] @@ -948,6 +957,205 @@ class RepoSync: repos_to_tree = [repo] # If a treeinfo or discinfo file exists, it should be skipped. + for r in repos_to_tree: + entry_name_list = [] + repo_name = r + arch_tree = arches_to_tree.copy() + + if r in self.repo_renames: + repo_name = self.repo_renames[r] + + # I feel it's necessary to make sure even i686 has .treeinfo and + # .discinfo, just for consistency. + if 'all' in r and 'x86_64' in arches_to_tree and self.multilib: + arch_tree.append('i686') + + for a in arch_tree: + os_tree_path = os.path.join( + sync_root, + repo_name, + a, + 'os/.treeinfo' + ) + + os_disc_path = os.path.join( + sync_root, + repo_name, + a, + 'os/.discinfo' + ) + + os_media_path = os.path.join( + sync_root, + repo_name, + a, + 'os/media.repo' + ) + + if not os.path.exists(os_tree_path): + Shared.treeinfo_new_write( + os_tree_path, + self.distname, + self.shortname, + self.fullversion, + a, + self.timestamp, + repo_name + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' ' + a + 'os .treeinfo already exists' + ) + + if not os.path.exists(os_disc_path): + Shared.discinfo_write( + self.timestamp, + self.fullname, + a, + os_disc_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' ' + a + 'os .discinfo already exists' + ) + + if not os.path.exists(os_media_path): + Shared.media_repo_write( + self.timestamp, + self.fullname, + os_media_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' ' + a + 'os media.repo already exists' + ) + + if not self.ignore_debug: + debug_tree_path = os.path.join( + sync_root, + repo_name, + a, + 'debug/tree/.treeinfo' + ) + + debug_disc_path = os.path.join( + sync_root, + repo_name, + a, + 'debug/tree/.discinfo' + ) + + debug_media_path = os.path.join( + sync_root, + repo_name, + a, + 'debug/tree/media.repo' + ) + + if not os.path.exists(debug_tree_path): + Shared.treeinfo_new_write( + debug_tree_path, + self.distname, + self.shortname, + self.fullversion, + a, + self.timestamp, + repo_name + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + r + ' ' + a + 'debug .treeinfo already exists' + ) + + if not os.path.exists(debug_disc_path): + Shared.discinfo_write( + self.timestamp, + self.fullname, + a, + debug_disc_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + r + ' ' + a + 'debug .discinfo already exists' + ) + + if not os.path.exists(debug_media_path): + Shared.media_repo_write( + self.timestamp, + self.fullname, + debug_media_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' ' + a + 'debug media.repo already exists' + ) + + + if not self.ignore_source: + source_tree_path = os.path.join( + sync_root, + repo_name, + 'source/tree/.treeinfo' + ) + + source_disc_path = os.path.join( + sync_root, + repo_name, + 'source/tree/.discinfo' + ) + + source_media_path = os.path.join( + sync_root, + repo_name, + 'source/tree/media.repo' + ) + + if not os.path.exists(source_tree_path): + Shared.treeinfo_new_write( + source_tree_path, + self.distname, + self.shortname, + self.fullversion, + 'src', + self.timestamp, + repo_name + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' source os .treeinfo already exists' + ) + + if not os.path.exists(source_disc_path): + Shared.discinfo_write( + self.timestamp, + self.fullname, + 'src', + source_disc_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' source .discinfo already exists' + ) + + if not os.path.exists(source_media_path): + Shared.media_repo_write( + self.timestamp, + self.fullname, + source_media_path + ) + else: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + repo_name + ' source media.repo already exists' + ) def run_compose_closeout(self): """ From 49b001e31da636c015568fcb1ac3d5a33a118b36 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Thu, 30 Jun 2022 01:45:08 -0700 Subject: [PATCH 17/96] Resolve RLBT#0000132 discinfo and treeinfo --- iso/empanadas/empanadas/scripts/sync_from_peridot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py index 1e40ed8..d025f65 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot.py @@ -22,6 +22,7 @@ parser.add_argument('--dry-run', action='store_true') parser.add_argument('--full-run', action='store_true') parser.add_argument('--no-fail', action='store_true') parser.add_argument('--refresh-extra-files', action='store_true') +parser.add_argument('--refresh-treeinfo', action='store_true') # I am aware this is confusing, I want podman to be the default option parser.add_argument('--simple', action='store_false') parser.add_argument('--logger', type=str) @@ -52,6 +53,7 @@ a = RepoSync( nofail=results.no_fail, logger=results.logger, refresh_extra_files=results.refresh_extra_files, + refresh_treeinfo=results.refresh_treeinfo, ) def run(): From 2c3409de5158554ba4910674c8187c262f777b64 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Thu, 30 Jun 2022 03:06:29 -0700 Subject: [PATCH 18/96] treeinfo is not writing to paths --- iso/empanadas/empanadas/util/dnf_utils.py | 12 ++++++------ iso/empanadas/empanadas/util/shared.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 8af3bac..58480db 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1005,7 +1005,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + 'os .treeinfo already exists' + repo_name + ' ' + a + ' os .treeinfo already exists' ) if not os.path.exists(os_disc_path): @@ -1018,7 +1018,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + 'os .discinfo already exists' + repo_name + ' ' + a + ' os .discinfo already exists' ) if not os.path.exists(os_media_path): @@ -1030,7 +1030,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + 'os media.repo already exists' + repo_name + ' ' + a + ' os media.repo already exists' ) if not self.ignore_debug: @@ -1068,7 +1068,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - r + ' ' + a + 'debug .treeinfo already exists' + r + ' ' + a + ' debug .treeinfo already exists' ) if not os.path.exists(debug_disc_path): @@ -1081,7 +1081,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - r + ' ' + a + 'debug .discinfo already exists' + r + ' ' + a + ' debug .discinfo already exists' ) if not os.path.exists(debug_media_path): @@ -1093,7 +1093,7 @@ class RepoSync: else: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - repo_name + ' ' + a + 'debug media.repo already exists' + repo_name + ' ' + a + ' debug media.repo already exists' ) diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index e3b08c5..2661356 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -72,8 +72,8 @@ class Shared: variant.uid = repo variant.name = repo variant.type = "variant" - variant.repository = "." - variant.packages = "Packages" + variant.paths.repository = "." + variant.paths.packages = "Packages" ti.variants.add(variant) ti.dump(file_path) From 7f6f7babf5e66211885e353e68659b39663278c4 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Thu, 30 Jun 2022 13:14:27 -0700 Subject: [PATCH 19/96] Attempt to resolve RLBT#0000133 --- iso/empanadas/empanadas/util/__init__.py | 1 + iso/empanadas/empanadas/util/iso_utils.py | 44 ++++++++++++++++++++++- iso/empanadas/empanadas/util/shared.py | 23 ++++++++++++ 3 files changed, 67 insertions(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/util/__init__.py b/iso/empanadas/empanadas/util/__init__.py index f107a54..828e595 100644 --- a/iso/empanadas/empanadas/util/__init__.py +++ b/iso/empanadas/empanadas/util/__init__.py @@ -8,6 +8,7 @@ from empanadas.util.check import ( from empanadas.util.shared import ( Shared, + ArchCheck, ) from empanadas.util.dnf_utils import ( diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index de93dac..ef1e283 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -35,7 +35,7 @@ import productmd.treeinfo from jinja2 import Environment, FileSystemLoader from empanadas.common import Color, _rootdir -from empanadas.util import Shared +from empanadas.util import Shared, ArchCheck class IsoBuild: """ @@ -710,6 +710,48 @@ class IsoBuild: Syncs data from a non-disc set of images to the appropriate repo. Repo and image MUST match names for this to work. """ + pathway = os.path.join( + self.compose_latest_sync, + repo, + arch, + 'os' + ) + + src_to_image = os.path.join( + self.lorax_work_dir, + arch, + repo + ) + + if not force_unpack: + found_files = [] + for y in ArchCheck.archfile[arch]: + imgpath = os.path.join( + pathway, + y + ) + if os.path.exists(imgpath): + found_files.append(y) + + if os.path.exists(pathway + '/images/boot.iso'): + found_files.append('/images/boot.iso') + + if len(found_files) > 0: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Images and data for ' + repo + ' and ' + arch + ' already exists.' + ) + return + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Copying images and data for ' + repo + ' ' + arch + ) + try: + shutil.copytree(src_to_image, pathway, copy_function=shutil.copy2, dirs_exist_ok=True) + except: + self.log.error('%s already exists??' % repo) + def run_boot_sync(self): """ diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 2661356..7524c2f 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -4,6 +4,29 @@ import os import hashlib import productmd.treeinfo +class ArchCheck: + """ + Arches and their files + """ + archfile = { + 'x86_64': [ + 'isolinux/vmlinuz', + 'images/grub.conf', + 'EFI/BOOT/BOOTX64.EFI' + ], + 'aarch64': [ + 'EFI/BOOT/BOOTAA64.EFI' + ], + 'ppc64le': [ + 'ppc/bootinfo.txt', + 'ppc/ppc64/vmlinuz' + ], + 's390x': [ + 'generic.ins', + 'images/generic.prm' + ] + } + class Shared: """ Quick utilities that may be commonly used From 4bc377cd443cb5fbf52b9c533f5f078ead80258c Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Thu, 30 Jun 2022 15:38:50 -0700 Subject: [PATCH 20/96] Attempt to resolve RLBT#0000133 --- iso/empanadas/empanadas/scripts/sync_from_peridot_test.py | 2 ++ iso/empanadas/empanadas/templates/README.tmpl | 6 ++++++ iso/empanadas/empanadas/util/dnf_utils.py | 6 ++++-- iso/empanadas/empanadas/util/iso_utils.py | 7 +++++++ iso/empanadas/empanadas/util/shared.py | 3 ++- iso/empanadas/pyproject.toml | 4 ++-- 6 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 iso/empanadas/empanadas/templates/README.tmpl diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py b/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py index 5057753..7678f33 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py @@ -2,6 +2,7 @@ import argparse +import empanadas from empanadas.common import * from empanadas.util import Checks from empanadas.util import RepoSync @@ -16,3 +17,4 @@ a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_deb def run(): print(rlvars.keys()) print(rlvars) + print(empanadas.__version__) diff --git a/iso/empanadas/empanadas/templates/README.tmpl b/iso/empanadas/empanadas/templates/README.tmpl new file mode 100644 index 0000000..726ab3a --- /dev/null +++ b/iso/empanadas/empanadas/templates/README.tmpl @@ -0,0 +1,6 @@ +These set of repositories (or "compose") is for {{ fullname }} and was generated +using Empanadas {{ version }} from the SIG/Core Toolkit. + +As this is not a traditional compose, there will be things that you might be +expecting and do not see, or not expecting and do see.. While we attempted to +recreate a lot of those elements, it's not perfect. diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 58480db..ba38234 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -930,7 +930,9 @@ class RepoSync: e.strerror ) - # Create metadata here? + # Create metadata here + # Create COMPOSE_ID here (this doesn't necessarily match anything, it's + # just an indicator) self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + @@ -999,7 +1001,7 @@ class RepoSync: self.shortname, self.fullversion, a, - self.timestamp, + int(self.timestamp), repo_name ) else: diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index ef1e283..86399cc 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -723,6 +723,12 @@ class IsoBuild: repo ) + if not os.path.exists(pathway): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Repo and Image variant do NOT match' + ) + if not force_unpack: found_files = [] for y in ArchCheck.archfile[arch]: @@ -747,6 +753,7 @@ class IsoBuild: '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Copying images and data for ' + repo + ' ' + arch ) + try: shutil.copytree(src_to_image, pathway, copy_function=shutil.copy2, dirs_exist_ok=True) except: diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 7524c2f..085fabb 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -117,7 +117,8 @@ class Shared: "%s" % timestamp, "%s" % fullname, "%s" % arch, - "ALL" + "ALL", + "" ] with open(file_path, "w+") as f: diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index fb25ee5..82ea835 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -1,8 +1,8 @@ [tool.poetry] name = "empanadas" -version = "0.1.0" +version = "0.2.0" description = "hand crafted ISOs with love and spice" -authors = ["Louis Abel ", "Neil Hanlon "] +authors = ["Louis Abel ", "Neil Hanlon "] [tool.poetry.dependencies] python = ">=3.7,<4" From c1f1be93536a9ffd8c8e47b943adb52bc89dd8d4 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Fri, 1 Jul 2022 13:09:52 -0700 Subject: [PATCH 21/96] Remove hardcodes, add in metadata * Remove hardcodes of "Rocky" to use "shortname" instead * Add in metadata --- README.md | 4 +- iso/empanadas/empanadas/sig/cloud.yaml | 7 ++ iso/empanadas/empanadas/util/dnf_utils.py | 95 ++++++++++++++++++++--- iso/empanadas/empanadas/util/iso_utils.py | 5 +- iso/empanadas/empanadas/util/shared.py | 32 ++++++++ sync/sync-to-prod.sh | 10 +++ sync/sync-to-staging.sh | 10 +++ 7 files changed, 149 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 1a88e0d..21bc9ec 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ What does this have? * iso -> ISO and Compose related utilities, primarily for Rocky Linux 9+ * live -> Live image related utilities * mangle -> Manglers and other misc stuff -* sync -> Sync tools, primarily for Rocky Linux 8 +* sync -> Sync tools, primarily for Rocky Linux 8 and will eventually be deprecated How can I help? --------------- @@ -26,7 +26,7 @@ when you make changes: * Have pre-commit installed * Have shellcheck installed * Shell Scripts: These must pass a shellcheck test! -* Python scripts: Try your best to follow PEP8 guidelines +* Python scripts: Try your best to follow PEP8 guidelines (even the best linters get things wrong) Your PR should be against the devel branch at all times. PR's against the main branch will be closed. diff --git a/iso/empanadas/empanadas/sig/cloud.yaml b/iso/empanadas/empanadas/sig/cloud.yaml index f30f94a..e7305c7 100644 --- a/iso/empanadas/empanadas/sig/cloud.yaml +++ b/iso/empanadas/empanadas/sig/cloud.yaml @@ -12,6 +12,13 @@ cloud: - x86_64 project_id: '' '9': + cloud-kernel: + project_id: '' + allowed_arches: + - aarch64 + - x86_64 + - ppc64le + - s390x cloud-common: project_id: '' allowed_arches: diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index ba38234..681e128 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -97,6 +97,12 @@ class RepoSync: self.extra_files = rlvars['extra_files'] self.gpgkey = gpgkey + self.compose_id = '{}-{}-{}'.format( + config['shortname'], + rlvars['revision'], + config['date_stamp'] + ) + # Templates file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) @@ -119,7 +125,10 @@ class RepoSync: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-Rocky-{}".format(self.profile) + "latest-{}-{}".format( + self.shortname, + self.profile + ) ) self.compose_latest_sync = os.path.join( @@ -224,7 +233,7 @@ class RepoSync: self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch) if self.fullrun: - self.deploy_extra_files(global_work_root) + self.deploy_extra_files(sync_root, global_work_root) self.deploy_treeinfo(self.repo, sync_root, self.arch) self.symlink_to_latest(generated_dir) @@ -232,11 +241,13 @@ class RepoSync: self.repoclosure_work(sync_root, work_root, log_root) if self.refresh_extra_files and not self.fullrun: - self.deploy_extra_files(global_work_root) + self.deploy_extra_files(sync_root, global_work_root) if self.refresh_treeinfo and not self.fullrun: self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.deploy_metadata(sync_root) + self.log.info('Compose repo directory: %s' % sync_root) self.log.info('Compose logs: %s' % log_root) self.log.info('Compose completed.') @@ -576,7 +587,11 @@ class RepoSync: """ compose_base_dir = os.path.join( self.compose_base, - "Rocky-{}-{}".format(self.fullversion, self.date_stamp) + "{}-{}-{}".format( + self.shortname, + self.fullversion, + self.date_stamp + ) ) self.log.info('Creating compose directory %s' % compose_base_dir) if not os.path.exists(compose_base_dir): @@ -872,7 +887,7 @@ class RepoSync: for issue in bad_exit_list: self.log.error(issue) - def deploy_extra_files(self, global_work_root): + def deploy_extra_files(self, sync_root, global_work_root): """ deploys extra files based on info of rlvars including a extra_files.json @@ -880,20 +895,27 @@ class RepoSync: might also deploy COMPOSE_ID and maybe in the future a metadata dir with a bunch of compose-esque stuff. """ + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Deploying treeinfo, discinfo, and media.repo' + ) + cmd = self.git_cmd() tmpclone = '/tmp/clone' extra_files_dir = os.path.join( global_work_root, 'extra-files' ) - self.log.info( - '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Deploying extra files to work directory ...' + metadata_dir = os.path.join( + sync_root, + "metadata" ) - if not os.path.exists(extra_files_dir): os.makedirs(extra_files_dir, exist_ok=True) + if not os.path.exists(metadata_dir): + os.makedirs(metadata_dir, exist_ok=True) + clonecmd = '{} clone {} -b {} -q {}'.format( cmd, self.extra_files['git_repo'], @@ -907,6 +929,11 @@ class RepoSync: stderr=subprocess.DEVNULL ) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Deploying extra files to work and metadata directories ...' + ) + # Copy files to work root for extra in self.extra_files['list']: src = '/tmp/clone/' + extra @@ -915,6 +942,7 @@ class RepoSync: # exist on our mirrors. try: shutil.copy2(src, extra_files_dir) + shutil.copy2(src, metadata_dir) except: self.log.warn( '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + @@ -930,15 +958,48 @@ class RepoSync: e.strerror ) + def deploy_metadata(self, sync_root): + """ + Deploys metadata that defines information about the compose. Some data + will be close to how pungi produces it, but it won't be exact nor a + perfect replica. + """ + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Deploying metadata for this compose' + ) # Create metadata here # Create COMPOSE_ID here (this doesn't necessarily match anything, it's # just an indicator) + metadata_dir = os.path.join( + sync_root, + "metadata" + ) + + # It should already exist from a full run or refresh. This is just in + # case and it doesn't hurt. + if not os.path.exists(metadata_dir): + os.makedirs(metadata_dir, exist_ok=True) + + with open(metadata_dir + '/COMPOSE_ID') as f: + f.write(self.compose_id) + f.close() + + Shared.write_metadata( + self.timestamp, + self.date_stamp, + self.fullname, + self.fullversion, + self.compose_id, + metadata_dir + 'metadata.json' + ) self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + - 'Extra files phase completed.' + 'Metadata files phase completed.' ) + def deploy_treeinfo(self, repo, sync_root, arch): """ Deploys initial treeinfo files. These have the potential of being @@ -1181,6 +1242,7 @@ class SigRepoSync: major, repo=None, arch=None, + ignore_debug: bool = False, ignore_source: bool = False, repoclosure: bool = False, refresh_extra_files: bool = False, @@ -1196,6 +1258,7 @@ class SigRepoSync: self.dryrun = dryrun self.fullrun = fullrun self.arch = arch + self.ignore_debug = ignore_debug self.ignore_source = ignore_source self.skip_all = skip_all self.hashed = hashed @@ -1206,9 +1269,15 @@ class SigRepoSync: # Relevant config items self.major_version = major self.date_stamp = config['date_stamp'] + self.timestamp = time.time() self.repo_base_url = config['repo_base_url'] self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major + self.profile = rlvars['profile'] + self.iso_map = rlvars['iso_map'] + self.distname = config['distname'] + self.fullname = rlvars['fullname'] + self.shortname = config['shortname'] # Relevant major version items self.sigvars = sigvars @@ -1217,6 +1286,10 @@ class SigRepoSync: #self.project_id = sigvars['project_id'] self.sigrepo = repo + # Templates + file_loader = FileSystemLoader(f"{_rootdir}/templates") + self.tmplenv = Environment(loader=file_loader) + # each el can have its own designated container to run stuff in, # otherwise we'll just default to the default config. self.container = config['container'] @@ -1235,7 +1308,7 @@ class SigRepoSync: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-Rocky-{}-SIG".format(major) + "latest-{}-{}-SIG".format(self.shortname, major) ) self.compose_latest_sync = os.path.join( diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 86399cc..d26a39a 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -135,7 +135,10 @@ class IsoBuild: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-Rocky-{}".format(self.profile) + "latest-{}-{}".format( + self.shortname, + self.profile + ) ) self.compose_latest_sync = os.path.join( diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 085fabb..407be7a 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -1,6 +1,7 @@ # These are shared utilities used import os +import json import hashlib import productmd.treeinfo @@ -107,6 +108,37 @@ class Shared: the case of modifying treeinfo for primary repos or images. """ + @staticmethod + def write_metadata( + timestamp, + datestamp, + fullname, + release, + compose_id, + file_path + ): + + metadata = { + "header": { + "name": "empanadas", + "version": "0.2.0", + "type": "toolkit", + "maintainer": "SIG/Core" + }, + "payload": { + "compose": { + "date": datestamp, + "id": compose_id, + "fullname": fullname, + "release": release, + "timestamp": timestamp + } + } + } + + with open(file_path, "w+") as f: + json.dump(metadata, f) + f.close() @staticmethod def discinfo_write(timestamp, fullname, arch, file_path): diff --git a/sync/sync-to-prod.sh b/sync/sync-to-prod.sh index 76045c7..b48b74c 100644 --- a/sync/sync-to-prod.sh +++ b/sync/sync-to-prod.sh @@ -22,6 +22,16 @@ if [ $ret_val -eq "0" ]; then sudo -l && find ** -maxdepth 0 -type l | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \ {} "${TARGET}" + # Temporary until empanadas has this support + if [ -f "COMPOSE_ID" ]; then + cp COMPOSE_ID "${TARGET}" + chown 10004:10005 "${TARGET}/COMPOSE_ID" + fi + + if [ -d "metadata" ]; then + rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}" + fi + # Full file list update cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; } # Hardlink everything except xml files diff --git a/sync/sync-to-staging.sh b/sync/sync-to-staging.sh index 1e764c8..d256fd5 100644 --- a/sync/sync-to-staging.sh +++ b/sync/sync-to-staging.sh @@ -25,4 +25,14 @@ if [ $ret_val -eq "0" ]; then # shellcheck disable=SC2035 sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \ {} "${TARGET}" + + # This is temporary until we implement rsync into empanadas + if [ -f "COMPOSE_ID" ]; then + cp COMPOSE_ID "${TARGET}" + chown 10004:10005 "${TARGET}/COMPOSE_ID" + fi + + if [ -d "metadata" ]; then + rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}" + fi fi From e47ca962afadbfd858b604e7b6c6e2fa230d306f Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Fri, 1 Jul 2022 13:13:00 -0700 Subject: [PATCH 22/96] missing write function for compose_id --- iso/empanadas/empanadas/util/dnf_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 681e128..c2243f1 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -981,7 +981,7 @@ class RepoSync: if not os.path.exists(metadata_dir): os.makedirs(metadata_dir, exist_ok=True) - with open(metadata_dir + '/COMPOSE_ID') as f: + with open(metadata_dir + '/COMPOSE_ID', "w+") as f: f.write(self.compose_id) f.close() From 412a7ab089f65ef97533c6b9c5cd7d9924fd8ee7 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Fri, 1 Jul 2022 13:28:24 -0700 Subject: [PATCH 23/96] add json and yaml components --- iso/empanadas/empanadas/util/dnf_utils.py | 4 ++-- iso/empanadas/empanadas/util/shared.py | 11 ++++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index c2243f1..62bd540 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -988,10 +988,10 @@ class RepoSync: Shared.write_metadata( self.timestamp, self.date_stamp, - self.fullname, + self.distname, self.fullversion, self.compose_id, - metadata_dir + 'metadata.json' + metadata_dir + '/metadata' ) self.log.info( diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 407be7a..be3d33f 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -3,6 +3,7 @@ import os import json import hashlib +import yaml import productmd.treeinfo class ArchCheck: @@ -136,9 +137,13 @@ class Shared: } } - with open(file_path, "w+") as f: - json.dump(metadata, f) - f.close() + with open(file_path + ".json", "w+") as fp: + json.dump(metadata, fp, indent=4) + fp.close() + + with open(file_path + ".yaml", "w+") as yp: + yaml.dump(metadata, yp) + yp.close() @staticmethod def discinfo_write(timestamp, fullname, arch, file_path): From 2884bb0eaa74c1b037cbc045ce54ca31995fdc16 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Fri, 1 Jul 2022 14:16:14 -0700 Subject: [PATCH 24/96] Fix source and debuginfo portions --- iso/empanadas/empanadas/util/dnf_utils.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 62bd540..e2a6b0a 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -340,7 +340,7 @@ class RepoSync: entry_name_list.append(entry_name) - if not self.ignore_debug: + if not self.ignore_debug and not a == 'source': entry_name_list.append(debug_entry_name) entry_point_sh = os.path.join( @@ -444,7 +444,8 @@ class RepoSync: os.chmod(debug_entry_point_sh, 0o755) # We ignoring sources? - if not self.ignore_source: + if (not self.ignore_source and not arch) or ( + not self.ignore_source and arch == 'source'): source_entry_name = '{}-source'.format(r) entry_name_list.append(source_entry_name) @@ -1034,6 +1035,9 @@ class RepoSync: arch_tree.append('i686') for a in arch_tree: + if a == 'source': + continue + os_tree_path = os.path.join( sync_root, repo_name, @@ -1096,7 +1100,7 @@ class RepoSync: repo_name + ' ' + a + ' os media.repo already exists' ) - if not self.ignore_debug: + if not self.ignore_debug and not a == 'source': debug_tree_path = os.path.join( sync_root, repo_name, @@ -1160,7 +1164,7 @@ class RepoSync: ) - if not self.ignore_source: + if not self.ignore_source and not arch: source_tree_path = os.path.join( sync_root, repo_name, From c081f6f202eeeab5c7ab4440a72d7eba697f78c5 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 00:19:13 -0700 Subject: [PATCH 25/96] test out parts of finalize --- iso/empanadas/empanadas/sig/cloud.yaml | 11 +- iso/empanadas/empanadas/util/dnf_utils.py | 147 +++++++-------- iso/empanadas/empanadas/util/iso_utils.py | 20 +- iso/empanadas/empanadas/util/shared.py | 215 ++++++++++++++++++++++ 4 files changed, 294 insertions(+), 99 deletions(-) diff --git a/iso/empanadas/empanadas/sig/cloud.yaml b/iso/empanadas/empanadas/sig/cloud.yaml index e7305c7..e0ad17a 100644 --- a/iso/empanadas/empanadas/sig/cloud.yaml +++ b/iso/empanadas/empanadas/sig/cloud.yaml @@ -1,6 +1,7 @@ --- cloud: '8': + profile: 'cloud' cloud-kernel: project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c' allowed_arches: @@ -10,7 +11,15 @@ cloud: allowed_arches: - aarch64 - x86_64 - project_id: '' + project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c' + extra_files: + git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/' + branch: 'r8' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud' + list: + - 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud' '9': cloud-kernel: project_id: '' diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index e2a6b0a..b275490 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -186,7 +186,13 @@ class RepoSync: # This should create the initial compose dir and set the path. # Otherwise, just use the latest link. if self.fullrun: - generated_dir = self.generate_compose_dirs() + generated_dir = Shared.generate_compose_dirs( + self.compose_base, + self.shortname, + self.fullversion, + self.date_stamp, + self.log + ) work_root = os.path.join( generated_dir, 'work' @@ -292,7 +298,7 @@ class RepoSync: Each container runs their own script wait till all is finished """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) contrunlist = [] bad_exit_list = [] self.log.info('Generating container entries') @@ -582,24 +588,6 @@ class RepoSync: 'No issues detected.' ) - def generate_compose_dirs(self) -> str: - """ - Generate compose dirs for full runs - """ - compose_base_dir = os.path.join( - self.compose_base, - "{}-{}-{}".format( - self.shortname, - self.fullversion, - self.date_stamp - ) - ) - self.log.info('Creating compose directory %s' % compose_base_dir) - if not os.path.exists(compose_base_dir): - os.makedirs(compose_base_dir) - - return compose_base_dir - def symlink_to_latest(self, generated_dir): """ Emulates pungi and symlinks latest-Rocky-X @@ -628,7 +616,7 @@ class RepoSync: """ fname = os.path.join( dest_path, - "{}-config.repo".format(self.major_version) + "{}-{}-config.repo".format(self.shortname, self.major_version) ) self.log.info('Generating the repo configuration: %s' % fname) @@ -676,60 +664,6 @@ class RepoSync: config_file.close() return fname - def reposync_cmd(self) -> str: - """ - This generates the reposync command. We don't support reposync by - itself and will raise an error. - - :return: The path to the reposync command. If dnf exists, we'll use - that. Otherwise, fail immediately. - """ - cmd = None - if os.path.exists("/usr/bin/dnf"): - cmd = "/usr/bin/dnf reposync" - else: - self.log.error('/usr/bin/dnf was not found. Good bye.') - raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync " - "is not sufficient and you are likely running on an el7 " - "system or a grossly modified EL8+ system, " + Color.BOLD + - "which tells us that you probably made changes to these tools " - "expecting them to work and got to this point." + Color.END) - return cmd - - def podman_cmd(self) -> str: - """ - This generates the podman run command. This is in the case that we want - to do reposyncs in parallel as we cannot reasonably run multiple - instances of dnf reposync on a single system. - """ - cmd = None - if os.path.exists("/usr/bin/podman"): - cmd = "/usr/bin/podman" - else: - self.log.error('/usr/bin/podman was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " + Color.BOLD + "Note that docker is not " - "supported." + Color.END - ) - return cmd - - def git_cmd(self) -> str: - """ - This generates the git command. This is when we need to pull down extra - files or do work from a git repository. - """ - cmd = None - if os.path.exists("/usr/bin/git"): - cmd = "/usr/bin/git" - else: - self.log.error('/usr/bin/git was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " - ) - return cmd - def repoclosure_work(self, sync_root, work_root, log_root): """ This is where we run repoclosures, based on the configuration of each @@ -740,7 +674,7 @@ class RepoSync: against itself. (This means BaseOS should be able to survive by itself.) """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) entries_dir = os.path.join(work_root, "entries") bad_exit_list = [] @@ -901,7 +835,7 @@ class RepoSync: 'Deploying treeinfo, discinfo, and media.repo' ) - cmd = self.git_cmd() + cmd = Shared.git_cmd(self.log) tmpclone = '/tmp/clone' extra_files_dir = os.path.join( global_work_root, @@ -1231,7 +1165,57 @@ class RepoSync: work/isos to compose/isos, and combines all checksum files per arch into a final CHECKSUM file. """ + # latest-X-Y should exist at all times for this to work. + work_root = os.path.join( + self.compose_latest_dir, + 'work' + ) + sync_root = self.compose_latest_sync + sync_iso_root = os.path.join( + sync_root, + 'isos' + ) + + tmp_dir = os.path.join( + self.compose_root, + 'partitions' + ) + + # Verify if the link even exists + if not os.path.exists(self.compose_latest_dir): + self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir) + self.log.error('!! Please perform a full run if you have not done so.') + raise SystemExit() + + log_root = os.path.join( + work_root, + "logs", + self.date_stamp + ) + + iso_root = os.path.join( + work_root, + "isos" + ) + + global_work_root = os.path.join( + work_root, + "global", + ) + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Starting to sync ISOs to compose' + ) + + iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir) + + if not iso_result: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Sync failed' + ) class SigRepoSync: """ @@ -1278,6 +1262,7 @@ class SigRepoSync: self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major self.profile = rlvars['profile'] + self.sigprofile = sigvars['profile'] self.iso_map = rlvars['iso_map'] self.distname = config['distname'] self.fullname = rlvars['fullname'] @@ -1312,7 +1297,11 @@ class SigRepoSync: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-{}-{}-SIG".format(self.shortname, major) + "latest-{}-{}-SIG-{}".format( + self.shortname, + major, + self.sigprofile + ) ) self.compose_latest_sync = os.path.join( @@ -1345,7 +1334,7 @@ class SigRepoSync: self.log.info('sig reposync init') self.log.info(major) - #self.dnf_config = self.generate_conf() + #self.dnf_config = Shared.generate_conf() def run(self): """ diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index d26a39a..e402c4a 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -1156,7 +1156,7 @@ class IsoBuild: to the compose directories. It's the same as if you were doing a reposync of the repositories. """ - cmd = self.podman_cmd() + cmd = Shared.podman_cmd(self.log) entries_dir = os.path.join(work_root, "entries") isos_dir = os.path.join(work_root, "isos") bad_exit_list = [] @@ -1766,24 +1766,6 @@ class IsoBuild: returned_cmd = ' '.join(cmd) return returned_cmd - def podman_cmd(self) -> str: - """ - This generates the podman run command. This is in the case that we want - to do reposyncs in parallel as we cannot reasonably run multiple - instances of dnf reposync on a single system. - """ - cmd = None - if os.path.exists("/usr/bin/podman"): - cmd = "/usr/bin/podman" - else: - self.log.error('/usr/bin/podman was not found. Good bye.') - raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " - " ensure that you have installed the necessary packages on " - " this system. " + Color.BOLD + "Note that docker is not " - "supported." + Color.END - ) - return cmd - class LiveBuild: """ This helps us build the live images for Rocky Linux. diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index be3d33f..c6b8eb8 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -3,8 +3,11 @@ import os import json import hashlib +import shlex +import subprocess import yaml import productmd.treeinfo +from empanadas.common import Color class ArchCheck: """ @@ -179,3 +182,215 @@ class Shared: with open(file_path, "w") as f: f.write("\n".join(data)) + + @staticmethod + def generate_compose_dirs( + compose_base, + shortname, + version, + date_stamp, + logger + ) -> str: + """ + Generate compose dirs for full runs + """ + compose_base_dir = os.path.join( + compose_base, + "{}-{}-{}".format( + shortname, + version, + date_stamp + ) + ) + logger.info('Creating compose directory %s' % compose_base_dir) + if not os.path.exists(compose_base_dir): + os.makedirs(compose_base_dir) + + return compose_base_dir + + @staticmethod + def podman_cmd(logger) -> str: + """ + This generates the podman run command. This is in the case that we want + to do reposyncs in parallel as we cannot reasonably run multiple + instances of dnf reposync on a single system. + """ + cmd = None + if os.path.exists("/usr/bin/podman"): + cmd = "/usr/bin/podman" + else: + logger.error('/usr/bin/podman was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + Color.BOLD + "Note that docker is not " + "supported." + Color.END + ) + return cmd + + @staticmethod + def reposync_cmd(logger) -> str: + """ + This generates the reposync command. We don't support reposync by + itself and will raise an error. + + :return: The path to the reposync command. If dnf exists, we'll use + that. Otherwise, fail immediately. + """ + cmd = None + if os.path.exists("/usr/bin/dnf"): + cmd = "/usr/bin/dnf reposync" + else: + logger('/usr/bin/dnf was not found. Good bye.') + raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync " + "is not sufficient and you are likely running on an el7 " + "system or a grossly modified EL8+ system, " + Color.BOLD + + "which tells us that you probably made changes to these tools " + "expecting them to work and got to this point." + Color.END) + return cmd + + @staticmethod + def git_cmd(logger) -> str: + """ + This generates the git command. This is when we need to pull down extra + files or do work from a git repository. + """ + cmd = None + if os.path.exists("/usr/bin/git"): + cmd = "/usr/bin/git" + else: + logger.error('/usr/bin/git was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + ) + return cmd + + @staticmethod + def generate_conf(data, logger, dest_path='/var/tmp') -> str: + """ + Generates the necessary repo conf file for the operation. This repo + file should be temporary in nature. This will generate a repo file + with all repos by default. If a repo is chosen for sync, that will be + the only one synced. + + :param dest_path: The destination where the temporary conf goes + :param repo: The repo object to create a file for + """ + fname = os.path.join( + dest_path, + "{}-{}-config.repo".format(data.shortname, data.major_version) + ) + data.log.info('Generating the repo configuration: %s' % fname) + + if data.repo_base_url.startswith("/"): + logger.error("Local file syncs are not supported.") + raise SystemExit(Color.BOLD + "Local file syncs are not " + "supported." + Color.END) + + prehashed = '' + if data.hashed: + prehashed = "hashed-" + # create dest_path + if not os.path.exists(dest_path): + os.makedirs(dest_path, exist_ok=True) + config_file = open(fname, "w+") + repolist = [] + for repo in data.repos: + + constructed_url = '{}/{}/repo/{}{}/$basearch'.format( + data.repo_base_url, + data.project_id, + prehashed, + repo, + ) + + constructed_url_src = '{}/{}/repo/{}{}/src'.format( + data.repo_base_url, + data.project_id, + prehashed, + repo, + ) + + repodata = { + 'name': repo, + 'baseurl': constructed_url, + 'srcbaseurl': constructed_url_src, + 'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey] + } + repolist.append(repodata) + + template = data.tmplenv.get_template('repoconfig.tmpl') + output = template.render(repos=repolist) + config_file.write(output) + + config_file.close() + return fname + + @staticmethod + def quick_sync(src, dest, logger, tmp_dir): + """ + Does a quick sync from one place to another. This determines the method + in which will be used. We will look for fpsync and fall back to + parallel | rsync if that is also available. It will fail if parallel is + not available. + + Return true or false on completion? + """ + + @staticmethod + def simple_sync(src, dest): + """ + This is for simple syncs only, using rsync or copytree. + """ + + @staticmethod + def fpsync_method(src, dest, logger, tmp_dir): + """ + Returns a list for the fpsync command + """ + cmd = '/usr/bin/fpsync' + rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005' + if not os.path.exists(cmd): + logger.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'fpsync not found' + ) + return False + + os.makedirs(tmp_dir, exist_ok=True) + + fpsync_cmd = '{} -o "{}" -n 18 -t {} {} {}'.format( + cmd, + rsync_switches, + tmp_dir, + src, + dest + ) + + process = subprocess.call( + shlex.split(fpsync_cmd), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + if process != 0: + logger.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'fpsync failed' + ) + return False + + if os.path.exists(dest): + return True + else: + logger.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Path synced does not seem to exist for some reason.' + ) + return False + + @staticmethod + def rsync_method(src, dest, logger, tmp_dir): + """ + Returns a string for the rsync command plus parallel. Yes, this is a + hack. + """ From 462ea264b3c8fde4ed028b7df6ed000a958dff96 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 00:22:00 -0700 Subject: [PATCH 26/96] add finalize script to mix --- .../empanadas/scripts/finalize_compose.py | 35 +++++++++++++++++++ iso/empanadas/pyproject.toml | 1 + 2 files changed, 36 insertions(+) create mode 100755 iso/empanadas/empanadas/scripts/finalize_compose.py diff --git a/iso/empanadas/empanadas/scripts/finalize_compose.py b/iso/empanadas/empanadas/scripts/finalize_compose.py new file mode 100755 index 0000000..9cc8139 --- /dev/null +++ b/iso/empanadas/empanadas/scripts/finalize_compose.py @@ -0,0 +1,35 @@ +# This script can be called to do single syncs or full on syncs. + +import argparse + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import RepoSync + +# Start up the parser baby +parser = argparse.ArgumentParser(description="Peridot Sync and Compose") + +# All of our options +parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) +parser.add_argument('--arch', type=str, help="Architecture") +parser.add_argument('--logger', type=str) + +# Parse them +results = parser.parse_args() +rlvars = rldict[results.release] +major = rlvars['major'] + +r = Checks(rlvars, config['arch']) +r.check_valid_arch() + +# Send them and do whatever I guess +a = RepoSync( + rlvars, + config, + major=major, + arch=results.arch, + logger=results.logger, +) + +def run(): + a.run_compose_closeout() diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 82ea835..3f25a19 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -29,6 +29,7 @@ build-iso-extra = "empanadas.scripts.build_iso_extra:run" pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" launch-builds = "empanadas.scripts.launch_builds:run" build-image = "empanadas.scripts.build_image:run" +finalize_compose = "empanadas.scripts.finalize_compose:run" [build-system] requires = ["poetry-core>=1.0.0"] From 957bf5ef3f80c86399cca7570f232262eb9fb4eb Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 14:08:59 -0700 Subject: [PATCH 27/96] change comments briefly before merging changes --- iso/empanadas/empanadas/util/dnf_utils.py | 3 ++- iso/empanadas/empanadas/util/shared.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index b275490..119933c 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -173,7 +173,8 @@ class RepoSync: * Dry runs only create initial directories and structure * Full runs sync everything from the top and setup structure, - including creating a symlink to latest-Rocky-X + including creating a symlink to latest-Rocky-X and creating the + kickstart directories * self.repo is ignored during full runs (noted in stdout) * self.arch being set will force only that arch to sync """ diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index c6b8eb8..eca879d 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -86,7 +86,8 @@ class Shared: """ Writes really basic treeinfo, this is for single repository treeinfo data. This is usually called in the case of a fresh run and each repo - needs one. + needs one. This basic info may be overwritten later either by lorax + data or a full run. """ ti = productmd.treeinfo.TreeInfo() ti.release.name = distname From 361c155481aa3b76b958cb57238fb0d28500325d Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Sun, 3 Jul 2022 22:27:08 -0400 Subject: [PATCH 28/96] Containerize and refactor image building * Add new fedora container to run imagefactory * Make architecture class from py attrs and make it raise exceptions * Change build-image script to primarily invoke imagefactory directly. A second wrapper will be added to support running in kubernetes if needed. --- iso/empanadas/Containerfile.imagefactory | 68 +++ iso/empanadas/empanadas/common.py | 41 +- .../empanadas/scripts/build_image.py | 413 ++++++++++++++---- .../empanadas/templates/icicle/tdl.xml.tmpl | 2 +- iso/empanadas/empanadas/util/imagebuild.py | 0 iso/empanadas/poetry.lock | 54 +-- iso/empanadas/pyproject.toml | 1 + 7 files changed, 441 insertions(+), 138 deletions(-) create mode 100644 iso/empanadas/Containerfile.imagefactory create mode 100644 iso/empanadas/empanadas/util/imagebuild.py diff --git a/iso/empanadas/Containerfile.imagefactory b/iso/empanadas/Containerfile.imagefactory new file mode 100644 index 0000000..c370ebd --- /dev/null +++ b/iso/empanadas/Containerfile.imagefactory @@ -0,0 +1,68 @@ +FROM docker.io/fedora:36 + +ADD images/get_arch /get_arch + +ENV TINI_VERSION v0.19.0 +RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)" +RUN chmod +x /tini + +RUN dnf install -y \ + bash \ + bzip2 \ + cpio \ + diffutils \ + findutils \ + gawk \ + gcc \ + gcc-c++ \ + git \ + grep \ + gzip \ + info \ + make \ + patch \ + python3 \ + redhat-rpm-config \ + rpm-build \ + scl-utils-build \ + sed \ + shadow-utils \ + tar \ + unzip \ + util-linux \ + which \ + xz \ + dnf-plugins-core \ + createrepo_c \ + rpm-sign \ + sudo \ + mock \ + python-pip \ + imagefactory \ + imagefactory-plugins* + +RUN sed -i -e 's/# memory = 1024/memory = 2048/' /etc/oz/oz.cfg + +RUN ssh-keygen -t rsa -q -f "$HOME/.ssh/id_rsa" -N "" +RUN dnf clean all +RUN rm -rf /etc/yum.repos.d/*.repo /get_arch +# RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder +# RUN chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d && chown -R peridotbuilder:mock /var/lib/imagefactory/storage + +RUN pip install awscli + +ENV BRANCH r9 +RUN git clone https://git.rockylinux.org/rocky/kickstarts.git --branch $BRANCH /kickstarts +RUN cp /kickstarts/Rocky-9-Container.ks /kickstarts/Rocky-9-Container-Base.ks +RUN sed -i "s/\$basearch/$(uname -m)/" /kickstarts/Rocky-9-Container-Base.ks + +# devel only +COPY . /empanadas +RUN pip install -e /empanadas + +# prod +#RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas' + +ENV LIBGUESTFS_BACKEND direct + +ENTRYPOINT ["/tini", "--"] diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index edb2533..796f472 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -97,24 +97,35 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"): #COMPOSE_ISO_WORKDIR = COMPOSE_ROOT + "work/" + arch + "/" + date_stamp -def valid_type_variant(_type: str, variant: str="") -> Tuple[bool, str]: - ALLOWED_TYPE_VARIANTS = { - "Container": ["Base", "Minimal"], - "GenericCloud": [], - } - +ALLOWED_TYPE_VARIANTS = { + "Azure": None, + "Container": ["Base", "Minimal"], + "EC2": None, + "GenericCloud": None, + "Vagrant": ["Libvirt", "VBox"] +} +def valid_type_variant(_type: str, variant: str="") -> bool: if _type not in ALLOWED_TYPE_VARIANTS: - return False, f"Type is invalid: ({_type}, {variant})" - elif variant not in ALLOWED_TYPE_VARIANTS[_type]: + raise Exception(f"Type is invalid: ({_type}, {variant})") + if ALLOWED_TYPE_VARIANTS[_type] == None: + if variant is not None: + raise Exception(f"{_type} Type expects no variant type.") + return True + if variant not in ALLOWED_TYPE_VARIANTS[_type]: if variant.capitalize() in ALLOWED_TYPE_VARIANTS[_type]: - return False, f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})" - return False, f"Type/Variant Combination is not allowed: ({_type}, {variant})" - return True, "" + raise Exception(f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})") + raise Exception(f"Type/Variant Combination is not allowed: ({_type}, {variant})") + return True -class Architecture(str): - @staticmethod - def New(architecture: str, version: int): +from attrs import define, field +@define +class Architecture: + name: str = field() + version: str = field() + + @classmethod + def New(cls, architecture: str, version: int): if architecture not in rldict[version]["allowed_arches"]: print("Invalid architecture/version combo, skipping") exit() - return Architecture(architecture) + return cls(architecture, version) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index 8a7f373..49e3b6e 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -3,12 +3,21 @@ import argparse import datetime +import json +import logging +import subprocess +import sys +import time import os import tempfile import pathlib +import platform + +from botocore import args +from attrs import define, Factory, field, asdict from jinja2 import Environment, FileSystemLoader, Template -from typing import List, Tuple +from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union from empanadas.common import Architecture, rldict, valid_type_variant from empanadas.common import _rootdir @@ -22,123 +31,337 @@ parser.add_argument('--debug', action='store_true', help="debug?") parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True) parser.add_argument('--variant', type=str, help="", required=False) parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False) +parser.add_argument('--kube', action='store_true', help="output as a K8s job(s)", required=False) + results = parser.parse_args() rlvars = rldict[results.version] major = rlvars["major"] + +debug = results.debug + +log = logging.getLogger(__name__) +log.setLevel(logging.INFO if not debug else logging.DEBUG) +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.INFO if not debug else logging.DEBUG) +formatter = logging.Formatter( + '%(asctime)s :: %(name)s :: %(message)s', + '%Y-%m-%d %H:%M:%S' +) +handler.setFormatter(formatter) +log.addHandler(handler) + STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage") KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts")) BUILDTIME = datetime.datetime.utcnow() +@define(kw_only=True) +class ImageBuild: + architecture: Architecture = field() + base_uuid: Optional[str] = field(default="") + command_args: List[str] = field(factory=list) + common_args: List[str] = field(factory=list) + debug: bool = field(default=False) + image_type: str = field() + job_template: Optional[Template] = field(init=False) + kickstart_arg: List[str] = field(factory=list) + out_type: str = field(init=False) + outdir: pathlib.Path = field(init=False) + outname: str = field(init=False) + package_args: List[str] = field(factory=list) + target_uuid: Optional[str] = field(default="") + tdl_path: pathlib.Path = field(init=False) + template: Template = field() + type_variant: str = field(init=False) + stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False) + variant: Optional[str] = field() + revision: Optional[int] = field() + metadata: pathlib.Path = field(init=False) + fedora_release: int = field() -def render_icicle_template(template: Template, architecture: Architecture) -> str: - handle, output = tempfile.mkstemp() - if not handle: - exit(3) - with os.fdopen(handle, "wb") as tmp: - _template = template.render( - architecture=architecture, - fedora_version=rlvars["fedora_release"], - iso8601date=BUILDTIME.strftime("%Y%m%d"), - installdir="kickstart" if results.kickstartdir else "os", - major=major, - release=results.release if results.release else 0, - size="10G", - type=results.type.capitalize(), - utcnow=BUILDTIME, - version_variant=rlvars["revision"] if not results.variant else f"{rlvars['revision']}-{results.variant.capitalize()}", - ) - tmp.write(_template.encode()) - return output - - -def generate_kickstart_imagefactory_args(debug: bool = False) -> str: - type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup - kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{major}-{type_variant}.ks") - - if not kickstart_path.is_file(): - print(f"Kickstart file is not available: {kickstart_path}") - if not debug: + def __attrs_post_init__(self): + self.tdl_path = self.render_icicle_template() + if not self.tdl_path: exit(2) + self.type_variant = self.type_variant_name() + self.outname = self.output_name() + self.outdir = pathlib.Path(f"/tmp/{self.outname}") + self.out_type = self.image_format() + self.command_args = self._command_args() + self.package_args = self._package_args() + self.common_args = self._common_args() + self.kickstart_arg = self.kickstart_imagefactory_args() - return f"--file-parameter install_script {kickstart_path}" + self.metadata = pathlib.Path(self.outdir, "metadata.json") -def get_image_format(_type: str) -> str: - mapping = { - "Container": "docker" - } - return mapping[_type] if _type in mapping.keys() else '' - -def generate_imagefactory_commands(tdl_template: Template, architecture: Architecture) -> List[List[str]]: - template_path = render_icicle_template(tdl_template, architecture) - if not template_path: - exit(2) - - args_mapping = { - "debug": "--debug" - } - - # only supports boolean flags right now? - args = [param for name, param in args_mapping.items() if getattr(results,name)] - package_args = [] - - kickstart_arg = generate_kickstart_imagefactory_args(True) # REMOVE DEBUG ARG - - if results.type == "Container": - args += ["--parameter", "offline_icicle", "true"] - package_args += ["--parameter", "compress", "xz"] - tar_command = ["tar", "-Oxf", f"{STORAGE_DIR}/*.body" "./layer.tar"] - - type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup - outname = f"Rocky-{rlvars['major']}-{type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{architecture}" - - outdir = pathlib.Path(f"/tmp/{outname}") - - build_command = (f"imagefactory base_image {kickstart_arg} {' '.join(args)} {template_path}" - f" | tee -a {outdir}/logs/base_image-{outname}.out" - f" | tail -n4 > {outdir}/base.meta || exit 2" - ) - - - out_type = get_image_format(results.type) - package_command = ["imagefactory", "target_image", *args, template_path, - "--id", "$(awk '$1==\"UUID:\"{print $NF}'"+f" /tmp/{outname}/base.meta)", - *package_args, - "--parameter", "repository", outname, out_type, - "|", "tee", "-a", f"{outdir}/base_image-{outname}.out", - "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3" + if self.image_type == "Container": + self.stage_commands = [ + ["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"] + ] + if self.image_type == "GenericCloud": + self.stage_commands = [ + ["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"] ] - copy_command = (f"aws s3 cp --recursive {outdir}/* s3://resf-empanadas/buildimage-{ outname }/{ BUILDTIME.strftime('%s') }/") - commands = [build_command, package_command, copy_command] - return commands + try: + os.mkdir(self.outdir) + except FileExistsError as e: + log.info("Directory already exists for this release. If possible, previously executed steps may be skipped") + except Exception as e: + log.exception("Some other exception occured while creating the output directory", e) + return 0 + + if os.path.exists(self.metadata): + with open(self.metadata, "r") as f: + try: + o = json.load(f) + self.base_uuid = o['base_uuid'] + self.target_uuid = o['target_uuid'] + except json.decoder.JSONDecodeError as e: + log.exception("Couldn't decode metadata file", e) + + def output_name(self): + return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{self.architecture.name}" + def type_variant_name(self): + return self.image_type if not self.variant else f"{self.image_type}-{self.variant.capitalize()}" + + def _command_args(self): + args_mapping = { + "debug": "--debug" + } + return [param for name, param in args_mapping.items() if getattr(results,name)] + + def _package_args(self) -> List[str]: + if results.type == "Container": + return ["--parameter", "compress", "xz"] + return [""] + + def _common_args(self) -> List[str]: + args = [] + if self.image_type == "Container": + args = ["--parameter", "offline_icicle", "true"] + if self.image_type == "GenericCloud": + args = ["--parameter", "generate_icicle", "false"] + return args + + def image_format(self) -> str: + mapping = { + "Container": "docker" + } + return mapping[self.image_type] if self.image_type in mapping.keys() else '' + + def kickstart_imagefactory_args(self) -> List[str]: + kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.version}-{self.type_variant}.ks") + + if not kickstart_path.is_file(): + log.warn(f"Kickstart file is not available: {kickstart_path}") + if not debug: + log.warn("Exiting because debug mode is not enabled.") + exit(2) + + return ["--file-parameter", "install_script", str(kickstart_path)] + + def render_icicle_template(self) -> pathlib.Path: + handle, output = tempfile.mkstemp() + if not handle: + exit(3) + with os.fdopen(handle, "wb") as tmp: + _template = self.template.render( + architecture=self.architecture.name, + fedora_version=self.fedora_release, + iso8601date=BUILDTIME.strftime("%Y%m%d"), + installdir="kickstart" if results.kickstartdir else "os", + major=self.architecture.version, + release=results.release if results.release else 0, + size="10G", + type=self.image_type, + utcnow=BUILDTIME, + version_variant=self.revision if not self.variant else f"{self.revision}-{self.variant}", + ) + tmp.write(_template.encode()) + tmp.flush() + return pathlib.Path(output) + + def build_command(self) -> List[str]: + build_command = ["imagefactory", *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path + # "|", "tee", "-a", f"{outdir}/logs/base_image-{outname}.out", + # "|", "tail", "-n4", ">", f"{outdir}/base.meta", "||", "exit", "2" + ] + return build_command + def package_command(self) -> List[str]: + package_command = ["imagefactory", *self.command_args, "target_image", self.out_type, *self.common_args, + "--id", f"{self.base_uuid}", + *self.package_args, + "--parameter", "repository", self.outname, + # "|", "tee", "-a", f"{outdir}/base_image-{outname}.out", + # "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3" + ] + return package_command + + def copy_command(self) -> List[str]: + + copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/", f"s3://resf-empanadas/buildimage-{ self.outname }/{ BUILDTIME.strftime('%s') }/"] + + return copy_command + + def build(self) -> int: + if self.base_uuid: + return 0 + + ret, out, err, uuid = self.runCmd(self.build_command()) + if ret > 0: + #error in build command + log.error("Problem during build.") + if not uuid: + log.error("Build UUID not found in stdout. Dumping stdout and stderr") + self.log_subprocess(ret, out, err) + return ret + self.base_uuid = uuid.rstrip() + self.save() + return ret + + def package(self) -> int: + # Some build types don't need to be packaged by imagefactory + if self.image_type == "GenericCloud": + self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else "" + + if self.target_uuid: + return 0 + + ret, out, err, uuid = self.runCmd(self.package_command()) + if ret > 0: + log.error("Problem during packaging") + if not uuid: + log.error("Target Image UUID not found in stdout. Dumping stdout and stderr") + self.log_subprocess(ret, out, err) + return ret + self.target_uuid = uuid.rstrip() + self.save() + return ret + + def stage(self) -> int: + """ Stage the artifacst from wherever they are (unpacking and converting if needed)""" + if not self.stage_commands: + return 0 + + returns = [] + for command in self.stage_commands: + ret, out, err, _ = self.runCmd(command, search=False) + if ret > 0: + log.error("Problem during unpack.") + self.log_subprocess(ret, out, err) + returns.append(ret) + + return all(ret > 0 for ret in returns) + + def copy(self) -> int: + # move or unpack if necessary + if (stage := self.stage() > 0): + raise Exception(stage) + + ret, out, err, _ = self.runCmd(self.copy_command(), search=False) + if ret > 0: + #error in build command + log.error("Problem during build.") + return ret + + def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[IO[bytes],None], Union[IO[bytes],None], Union[str,None]]: + prepared, _ = self.prepare_command(command) + log.info(f"Running command: {' '.join(prepared)}") + + kwargs = { + "stderr": subprocess.PIPE, + "stdout": subprocess.PIPE + } + if debug: del kwargs["stderr"] + + with subprocess.Popen(prepared, **kwargs) as p: + uuid = None + if search: + for _, line in enumerate(p.stdout): # type: ignore + ln = line.decode() + if ln.startswith("UUID: "): + uuid = ln.split(" ")[-1] + log.debug(f"found uuid: {uuid}") + return p.wait(), p.stdout, p.stdin, uuid + + def prepare_command(self, command_list: List[Union[str, Callable]]) -> Tuple[List[str],List[None]]: + """ + Commands may be a callable, which should be a lambda to be evaluated at + preparation time with available locals. This can be used to, among + other things, perform lazy evaluations of f-strings which have values + not available at assignment time. e.g., filling in a second command + with a value extracted from the previous step or command. + + """ + + r = [] + return r, [r.append(c()) if (callable(c) and c.__name__ == '') else r.append(str(c)) for c in command_list] + + def log_subprocess(self, return_code: int, stdout: Union[IO[bytes], None], stderr: Union[IO[bytes], None]): + def log_lines(title, lines): + log.info(f"====={title}=====") + for _, line in lines: + log.info(line.decode()) + log.info(f"Command return code: {return_code}") + log_lines("Command STDOUT", enumerate(stdout)) # type: ignore + log_lines("Command STDERR", enumerate(stderr)) # type: ignore + + def render_kubernetes_job(self): + commands = [self.build_command(), self.package_command(), self.copy_command()] + if not self.job_template: + return None + template = self.job_template.render( + architecture=self.architecture.name, + backoffLimit=4, + buildTime=BUILDTIME.strftime("%s"), + command=commands, + imageName="ghcr.io/rockylinux/sig-core-toolkit:latest", + jobname="buildimage", + namespace="empanadas", + major=major, + restartPolicy="Never", + ) + return template + + def save(self): + with open(pathlib.Path(self.outdir, "metadata.json"), "w") as f: + o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] } + log.debug(o) + json.dump(o, f) + def run(): - result, error = valid_type_variant(results.type, results.variant) - if not result: - print(error) + try: + valid_type_variant(results.type, results.variant) + except Exception as e: + log.exception(e) exit(2) file_loader = FileSystemLoader(f"{_rootdir}/templates") tmplenv = Environment(loader=file_loader) tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl') - job_template = tmplenv.get_template('kube/Job.tmpl') - for architecture in rlvars["allowed_arches"]: - architecture = Architecture.New(architecture, major) + arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine] - commands = generate_imagefactory_commands(tdl_template, architecture) + for architecture in arches: + IB = ImageBuild( + image_type=results.type, + variant=results.variant, + architecture=Architecture.New(architecture, major), + template=tdl_template, + revision=rlvars['revision'], + fedora_release=rlvars['fedora_release'], + debug=True + ) + if results.kube: + IB.job_template = tmplenv.get_template('kube/Job.tmpl') + #commands = IB.kube_commands() + print(IB.render_kubernetes_job()) + else: + ret = IB.build() + ret = IB.package() + ret = IB.copy() - print(job_template.render( - architecture=architecture, - backoffLimit=4, - buildTime=datetime.datetime.utcnow().strftime("%s"), - command=commands, - imageName="ghcr.io/rockylinux/sig-core-toolkit:latest", - jobname="buildimage", - namespace="empanadas", - major=major, - restartPolicy="Never", - )) diff --git a/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl index 14e8dd8..5f0aa8b 100644 --- a/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl +++ b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl @@ -5,7 +5,7 @@ {{fedora_version}} {{architecture}} - https://dl.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}}/ + https://dl.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}} rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME} diff --git a/iso/empanadas/empanadas/util/imagebuild.py b/iso/empanadas/empanadas/util/imagebuild.py new file mode 100644 index 0000000..e69de29 diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 35ab49d..7a23d84 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -10,7 +10,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -22,14 +22,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "boto3" -version = "1.24.14" +version = "1.24.22" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.14,<1.28.0" +botocore = ">=1.27.22,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -38,7 +38,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.14" +version = "1.27.22" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -62,11 +62,11 @@ python-versions = ">=3.6" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] @@ -89,7 +89,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.11.4" +version = "4.12.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -102,7 +102,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -143,7 +143,7 @@ python-versions = ">=3.7" [[package]] name = "kobo" -version = "0.24.1" +version = "0.24.2" description = "A pile of python modules used by Red Hat release engineering to build their tools" category = "main" optional = false @@ -267,7 +267,7 @@ python-versions = ">=3.6" [[package]] name = "requests" -version = "2.28.0" +version = "2.28.1" description = "Python HTTP for Humans." category = "main" optional = false @@ -275,13 +275,13 @@ python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2.0.0,<2.1.0" +charset-normalizer = ">=2,<3" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rpm-py-installer" @@ -315,7 +315,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "4.2.0" +version = "4.3.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false @@ -365,7 +365,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.7,<4" -content-hash = "ccd47ad1b0819968dbad34b68c3f9afd98bd657ee639f9037731fd2a0746bd16" +content-hash = "42676fd0ceb350c8cd90246dc688cfcd404e14d22229052d0527fe342c135b95" [metadata.files] atomicwrites = [ @@ -377,20 +377,20 @@ attrs = [ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] boto3 = [ - {file = "boto3-1.24.14-py3-none-any.whl", hash = "sha256:490f5e88f5551b33ae3019a37412158b76426d63d1fb910968ade9b6a024e5fe"}, - {file = "boto3-1.24.14.tar.gz", hash = "sha256:e284705da36faa668c715ae1f74ebbff4320dbfbe3a733df3a8ab076d1ed1226"}, + {file = "boto3-1.24.22-py3-none-any.whl", hash = "sha256:c9a9f893561f64f5b81de197714ac4951251a328672a8dba28ad4c4a589c3adf"}, + {file = "boto3-1.24.22.tar.gz", hash = "sha256:67d404c643091d4aa37fc485193289ad859f1f65f94d0fa544e13bdd1d4187c1"}, ] botocore = [ - {file = "botocore-1.27.14-py3-none-any.whl", hash = "sha256:df1e9b208ff93daac7c645b0b04fb6dccd7f20262eae24d87941727025cbeece"}, - {file = "botocore-1.27.14.tar.gz", hash = "sha256:bb56fa77b8fa1ec367c2e16dee62d60000451aac5140dcce3ebddc167fd5c593"}, + {file = "botocore-1.27.22-py3-none-any.whl", hash = "sha256:7145d9b7cae87999a9f074de700d02a1b3222ee7d1863aa631ff56c5fc868035"}, + {file = "botocore-1.27.22.tar.gz", hash = "sha256:f57cb33446deef92e552b0be0e430d475c73cf64bc9e46cdb4783cdfe39cb6bb"}, ] certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, + {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, + {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, ] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, @@ -401,8 +401,8 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, - {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, + {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, + {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] importlib-resources = [ {file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"}, @@ -417,7 +417,7 @@ jmespath = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] kobo = [ - {file = "kobo-0.24.1.tar.gz", hash = "sha256:d5a30cc20c323f3e9d9b4b2e511650c4b98929b88859bd8cf57463876686e407"}, + {file = "kobo-0.24.2.tar.gz", hash = "sha256:1b3c17260a93d933d2238884373fbf3485ecd417d930acf984285dc012410e2b"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, @@ -558,8 +558,8 @@ pyyaml = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ - {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, - {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] rpm-py-installer = [ {file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"}, @@ -573,8 +573,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] typing-extensions = [ - {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, - {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 3f25a19..b4fa53a 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -16,6 +16,7 @@ boto3 = "^1.24.12" xmltodict = "^0.13.0" requests = "^2.28.0" kobo = "^0.24.1" +attrs = "^21.4.0" [tool.poetry.dev-dependencies] pytest = "~5" From 3cf47dd85cf594985bf086be3ce98232034e707f Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Sun, 3 Jul 2022 22:48:12 -0400 Subject: [PATCH 29/96] remove final dependencies on results variable * put logging code into runCmd method to reduce repeated code --- .../empanadas/scripts/build_image.py | 64 ++++++++----------- 1 file changed, 28 insertions(+), 36 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index 49e3b6e..f03ec42 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -79,6 +79,7 @@ class ImageBuild: revision: Optional[int] = field() metadata: pathlib.Path = field(init=False) fedora_release: int = field() + release: int = field(default=0) def __attrs_post_init__(self): self.tdl_path = self.render_icicle_template() @@ -122,7 +123,7 @@ class ImageBuild: log.exception("Couldn't decode metadata file", e) def output_name(self): - return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{self.architecture.name}" + return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{self.release}.{self.architecture.name}" def type_variant_name(self): return self.image_type if not self.variant else f"{self.image_type}-{self.variant.capitalize()}" @@ -174,7 +175,7 @@ class ImageBuild: iso8601date=BUILDTIME.strftime("%Y%m%d"), installdir="kickstart" if results.kickstartdir else "os", major=self.architecture.version, - release=results.release if results.release else 0, + release=self.release, size="10G", type=self.image_type, utcnow=BUILDTIME, @@ -211,15 +212,9 @@ class ImageBuild: return 0 ret, out, err, uuid = self.runCmd(self.build_command()) - if ret > 0: - #error in build command - log.error("Problem during build.") - if not uuid: - log.error("Build UUID not found in stdout. Dumping stdout and stderr") - self.log_subprocess(ret, out, err) - return ret - self.base_uuid = uuid.rstrip() - self.save() + if uuid: + self.base_uuid = uuid.rstrip() + self.save() return ret def package(self) -> int: @@ -231,14 +226,9 @@ class ImageBuild: return 0 ret, out, err, uuid = self.runCmd(self.package_command()) - if ret > 0: - log.error("Problem during packaging") - if not uuid: - log.error("Target Image UUID not found in stdout. Dumping stdout and stderr") - self.log_subprocess(ret, out, err) - return ret - self.target_uuid = uuid.rstrip() - self.save() + if uuid: + self.target_uuid = uuid.rstrip() + self.save() return ret def stage(self) -> int: @@ -249,9 +239,6 @@ class ImageBuild: returns = [] for command in self.stage_commands: ret, out, err, _ = self.runCmd(command, search=False) - if ret > 0: - log.error("Problem during unpack.") - self.log_subprocess(ret, out, err) returns.append(ret) return all(ret > 0 for ret in returns) @@ -262,9 +249,6 @@ class ImageBuild: raise Exception(stage) ret, out, err, _ = self.runCmd(self.copy_command(), search=False) - if ret > 0: - #error in build command - log.error("Problem during build.") return ret def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[IO[bytes],None], Union[IO[bytes],None], Union[str,None]]: @@ -285,7 +269,15 @@ class ImageBuild: if ln.startswith("UUID: "): uuid = ln.split(" ")[-1] log.debug(f"found uuid: {uuid}") - return p.wait(), p.stdout, p.stdin, uuid + + res = p.wait(), p.stdout, p.stdin, uuid + if res[0] > 0: + log.error(f"Problem while executing command: '{prepared}'") + if search and not res[3]: + log.error("UUID not found in stdout. Dumping stdout and stderr") + self.log_subprocess(res) + + return res def prepare_command(self, command_list: List[Union[str, Callable]]) -> Tuple[List[str],List[None]]: """ @@ -300,14 +292,14 @@ class ImageBuild: r = [] return r, [r.append(c()) if (callable(c) and c.__name__ == '') else r.append(str(c)) for c in command_list] - def log_subprocess(self, return_code: int, stdout: Union[IO[bytes], None], stderr: Union[IO[bytes], None]): + def log_subprocess(self, result: Tuple[int, Union[IO[bytes], None], Union[IO[bytes], None], Union[str, None]]): def log_lines(title, lines): log.info(f"====={title}=====") for _, line in lines: log.info(line.decode()) - log.info(f"Command return code: {return_code}") - log_lines("Command STDOUT", enumerate(stdout)) # type: ignore - log_lines("Command STDERR", enumerate(stderr)) # type: ignore + log.info(f"Command return code: {result[0]}") + log_lines("Command STDOUT", enumerate(result[1])) # type: ignore + log_lines("Command STDERR", enumerate(result[2])) # type: ignore def render_kubernetes_job(self): commands = [self.build_command(), self.package_command(), self.copy_command()] @@ -347,13 +339,14 @@ def run(): for architecture in arches: IB = ImageBuild( - image_type=results.type, - variant=results.variant, architecture=Architecture.New(architecture, major), - template=tdl_template, - revision=rlvars['revision'], + debug=results.debug, fedora_release=rlvars['fedora_release'], - debug=True + image_type=results.type, + release=results.release if results.release else 0, + revision=rlvars['revision'], + template=tdl_template, + variant=results.variant, ) if results.kube: IB.job_template = tmplenv.get_template('kube/Job.tmpl') @@ -364,4 +357,3 @@ def run(): ret = IB.package() ret = IB.copy() - From 77178e96578d50ef48a61f1258b83b303cb75428 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 21:00:57 -0700 Subject: [PATCH 30/96] move treeinfo writing to Shared --- iso/empanadas/empanadas/util/dnf_utils.py | 90 ++++++++++++-- iso/empanadas/empanadas/util/iso_utils.py | 134 +++++++++++--------- iso/empanadas/empanadas/util/shared.py | 145 +++++++++++++++++++--- sync/.sync-to-prod.sh.swp | Bin 0 -> 12288 bytes 4 files changed, 287 insertions(+), 82 deletions(-) create mode 100644 sync/.sync-to-prod.sh.swp diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 119933c..c7e9a79 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -242,6 +242,7 @@ class RepoSync: if self.fullrun: self.deploy_extra_files(sync_root, global_work_root) self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.tweak_treeinfo(self.repo, sync_root, self.arch) self.symlink_to_latest(generated_dir) if self.repoclosure: @@ -250,8 +251,11 @@ class RepoSync: if self.refresh_extra_files and not self.fullrun: self.deploy_extra_files(sync_root, global_work_root) + # This does NOT overwrite treeinfo files. This just ensures they exist + # and are configured correctly. if self.refresh_treeinfo and not self.fullrun: self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.tweak_treeinfo(self.repo, sync_root, self.arch) self.deploy_metadata(sync_root) @@ -450,6 +454,53 @@ class RepoSync: os.chmod(entry_point_sh, 0o755) os.chmod(debug_entry_point_sh, 0o755) + # During fullruns, a kickstart directory is made. Kickstart + # should not be updated nor touched during regular runs under + # any circumstances. + if self.fullrun: + ks_entry_name = '{}-ks-{}'.format(r, a) + entry_name_list.append(ks_entry_name) + ks_point_sh = os.path.join( + entries_dir, + ks_entry_name + ) + + ks_sync_path = os.path.join( + sync_root, + repo_name, + a, + 'kickstart' + ) + + ks_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " + "--repoid={} -p {} --forcearch {} --norepopath " + "--gpgcheck --assumeyes 2>&1").format( + self.dnf_config, + a, + r, + ks_sync_path, + a + ) + + ks_sync_log = ("{}/{}-{}-ks.log").format( + log_root, + repo_name, + a + ) + + ks_sync_template = self.tmplenv.get_template('reposync.tmpl') + ks_sync_output = ks_sync_template.render( + import_gpg_cmd=import_gpg_cmd, + arch_force_cp=arch_force_cp, + dnf_plugin_cmd=dnf_plugin_cmd, + sync_cmd=ks_sync_cmd, + sync_log=ks_sync_log + ) + ks_entry_point_open = open(ks_point_sh, "w+") + ks_entry_point_open.write(ks_sync_output) + ks_entry_point_open.close() + os.chmod(ks_point_sh, 0o755) + # We ignoring sources? if (not self.ignore_source and not arch) or ( not self.ignore_source and arch == 'source'): @@ -635,7 +686,6 @@ class RepoSync: config_file = open(fname, "w+") repolist = [] for repo in self.repos: - constructed_url = '{}/{}/repo/{}{}/$basearch'.format( self.repo_base_url, self.project_id, @@ -1159,6 +1209,12 @@ class RepoSync: repo_name + ' source media.repo already exists' ) + def tweak_treeinfo(self, repo, sync_root, arch): + """ + This modifies treeinfo for the primary repository. If the repository is + listed in the iso_map as a non-disc, it will be considered for modification. + """ + def run_compose_closeout(self): """ Closes out a compose as file. This ensures kickstart repositories are @@ -1185,8 +1241,12 @@ class RepoSync: # Verify if the link even exists if not os.path.exists(self.compose_latest_dir): - self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir) - self.log.error('!! Please perform a full run if you have not done so.') + self.log.error( + '!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir + ) + self.log.error( + '!! Please perform a full run if you have not done so.' + ) raise SystemExit() log_root = os.path.join( @@ -1210,12 +1270,28 @@ class RepoSync: 'Starting to sync ISOs to compose' ) - iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir) - - if not iso_result: + if os.path.exists('/usr/bin/fpsync'): + message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir) + elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'): + message, ret = Shared.rsync_method(iso_root, sync_iso_root) + else: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Sync failed' + 'fpsync nor parallel + rsync were found on this system. ' + + 'There is also no built-in parallel rsync method at this ' + + 'time.' + ) + raise SystemExit() + + if ret != 0: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + message + ) + else: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + message ) class SigRepoSync: diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index e402c4a..ee682c9 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -799,54 +799,76 @@ class IsoBuild: is for basic use. Eventually it'll be expanded to handle this scenario. """ image = os.path.join(self.lorax_work_dir, arch, variant) - treeinfo = os.path.join(image, '.treeinfo') - discinfo = os.path.join(image, '.discinfo') - mediarepo = os.path.join(image, 'media.repo') imagemap = self.iso_map['images'][variant] - primary = imagemap['variant'] - repos = imagemap['repos'] - is_disc = False + data = { + 'arch': arch, + 'variant': variant, + 'variant_path': image, + 'checksum': self.checksum, + 'distname': self.distname, + 'fullname': self.fullname, + 'shortname': self.shortname, + 'release': self.release, + 'timestamp': self.timestamp, + } - if imagemap['disc']: - is_disc = True - discnum = 1 + try: + Shared.treeinfo_modify_write(data, imagemap) + except Exception as e: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'There was an error writing treeinfo.' + ) + self.log.error(e) + + #treeinfo = os.path.join(image, '.treeinfo') + #discinfo = os.path.join(image, '.discinfo') + #mediarepo = os.path.join(image, 'media.repo') + #imagemap = self.iso_map['images'][variant] + #primary = imagemap['variant'] + #repos = imagemap['repos'] + #is_disc = False + + #if imagemap['disc']: + # is_disc = True + # discnum = 1 # load up productmd - ti = productmd.treeinfo.TreeInfo() - ti.load(treeinfo) + #ti = productmd.treeinfo.TreeInfo() + #ti.load(treeinfo) # Set the name - ti.release.name = self.distname - ti.release.short = self.shortname + #ti.release.name = self.distname + #ti.release.short = self.shortname # Set the version (the initial lorax run does this, but we are setting # it just in case) - ti.release.version = self.release + #ti.release.version = self.release # Assign the present images into a var as a copy. For each platform, # clear out the present dictionary. For each item and path in the # assigned var, assign it back to the platform dictionary. If the path # is empty, continue. Do checksums afterwards. - plats = ti.images.images.copy() - for platform in ti.images.images: - ti.images.images[platform] = {} - for i, p in plats[platform].items(): - if not p: - continue - if 'boot.iso' in i and is_disc: - continue - ti.images.images[platform][i] = p - ti.checksums.add(p, self.checksum, root_dir=image) + #plats = ti.images.images.copy() + #for platform in ti.images.images: + # ti.images.images[platform] = {} + # for i, p in plats[platform].items(): + # if not p: + # continue + # if 'boot.iso' in i and is_disc: + # continue + # ti.images.images[platform][i] = p + # ti.checksums.add(p, self.checksum, root_dir=image) # stage2 checksums - if ti.stage2.mainimage: - ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) + #if ti.stage2.mainimage: + # ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) - if ti.stage2.instimage: - ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) + #if ti.stage2.instimage: + # ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) # If we are a disc, set the media section appropriately. - if is_disc: - ti.media.discnum = discnum - ti.media.totaldiscs = discnum + #if is_disc: + # ti.media.discnum = discnum + # ti.media.totaldiscs = discnum # Create variants # Note to self: There's a lot of legacy stuff running around for @@ -854,38 +876,38 @@ class IsoBuild: # apparently. But there could be a chance it'll change. We may need to # put in a configuration to deal with it at some point. #ti.variants.variants.clear() - for y in repos: - if y in ti.variants.variants.keys(): - vari = ti.variants.variants[y] - else: - vari = productmd.treeinfo.Variant(ti) + #for y in repos: + # if y in ti.variants.variants.keys(): + # vari = ti.variants.variants[y] + # else: + # vari = productmd.treeinfo.Variant(ti) - vari.id = y - vari.uid = y - vari.name = y - vari.type = "variant" - if is_disc: - vari.paths.repository = y - vari.paths.packages = y + "/Packages" - else: - if y == primary: - vari.paths.repository = "." - vari.paths.packages = "Packages" - else: - vari.paths.repository = "../../../" + y + "/" + arch + "/os" - vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" + # vari.id = y + # vari.uid = y + # vari.name = y + # vari.type = "variant" + # if is_disc: + # vari.paths.repository = y + # vari.paths.packages = y + "/Packages" + # else: + # if y == primary: + # vari.paths.repository = "." + # vari.paths.packages = "Packages" + # else: + # vari.paths.repository = "../../../" + y + "/" + arch + "/os" + # vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" - if y not in ti.variants.variants.keys(): - ti.variants.add(vari) + # if y not in ti.variants.variants.keys(): + # ti.variants.add(vari) - del vari + # del vari # Set default variant - ti.dump(treeinfo, main_variant=primary) + #ti.dump(treeinfo, main_variant=primary) # Set discinfo - Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) + #Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) # Set media.repo - Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) + #Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index eca879d..305316b 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -107,11 +107,115 @@ class Shared: ti.dump(file_path) @staticmethod - def treeinfo_modify_write(): + def treeinfo_modify_write(data, imagemap): """ Modifies a specific treeinfo with already available data. This is in the case of modifying treeinfo for primary repos or images. """ + arch = data['arch'] + variant = data['variant'] + variant_path = data['variant_path'] + checksum = data['checksum'] + distname = data['distname'] + fullname = data['fullname'] + shortname = data['shortname'] + release = data['release'] + timestamp = data['timestamp'] + + os_or_ks = '' + if '/os/' in variant_path: + os_or_ks = 'os' + if '/kickstart/' in variant_path: + os_or_ks = 'kickstart' + + image = os.path.join(variant_path) + treeinfo = os.path.join(image, '.treeinfo') + discinfo = os.path.join(image, '.discinfo') + mediarepo = os.path.join(image, 'media.repo') + #imagemap = self.iso_map['images'][variant] + primary = imagemap['variant'] + repos = imagemap['repos'] + is_disc = False + + if imagemap['disc']: + is_disc = True + discnum = 1 + + # load up productmd + ti = productmd.treeinfo.TreeInfo() + ti.load(treeinfo) + + # Set the name + ti.release.name = distname + ti.release.short = shortname + # Set the version (the initial lorax run does this, but we are setting + # it just in case) + ti.release.version = release + # Assign the present images into a var as a copy. For each platform, + # clear out the present dictionary. For each item and path in the + # assigned var, assign it back to the platform dictionary. If the path + # is empty, continue. Do checksums afterwards. + plats = ti.images.images.copy() + for platform in ti.images.images: + ti.images.images[platform] = {} + for i, p in plats[platform].items(): + if not p: + continue + if 'boot.iso' in i and is_disc: + continue + ti.images.images[platform][i] = p + ti.checksums.add(p, checksum, root_dir=image) + + # stage2 checksums + if ti.stage2.mainimage: + ti.checksums.add(ti.stage2.mainimage, checksum, root_dir=image) + + if ti.stage2.instimage: + ti.checksums.add(ti.stage2.instimage, checksum, root_dir=image) + + # If we are a disc, set the media section appropriately. + if is_disc: + ti.media.discnum = discnum + ti.media.totaldiscs = discnum + + # Create variants + # Note to self: There's a lot of legacy stuff running around for + # Fedora, ELN, and RHEL in general. This is the general structure, + # apparently. But there could be a chance it'll change. We may need to + # put in a configuration to deal with it at some point. + #ti.variants.variants.clear() + for y in repos: + if y in ti.variants.variants.keys(): + vari = ti.variants.variants[y] + else: + vari = productmd.treeinfo.Variant(ti) + + vari.id = y + vari.uid = y + vari.name = y + vari.type = "variant" + if is_disc: + vari.paths.repository = y + vari.paths.packages = y + "/Packages" + else: + if y == primary: + vari.paths.repository = "." + vari.paths.packages = "Packages" + else: + vari.paths.repository = "../../../" + y + "/" + arch + "/" + os_or_ks + vari.paths.packages = "../../../" + y + "/" + arch + "/" + os_or_ks + "/Packages" + + if y not in ti.variants.variants.keys(): + ti.variants.add(vari) + + del vari + + # Set default variant + ti.dump(treeinfo, main_variant=primary) + # Set discinfo + Shared.discinfo_write(timestamp, fullname, arch, discinfo) + # Set media.repo + Shared.media_repo_write(timestamp, fullname, mediarepo) @staticmethod def write_metadata( @@ -345,18 +449,16 @@ class Shared: """ @staticmethod - def fpsync_method(src, dest, logger, tmp_dir): + def fpsync_method(src, dest, tmp_dir): """ Returns a list for the fpsync command """ cmd = '/usr/bin/fpsync' rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005' if not os.path.exists(cmd): - logger.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'fpsync not found' - ) - return False + message = 'fpsync not found' + retval = 1 + return message, retval os.makedirs(tmp_dir, exist_ok=True) @@ -373,25 +475,30 @@ class Shared: stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) + if process != 0: - logger.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'fpsync failed' - ) - return False + message = 'Syncing (fpsync) failed' + retval = process + return message, retval if os.path.exists(dest): - return True + message = 'Syncing (fpsync) succeeded' + retval = process else: - logger.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Path synced does not seem to exist for some reason.' - ) - return False + message = 'Path synced does not seem to exist for some reason.' + retval = 1 + + return message, retval @staticmethod - def rsync_method(src, dest, logger, tmp_dir): + def rsync_method(src, dest): """ Returns a string for the rsync command plus parallel. Yes, this is a hack. """ + find_cmd = '/usr/bin/find' + parallel_cmd = '/usr/bin/parallel' + rsync_cmd = '/usr/bin/rsync' + switches = '-av --chown=10004:10005 --progress --relative --human-readable' + + os.makedirs(dest, exist_ok=True) diff --git a/sync/.sync-to-prod.sh.swp b/sync/.sync-to-prod.sh.swp new file mode 100644 index 0000000000000000000000000000000000000000..75f3529bd37db6f81cb1f6cd48c5ac3accac90cb GIT binary patch literal 12288 zcmeI2L2nyH6vroAXbLSAd;nhUsDY@vYbPx#i9!{eG%-@*D0W(*rj5qCV|(lIt~;}v z#7Yb&4jj2~Lwo=Zh%*uwRBq4{2e=_|feRNt0RLGhb(#<*QmI6xS?O=@&dz%?^PBfx z5@qUz+vR1t;Z8DKml&I_epeq|{FLqFFEPpMLYnqdF3qFR<8!(^cD08 zB%lDAhf2^d7a98r`VsmXN}>DEMd-H+jC~LN{wiaSp>Lo!q4Urv^y@2(eG7d7b)h!& z09uCrI?vdj&>zra=pnQO5p)jvd6cm)p_|YM^aykR9Qq7ef)*gNo)Tm)OTZGa1S|nd zz!G>)1R5bD@>}tCM8!g(aBbRrUZ+MXWh0bAhB|rjkc6GA!BIFT*gMt?N0~SH>mGY~ zvJH`!YUSbVA(vh~j67cno(Sj3@b8`A4CBEzvpDm3gK0Ri3^Hp!pxoH*>RM%fy;Lh# zRyNlvm0I5$+bzx278WaOA8uA_>+koy9PRDVE(w65+$|0;0VOfGnWK{wgi82Hta`Z_ z5<6i+#Tn{n{_}K4j4MH0VogDhT0)7dR8JH7Ak|42QHg6o+dN8)!&aza6*lPZU2+?W+T`rexI3}o?sTL(F>cnpws5;##ftkm^vl!I zK~J$IR1Y(Wnplc2x(UGs14v=!_I!Ee=@!B+8;r8ROS=Ig0lz&S1eP*qP08U~UBV59+T7aWLM?+}xRZl`IC!6< z1H!`z9M6=Y6R%WRUaeFYHp}w{YL*7Wrhgr+LAha%;do|8YNFGP6<0k7sM;>9` z8oR2qSRtKu(FG@U&0s{k{Uc-OG>FNObmht*LMA52>F}KZyV{}xIZ3Z8NYWm4x#AMZ z4p!Ttl#U-J;P!xuH!uc#{3qBqm|2!?2`T-S@Y_fmZT4ffTAD0OUB_ERE7KHn3K>;~pNBDy!@AgmczA>|>k$&UayYIT5*Bt-<5Xloq_0TawGE2!&(}Wf( zj8iRp6SS>j3nEwnM}N zBC92|OXY^CDn0n?v{Bf|=^lxyLCVaYscvRVGt<5c#R~Any$(if^*$eZ5f7Iq-jbcYzWX05V0Uta4o zra{gZK=2k|M7(2e3%R#(0>0CUqZ~UiqOZMkwODw!Xg&()RUP;ln9&wjp^1Hs)4Wtg lP1AI!@HSN3F}u?=n}Z)S=1hI+Ct)11{N?NiULBjo{sC&2w+sLP literal 0 HcmV?d00001 From b9037585c7598439e61c4050a038b9cb8307f24c Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 21:01:13 -0700 Subject: [PATCH 31/96] move treeinfo writing to Shared --- sync/.sync-to-prod.sh.swp | Bin 12288 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 sync/.sync-to-prod.sh.swp diff --git a/sync/.sync-to-prod.sh.swp b/sync/.sync-to-prod.sh.swp deleted file mode 100644 index 75f3529bd37db6f81cb1f6cd48c5ac3accac90cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2L2nyH6vroAXbLSAd;nhUsDY@vYbPx#i9!{eG%-@*D0W(*rj5qCV|(lIt~;}v z#7Yb&4jj2~Lwo=Zh%*uwRBq4{2e=_|feRNt0RLGhb(#<*QmI6xS?O=@&dz%?^PBfx z5@qUz+vR1t;Z8DKml&I_epeq|{FLqFFEPpMLYnqdF3qFR<8!(^cD08 zB%lDAhf2^d7a98r`VsmXN}>DEMd-H+jC~LN{wiaSp>Lo!q4Urv^y@2(eG7d7b)h!& z09uCrI?vdj&>zra=pnQO5p)jvd6cm)p_|YM^aykR9Qq7ef)*gNo)Tm)OTZGa1S|nd zz!G>)1R5bD@>}tCM8!g(aBbRrUZ+MXWh0bAhB|rjkc6GA!BIFT*gMt?N0~SH>mGY~ zvJH`!YUSbVA(vh~j67cno(Sj3@b8`A4CBEzvpDm3gK0Ri3^Hp!pxoH*>RM%fy;Lh# zRyNlvm0I5$+bzx278WaOA8uA_>+koy9PRDVE(w65+$|0;0VOfGnWK{wgi82Hta`Z_ z5<6i+#Tn{n{_}K4j4MH0VogDhT0)7dR8JH7Ak|42QHg6o+dN8)!&aza6*lPZU2+?W+T`rexI3}o?sTL(F>cnpws5;##ftkm^vl!I zK~J$IR1Y(Wnplc2x(UGs14v=!_I!Ee=@!B+8;r8ROS=Ig0lz&S1eP*qP08U~UBV59+T7aWLM?+}xRZl`IC!6< z1H!`z9M6=Y6R%WRUaeFYHp}w{YL*7Wrhgr+LAha%;do|8YNFGP6<0k7sM;>9` z8oR2qSRtKu(FG@U&0s{k{Uc-OG>FNObmht*LMA52>F}KZyV{}xIZ3Z8NYWm4x#AMZ z4p!Ttl#U-J;P!xuH!uc#{3qBqm|2!?2`T-S@Y_fmZT4ffTAD0OUB_ERE7KHn3K>;~pNBDy!@AgmczA>|>k$&UayYIT5*Bt-<5Xloq_0TawGE2!&(}Wf( zj8iRp6SS>j3nEwnM}N zBC92|OXY^CDn0n?v{Bf|=^lxyLCVaYscvRVGt<5c#R~Any$(if^*$eZ5f7Iq-jbcYzWX05V0Uta4o zra{gZK=2k|M7(2e3%R#(0>0CUqZ~UiqOZMkwODw!Xg&()RUP;ln9&wjp^1Hs)4Wtg lP1AI!@HSN3F}u?=n}Z)S=1hI+Ct)11{N?NiULBjo{sC&2w+sLP From 79682d0e98604411870f44eb29e007a6d71efec9 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 21:14:33 -0700 Subject: [PATCH 32/96] os is missing from initial lorax variant --- iso/empanadas/empanadas/util/shared.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 305316b..93aa986 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -123,7 +123,7 @@ class Shared: timestamp = data['timestamp'] os_or_ks = '' - if '/os/' in variant_path: + if '/os/' in variant_path or not imagemap['disc']: os_or_ks = 'os' if '/kickstart/' in variant_path: os_or_ks = 'kickstart' From f9166541f409cfab4255400d7cde575bb7f734d7 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 23:27:49 -0700 Subject: [PATCH 33/96] add tweak treeinfo for dnf_utils --- iso/empanadas/empanadas/util/dnf_utils.py | 75 ++++++++++++++++++- iso/empanadas/empanadas/util/iso_utils.py | 88 ----------------------- 2 files changed, 73 insertions(+), 90 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index c7e9a79..4332986 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -96,6 +96,7 @@ class RepoSync: self.repo = repo self.extra_files = rlvars['extra_files'] self.gpgkey = gpgkey + self.checksum = rlvars['checksum'] self.compose_id = '{}-{}-{}'.format( config['shortname'], @@ -251,8 +252,9 @@ class RepoSync: if self.refresh_extra_files and not self.fullrun: self.deploy_extra_files(sync_root, global_work_root) - # This does NOT overwrite treeinfo files. This just ensures they exist - # and are configured correctly. + # deploy_treeinfo does NOT overwrite any treeinfo files. However, + # tweak_treeinfo calls out to a method that does. This should not + # cause issues as the method is fairly static in nature. if self.refresh_treeinfo and not self.fullrun: self.deploy_treeinfo(self.repo, sync_root, self.arch) self.tweak_treeinfo(self.repo, sync_root, self.arch) @@ -1214,6 +1216,75 @@ class RepoSync: This modifies treeinfo for the primary repository. If the repository is listed in the iso_map as a non-disc, it will be considered for modification. """ + variants_to_tweak = [] + + arches_to_tree = self.arches + if arch: + arches_to_tree = [arch] + + repos_to_tree = self.repos + if repo and not self.fullrun: + repos_to_tree = [repo] + + for r in repos_to_tree: + entry_name_list = [] + repo_name = r + arch_tree = arches_to_tree.copy() + + if r in self.iso_map['images']: + variants_to_tweak.append(r) + + for v in variants_to_tweak: + for a in arches_to_tree: + image = os.path.join(sync_root, v, a, 'os') + imagemap = self.iso_map['images'][v] + data = { + 'arch': a, + 'variant': v, + 'variant_path': image, + 'checksum': self.checksum, + 'distname': self.distname, + 'fullname': self.fullname, + 'shortname': self.shortname, + 'release': self.fullversion, + 'timestamp': self.timestamp, + } + + try: + Shared.treeinfo_modify_write(data, imagemap) + except Exception as e: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'There was an error writing treeinfo.' + ) + self.log.error(e) + + if self.fullrun: + ksimage = os.path.join(sync_root, v, a, 'kickstart') + ksdata = { + 'arch': a, + 'variant': v, + 'variant_path': image, + 'checksum': self.checksum, + 'distname': self.distname, + 'fullname': self.fullname, + 'shortname': self.shortname, + 'release': self.fullversion, + 'timestamp': self.timestamp, + } + ksdata.clear() + + try: + Shared.treeinfo_modify_write(ksdata, imagemap) + except Exception as e: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'There was an error writing treeinfo.' + ) + self.log.error(e) + + data.clear() + imagemap.clear() def run_compose_closeout(self): """ diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index ee682c9..c61e0fe 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -821,94 +821,6 @@ class IsoBuild: ) self.log.error(e) - #treeinfo = os.path.join(image, '.treeinfo') - #discinfo = os.path.join(image, '.discinfo') - #mediarepo = os.path.join(image, 'media.repo') - #imagemap = self.iso_map['images'][variant] - #primary = imagemap['variant'] - #repos = imagemap['repos'] - #is_disc = False - - #if imagemap['disc']: - # is_disc = True - # discnum = 1 - - # load up productmd - #ti = productmd.treeinfo.TreeInfo() - #ti.load(treeinfo) - - # Set the name - #ti.release.name = self.distname - #ti.release.short = self.shortname - # Set the version (the initial lorax run does this, but we are setting - # it just in case) - #ti.release.version = self.release - # Assign the present images into a var as a copy. For each platform, - # clear out the present dictionary. For each item and path in the - # assigned var, assign it back to the platform dictionary. If the path - # is empty, continue. Do checksums afterwards. - #plats = ti.images.images.copy() - #for platform in ti.images.images: - # ti.images.images[platform] = {} - # for i, p in plats[platform].items(): - # if not p: - # continue - # if 'boot.iso' in i and is_disc: - # continue - # ti.images.images[platform][i] = p - # ti.checksums.add(p, self.checksum, root_dir=image) - - # stage2 checksums - #if ti.stage2.mainimage: - # ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) - - #if ti.stage2.instimage: - # ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) - - # If we are a disc, set the media section appropriately. - #if is_disc: - # ti.media.discnum = discnum - # ti.media.totaldiscs = discnum - - # Create variants - # Note to self: There's a lot of legacy stuff running around for - # Fedora, ELN, and RHEL in general. This is the general structure, - # apparently. But there could be a chance it'll change. We may need to - # put in a configuration to deal with it at some point. - #ti.variants.variants.clear() - #for y in repos: - # if y in ti.variants.variants.keys(): - # vari = ti.variants.variants[y] - # else: - # vari = productmd.treeinfo.Variant(ti) - - # vari.id = y - # vari.uid = y - # vari.name = y - # vari.type = "variant" - # if is_disc: - # vari.paths.repository = y - # vari.paths.packages = y + "/Packages" - # else: - # if y == primary: - # vari.paths.repository = "." - # vari.paths.packages = "Packages" - # else: - # vari.paths.repository = "../../../" + y + "/" + arch + "/os" - # vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" - - # if y not in ti.variants.variants.keys(): - # ti.variants.add(vari) - - # del vari - - # Set default variant - #ti.dump(treeinfo, main_variant=primary) - # Set discinfo - #Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) - # Set media.repo - #Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) - # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or # reduced in nature. From 93d6bae08c457c4d6c8c12be65f51782ba9f52d3 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 23:35:03 -0700 Subject: [PATCH 34/96] missing logger --- iso/empanadas/empanadas/util/dnf_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 4332986..3bd6af4 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1236,6 +1236,10 @@ class RepoSync: for v in variants_to_tweak: for a in arches_to_tree: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Tweaking treeinfo for ' + a + ' ' + v + ) image = os.path.join(sync_root, v, a, 'os') imagemap = self.iso_map['images'][v] data = { From f308e87b15f306ab6f17f64b1e7e8295667f61da Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 23:42:35 -0700 Subject: [PATCH 35/96] add more logging --- iso/empanadas/empanadas/util/dnf_utils.py | 4 ++-- iso/empanadas/empanadas/util/iso_utils.py | 2 +- iso/empanadas/empanadas/util/shared.py | 5 ++++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 3bd6af4..ab46555 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1255,7 +1255,7 @@ class RepoSync: } try: - Shared.treeinfo_modify_write(data, imagemap) + Shared.treeinfo_modify_write(data, imagemap, self.log) except Exception as e: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + @@ -1279,7 +1279,7 @@ class RepoSync: ksdata.clear() try: - Shared.treeinfo_modify_write(ksdata, imagemap) + Shared.treeinfo_modify_write(ksdata, imagemap, self.log) except Exception as e: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index c61e0fe..f571ba6 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -813,7 +813,7 @@ class IsoBuild: } try: - Shared.treeinfo_modify_write(data, imagemap) + Shared.treeinfo_modify_write(data, imagemap, self.log) except Exception as e: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 93aa986..8993f1d 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -107,7 +107,7 @@ class Shared: ti.dump(file_path) @staticmethod - def treeinfo_modify_write(data, imagemap): + def treeinfo_modify_write(data, imagemap, logger): """ Modifies a specific treeinfo with already available data. This is in the case of modifying treeinfo for primary repos or images. @@ -211,10 +211,13 @@ class Shared: del vari # Set default variant + logger.info('Writing treeinfo') ti.dump(treeinfo, main_variant=primary) # Set discinfo + logger.info('Writing discinfo') Shared.discinfo_write(timestamp, fullname, arch, discinfo) # Set media.repo + logger.info('Writing media.repo') Shared.media_repo_write(timestamp, fullname, mediarepo) @staticmethod From b78c4a774da7d0fbf0ccf06adeefdce6578406f0 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 23:50:08 -0700 Subject: [PATCH 36/96] reverse for loops --- iso/empanadas/empanadas/util/dnf_utils.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index ab46555..a40045d 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1234,8 +1234,8 @@ class RepoSync: if r in self.iso_map['images']: variants_to_tweak.append(r) - for v in variants_to_tweak: - for a in arches_to_tree: + for a in arches_to_tree: + for v in variants_to_tweak: self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Tweaking treeinfo for ' + a + ' ' + v @@ -1287,9 +1287,6 @@ class RepoSync: ) self.log.error(e) - data.clear() - imagemap.clear() - def run_compose_closeout(self): """ Closes out a compose as file. This ensures kickstart repositories are From d84a686102a36cd8e6be06ae8058e5a08145b9a7 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Mon, 4 Jul 2022 00:53:39 -0700 Subject: [PATCH 37/96] add live root logic before sign off --- iso/empanadas/empanadas/util/dnf_utils.py | 45 +++++++++++++++++++++-- iso/empanadas/empanadas/util/shared.py | 2 + 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index a40045d..8f74a8b 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1234,6 +1234,13 @@ class RepoSync: if r in self.iso_map['images']: variants_to_tweak.append(r) + if not len(variants_to_tweak) > 0: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'No treeinfo to tweak.' + ) + return + for a in arches_to_tree: for v in variants_to_tweak: self.log.info( @@ -1289,10 +1296,9 @@ class RepoSync: def run_compose_closeout(self): """ - Closes out a compose as file. This ensures kickstart repositories are - made, the treeinfo is modifed for the primary repository, syncs - work/isos to compose/isos, and combines all checksum files per arch - into a final CHECKSUM file. + Closes out a compose. This ensures the ISO's are synced from work/isos + to compose/isos, checks for live media and syncs as well from work/live + to compose/live, deploys final metadata. """ # latest-X-Y should exist at all times for this to work. work_root = os.path.join( @@ -1332,6 +1338,11 @@ class RepoSync: "isos" ) + live_root = os.path.join( + work_root, + "live" + ) + global_work_root = os.path.join( work_root, "global", @@ -1366,6 +1377,32 @@ class RepoSync: message ) + if os.path.exists(live_root): + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Starting to sync live images to compose' + ) + + if os.path.exists('/usr/bin/fpsync'): + message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir) + elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'): + message, ret = Shared.rsync_method(iso_root, sync_iso_root) + + if ret != 0: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + message + ) + else: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + message + ) + + # Combine all checksums here + # Deploy final metadata for a close out + self.deploy_metadata(sync_root) + class SigRepoSync: """ This helps us do reposync operations for SIG's. Do not use this for the diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index 8993f1d..bd9fe53 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -505,3 +505,5 @@ class Shared: switches = '-av --chown=10004:10005 --progress --relative --human-readable' os.makedirs(dest, exist_ok=True) + + return 'Not available', 1 From 7365ca6b06bea78b4d1c5fed79ab80399ac352ea Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 4 Jul 2022 12:06:31 -0400 Subject: [PATCH 38/96] various cleanup of image build script * reorder imports * pass cli arguments directly into class for simplification and segregation * reorder class variables * Some error checking to ensure we're writing what we want to * Allow copy(upload?) step to be skipped * properly consume stdout and stderr from popen to avoid exception when FD is closed but trying to be read by the subprocess_log function --- .../empanadas/scripts/build_image.py | 87 ++++++++++++------- 1 file changed, 57 insertions(+), 30 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index f03ec42..cc696b5 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -5,17 +5,16 @@ import argparse import datetime import json import logging -import subprocess -import sys -import time import os -import tempfile import pathlib import platform +import subprocess +import sys +import tempfile +import time -from botocore import args from attrs import define, Factory, field, asdict - +from botocore import args from jinja2 import Environment, FileSystemLoader, Template from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union @@ -60,26 +59,27 @@ BUILDTIME = datetime.datetime.utcnow() class ImageBuild: architecture: Architecture = field() base_uuid: Optional[str] = field(default="") + cli_args: argparse.Namespace = field() command_args: List[str] = field(factory=list) common_args: List[str] = field(factory=list) debug: bool = field(default=False) + fedora_release: int = field() image_type: str = field() job_template: Optional[Template] = field(init=False) kickstart_arg: List[str] = field(factory=list) + metadata: pathlib.Path = field(init=False) out_type: str = field(init=False) outdir: pathlib.Path = field(init=False) outname: str = field(init=False) package_args: List[str] = field(factory=list) + release: int = field(default=0) + revision: Optional[int] = field() + stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False) target_uuid: Optional[str] = field(default="") tdl_path: pathlib.Path = field(init=False) template: Template = field() type_variant: str = field(init=False) - stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False) variant: Optional[str] = field() - revision: Optional[int] = field() - metadata: pathlib.Path = field(init=False) - fedora_release: int = field() - release: int = field(default=0) def __attrs_post_init__(self): self.tdl_path = self.render_icicle_template() @@ -121,6 +121,8 @@ class ImageBuild: self.target_uuid = o['target_uuid'] except json.decoder.JSONDecodeError as e: log.exception("Couldn't decode metadata file", e) + finally: + f.flush() def output_name(self): return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{self.release}.{self.architecture.name}" @@ -132,10 +134,10 @@ class ImageBuild: args_mapping = { "debug": "--debug" } - return [param for name, param in args_mapping.items() if getattr(results,name)] + return [param for name, param in args_mapping.items() if getattr(self.cli_args, name)] def _package_args(self) -> List[str]: - if results.type == "Container": + if self.image_type == "Container": return ["--parameter", "compress", "xz"] return [""] @@ -143,7 +145,7 @@ class ImageBuild: args = [] if self.image_type == "Container": args = ["--parameter", "offline_icicle", "true"] - if self.image_type == "GenericCloud": + if self.image_type in ["GenericCloud", "EC2"]: args = ["--parameter", "generate_icicle", "false"] return args @@ -173,7 +175,7 @@ class ImageBuild: architecture=self.architecture.name, fedora_version=self.fedora_release, iso8601date=BUILDTIME.strftime("%Y%m%d"), - installdir="kickstart" if results.kickstartdir else "os", + installdir="kickstart" if self.cli_args.kickstartdir else "os", major=self.architecture.version, release=self.release, size="10G", @@ -183,7 +185,11 @@ class ImageBuild: ) tmp.write(_template.encode()) tmp.flush() - return pathlib.Path(output) + output = pathlib.Path(output) + if not output.exists(): + log.error("Failed to write TDL template") + raise Exception("Failed to write TDL template") + return output def build_command(self) -> List[str]: build_command = ["imagefactory", *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path @@ -219,6 +225,7 @@ class ImageBuild: def package(self) -> int: # Some build types don't need to be packaged by imagefactory + # @TODO remove business logic if possible if self.image_type == "GenericCloud": self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else "" @@ -243,15 +250,21 @@ class ImageBuild: return all(ret > 0 for ret in returns) - def copy(self) -> int: + def copy(self, skip=False) -> int: # move or unpack if necessary + log.info("Executing staging commands") if (stage := self.stage() > 0): raise Exception(stage) - ret, out, err, _ = self.runCmd(self.copy_command(), search=False) - return ret + if not skip: + log.info("Copying files to output directory") + ret, out, err, _ = self.runCmd(self.copy_command(), search=False) + return ret - def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[IO[bytes],None], Union[IO[bytes],None], Union[str,None]]: + log.info(f"Build complete! Output available in {self.outdir}/") + return 0 + + def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[bytes,None], Union[bytes,None], Union[str,None]]: prepared, _ = self.prepare_command(command) log.info(f"Running command: {' '.join(prepared)}") @@ -263,6 +276,7 @@ class ImageBuild: with subprocess.Popen(prepared, **kwargs) as p: uuid = None + # @TODO implement this as a callback? if search: for _, line in enumerate(p.stdout): # type: ignore ln = line.decode() @@ -270,12 +284,14 @@ class ImageBuild: uuid = ln.split(" ")[-1] log.debug(f"found uuid: {uuid}") - res = p.wait(), p.stdout, p.stdin, uuid + out, err = p.communicate() + res = p.wait(), out, err, uuid + if res[0] > 0: log.error(f"Problem while executing command: '{prepared}'") if search and not res[3]: log.error("UUID not found in stdout. Dumping stdout and stderr") - self.log_subprocess(res) + self.log_subprocess(res) return res @@ -292,14 +308,17 @@ class ImageBuild: r = [] return r, [r.append(c()) if (callable(c) and c.__name__ == '') else r.append(str(c)) for c in command_list] - def log_subprocess(self, result: Tuple[int, Union[IO[bytes], None], Union[IO[bytes], None], Union[str, None]]): + def log_subprocess(self, result: Tuple[int, Union[bytes, None], Union[bytes, None], Union[str, None]]): def log_lines(title, lines): log.info(f"====={title}=====") - for _, line in lines: - log.info(line.decode()) + log.info(lines.decode()) log.info(f"Command return code: {result[0]}") - log_lines("Command STDOUT", enumerate(result[1])) # type: ignore - log_lines("Command STDERR", enumerate(result[2])) # type: ignore + stdout = result[1] + stderr = result[2] + if stdout: + log_lines("Command STDOUT", stdout) + if stderr: + log_lines("Command STDERR", stderr) def render_kubernetes_job(self): commands = [self.build_command(), self.package_command(), self.copy_command()] @@ -320,9 +339,16 @@ class ImageBuild: def save(self): with open(pathlib.Path(self.outdir, "metadata.json"), "w") as f: - o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] } - log.debug(o) - json.dump(o, f) + try: + o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] } + log.debug(o) + json.dump(o, f) + except AttributeError as e: + log.error("Couldn't find attribute in object. Something is probably wrong", e) + except Exception as e: + log.exception(e) + finally: + f.flush() def run(): try: @@ -340,6 +366,7 @@ def run(): for architecture in arches: IB = ImageBuild( architecture=Architecture.New(architecture, major), + cli_args=results, debug=results.debug, fedora_release=rlvars['fedora_release'], image_type=results.type, From 04e7e1d1648ef7e2db8d3d8a2d12489289803844 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Mon, 4 Jul 2022 11:31:24 -0700 Subject: [PATCH 39/96] Bump to 0.2.0 * Add metadata with README information * Bump to 0.2.0 * Move more functions into shared --- iso/empanadas/empanadas/__init__.py | 2 +- iso/empanadas/empanadas/templates/README.tmpl | 25 ++- .../empanadas/templates/extraisobuild.tmpl.sh | 2 + .../empanadas/templates/isobuild.tmpl.sh | 2 + iso/empanadas/empanadas/util/dnf_utils.py | 12 ++ iso/empanadas/empanadas/util/iso_utils.py | 148 ++++-------------- iso/empanadas/empanadas/util/shared.py | 117 +++++++++++++- iso/empanadas/tests/test_empanadas.py | 2 +- 8 files changed, 187 insertions(+), 123 deletions(-) diff --git a/iso/empanadas/empanadas/__init__.py b/iso/empanadas/empanadas/__init__.py index b794fd4..7fd229a 100644 --- a/iso/empanadas/empanadas/__init__.py +++ b/iso/empanadas/empanadas/__init__.py @@ -1 +1 @@ -__version__ = '0.1.0' +__version__ = '0.2.0' diff --git a/iso/empanadas/empanadas/templates/README.tmpl b/iso/empanadas/empanadas/templates/README.tmpl index 726ab3a..4e4828b 100644 --- a/iso/empanadas/empanadas/templates/README.tmpl +++ b/iso/empanadas/empanadas/templates/README.tmpl @@ -2,5 +2,26 @@ These set of repositories (or "compose") is for {{ fullname }} and was generated using Empanadas {{ version }} from the SIG/Core Toolkit. As this is not a traditional compose, there will be things that you might be -expecting and do not see, or not expecting and do see.. While we attempted to -recreate a lot of those elements, it's not perfect. +expecting and do not see, or not expecting and do see. While we attempted to +recreate a lot of those elements, it's not perfect. In the future, we do plan on +having more metadata and providing a client libraries that can ingest this type +of metadata that we produce for easy consumption. + +# Notes # + +## Checksums ## + +CHECKSUM Validation: https://github.com/rocky-linux/checksums + +Traditionally, we would to "sign" the checksum files with the current GPG key +of a major release. However, due to how the new build system operates and for +ensuring strong security within the build system as it pertains the signing +keys, this is no longer possible. It was determined by SIG/Core or Release +Engineering to instead provide verified signed commits using our keys with +RESF/Rocky Linux email domain names to a proper git repository. + +With that being said, if you are looking for "verification" of the ISO +checksums, it is highly recommended to visit the link above. + +These are *always* updated with new releases or new images. This includes +live images as we release them. diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index 4d42901..df51333 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -3,6 +3,8 @@ # under extreme circumstances should you be filling this out and running # manually. +set -o pipefail + # Vars MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg" MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" diff --git a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh index 95184b6..28398e3 100644 --- a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh @@ -2,6 +2,8 @@ # This is a template that is used to build ISO's for Rocky Linux. Only under # extreme circumstances should you be filling this out and running manually. +set -o pipefail + # Vars MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg" MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 8f74a8b..1f6ab3b 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -18,6 +18,7 @@ import json from jinja2 import Environment, FileSystemLoader +import empanadas from empanadas.common import Color, _rootdir from empanadas.util import Shared @@ -987,6 +988,17 @@ class RepoSync: 'Metadata files phase completed.' ) + # Deploy README to metadata directory + readme_template = self.tmplenv.get_template('README.tmpl') + readme_output = readme_template.render( + fullname=self.fullname, + version=empanadas.__version__ + ) + + with open(metadata_dir + '/README') as readme_file: + readme_file.write(readme_output) + readme_file.close() + def deploy_treeinfo(self, repo, sync_root, arch): """ diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index f571ba6..a9a80fa 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -15,11 +15,11 @@ import tarfile import shutil # lazy person's s3 parser -import requests -import json -import xmltodict +#import requests +#import json +#import xmltodict # if we can access s3 -import boto3 +#import boto3 # relative_path, compute_file_checksums import kobo.shortcuts from fnmatch import fnmatch @@ -122,8 +122,8 @@ class IsoBuild: self.s3_bucket = config['bucket'] self.s3_bucket_url = config['bucket_url'] - if s3: - self.s3 = boto3.client('s3') + #if s3: + # self.s3 = boto3.client('s3') # arch specific self.hfs_compat = hfs_compat @@ -352,9 +352,21 @@ class IsoBuild: 'Determining the latest pulls...' ) if self.s3: - latest_artifacts = self._s3_determine_latest() + latest_artifacts = Shared.s3_determine_latest( + self.s3_bucket, + self.release, + self.arches, + 'tar.gz', + self.log + ) else: - latest_artifacts = self._reqs_determine_latest() + latest_artifacts = Shared.reqs_determine_latest( + self.s3_bucket_url, + self.release, + self.arches, + 'tar.gz', + self.log + ) self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + @@ -381,16 +393,20 @@ class IsoBuild: 'Downloading artifact for ' + Color.BOLD + arch + Color.END ) if self.s3: - self._s3_download_artifacts( + Shared.s3_download_artifacts( self.force_download, + self.s3_bucket, source_path, - full_drop + full_drop, + self.log ) else: - self._reqs_download_artifacts( + Shared.reqs_download_artifacts( self.force_download, + self.s3_bucket_url, source_path, - full_drop + full_drop, + self.log ) self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + @@ -464,111 +480,6 @@ class IsoBuild: ) self._copy_nondisc_to_repo(self.force_unpack, arch, variant) - - def _s3_determine_latest(self): - """ - Using native s3, determine the latest artifacts and return a dict - """ - temp = [] - data = {} - try: - self.s3.list_objects(Bucket=self.s3_bucket)['Contents'] - except: - self.log.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Cannot access s3 bucket.' - ) - raise SystemExit() - - for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']: - if 'tar.gz' in y['Key'] and self.release in y['Key']: - temp.append(y['Key']) - - for arch in self.arches: - temps = [] - for y in temp: - if arch in y: - temps.append(y) - temps.sort(reverse=True) - data[arch] = temps[0] - - return data - - def _s3_download_artifacts(self, force_download, source, dest): - """ - Download the requested artifact(s) via s3 - """ - if os.path.exists(dest): - if not force_download: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Artifact at ' + dest + ' already exists' - ) - return - - self.log.info('Downloading ({}) to: {}'.format(source, dest)) - try: - self.s3.download_file( - Bucket=self.s3_bucket, - Key=source, - Filename=dest - ) - except: - self.log.error('There was an issue downloading from %s' % self.s3_bucket) - - def _reqs_determine_latest(self): - """ - Using requests, determine the latest artifacts and return a list - """ - temp = [] - data = {} - - try: - bucket_data = requests.get(self.s3_bucket_url) - except requests.exceptions.RequestException as e: - self.log.error('The s3 bucket http endpoint is inaccessible') - raise SystemExit(e) - - resp = xmltodict.parse(bucket_data.content) - - for y in resp['ListBucketResult']['Contents']: - if 'tar.gz' in y['Key'] and self.release in y['Key']: - temp.append(y['Key']) - - for arch in self.arches: - temps = [] - for y in temp: - if arch in y: - temps.append(y) - temps.sort(reverse=True) - data[arch] = temps[0] - - return data - - def _reqs_download_artifacts(self, force_download, source, dest): - """ - Download the requested artifact(s) via requests only - """ - if os.path.exists(dest): - if not force_download: - self.log.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'Artifact at ' + dest + ' already exists' - ) - return - unurl = self.s3_bucket_url + '/' + source - - self.log.info('Downloading ({}) to: {}'.format(source, dest)) - try: - with requests.get(unurl, allow_redirects=True) as r: - with open(dest, 'wb') as f: - f.write(r.content) - f.close() - r.close() - except requests.exceptions.RequestException as e: - self.log.error('There was a problem downloading the artifact') - raise SystemExit(e) - def _unpack_artifacts(self, force_unpack, arch, tarball): """ Unpack the requested artifacts(s) @@ -729,7 +640,8 @@ class IsoBuild: if not os.path.exists(pathway): self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Repo and Image variant do NOT match' + 'Repo and Image variant either does NOT match or does ' + + 'NOT exist. Are you sure you have synced the repository?' ) if not force_unpack: diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index bd9fe53..58525f8 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -6,7 +6,11 @@ import hashlib import shlex import subprocess import yaml +import requests +import boto3 +import xmltodict import productmd.treeinfo +import empanadas from empanadas.common import Color class ArchCheck: @@ -233,7 +237,7 @@ class Shared: metadata = { "header": { "name": "empanadas", - "version": "0.2.0", + "version": empanadas.__version__, "type": "toolkit", "maintainer": "SIG/Core" }, @@ -507,3 +511,114 @@ class Shared: os.makedirs(dest, exist_ok=True) return 'Not available', 1 + + @staticmethod + def s3_determine_latest(s3_bucket, release, arches, filetype, logger): + """ + Using native s3, determine the latest artifacts and return a dict + """ + temp = [] + data = {} + s3 = boto3.client('s3') + + try: + s3.list_objects(Bucket=s3_bucket)['Contents'] + except: + logger.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Cannot access s3 bucket.' + ) + raise SystemExit() + + for y in s3.list_objects(Bucket=s3_bucket)['Contents']: + if filetype in y['Key'] and release in y['Key']: + temp.append(y['Key']) + + for arch in arches: + temps = [] + for y in temp: + if arch in y: + temps.append(y) + temps.sort(reverse=True) + data[arch] = temps[0] + + return data + + @staticmethod + def s3_download_artifacts(force_download, s3_bucket, source, dest, logger): + """ + Download the requested artifact(s) via s3 + """ + s3 = boto3.client('s3') + if os.path.exists(dest): + if not force_download: + logger.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Artifact at ' + dest + ' already exists' + ) + return + + logger.info('Downloading ({}) to: {}'.format(source, dest)) + try: + s3.download_file( + Bucket=s3_bucket, + Key=source, + Filename=dest + ) + except: + logger.error('There was an issue downloading from %s' % s3_bucket) + + @staticmethod + def reqs_determine_latest(s3_bucket_url, release, arches, filetype, logger): + """ + Using requests, determine the latest artifacts and return a list + """ + temp = [] + data = {} + + try: + bucket_data = requests.get(s3_bucket_url) + except requests.exceptions.RequestException as e: + logger.error('The s3 bucket http endpoint is inaccessible') + raise SystemExit(e) + + resp = xmltodict.parse(bucket_data.content) + + for y in resp['ListBucketResult']['Contents']: + if filetype in y['Key'] and release in y['Key']: + temp.append(y['Key']) + + for arch in arches: + temps = [] + for y in temp: + if arch in y: + temps.append(y) + temps.sort(reverse=True) + data[arch] = temps[0] + + return data + + @staticmethod + def reqs_download_artifacts(force_download, s3_bucket_url, source, dest, logger): + """ + Download the requested artifact(s) via requests only + """ + if os.path.exists(dest): + if not force_download: + logger.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Artifact at ' + dest + ' already exists' + ) + return + unurl = s3_bucket_url + '/' + source + + logger.info('Downloading ({}) to: {}'.format(source, dest)) + try: + with requests.get(unurl, allow_redirects=True) as r: + with open(dest, 'wb') as f: + f.write(r.content) + f.close() + r.close() + except requests.exceptions.RequestException as e: + logger.error('There was a problem downloading the artifact') + raise SystemExit(e) diff --git a/iso/empanadas/tests/test_empanadas.py b/iso/empanadas/tests/test_empanadas.py index 4561768..195fc4b 100644 --- a/iso/empanadas/tests/test_empanadas.py +++ b/iso/empanadas/tests/test_empanadas.py @@ -2,4 +2,4 @@ from empanadas import __version__ def test_version(): - assert __version__ == '0.1.0' + assert __version__ == '0.2.0' From 55abe763ef17c99c4a055484803d8992da1c0bab Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 4 Jul 2022 14:41:39 -0400 Subject: [PATCH 40/96] Refactor Architecture class and fix names for output files --- iso/empanadas/empanadas/common.py | 11 +++++++---- iso/empanadas/empanadas/scripts/build_image.py | 16 +++++++--------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index 796f472..01e90b8 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -118,14 +118,17 @@ def valid_type_variant(_type: str, variant: str="") -> bool: return True from attrs import define, field -@define +@define(kw_only=True) class Architecture: name: str = field() version: str = field() + major: int = field(converter=int) + minor: int = field(converter=int) @classmethod - def New(cls, architecture: str, version: int): - if architecture not in rldict[version]["allowed_arches"]: + def from_version(cls, architecture: str, version: str): + major, minor = str.split(version, ".") + if architecture not in rldict[major]["allowed_arches"]: print("Invalid architecture/version combo, skipping") exit() - return cls(architecture, version) + return cls(name=architecture, version=version, major=major, minor=minor) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index cc696b5..f13afe3 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -73,7 +73,6 @@ class ImageBuild: outname: str = field(init=False) package_args: List[str] = field(factory=list) release: int = field(default=0) - revision: Optional[int] = field() stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False) target_uuid: Optional[str] = field(default="") tdl_path: pathlib.Path = field(init=False) @@ -100,7 +99,7 @@ class ImageBuild: self.stage_commands = [ ["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"] ] - if self.image_type == "GenericCloud": + if self.image_type in ["GenericCloud", "EC2"]: self.stage_commands = [ ["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"] ] @@ -125,7 +124,7 @@ class ImageBuild: f.flush() def output_name(self): - return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{self.release}.{self.architecture.name}" + return f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{BUILDTIME.strftime('%Y%m%d')}.{self.release}.{self.architecture.name}" def type_variant_name(self): return self.image_type if not self.variant else f"{self.image_type}-{self.variant.capitalize()}" @@ -156,7 +155,7 @@ class ImageBuild: return mapping[self.image_type] if self.image_type in mapping.keys() else '' def kickstart_imagefactory_args(self) -> List[str]: - kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.version}-{self.type_variant}.ks") + kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.major}-{self.type_variant}.ks") if not kickstart_path.is_file(): log.warn(f"Kickstart file is not available: {kickstart_path}") @@ -176,12 +175,12 @@ class ImageBuild: fedora_version=self.fedora_release, iso8601date=BUILDTIME.strftime("%Y%m%d"), installdir="kickstart" if self.cli_args.kickstartdir else "os", - major=self.architecture.version, + major=self.architecture.major, release=self.release, size="10G", type=self.image_type, utcnow=BUILDTIME, - version_variant=self.revision if not self.variant else f"{self.revision}-{self.variant}", + version_variant=self.architecture.version if not self.variant else f"{self.architecture.version}-{self.variant}", ) tmp.write(_template.encode()) tmp.flush() @@ -226,7 +225,7 @@ class ImageBuild: def package(self) -> int: # Some build types don't need to be packaged by imagefactory # @TODO remove business logic if possible - if self.image_type == "GenericCloud": + if self.image_type in ["GenericCloud", "EC2"]: self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else "" if self.target_uuid: @@ -365,13 +364,12 @@ def run(): for architecture in arches: IB = ImageBuild( - architecture=Architecture.New(architecture, major), + architecture=Architecture.from_version(architecture, rlvars['revision']), cli_args=results, debug=results.debug, fedora_release=rlvars['fedora_release'], image_type=results.type, release=results.release if results.release else 0, - revision=rlvars['revision'], template=tdl_template, variant=results.variant, ) From 9536ab07438e4c69740838f27c5dffa748d06600 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 4 Jul 2022 14:44:17 -0400 Subject: [PATCH 41/96] Update CI for both image types for now --- .github/workflows/imagefactory-image.yml | 47 ++++++++++++++++++++++++ .github/workflows/mix-empanadas.yml | 4 +- 2 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/imagefactory-image.yml diff --git a/.github/workflows/imagefactory-image.yml b/.github/workflows/imagefactory-image.yml new file mode 100644 index 0000000..8934bb9 --- /dev/null +++ b/.github/workflows/imagefactory-image.yml @@ -0,0 +1,47 @@ +--- +name: Build empanada container images + +on: + push: + branches: [ $default-branch, "devel" ] + pull_request: + branches: [ $default-branch ] + workflow_dispatch: + +jobs: + buildx: + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + with: + install: true + + - name: Login to ghcr + if: github.event_name != 'pull_request' + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le + context: ./iso/empanadas + file: ./iso/empanadas/Containerfile.imagebuild + push: ${{ github.event_name != 'pull_request' }} + tags: ghcr.io/rocky-linux/empanadas-imagebuild:latest + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/mix-empanadas.yml b/.github/workflows/mix-empanadas.yml index ebae78d..7ee3791 100644 --- a/.github/workflows/mix-empanadas.yml +++ b/.github/workflows/mix-empanadas.yml @@ -40,8 +40,8 @@ jobs: builder: ${{ steps.buildx.outputs.name }} platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le context: ./iso/empanadas - file: ./iso/empanadas/Containerfile + file: ./iso/empanadas/Containerfile.imagefactory push: ${{ github.event_name != 'pull_request' }} - tags: ghcr.io/rocky-linux/sig-core-toolkit:latest + tags: ghcr.io/rocky-linux/empanadas-imagefactory:latest cache-from: type=gha cache-to: type=gha,mode=max From 843f412923fb567287a0e0951a2741db8b9458e0 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 4 Jul 2022 14:47:26 -0400 Subject: [PATCH 42/96] Fix names for builds --- .github/workflows/imagefactory-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/imagefactory-image.yml b/.github/workflows/imagefactory-image.yml index 8934bb9..7ee3791 100644 --- a/.github/workflows/imagefactory-image.yml +++ b/.github/workflows/imagefactory-image.yml @@ -40,8 +40,8 @@ jobs: builder: ${{ steps.buildx.outputs.name }} platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le context: ./iso/empanadas - file: ./iso/empanadas/Containerfile.imagebuild + file: ./iso/empanadas/Containerfile.imagefactory push: ${{ github.event_name != 'pull_request' }} - tags: ghcr.io/rocky-linux/empanadas-imagebuild:latest + tags: ghcr.io/rocky-linux/empanadas-imagefactory:latest cache-from: type=gha cache-to: type=gha,mode=max From 340a6a337715e4b70adf413df97967bfed88a5a3 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Mon, 4 Jul 2022 12:23:04 -0700 Subject: [PATCH 43/96] add checksum combine for close out --- iso/empanadas/empanadas/templates/README.tmpl | 11 ++++++----- iso/empanadas/empanadas/util/dnf_utils.py | 11 +++++++++++ 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/templates/README.tmpl b/iso/empanadas/empanadas/templates/README.tmpl index 4e4828b..498653c 100644 --- a/iso/empanadas/empanadas/templates/README.tmpl +++ b/iso/empanadas/empanadas/templates/README.tmpl @@ -13,15 +13,16 @@ of metadata that we produce for easy consumption. CHECKSUM Validation: https://github.com/rocky-linux/checksums -Traditionally, we would to "sign" the checksum files with the current GPG key -of a major release. However, due to how the new build system operates and for +Traditionally, we would "sign" the checksum files with the current GPG key of a +major release. However, due to how the new build system operates and for ensuring strong security within the build system as it pertains the signing keys, this is no longer possible. It was determined by SIG/Core or Release Engineering to instead provide verified signed commits using our keys with -RESF/Rocky Linux email domain names to a proper git repository. +RESF/Rocky Linux email domain names to a proper git repository. Our signing keys +are attached to our GitHub and RESF Git Service profiles. -With that being said, if you are looking for "verification" of the ISO -checksums, it is highly recommended to visit the link above. +If you are looking for "verification" of the ISO checksums and were expecting a +`CHECKSUM.sig`, it is highly recommended to visit the link above instead. These are *always* updated with new releases or new images. This includes live images as we release them. diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 1f6ab3b..7e9c9df 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -14,6 +14,7 @@ import shutil import time import re import json +import glob #import pipes from jinja2 import Environment, FileSystemLoader @@ -1412,6 +1413,16 @@ class RepoSync: ) # Combine all checksums here + for arch in self.arches: + iso_arch_root = os.path.join(sync_iso_root, arch) + iso_arch_checksum = os.path.join(iso_arch_root, 'CHECKSUM') + with open(iso_arch_checksum, 'w+', encoding='utf-8') as fp: + for check in glob.iglob(iso_arch_root + '/*.CHECKSUM'): + with open(check, 'r', encoding='utf-8') as sum: + for line in sum: + fp.write(line) + fp.close() + # Deploy final metadata for a close out self.deploy_metadata(sync_root) From b89ebe777a057cdd7220d2300a45fc176de6ff19 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Mon, 4 Jul 2022 12:36:15 -0700 Subject: [PATCH 44/96] Finish and resolve RLBT#0000134 --- iso/empanadas/empanadas/util/dnf_utils.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 7e9c9df..c7ced10 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -1356,6 +1356,11 @@ class RepoSync: "live" ) + sync_live_root = os.path.join( + sync_root, + 'live' + ) + global_work_root = os.path.join( work_root, "global", @@ -1397,9 +1402,9 @@ class RepoSync: ) if os.path.exists('/usr/bin/fpsync'): - message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir) + message, ret = Shared.fpsync_method(live_root, sync_live_root, tmp_dir) elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'): - message, ret = Shared.rsync_method(iso_root, sync_iso_root) + message, ret = Shared.rsync_method(live_root, sync_live_root) if ret != 0: self.log.error( @@ -1421,8 +1426,20 @@ class RepoSync: with open(check, 'r', encoding='utf-8') as sum: for line in sum: fp.write(line) + sum.close() fp.close() + if arch == 'x86_64' and os.path.exists(sync_live_root): + live_arch_root = os.path.join(sync_live_root, arch) + live_arch_checksum = os.path.join(live_arch_root, 'CHECKSUM') + with open(live_arch_checksum, 'w+', encoding='utf-8') as lp: + for lcheck in glob.iglob(iso_arch_root + '/*.CHECKSUM'): + with open(lcheck, 'r', encoding='utf-8') as sum: + for line in sum: + lp.write(line) + sum.close() + lp.close() + # Deploy final metadata for a close out self.deploy_metadata(sync_root) From 1c90edaa70df0f0d157ced136ac83180b6269dd5 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Mon, 4 Jul 2022 13:10:48 -0700 Subject: [PATCH 45/96] fix readme and bump to RC2 --- iso/empanadas/empanadas/configs/el9.yaml | 2 +- iso/empanadas/empanadas/util/dnf_utils.py | 2 +- sync/common_9 | 2 +- sync/sync-to-staging-9.sh | 40 +++++++++++++++++++++++ 4 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 sync/sync-to-staging-9.sh diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 786c46a..e63cd6e 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -2,7 +2,7 @@ '9': fullname: 'Rocky Linux 9.0' revision: '9.0' - rclvl: 'RC1' + rclvl: 'RC2' major: '9' minor: '0' profile: '9' diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index c7ced10..336e782 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -996,7 +996,7 @@ class RepoSync: version=empanadas.__version__ ) - with open(metadata_dir + '/README') as readme_file: + with open(metadata_dir + '/README', 'w+', encoding='utf-8') as readme_file: readme_file.write(readme_output) readme_file.close() diff --git a/sync/common_9 b/sync/common_9 index 8b1cbb8..777ae46 100644 --- a/sync/common_9 +++ b/sync/common_9 @@ -9,7 +9,7 @@ MAJOR="${REVISION:0:1}" MINOR="${REVISION:2:1}" # comment or blank if needed -APPEND_TO_DIR="-RC1" +APPEND_TO_DIR="-RC2" STAGING_ROOT="/mnt/repos-staging" PRODUCTION_ROOT="/mnt/repos-production" diff --git a/sync/sync-to-staging-9.sh b/sync/sync-to-staging-9.sh new file mode 100644 index 0000000..ffcc6f1 --- /dev/null +++ b/sync/sync-to-staging-9.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# Source common variables +# shellcheck disable=SC2046,1091,1090 +source "$(dirname "$0")/common" + +if [[ $# -eq 0 ]]; then + echo "You must specify a short name." + exit 1 +fi + +# Major Version (eg, 8) +MAJ=${RLVER} +# Short name (eg, NFV, extras, Rocky, gluster9) +SHORT=${1} +PROFILE=${2} + +cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; } +ret_val=$? + +if [ $ret_val -eq "0" ]; then + TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" + mkdir -p "${TARGET}" + # disabling because none of our files should be starting with dashes. If they + # are something is *seriously* wrong here. + # shellcheck disable=SC2035 + #sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \ + # {} "${TARGET}" + sudo -l && fpsync -o '-av --numeric-ids --no-compress --chown=10004:10005' -n 18 -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/" + + # This is temporary until we implement rsync into empanadas + #if [ -f "COMPOSE_ID" ]; then + # cp COMPOSE_ID "${TARGET}" + # chown 10004:10005 "${TARGET}/COMPOSE_ID" + #fi + + #if [ -d "metadata" ]; then + # rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}" + #fi +fi From fe4daffb253804604a7fd902b57c579dc9778e29 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Mon, 4 Jul 2022 16:22:12 -0400 Subject: [PATCH 46/96] Changes for oz/imagefactory config naming --- iso/empanadas/empanadas/scripts/build_image.py | 1 + iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/build_image.py b/iso/empanadas/empanadas/scripts/build_image.py index f13afe3..e8b031c 100644 --- a/iso/empanadas/empanadas/scripts/build_image.py +++ b/iso/empanadas/empanadas/scripts/build_image.py @@ -176,6 +176,7 @@ class ImageBuild: iso8601date=BUILDTIME.strftime("%Y%m%d"), installdir="kickstart" if self.cli_args.kickstartdir else "os", major=self.architecture.major, + minor=self.architecture.minor, release=self.release, size="10G", type=self.image_type, diff --git a/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl index 5f0aa8b..73f99f7 100644 --- a/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl +++ b/iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl @@ -1,8 +1,8 @@