From 541ee349aa71b13f2f1da1c9dabc8fca256f5cba Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 10 Jun 2022 16:14:22 -0700 Subject: [PATCH 01/64] all repo isn't hashed --- iso/py/util/dnf_utils.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 4354e45..b1fece3 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -521,6 +521,12 @@ class RepoSync: os.makedirs(dest_path, exist_ok=True) config_file = open(fname, "w+") for repo in self.repos: + + # The all repo doesn't get hashed. Likely due to how long it takes + # to hash in the first place? + if 'all' in repo: + prehashed = '' + constructed_url = '{}/{}/repo/{}{}/$basearch'.format( self.repo_base_url, self.project_id, From 0b70e6157a6ed2f0d9da82f89006d2415b8005d4 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 10 Jun 2022 16:22:30 -0700 Subject: [PATCH 02/64] all was missed, it can be hashed later --- iso/py/util/dnf_utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index b1fece3..26a8efe 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -522,10 +522,8 @@ class RepoSync: config_file = open(fname, "w+") for repo in self.repos: - # The all repo doesn't get hashed. Likely due to how long it takes - # to hash in the first place? - if 'all' in repo: - prehashed = '' + #if 'all' in repo: + # prehashed = '' constructed_url = '{}/{}/repo/{}{}/$basearch'.format( self.repo_base_url, From 910901dacb150fa9bc3414990e90ba403200e5f3 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 10 Jun 2022 19:59:43 -0700 Subject: [PATCH 03/64] Add dnf_plugin_cmd to logs --- iso/py/util/dnf_utils.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 26a8efe..062ad6b 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -328,19 +328,36 @@ class RepoSync: self.date_stamp ) + dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " + "-y | tee -a {}/{}-{}-{}.log").format( + log_root, + repo_name, + a, + self.date_stamp + ) + + + debug_dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " + "-y | tee -a {}/{}-{}-debug-{}.log").format( + log_root, + repo_name, + a, + self.date_stamp + ) + entry_point_open = open(entry_point_sh, "w+") debug_entry_point_open = open(debug_entry_point_sh, "w+") entry_point_open.write('#!/bin/bash\n') entry_point_open.write('set -o pipefail\n') entry_point_open.write(arch_force_cp + '\n') - entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n') + entry_point_open.write(dnf_plugin_cmd + '\n') entry_point_open.write(sync_cmd + '\n') debug_entry_point_open.write('#!/bin/bash\n') debug_entry_point_open.write('set -o pipefail\n') debug_entry_point_open.write(arch_force_cp + '\n') - debug_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n') + debug_entry_point_open.write(debug_dnf_plugin_cmd + '\n') debug_entry_point_open.write(debug_sync_cmd + '\n') entry_point_open.close() @@ -375,10 +392,18 @@ class RepoSync: repo_name, self.date_stamp ) + + source_dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " + "-y | tee -a {}/{}-source-{}.log").format( + log_root, + repo_name, + self.date_stamp + ) + source_entry_point_open = open(source_entry_point_sh, "w+") source_entry_point_open.write('#!/bin/bash\n') source_entry_point_open.write('set -o pipefail\n') - source_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n') + source_entry_point_open.write(source_dnf_plugin_cmd + '\n') source_entry_point_open.write(source_sync_cmd + '\n') source_entry_point_open.close() os.chmod(source_entry_point_sh, 0o755) From 6c25a812415aa406df93dd0fc761bac4f236c61a Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sat, 11 Jun 2022 08:03:03 -0700 Subject: [PATCH 04/64] remove nplb and rename all to devel --- iso/py/configs/.el9.yaml.swp | Bin 12288 -> 0 bytes iso/py/configs/el9.yaml | 7 ++----- 2 files changed, 2 insertions(+), 5 deletions(-) delete mode 100644 iso/py/configs/.el9.yaml.swp diff --git a/iso/py/configs/.el9.yaml.swp b/iso/py/configs/.el9.yaml.swp deleted file mode 100644 index d235db335daa90092f58bdac2b59b97e409f08e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2%ZnUE9LFn4P}js2Bd7#rRB%M>?w#4)*_j0ovy(BqB>P}yHW*(`O?PciyWL%F zch6+^5=9V<2cs83gNL9OgEtR>5b&S@{{R#5rcn_*i3A@I&~HEIkzJBS4@RotL)Y(D zRllnGR{d&Nw%Zd&78Y}5xk&KbO~?cK>GrL^zD~x!dxBWB&8*OWoyVt+DO1N_me1ET zi)-u0xVJ7lEEw*1JPN27W}x_{p=jJ=O2E0b4u|P;#odI@Su+h^VOB-nr1p)BK0Jo7 zgaSf=?G)HcCX0oMu=deM9?tE%XLY-V!Y>pM3J3*+0zv_yfKWgvAQTV^{0|if++E}q ztm~F!ZFchBHS$hO@e&FM1%v`Z0il3UKqw#-5DEwdgaSeVp@2}}CRBjxgq+(=$eXue z^7#LM`uG3UaYB9vKY~l(3vd>E3{HWQpa-hpN$?OD1Ap8_$WP!aZ~>eHXTS&G9q<}( zfesFVBFKaNU>y9lmyln;WpELE20jHJfp@_P&;tfo1q)yT+z0jm0

KTks_~3qAzzfj2+^Oz;di04m^da4-1tc0zsz-+&9?3^)ZEpbBQdK5!NLbs3xo zZ-H?CP5|);1%v`Z0inRnrhqKVBo{qWF5g^UZPe;brLnRo9b7pm9jYId8u+koQ>sWU z<_D%@Z)|#N}qkhy4^pPF4Z?tySocU0MIYBev9yQpGI*-b_qt3O)++h15%`;Wf7IE!t z?ztC|U2$Biotfdm$@bfytW0d+7)jC^_cTU5t@Bjm%SqC;hQmW&dW2n9D@|E!q<7*? z`;VBz5oj2$G&K?-@um?*%;5+d<&t_$fY_P#AGT=FvAC1gnWb_mD^CxUrv}QC{qo@8 z;oRfI2^Gb6xqjDUs!iQ=8*tBLTa#)AU%Wf9V`wj=T$}SC*)l0-n~rHyE3@^UUWg2M zjef?aJD85p-<8zUS2^`$s>O~D_k-v};49(&0x(say12#YQaaREZLW8-n*$A!)XzG= z&dSKktlVhzizE4zRgC6JR zCt3A3ZT$SK_vOZcvh``dYiBAO%z-mH!kN#)Wx5fM_;T0un6A21TTc(U>+zSE7O19< z@usKSg_&ZxAhosWDQT)wtVm_5QHhpmp`e$F<(bKH7zv$vrjB&%S^=(X+J*TPeq={lYAvVOC(7me*>3% BngIX+ diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index d6553a8..3f8be02 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -14,10 +14,9 @@ - 'genisoimage' - 'isomd5sum' repo_symlinks: - devel: 'Devel' NFV: 'nfv' renames: - all: 'nplb' + all: 'devel' all_repos: - 'all' - 'BaseOS' @@ -30,12 +29,10 @@ - 'SAP' - 'SAPHANA' - 'extras' - - 'devel' - 'plus' no_comps_or_groups: - 'all' - 'extras' - - 'devel' - 'plus' comps_or_groups: - 'BaseOS' @@ -70,7 +67,7 @@ ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch' s390x: '--forcearch=s390x --arch=s390x --arch=noarch' repos: - nplb: [] + devel: [] BaseOS: [] AppStream: - BaseOS From b376c168bb5e5609fb486407ac38b6741a007a5a Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sat, 11 Jun 2022 10:49:37 -0700 Subject: [PATCH 05/64] start using templates for reposync --- iso/py/sync-from-peridot-test | 4 +- iso/py/templates/repoconfig.tmpl | 20 ++++++ iso/py/templates/reposync-src.tmpl | 4 ++ iso/py/templates/reposync.tmpl | 5 ++ iso/py/util/dnf_utils.py | 97 +++++++++++++++--------------- 5 files changed, 78 insertions(+), 52 deletions(-) create mode 100644 iso/py/templates/repoconfig.tmpl create mode 100644 iso/py/templates/reposync-src.tmpl create mode 100644 iso/py/templates/reposync.tmpl diff --git a/iso/py/sync-from-peridot-test b/iso/py/sync-from-peridot-test index 34cd4b6..b8db670 100755 --- a/iso/py/sync-from-peridot-test +++ b/iso/py/sync-from-peridot-test @@ -12,5 +12,5 @@ r = Checks(rlvars, config['arch']) r.check_valid_arch() #a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) -a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) -a.run() +a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) +#a.run() diff --git a/iso/py/templates/repoconfig.tmpl b/iso/py/templates/repoconfig.tmpl new file mode 100644 index 0000000..04da1d8 --- /dev/null +++ b/iso/py/templates/repoconfig.tmpl @@ -0,0 +1,20 @@ +{%- for repo in repos -%} +[{{ repo.name }}] +name={{repo.name}} +baseurl={{ repo.baseurl }} +enabled=1 +gpgcheck=0 + +[{{ repo.name }}-debug] +name={{repo.name}} +baseurl={{ repo.baseurl }}-debug +enabled=1 +gpgcheck=0 + +[{{ repo.name }}-source] +name={{repo.name}} +baseurl={{ repo.srcbaseurl }} +enabled=1 +gpgcheck=0 + +{% endfor %} diff --git a/iso/py/templates/reposync-src.tmpl b/iso/py/templates/reposync-src.tmpl new file mode 100644 index 0000000..a0b634d --- /dev/null +++ b/iso/py/templates/reposync-src.tmpl @@ -0,0 +1,4 @@ +#!/bin/bash +set -o pipefail +{{ dnf_plugin_cmd }} +{{ sync_cmd }} diff --git a/iso/py/templates/reposync.tmpl b/iso/py/templates/reposync.tmpl new file mode 100644 index 0000000..4b5306f --- /dev/null +++ b/iso/py/templates/reposync.tmpl @@ -0,0 +1,5 @@ +#!/bin/bash +set -o pipefail +{{ arch_force_cp }} +{{ dnf_plugin_cmd }} +{{ sync_cmd }} diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 062ad6b..c4fce49 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -14,6 +14,7 @@ import time import re #import pipes from common import Color +from jinja2 import Environment, FileSystemLoader #HAS_LIBREPO = True #try: @@ -73,6 +74,10 @@ class RepoSync: self.repo = repo self.extra_files = rlvars['extra_files'] + # Templates + file_loader = FileSystemLoader('templates') + self.tmplenv = Environment(loader=file_loader) + # each el can have its own designated container to run stuff in, # otherwise we'll just default to the default config. self.container = config['container'] @@ -300,18 +305,22 @@ class RepoSync: a )) + sync_log = ("{}/{}-{}-{}.log").format( + log_root, + repo_name, + a, + self.date_stamp + ) + sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " "--repoid={} -p {} --forcearch {} --norepopath 2>&1 " - "| tee -a {}/{}-{}-{}.log").format( + "| tee -a {}").format( self.dnf_config, a, r, os_sync_path, a, - log_root, - repo_name, - a, - self.date_stamp + sync_log, ) debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " @@ -345,20 +354,25 @@ class RepoSync: self.date_stamp ) + sync_template = self.tmplenv.get_template('reposync.tmpl') + sync_output = sync_template.render( + arch_force_cp=arch_force_cp, + dnf_plugin_cmd=dnf_plugin_cmd, + sync_cmd=sync_cmd + ) + + debug_sync_template = self.tmplenv.get_template('reposync.tmpl') + debug_sync_output = debug_sync_template.render( + arch_force_cp=arch_force_cp, + dnf_plugin_cmd=debug_dnf_plugin_cmd, + sync_cmd=debug_sync_cmd + ) + entry_point_open = open(entry_point_sh, "w+") debug_entry_point_open = open(debug_entry_point_sh, "w+") - entry_point_open.write('#!/bin/bash\n') - entry_point_open.write('set -o pipefail\n') - entry_point_open.write(arch_force_cp + '\n') - entry_point_open.write(dnf_plugin_cmd + '\n') - entry_point_open.write(sync_cmd + '\n') - - debug_entry_point_open.write('#!/bin/bash\n') - debug_entry_point_open.write('set -o pipefail\n') - debug_entry_point_open.write(arch_force_cp + '\n') - debug_entry_point_open.write(debug_dnf_plugin_cmd + '\n') - debug_entry_point_open.write(debug_sync_cmd + '\n') + entry_point_open.write(sync_output) + debug_entry_point_open.write(debug_sync_output) entry_point_open.close() debug_entry_point_open.close() @@ -400,11 +414,14 @@ class RepoSync: self.date_stamp ) + source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') + source_sync_output = source_sync_template.render( + dnf_plugin_cmd=source_dnf_plugin_cmd, + sync_cmd=source_sync_cmd + ) + source_entry_point_open = open(source_entry_point_sh, "w+") - source_entry_point_open.write('#!/bin/bash\n') - source_entry_point_open.write('set -o pipefail\n') - source_entry_point_open.write(source_dnf_plugin_cmd + '\n') - source_entry_point_open.write(source_sync_cmd + '\n') + source_entry_point_open.write(source_sync_output) source_entry_point_open.close() os.chmod(source_entry_point_sh, 0o755) @@ -545,11 +562,9 @@ class RepoSync: if not os.path.exists(dest_path): os.makedirs(dest_path, exist_ok=True) config_file = open(fname, "w+") + repolist = [] for repo in self.repos: - #if 'all' in repo: - # prehashed = '' - constructed_url = '{}/{}/repo/{}{}/$basearch'.format( self.repo_base_url, self.project_id, @@ -557,13 +572,6 @@ class RepoSync: repo, ) - constructed_url_debug = '{}/{}/repo/{}{}/$basearch-debug'.format( - self.repo_base_url, - self.project_id, - prehashed, - repo, - ) - constructed_url_src = '{}/{}/repo/{}{}/src'.format( self.repo_base_url, self.project_id, @@ -571,27 +579,16 @@ class RepoSync: repo, ) - # normal - config_file.write('[%s]\n' % repo) - config_file.write('name=%s\n' % repo) - config_file.write('baseurl=%s\n' % constructed_url) - config_file.write("enabled=1\n") - config_file.write("gpgcheck=0\n\n") - - # debug - config_file.write('[%s-debug]\n' % repo) - config_file.write('name=%s debug\n' % repo) - config_file.write('baseurl=%s\n' % constructed_url_debug) - config_file.write("enabled=1\n") - config_file.write("gpgcheck=0\n\n") - - # src - config_file.write('[%s-source]\n' % repo) - config_file.write('name=%s source\n' % repo) - config_file.write('baseurl=%s\n' % constructed_url_src) - config_file.write("enabled=1\n") - config_file.write("gpgcheck=0\n\n") + repodata = { + 'name': repo, + 'baseurl': constructed_url, + 'srcbaseurl': constructed_url_src + } + repolist.append(repodata) + template = self.tmplenv.get_template('repoconfig.tmpl') + output = template.render(repos=repolist) + config_file.write(output) config_file.close() return fname From 08249632af94efc0156a1d5906d115dade690556 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 12 Jun 2022 02:08:48 -0700 Subject: [PATCH 06/64] try to simplify templates, add gpg check --- iso/py/configs/el9.yaml | 4 ++ iso/py/templates/reposync-src.tmpl | 13 +++- iso/py/templates/reposync.tmpl | 15 ++++- iso/py/util/dnf_utils.py | 96 ++++++++++++++---------------- 4 files changed, 73 insertions(+), 55 deletions(-) diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index 3f8be02..ff11dcb 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -96,7 +96,11 @@ - HighAvailability extra_files: git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/' branch: 'r9' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-9' + testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' list: - 'SOURCES/COMMUNITY-CHARTER' - 'SOURCES/EULA' diff --git a/iso/py/templates/reposync-src.tmpl b/iso/py/templates/reposync-src.tmpl index a0b634d..dc10fab 100644 --- a/iso/py/templates/reposync-src.tmpl +++ b/iso/py/templates/reposync-src.tmpl @@ -1,4 +1,13 @@ #!/bin/bash set -o pipefail -{{ dnf_plugin_cmd }} -{{ sync_cmd }} +{{ import_gpg_cmd }} | tee -a {{ sync_log }} +{{ dnf_plugin_cmd }} | tee -a {{ sync_log }} +{{ sync_cmd }} | tee -a {{ sync_log }} +{{ check_cmd }} | tee -a {{ sync_log }} + +ret_val=$? +if [ "$ret_val" -eq 1 ]; then + exit 0 +else + exit 1 +fi diff --git a/iso/py/templates/reposync.tmpl b/iso/py/templates/reposync.tmpl index 4b5306f..282340f 100644 --- a/iso/py/templates/reposync.tmpl +++ b/iso/py/templates/reposync.tmpl @@ -1,5 +1,14 @@ #!/bin/bash set -o pipefail -{{ arch_force_cp }} -{{ dnf_plugin_cmd }} -{{ sync_cmd }} +{{ import_gpg_cmd }} | tee -a {{ sync_log }} +{{ arch_force_cp }} | tee -a {{ sync_log }} +{{ dnf_plugin_cmd }} | tee -a {{ sync_log }} +{{ sync_cmd }} | tee -a {{ sync_log }} +{{ check_cmd }} | tee -a {{ sync_log }} + +ket_val=$? +if [ "$ret_val" -eq 1 ]; then + exit 0 +else + exit 1 +fi diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index c4fce49..a6dc2b1 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -44,6 +44,7 @@ class RepoSync: dryrun: bool = False, fullrun: bool = False, nofail: bool = False, + gpgkey: str = 'stable', logger=None ): self.nofail = nofail @@ -73,6 +74,7 @@ class RepoSync: self.multilib = rlvars['provide_multilib'] self.repo = repo self.extra_files = rlvars['extra_files'] + self.gpgkey = gpgkey # Templates file_loader = FileSystemLoader('templates') @@ -298,6 +300,12 @@ class RepoSync: 'debug/tree' ) + import_gpg_cmd = ("/usr/bin/rpm --import " + "| /usr/bin/curl {}{}").format( + self.extra_files['git_raw_path'], + self.extra_files['gpg'][self.gpgkey] + ) + arch_force_cp = ("/usr/bin/sed 's|$basearch|{}|g' {} > {}.{}".format( a, self.dnf_config, @@ -312,60 +320,53 @@ class RepoSync: self.date_stamp ) + debug_sync_log = ("{}/{}-{}-debug-{}.log").format( + log_root, + repo_name, + a, + self.date_stamp + ) + sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " - "--repoid={} -p {} --forcearch {} --norepopath 2>&1 " - "| tee -a {}").format( + "--repoid={} -p {} --forcearch {} --norepopath 2>&1").format( self.dnf_config, a, r, os_sync_path, - a, - sync_log, + a ) debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " "--download-metadata --repoid={}-debug -p {} --forcearch {} " - "--norepopath 2>&1 | tee -a {}/{}-{}-debug-{}.log").format( + "--norepopath 2>&1").format( self.dnf_config, a, r, debug_sync_path, - a, - log_root, - repo_name, - a, - self.date_stamp + a ) - dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " - "-y | tee -a {}/{}-{}-{}.log").format( - log_root, - repo_name, - a, - self.date_stamp - ) - - - debug_dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " - "-y | tee -a {}/{}-{}-debug-{}.log").format( - log_root, - repo_name, - a, - self.date_stamp - ) + dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y" + check_cmd = ("/usr/bin/rpm -K $(find . -name '*.rpm') | grep -v 'signatures OK'") sync_template = self.tmplenv.get_template('reposync.tmpl') sync_output = sync_template.render( + import_gpg_cmd=import_gpg_cmd, arch_force_cp=arch_force_cp, dnf_plugin_cmd=dnf_plugin_cmd, - sync_cmd=sync_cmd + sync_cmd=sync_cmd, + check_cmd=check_cmd, + sync_log=sync_log ) debug_sync_template = self.tmplenv.get_template('reposync.tmpl') debug_sync_output = debug_sync_template.render( + import_gpg_cmd=import_gpg_cmd, arch_force_cp=arch_force_cp, - dnf_plugin_cmd=debug_dnf_plugin_cmd, - sync_cmd=debug_sync_cmd + dnf_plugin_cmd=dnf_plugin_cmd, + sync_cmd=debug_sync_cmd, + check_cmd=check_cmd, + sync_log=debug_sync_log ) entry_point_open = open(entry_point_sh, "w+") @@ -396,28 +397,27 @@ class RepoSync: 'source/tree' ) - source_sync_cmd = ("/usr/bin/dnf reposync -c {} " - "--download-metadata --repoid={}-source -p {} " - "--norepopath | tee -a {}/{}-source-{}.log").format( - self.dnf_config, - r, - source_sync_path, + source_sync_log = ("{}/{}-source-{}.log").format( log_root, repo_name, self.date_stamp ) - source_dnf_plugin_cmd = ("/usr/bin/dnf install dnf-plugins-core " - "-y | tee -a {}/{}-source-{}.log").format( - log_root, - repo_name, - self.date_stamp + source_sync_cmd = ("/usr/bin/dnf reposync -c {} " + "--download-metadata --repoid={}-source -p {} " + "--norepopath 2>&1").format( + self.dnf_config, + r, + source_sync_path ) source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') source_sync_output = source_sync_template.render( - dnf_plugin_cmd=source_dnf_plugin_cmd, - sync_cmd=source_sync_cmd + import_gpg_cmd=import_gpg_cmd, + dnf_plugin_cmd=dnf_plugin_cmd, + sync_cmd=source_sync_cmd, + check_cmd=check_cmd, + sync_log=source_sync_log ) source_entry_point_open = open(source_entry_point_sh, "w+") @@ -481,10 +481,8 @@ class RepoSync: ) output, errors = podcheck.communicate() - if 'Exited (0)' in output.decode(): - self.log.info('%s seems ok' % pod) - else: - self.log.error('%s had issues syncing' % pod) + if 'Exited (0)' not in output.decode(): + self.log.error('[%s%sFAIL%s] %s' % Color.BOLD, Color.RED, pod, Color.END) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( @@ -761,10 +759,8 @@ class RepoSync: ) output, errors = podcheck.communicate() - if 'Exited (0)' in output.decode(): - self.log.info('%s seems ok' % pod) - else: - self.log.error('%s had issues closing' % pod) + if 'Exited (0)' not in output.decode(): + self.log.error('[%s%sFAIL%s] %s' % Color.BOLD, Color.RED, pod, Color.END) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( From 9eb649e86b5a790f88fea88e215bfc05344b6331 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 12 Jun 2022 02:15:52 -0700 Subject: [PATCH 07/64] ensure separate directories are checked --- iso/py/util/dnf_utils.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index a6dc2b1..c4e2d49 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -347,7 +347,10 @@ class RepoSync: ) dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y" - check_cmd = ("/usr/bin/rpm -K $(find . -name '*.rpm') | grep -v 'signatures OK'") + check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " + "| grep -v 'signatures OK'").format(os_sync_path) + debug_check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " + "| grep -v 'signatures OK'").format(debug_sync_path) sync_template = self.tmplenv.get_template('reposync.tmpl') sync_output = sync_template.render( @@ -365,7 +368,7 @@ class RepoSync: arch_force_cp=arch_force_cp, dnf_plugin_cmd=dnf_plugin_cmd, sync_cmd=debug_sync_cmd, - check_cmd=check_cmd, + check_cmd=debug_check_cmd, sync_log=debug_sync_log ) @@ -411,12 +414,15 @@ class RepoSync: source_sync_path ) + source_check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " + "| grep -v 'signatures OK'").format(source_sync_path) + source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') source_sync_output = source_sync_template.render( import_gpg_cmd=import_gpg_cmd, dnf_plugin_cmd=dnf_plugin_cmd, sync_cmd=source_sync_cmd, - check_cmd=check_cmd, + check_cmd=source_check_cmd, sync_log=source_sync_log ) From d6adcf3f8ff69dbce1192c85374daf3ba4f0e312 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 12 Jun 2022 02:19:20 -0700 Subject: [PATCH 08/64] fix raw path --- iso/py/configs/el9.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index ff11dcb..22b6cd1 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -96,7 +96,7 @@ - HighAvailability extra_files: git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' - git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/' branch: 'r9' gpg: stable: 'SOURCES/RPM-GPG-KEY-Rocky-9' From 41f77b1d4cc8e5e6aa7754f25370f95473cb7341 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 12 Jun 2022 02:22:01 -0700 Subject: [PATCH 09/64] remove sleepy curl --- iso/py/util/dnf_utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index c4e2d49..5969be0 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -300,8 +300,7 @@ class RepoSync: 'debug/tree' ) - import_gpg_cmd = ("/usr/bin/rpm --import " - "| /usr/bin/curl {}{}").format( + import_gpg_cmd = ("/usr/bin/rpm --import {}{}").format( self.extra_files['git_raw_path'], self.extra_files['gpg'][self.gpgkey] ) From 45848bf530651e0273978408099c43c5f4e5a419 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 12 Jun 2022 02:55:32 -0700 Subject: [PATCH 10/64] simply gpg checking --- iso/py/sync-from-peridot-test | 4 ++-- iso/py/templates/repoconfig.tmpl | 6 +++--- iso/py/templates/reposync-src.tmpl | 8 +------- iso/py/templates/reposync.tmpl | 8 +------- iso/py/util/dnf_utils.py | 25 ++++++++++--------------- 5 files changed, 17 insertions(+), 34 deletions(-) diff --git a/iso/py/sync-from-peridot-test b/iso/py/sync-from-peridot-test index b8db670..1166376 100755 --- a/iso/py/sync-from-peridot-test +++ b/iso/py/sync-from-peridot-test @@ -12,5 +12,5 @@ r = Checks(rlvars, config['arch']) r.check_valid_arch() #a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) -a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) -#a.run() +a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) +a.run() diff --git a/iso/py/templates/repoconfig.tmpl b/iso/py/templates/repoconfig.tmpl index 04da1d8..ce57623 100644 --- a/iso/py/templates/repoconfig.tmpl +++ b/iso/py/templates/repoconfig.tmpl @@ -3,18 +3,18 @@ name={{repo.name}} baseurl={{ repo.baseurl }} enabled=1 -gpgcheck=0 +gpgcheck=1 [{{ repo.name }}-debug] name={{repo.name}} baseurl={{ repo.baseurl }}-debug enabled=1 -gpgcheck=0 +gpgcheck=1 [{{ repo.name }}-source] name={{repo.name}} baseurl={{ repo.srcbaseurl }} enabled=1 -gpgcheck=0 +gpgcheck=1 {% endfor %} diff --git a/iso/py/templates/reposync-src.tmpl b/iso/py/templates/reposync-src.tmpl index dc10fab..88836a3 100644 --- a/iso/py/templates/reposync-src.tmpl +++ b/iso/py/templates/reposync-src.tmpl @@ -3,11 +3,5 @@ set -o pipefail {{ import_gpg_cmd }} | tee -a {{ sync_log }} {{ dnf_plugin_cmd }} | tee -a {{ sync_log }} {{ sync_cmd }} | tee -a {{ sync_log }} -{{ check_cmd }} | tee -a {{ sync_log }} -ret_val=$? -if [ "$ret_val" -eq 1 ]; then - exit 0 -else - exit 1 -fi +# {{ check_cmd }} | tee -a {{ sync_log }} diff --git a/iso/py/templates/reposync.tmpl b/iso/py/templates/reposync.tmpl index 282340f..22f71b4 100644 --- a/iso/py/templates/reposync.tmpl +++ b/iso/py/templates/reposync.tmpl @@ -4,11 +4,5 @@ set -o pipefail {{ arch_force_cp }} | tee -a {{ sync_log }} {{ dnf_plugin_cmd }} | tee -a {{ sync_log }} {{ sync_cmd }} | tee -a {{ sync_log }} -{{ check_cmd }} | tee -a {{ sync_log }} -ket_val=$? -if [ "$ret_val" -eq 1 ]; then - exit 0 -else - exit 1 -fi +# {{ check_cmd }} | tee -a {{ sync_log }} diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 5969be0..7221e72 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -327,7 +327,8 @@ class RepoSync: ) sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " - "--repoid={} -p {} --forcearch {} --norepopath 2>&1").format( + "--repoid={} -p {} --forcearch {} --norepopath " + "--gpgcheck 2>&1").format( self.dnf_config, a, r, @@ -337,7 +338,7 @@ class RepoSync: debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " "--download-metadata --repoid={}-debug -p {} --forcearch {} " - "--norepopath 2>&1").format( + "--gpgcheck --norepopath 2>&1").format( self.dnf_config, a, r, @@ -346,10 +347,6 @@ class RepoSync: ) dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y" - check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " - "| grep -v 'signatures OK'").format(os_sync_path) - debug_check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " - "| grep -v 'signatures OK'").format(debug_sync_path) sync_template = self.tmplenv.get_template('reposync.tmpl') sync_output = sync_template.render( @@ -357,7 +354,6 @@ class RepoSync: arch_force_cp=arch_force_cp, dnf_plugin_cmd=dnf_plugin_cmd, sync_cmd=sync_cmd, - check_cmd=check_cmd, sync_log=sync_log ) @@ -367,7 +363,6 @@ class RepoSync: arch_force_cp=arch_force_cp, dnf_plugin_cmd=dnf_plugin_cmd, sync_cmd=debug_sync_cmd, - check_cmd=debug_check_cmd, sync_log=debug_sync_log ) @@ -407,21 +402,17 @@ class RepoSync: source_sync_cmd = ("/usr/bin/dnf reposync -c {} " "--download-metadata --repoid={}-source -p {} " - "--norepopath 2>&1").format( + "--gpgcheck --norepopath 2>&1").format( self.dnf_config, r, source_sync_path ) - source_check_cmd = ("/usr/bin/rpm -K $(find {} -name '*.rpm') " - "| grep -v 'signatures OK'").format(source_sync_path) - source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') source_sync_output = source_sync_template.render( import_gpg_cmd=import_gpg_cmd, dnf_plugin_cmd=dnf_plugin_cmd, sync_cmd=source_sync_cmd, - check_cmd=source_check_cmd, sync_log=source_sync_log ) @@ -487,7 +478,9 @@ class RepoSync: output, errors = podcheck.communicate() if 'Exited (0)' not in output.decode(): - self.log.error('[%s%sFAIL%s] %s' % Color.BOLD, Color.RED, pod, Color.END) + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod + ) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( @@ -765,7 +758,9 @@ class RepoSync: output, errors = podcheck.communicate() if 'Exited (0)' not in output.decode(): - self.log.error('[%s%sFAIL%s] %s' % Color.BOLD, Color.RED, pod, Color.END) + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod + ) bad_exit_list.append(pod) rmcmd = '{} rm {}'.format( From 602553e878ea54fc99529758ec8b12877f2af2eb Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 13 Jun 2022 07:37:50 -0700 Subject: [PATCH 11/64] add more color for next sync --- iso/py/common.py | 1 + iso/py/configs/el8.yaml | 1 + iso/py/configs/el9.yaml | 2 ++ iso/py/templates/isobuild.tmpl.sh | 15 +++++++++ iso/py/templates/isolorax.tmpl.sh | 2 ++ iso/py/templates/isomock.tmpl | 47 ++++++++++++++++++++++++++++ iso/py/util/dnf_utils.py | 5 +++ iso/py/util/iso_utils.py | 51 ++++++++++++++++++++++++++++++- 8 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 iso/py/templates/isobuild.tmpl.sh create mode 100644 iso/py/templates/isolorax.tmpl.sh create mode 100644 iso/py/templates/isomock.tmpl diff --git a/iso/py/common.py b/iso/py/common.py index 509e89e..8032efd 100644 --- a/iso/py/common.py +++ b/iso/py/common.py @@ -25,6 +25,7 @@ rldict = {} sigdict = {} config = { "rlmacro": rpm.expandMacro('%rhel'), + "dist": 'el' + rpm.expandMacro('%rhel'), "arch": platform.machine(), "date_stamp": time.strftime("%Y%m%d.%H%M%S", time.localtime()), "compose_root": "/mnt/compose", diff --git a/iso/py/configs/el8.yaml b/iso/py/configs/el8.yaml index c232bb0..f7f64d2 100644 --- a/iso/py/configs/el8.yaml +++ b/iso/py/configs/el8.yaml @@ -1,5 +1,6 @@ --- '8': + fullname: 'Rocky Linux 8' revision: '8.6' rclvl: 'RC2' allowed_arches: diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index 22b6cd1..cf5c49e 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -1,5 +1,6 @@ --- '9': + fullname: 'Rocky Linux 9' revision: '9.0' rclvl: 'RC1' allowed_arches: @@ -60,6 +61,7 @@ repos: - 'BaseOS' - 'AppStream' + variant: 'BaseOS' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/py/templates/isobuild.tmpl.sh b/iso/py/templates/isobuild.tmpl.sh new file mode 100644 index 0000000..880aa16 --- /dev/null +++ b/iso/py/templates/isobuild.tmpl.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# This is a template that is used to build ISO's for Rocky Linux. Only under +# extreme circumstances should you be filling this out and running manually. + +# Init the container +mock \ + -r /mnt/compose/9/latest-Rocky-9/work/entries/lorax-{{ major }}-{{ arch }}.cfg \ + --init + +mock \ + -r /mnt/compose/9/latest-Rocky-9/work/entries/lorax-{{ major }}-{{ arch }}.cfg \ + --shell \ + --enable-network -- /bin/bash /mnt/compose/9/latest-Rocky-9/work/entries/runLorax-{{ arch }}.sh + +# Clean up? diff --git a/iso/py/templates/isolorax.tmpl.sh b/iso/py/templates/isolorax.tmpl.sh new file mode 100644 index 0000000..5dba11d --- /dev/null +++ b/iso/py/templates/isolorax.tmpl.sh @@ -0,0 +1,2 @@ +#!/bin/bash +# You should not be running this manually. diff --git a/iso/py/templates/isomock.tmpl b/iso/py/templates/isomock.tmpl new file mode 100644 index 0000000..a07589c --- /dev/null +++ b/iso/py/templates/isomock.tmpl @@ -0,0 +1,47 @@ +config_opts['root'] = 'rocky-{{ major }}-{{ arch }}' +config_opts['description'] = '{{ fullname }}' +config_opts['target_arch'] = '{{ arch }}' +config_opts['legal_host_arches'] = ('{{ arch }}',) + +config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils redhat-release findutils gawk glibc-minimal-langpack grep gzip info patch redhat-rpm-config rpm-build sed shadow-utils tar unzip util-linux which xz genisoimage isomd5sum lorax lorax-templates-rhel lorax-templates-generic' +config_opts['dist'] = '{{ dist }}' # only useful for --resultdir variable subst +config_opts['releasever'] = '{{ major }}' +config_opts['package_manager'] = 'dnf' +config_opts['extra_chroot_dirs'] = [ '/run/lock', ] +# config_opts['bootstrap_image'] = 'quay.io/rockylinux/rockylinux:{{ major }}' + +config_opts['plugin_conf']['bind_mount_enable'] = True +config_opts['plugin_conf']['bind_mount_opts']['dirs'].append(('/mnt/compose', '/mnt/compose')) + + +config_opts['dnf.conf'] = """ +[main] +keepcache=1 +debuglevel=2 +reposdir=/dev/null +logfile=/var/log/yum.log +retries=20 +obsoletes=1 +gpgcheck=0 +assumeyes=1 +syslog_ident=mock +syslog_device= +metadata_expire=0 +mdpolicy=group:primary +best=1 +install_weak_deps=0 +protected_packages= +module_platform_id=platform:{{ dist }} +user_agent={{ user_agent }} + +{% for repo in repos %} +[{{ repo }}] +name={{ repo }} +baseurl=file:///mnt/compose/{{ major }}/latest-Rocky-{{ major }}/compose/{{ repo }}/{{ arch }}/os +enabled=1 +gpgcheck=0 + +{% endfor %} + +""" + diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 7221e72..ecc75b9 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -505,6 +505,11 @@ class RepoSync: ) for issue in bad_exit_list: self.log.error(issue) + else: + self.log.info( + '[' + Color.BOLD + Color.GREEN + ' OK ' + Color.END + '] ' + 'No issues detected.' + ) def generate_compose_dirs(self) -> str: """ diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index c19ebc6..6bc2c19 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -12,6 +12,7 @@ import subprocess import shlex import time import re +from productmd.common import SortedConfigParser from common import Color class IsoBuild: @@ -34,12 +35,15 @@ class IsoBuild: self.arch = arch self.image = image self.host = host + self.fullname = rlvars['fullname'] # Relevant config items self.major_version = major + self.disttag = config['dist'] self.date_stamp = config['date_stamp'] + self.timestamp = time.time() self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major - self.iso_base = config['compose_root'] + "/" + major + "/isos" + self.iso_drop = config['compose_root'] + "/" + major + "/isos" self.current_arch = config['arch'] self.extra_files = rlvars['extra_files'] @@ -145,6 +149,51 @@ class IsoBuild: """ print() + def generate_iso_scripts(self): + """ + Generates the scripts needed to be ran in the mock roots + """ + print() + + def treeinfo_write(self): + """ + Ensure treeinfo is written correctly + """ + print() + + def discinfo_write(self): + """ + Ensure discinfo is written correctly + """ + #with open(file_path, "w") as f: + # f.write("%s\n" % self.timestamp) + # f.write("%s\n" % self.fullname) + # f.write("%s\n" % self.arch) + # if disc_numbers: + # f.write("%s\n" % ",".join([str(i) for i in disc_numbers])) + print() + + def write_media_repo(self): + """ + Ensure media.repo exists + """ + data = [ + "[InstallMedia]", + "name=%s" % self.fullname, + "mediaid=%s" % self.timestamp, + "metadata_expire=-1", + "gpgcheck=0", + "cost=500", + "", + ] + + + def generate_graft_points(self): + """ + Get a list of packages for an ISO + """ + print() + class LiveBuild: """ From 3d161df451fa798fdfd70b0a6ea3d29fef476a6b Mon Sep 17 00:00:00 2001 From: nazunalika Date: Wed, 15 Jun 2022 13:53:12 -0700 Subject: [PATCH 12/64] update iso utils and prep --- iso/py/common.py | 5 +- iso/py/configs/el8.yaml | 16 +- iso/py/configs/el9.yaml | 13 +- iso/py/templates/buildImage.tmpl.sh | 19 ++ iso/py/templates/isobuild.tmpl.sh | 23 ++- iso/py/templates/isolorax.tmpl.sh | 6 + .../{isomock.tmpl => isomock.tmpl.cfg} | 16 +- iso/py/util/dnf_utils.py | 7 +- iso/py/util/iso_utils.py | 176 +++++++++++++----- sync/common_9 | 6 +- 10 files changed, 219 insertions(+), 68 deletions(-) create mode 100644 iso/py/templates/buildImage.tmpl.sh rename iso/py/templates/{isomock.tmpl => isomock.tmpl.cfg} (73%) diff --git a/iso/py/common.py b/iso/py/common.py index 8032efd..509a622 100644 --- a/iso/py/common.py +++ b/iso/py/common.py @@ -34,7 +34,10 @@ config = { "category_stub": "mirror/pub/rocky", "sig_category_stub": "mirror/pub/sig", "repo_base_url": "https://yumrepofs.build.resf.org/v1/projects", - "container": "centos:stream9" + "mock_work_root": "/builddir", + "container": "centos:stream9", + "distname": "Rocky Linux", + "shortname": "Rocky" } # Importing the config from yaml diff --git a/iso/py/configs/el8.yaml b/iso/py/configs/el8.yaml index f7f64d2..93bcdf3 100644 --- a/iso/py/configs/el8.yaml +++ b/iso/py/configs/el8.yaml @@ -8,10 +8,6 @@ - aarch64 provide_multilib: False project_id: '' - required_packages: - - 'lorax' - - 'genisoimage' - - 'isomd5sum' repo_symlinks: devel: 'Devel' NFV: 'nfv' @@ -54,6 +50,18 @@ - dvd1 - minimal - boot + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_packages: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index cf5c49e..b0a371b 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -3,6 +3,7 @@ fullname: 'Rocky Linux 9' revision: '9.0' rclvl: 'RC1' + bugurl: 'https://bugs.rockylinux.org' allowed_arches: - x86_64 - aarch64 @@ -10,10 +11,6 @@ - s390x provide_multilib: True project_id: '55b17281-bc54-4929-8aca-a8a11d628738' - required_packages: - - 'lorax' - - 'genisoimage' - - 'isomd5sum' repo_symlinks: NFV: 'nfv' renames: @@ -62,6 +59,14 @@ - 'BaseOS' - 'AppStream' variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/py/templates/buildImage.tmpl.sh b/iso/py/templates/buildImage.tmpl.sh new file mode 100644 index 0000000..efe097d --- /dev/null +++ b/iso/py/templates/buildImage.tmpl.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +{% for pkg in lorax %} +sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl +{% endfor %} + +lorax --product='{{ distname }}' \ + --version='{{ revision }}' \ + --release='{{ revision }}' \ +{%- for repo in repos %} + --source={{ repo.url }} \ +{%- endfor %} + --variant={{ variant }} \ + --nomacboot \ + --buildarch={{ arch }} \ + --volid={{ shortname }}-{{ major }}-{{ minor }}-{{ arch }}-dvd1 \ + --logfile={{ mock_work_root }}/lorax.log \ + --rootfs-size=3 \ + {{ lorax_work_root }} diff --git a/iso/py/templates/isobuild.tmpl.sh b/iso/py/templates/isobuild.tmpl.sh index 880aa16..21b03f1 100644 --- a/iso/py/templates/isobuild.tmpl.sh +++ b/iso/py/templates/isobuild.tmpl.sh @@ -4,12 +4,29 @@ # Init the container mock \ - -r /mnt/compose/9/latest-Rocky-9/work/entries/lorax-{{ major }}-{{ arch }}.cfg \ + -r /var/tmp/lorax-{{ major }}.cfg \ + --isolation={{ isolation }} \ + --enable-network \ --init +cp /var/tmp/buildImage.sh \ + /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/root/var/tmp + mock \ - -r /mnt/compose/9/latest-Rocky-9/work/entries/lorax-{{ major }}-{{ arch }}.cfg \ + -r /var/tmp/lorax-{{ major }}.cfg \ --shell \ - --enable-network -- /bin/bash /mnt/compose/9/latest-Rocky-9/work/entries/runLorax-{{ arch }}.sh + --isolation={{ isolation }} \ + --enable-network -- /bin/bash /var/tmp/buildImage.sh + +ret_val=$? +if [ $ret_val -eq 0 ]; then + # Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result + mkdir /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result + cp /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/root/{{ builddir }}/lorax-{{ major }}-{{ arch }}.tar.gz \ + /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result +else + echo "!! LORAX RUN FAILED !!" + exit 1 +fi # Clean up? diff --git a/iso/py/templates/isolorax.tmpl.sh b/iso/py/templates/isolorax.tmpl.sh index 5dba11d..f08b123 100644 --- a/iso/py/templates/isolorax.tmpl.sh +++ b/iso/py/templates/isolorax.tmpl.sh @@ -1,2 +1,8 @@ #!/bin/bash # You should not be running this manually. + +# everything should be logged like | tee -a {{ log_path }}/lorax-{{ arch }}-{{ date_stamp }}.log +# for the dvd, we need to rely on pulling from {{ entries_root }}/dvd-{{ arch }}-list + +# Run the base lorax steps into a work dir specific to its arch +# copy everything into BaseOS/arch/os diff --git a/iso/py/templates/isomock.tmpl b/iso/py/templates/isomock.tmpl.cfg similarity index 73% rename from iso/py/templates/isomock.tmpl rename to iso/py/templates/isomock.tmpl.cfg index a07589c..69f84e3 100644 --- a/iso/py/templates/isomock.tmpl +++ b/iso/py/templates/isomock.tmpl.cfg @@ -3,16 +3,18 @@ config_opts['description'] = '{{ fullname }}' config_opts['target_arch'] = '{{ arch }}' config_opts['legal_host_arches'] = ('{{ arch }}',) -config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils redhat-release findutils gawk glibc-minimal-langpack grep gzip info patch redhat-rpm-config rpm-build sed shadow-utils tar unzip util-linux which xz genisoimage isomd5sum lorax lorax-templates-rhel lorax-templates-generic' +config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils redhat-release findutils gawk glibc-minimal-langpack grep gzip info patch redhat-rpm-config rpm-build sed shadow-utils tar unzip util-linux which xz {{ required_pkgs|join(' ') }}' config_opts['dist'] = '{{ dist }}' # only useful for --resultdir variable subst config_opts['releasever'] = '{{ major }}' -config_opts['package_manager'] = 'dnf' +config_opts['package_manager'] = '{{ pkgmanager|default("dnf") }}' config_opts['extra_chroot_dirs'] = [ '/run/lock', ] # config_opts['bootstrap_image'] = 'quay.io/rockylinux/rockylinux:{{ major }}' +# If compose is local, the bind mounts will be here +{% if compose_dir_is_here %} config_opts['plugin_conf']['bind_mount_enable'] = True -config_opts['plugin_conf']['bind_mount_opts']['dirs'].append(('/mnt/compose', '/mnt/compose')) - +config_opts['plugin_conf']['bind_mount_opts']['dirs'].append(('{{ compose_dir }}', '{{ compose_dir }}')) +{% endif %} config_opts['dnf.conf'] = """ [main] @@ -35,9 +37,9 @@ module_platform_id=platform:{{ dist }} user_agent={{ user_agent }} {% for repo in repos %} -[{{ repo }}] -name={{ repo }} -baseurl=file:///mnt/compose/{{ major }}/latest-Rocky-{{ major }}/compose/{{ repo }}/{{ arch }}/os +[{{ repo.name }}] +name={{ repo.name }} +baseurl={{ repo.url }} enabled=1 gpgcheck=0 diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index ecc75b9..6f6ec5e 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -12,6 +12,7 @@ import subprocess import shlex import time import re +import json #import pipes from common import Color from jinja2 import Environment, FileSystemLoader @@ -793,7 +794,11 @@ class RepoSync: def deploy_extra_files(self): """ - deploys extra files based on info of rlvars + deploys extra files based on info of rlvars including a + extra_files.json + + also deploys COMPOSE_ID and maybe in the future a metadata dir with a + bunch of compose-esque stuff. """ pass diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index 6bc2c19..1461019 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -14,30 +14,34 @@ import time import re from productmd.common import SortedConfigParser from common import Color +from jinja2 import Environment, FileSystemLoader class IsoBuild: """ This helps us build the generic ISO's for a Rocky Linux release. In - particular, this is for the boot and dvd images. + particular, this is for the boot images. - Live images are built in another class. + While there is a function for building the DVD and live images, this not + the main design of this class. The other functions can be called on their + own to facilitate those particular builds. """ def __init__( self, rlvars, config, major, - host=None, + isolation: str = 'auto', + compose_dir_is_here: bool = False, image=None, - arch=None, logger=None ): - self.arch = arch self.image = image - self.host = host self.fullname = rlvars['fullname'] + self.distname = config['distname'] + self.shortname = config['shortname'] # Relevant config items self.major_version = major + self.compose_dir_is_here = compose_dir_is_here self.disttag = config['dist'] self.date_stamp = config['date_stamp'] self.timestamp = time.time() @@ -45,11 +49,21 @@ class IsoBuild: self.compose_base = config['compose_root'] + "/" + major self.iso_drop = config['compose_root'] + "/" + major + "/isos" self.current_arch = config['arch'] - self.extra_files = rlvars['extra_files'] + self.required_pkgs = rlvars['iso_map']['required_pkgs'] + self.mock_work_root = config['mock_work_root'] + self.lorax_result_root = config['mock_work_root'] + "/" + "lorax" + self.mock_isolation = isolation + self.iso_map = rlvars['iso_map'] # Relevant major version items + self.release = rlvars['revision'] + self.minor_version = rlvars['revision'].split('.')[1] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] - self.arches = rlvars['allowed_arches'] + self.repos = rlvars['iso_map']['repos'] + self.repo_base_url = config['repo_base_url'] + self.project_id = rlvars['project_id'] + + self.extra_files = rlvars['extra_files'] self.staging_dir = os.path.join( config['staging_root'], @@ -57,6 +71,10 @@ class IsoBuild: self.revision ) + # Templates + file_loader = FileSystemLoader('templates') + self.tmplenv = Environment(loader=file_loader) + self.compose_latest_dir = os.path.join( config['compose_root'], major, @@ -73,6 +91,12 @@ class IsoBuild: "work/logs" ) + self.iso_work_dir = os.path.join( + self.compose_latest_dir, + "work/iso", + config['arch'] + ) + # This is temporary for now. if logger is None: self.log = logging.getLogger("iso") @@ -87,6 +111,7 @@ class IsoBuild: self.log.addHandler(handler) self.log.info('iso build init') + self.repolist = self.build_repo_list() self.log.info(self.revision) def run(self): @@ -101,66 +126,121 @@ class IsoBuild: "logs" ) - self.iso_build( - sync_root, - work_root, - log_root, - self.arch, - self.host - ) + self.iso_build() self.log.info('Compose repo directory: %s' % sync_root) self.log.info('ISO Build Logs: %s' % log_root) self.log.info('ISO Build completed.') - def iso_build(self, sync_root, work_root, log_root, arch, host): + def build_repo_list(self): """ - Calls out the ISO builds to the individual hosts listed in the map. - Each architecture is expected to build their own ISOs, similar to - runroot operations of koji and pungi. + Builds the repo dictionary + """ + repolist = [] + for name in self.repos: + if not self.compose_dir_is_here: + constructed_url = '{}/{}/repo/hashed-{}/{}'.format( + self.repo_base_url, + self.project_id, + name, + self.current_arch + ) + else: + constructed_url = 'file://{}/{}/{}/os'.format( + self.compose_latest_sync, + name, + self.current_arch + ) - It IS possible to run locally, but that would mean this only builds - ISOs for the architecture of the running machine. Please keep this in - mind when stating host=local. + + repodata = { + 'name': name, + 'url': constructed_url + } + + repolist.append(repodata) + + return repolist + + def iso_build(self): + """ + This does the general ISO building for the current running + architecture. This generates the mock config and the general script + needed to get this part running. """ # Check for local build, build accordingly # Check for arch specific build, build accordingly # local AND arch cannot be used together, local supersedes. print # warning. - local_only = False - if 'local' in self.host: - local_only = True - - arch = self.arch.copy() - if local_only and self.arch: - self.log.warn('You cannot set local build AND an architecture.') - self.log.warn('The architecture %s will be set' % self.current_arch) - arch = self.current_arch - - def iso_build_local(self, sync_root, work_root, log_root): - """ - Local iso builds only. Architecture is locked. - """ - print() - - def iso_build_remote(self, sync_root, work_root, log_root, arch): - """ - Remote ISO builds. Architecture is all or single. - """ + self.log.info('Generating ISO configuration and scripts') + self.generate_iso_scripts() print() def generate_iso_scripts(self): """ Generates the scripts needed to be ran in the mock roots """ + mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg') + mock_sh_template = self.tmplenv.get_template('isobuild.tmpl.sh') + iso_template = self.tmplenv.get_template('buildImage.tmpl.sh') + + mock_iso_path = '/var/tmp/lorax-' + self.major_version + '.cfg' + mock_sh_path = '/var/tmp/isobuild.sh' + iso_template_path = '/var/tmp/buildImage.sh' + + mock_iso_template_output = mock_iso_template.render( + arch=self.current_arch, + major=self.major_version, + fullname=self.fullname, + required_pkgs=self.required_pkgs, + dist=self.disttag, + repos=self.repolist, + user_agent='{{ user_agent }}', + ) + + mock_sh_template_output = mock_sh_template.render( + arch=self.current_arch, + major=self.major_version, + isolation=self.mock_isolation, + builddir=self.mock_work_root, + shortname=self.shortname, + ) + + iso_template_output = iso_template.render( + arch=self.current_arch, + major=self.major_version, + minor=self.minor_version, + shortname=self.shortname, + repos=self.repolist, + variant=self.iso_map['variant'], + lorax=self.iso_map['lorax_removes'], + distname=self.distname, + revision=self.release, + ) + + mock_iso_entry = open(mock_iso_path, "w+") + mock_iso_entry.write(mock_iso_template_output) + mock_iso_entry.close() + + mock_sh_entry = open(mock_sh_path, "w+") + mock_sh_entry.write(mock_sh_template_output) + mock_sh_entry.close() + + iso_template_entry = open(iso_template_path, "w+") + iso_template_entry.write(iso_template_output) + iso_template_entry.close() print() + # !!! Send help, we would prefer to do this using the productmd python + # !!! library. If you are reading this and you can help us, please do so! def treeinfo_write(self): """ Ensure treeinfo is written correctly """ print() + # !!! Send help, we would prefer to do this using the productmd python + # !!! library. If you are reading this and you can help us, please do so! def discinfo_write(self): """ Ensure discinfo is written correctly @@ -187,13 +267,19 @@ class IsoBuild: "", ] - - def generate_graft_points(self): + def build_extra_iso(self): """ - Get a list of packages for an ISO + Builds DVD images based on the data created from the initial lorax on + each arch. This should NOT be called during the usual run() section. """ print() + def generate_graft_points(self): + """ + Get a list of packages for an extras ISO. This should NOT be called + during the usual run() section. + """ + print() class LiveBuild: """ diff --git a/sync/common_9 b/sync/common_9 index a0e2262..8b1cbb8 100644 --- a/sync/common_9 +++ b/sync/common_9 @@ -169,6 +169,6 @@ EOF /bin/cp "${TREEINFO_VAR}" "${PRISTINE_TREE}" } -export -f treeinfoFixer -export -f treeinfoModder -export -f treeinfoModderKickstart +#export -f treeinfoFixer +#export -f treeinfoModder +#export -f treeinfoModderKickstart From 2ffd96ad1ef4e488b67722e957c160c0e47847df Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 12:24:19 -0700 Subject: [PATCH 13/64] sort logs by date --- iso/py/templates/buildImage.tmpl.sh | 2 +- iso/py/util/dnf_utils.py | 23 ++++++++++------------- iso/py/util/iso_utils.py | 11 +++++++++-- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/iso/py/templates/buildImage.tmpl.sh b/iso/py/templates/buildImage.tmpl.sh index efe097d..3cc055e 100644 --- a/iso/py/templates/buildImage.tmpl.sh +++ b/iso/py/templates/buildImage.tmpl.sh @@ -13,7 +13,7 @@ lorax --product='{{ distname }}' \ --variant={{ variant }} \ --nomacboot \ --buildarch={{ arch }} \ - --volid={{ shortname }}-{{ major }}-{{ minor }}-{{ arch }}-dvd1 \ + --volid={{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-boot1 \ --logfile={{ mock_work_root }}/lorax.log \ --rootfs-size=3 \ {{ lorax_work_root }} diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index 6f6ec5e..c5ca97d 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -177,7 +177,8 @@ class RepoSync: log_root = os.path.join( work_root, - "logs" + "logs", + self.date_stamp ) if self.dryrun: @@ -313,18 +314,16 @@ class RepoSync: a )) - sync_log = ("{}/{}-{}-{}.log").format( + sync_log = ("{}/{}-{}.log").format( log_root, repo_name, - a, - self.date_stamp + a ) - debug_sync_log = ("{}/{}-{}-debug-{}.log").format( + debug_sync_log = ("{}/{}-{}-debug.log").format( log_root, repo_name, - a, - self.date_stamp + a ) sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " @@ -395,10 +394,9 @@ class RepoSync: 'source/tree' ) - source_sync_log = ("{}/{}-source-{}.log").format( + source_sync_log = ("{}/{}-source.log").format( log_root, - repo_name, - self.date_stamp + repo_name ) source_sync_cmd = ("/usr/bin/dnf reposync -c {} " @@ -690,7 +688,7 @@ class RepoSync: ) repoclosure_cmd = ('/usr/bin/dnf repoclosure {} ' '--repofrompath={},file://{}/{}/{}/os --repo={} --check={} {} ' - '| tee -a {}/{}-repoclosure-{}-{}.log').format( + '| tee -a {}/{}-repoclosure-{}.log').format( repoclosure_arch_list, repo, sync_root, @@ -701,8 +699,7 @@ class RepoSync: join_repo_comb, log_root, repo, - arch, - self.date_stamp + arch ) repoclosure_entry_point_open = open(repoclosure_entry_point_sh, "w+") repoclosure_entry_point_open.write('#!/bin/bash\n') diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index 1461019..88de9fd 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -30,6 +30,7 @@ class IsoBuild: rlvars, config, major, + rc: bool = False, isolation: str = 'auto', compose_dir_is_here: bool = False, image=None, @@ -54,11 +55,13 @@ class IsoBuild: self.lorax_result_root = config['mock_work_root'] + "/" + "lorax" self.mock_isolation = isolation self.iso_map = rlvars['iso_map'] + self.release_candidate = rc # Relevant major version items self.release = rlvars['revision'] self.minor_version = rlvars['revision'].split('.')[1] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] + self.rclvl = rlvars['rclvl'] self.repos = rlvars['iso_map']['repos'] self.repo_base_url = config['repo_base_url'] self.project_id = rlvars['project_id'] @@ -172,7 +175,6 @@ class IsoBuild: # Check for arch specific build, build accordingly # local AND arch cannot be used together, local supersedes. print # warning. - self.log.info('Generating ISO configuration and scripts') self.generate_iso_scripts() print() @@ -180,6 +182,7 @@ class IsoBuild: """ Generates the scripts needed to be ran in the mock roots """ + self.log.info('Generating ISO configuration and scripts') mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg') mock_sh_template = self.tmplenv.get_template('isobuild.tmpl.sh') iso_template = self.tmplenv.get_template('buildImage.tmpl.sh') @@ -188,6 +191,10 @@ class IsoBuild: mock_sh_path = '/var/tmp/isobuild.sh' iso_template_path = '/var/tmp/buildImage.sh' + rclevel = '' + if self.release_candidate: + rclevel = '-' + self.rclvl + mock_iso_template_output = mock_iso_template.render( arch=self.current_arch, major=self.major_version, @@ -216,6 +223,7 @@ class IsoBuild: lorax=self.iso_map['lorax_removes'], distname=self.distname, revision=self.release, + rc=rclevel, ) mock_iso_entry = open(mock_iso_path, "w+") @@ -229,7 +237,6 @@ class IsoBuild: iso_template_entry = open(iso_template_path, "w+") iso_template_entry.write(iso_template_output) iso_template_entry.close() - print() # !!! Send help, we would prefer to do this using the productmd python # !!! library. If you are reading this and you can help us, please do so! From 613687c3c88a9093d2c5d801f12e7ea50b31195b Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 12:56:57 -0700 Subject: [PATCH 14/64] fullversion --- iso/py/util/dnf_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index c5ca97d..bb71427 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -68,6 +68,7 @@ class RepoSync: # Relevant major version items self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] + self.fullversion = rlvars['revision'] self.arches = rlvars['allowed_arches'] self.project_id = rlvars['project_id'] self.repo_renames = rlvars['renames'] @@ -516,7 +517,7 @@ class RepoSync: """ compose_base_dir = os.path.join( self.compose_base, - "Rocky-{}-{}".format(self.major_version, self.date_stamp) + "Rocky-{}-{}".format(self.fullversion, self.date_stamp) ) self.log.info('Creating compose directory %s' % compose_base_dir) if not os.path.exists(compose_base_dir): From 84ac6089674343953f0485294461e64502874d85 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 13:18:18 -0700 Subject: [PATCH 15/64] symlink functions --- iso/py/.common.py.swp | Bin 0 -> 12288 bytes iso/py/util/dnf_utils.py | 15 +++++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 iso/py/.common.py.swp diff --git a/iso/py/.common.py.swp b/iso/py/.common.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..255489cc50696fddc31841ca2f0e621f71046459 GIT binary patch literal 12288 zcmeHNJ!~9B6rKRO`Micf0#Cc=v zMCRv?&sskdmFMZRhu0p+r5o%Ra11yG90QI4$ADwNG2j?*3_M&6m}DP$10g?BgnhdB z-nZ+!cywQm0mp!2z%k$$a11yG90QI4$ADwNG2j?*3_OGkuz-+v9wp?H#}GXJ|6l(7 z|I^ci`~choz63r6J_J4hZUOHC7lE_D9B>5K5B%~JA>RR?19yP8ff3LJ7_bDK0FD6^ z`0WrOUjui6K5!a%4*2m&LcRcQ11(@0H~{>7kdSYId%!oqN5Gpv3zz^Zz#mT#@;z_| z*aS9!8Q>uB*#Sa^z!Bi9#|e2KxDC7qFyJU~82BB0SWZ5+ytvCT;23ZWI0hU8|1$#< zXIjfEt<`#`vD)ffZne)ZHQMwfEv4;N>mofyEA{|ht%5tIdP6J79s*`~M;jI;WI}}_ zrZk4CFc3X+RfQ4jDS3S_&4??=+x1J0)kf<=HpULV>M1@FTFBVDjyb#;51+YTsCwZj z^e~KG9CFR+h6qF2PUxZwP;0b!)(KY6c8v*mU&K7XtY%0VoQEl={I!&Y)JQuy#hPs7bSzVl1Yb-IxG^Fg z__Tel-t=~PjHRKk@IWd?W2xwBx?1e zW+aOb);9RRI`eJ5@&WY=OJqXyX(fxkVl&i}abFCO$hx+3V%Sd9*11wwGr0(IH zHVo~t*v<@tg1M47{Prs@90axH=ckD+&gz&&p|4q=cS6a6sx64AsmwUbq)d2RwSi4j zPkGan$2}QAa`8J z6@kz@<+Lf{bh9#@tKrZvY#e3B^>}QgUerc$7A=$}5xjM5d+8`CjkT+FQxOJ%P^F3D ziR^Sy+MQH|)?#Q(q8DqmaT-}&U;Eh*AKSn0OEsvCj@J?;Uqvg>r4!_-(?dxNq#C1V zrCsYN5=u!`OVX}AL0p>tlbOmEUn~!E1S#5$jJJc2VpFq0>6vY9=@_kY0Iv5kE>xu@ z&Mm_Q-)UJ0OPlTPi=`M5aUaz7?M<~Zd1W%14E)J+lgpEP8}{mN4T};xNpFy1 ztD^nixJmNOkl#<(=B=|f%}@uHU-a4DkoVTn%_%Ol7npk2X6NUx&PHA)%X(BPJYERb zE-cmC&BlfLmiY^X=1RTUY+c?GFU*&2nrqv}3#IkasrLCZS5Db?RX*$dONIRo=|U-8 zS!=H}@ALCwVSTn;uWvh9m@CBX`u2JjZ0x;|X`T1dqD`VL+t&)C-89B4SoX17(V8{g z;CMNVX$k}T9^@(#mM)o^B*lSwk@ure4s7X_k69-wR7h4Swk@r^w}n#Pm-SITON~x< Q3N0>ofm|W9Ay=II3D8I1+5i9m literal 0 HcmV?d00001 diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index bb71427..bfdb88a 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -67,6 +67,7 @@ class RepoSync: self.compose_base = config['compose_root'] + "/" + major # Relevant major version items + self.shortname = config['shortname'] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] self.fullversion = rlvars['revision'] self.arches = rlvars['allowed_arches'] @@ -190,7 +191,7 @@ class RepoSync: if self.fullrun: self.deploy_extra_files() - self.symlink_to_latest() + self.symlink_to_latest(generated_dir) if self.repoclosure: self.repoclosure_work(sync_root, work_root, log_root) @@ -525,7 +526,7 @@ class RepoSync: return compose_base_dir - def symlink_to_latest(self): + def symlink_to_latest(self, generated_dir): """ Emulates pungi and symlinks latest-Rocky-X @@ -533,7 +534,13 @@ class RepoSync: 'latest' directory is what is rsynced on to staging after completion. This link should not change often. """ - pass + try: + os.remove(self.compose_latest_dir) + except: + pass + + self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version)) + os.symlink(generated_dir, self.compose_latest_dir) def generate_conf(self, dest_path='/var/tmp') -> str: """ @@ -798,7 +805,7 @@ class RepoSync: also deploys COMPOSE_ID and maybe in the future a metadata dir with a bunch of compose-esque stuff. """ - pass + self.log.info('Deploying extra files...') class SigRepoSync: """ From c17e7216333b83158adff1ac975c2900e0bb38f3 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 13:18:27 -0700 Subject: [PATCH 16/64] symlink functions --- iso/py/.common.py.swp | Bin 12288 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 iso/py/.common.py.swp diff --git a/iso/py/.common.py.swp b/iso/py/.common.py.swp deleted file mode 100644 index 255489cc50696fddc31841ca2f0e621f71046459..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeHNJ!~9B6rKRO`Micf0#Cc=v zMCRv?&sskdmFMZRhu0p+r5o%Ra11yG90QI4$ADwNG2j?*3_M&6m}DP$10g?BgnhdB z-nZ+!cywQm0mp!2z%k$$a11yG90QI4$ADwNG2j?*3_OGkuz-+v9wp?H#}GXJ|6l(7 z|I^ci`~choz63r6J_J4hZUOHC7lE_D9B>5K5B%~JA>RR?19yP8ff3LJ7_bDK0FD6^ z`0WrOUjui6K5!a%4*2m&LcRcQ11(@0H~{>7kdSYId%!oqN5Gpv3zz^Zz#mT#@;z_| z*aS9!8Q>uB*#Sa^z!Bi9#|e2KxDC7qFyJU~82BB0SWZ5+ytvCT;23ZWI0hU8|1$#< zXIjfEt<`#`vD)ffZne)ZHQMwfEv4;N>mofyEA{|ht%5tIdP6J79s*`~M;jI;WI}}_ zrZk4CFc3X+RfQ4jDS3S_&4??=+x1J0)kf<=HpULV>M1@FTFBVDjyb#;51+YTsCwZj z^e~KG9CFR+h6qF2PUxZwP;0b!)(KY6c8v*mU&K7XtY%0VoQEl={I!&Y)JQuy#hPs7bSzVl1Yb-IxG^Fg z__Tel-t=~PjHRKk@IWd?W2xwBx?1e zW+aOb);9RRI`eJ5@&WY=OJqXyX(fxkVl&i}abFCO$hx+3V%Sd9*11wwGr0(IH zHVo~t*v<@tg1M47{Prs@90axH=ckD+&gz&&p|4q=cS6a6sx64AsmwUbq)d2RwSi4j zPkGan$2}QAa`8J z6@kz@<+Lf{bh9#@tKrZvY#e3B^>}QgUerc$7A=$}5xjM5d+8`CjkT+FQxOJ%P^F3D ziR^Sy+MQH|)?#Q(q8DqmaT-}&U;Eh*AKSn0OEsvCj@J?;Uqvg>r4!_-(?dxNq#C1V zrCsYN5=u!`OVX}AL0p>tlbOmEUn~!E1S#5$jJJc2VpFq0>6vY9=@_kY0Iv5kE>xu@ z&Mm_Q-)UJ0OPlTPi=`M5aUaz7?M<~Zd1W%14E)J+lgpEP8}{mN4T};xNpFy1 ztD^nixJmNOkl#<(=B=|f%}@uHU-a4DkoVTn%_%Ol7npk2X6NUx&PHA)%X(BPJYERb zE-cmC&BlfLmiY^X=1RTUY+c?GFU*&2nrqv}3#IkasrLCZS5Db?RX*$dONIRo=|U-8 zS!=H}@ALCwVSTn;uWvh9m@CBX`u2JjZ0x;|X`T1dqD`VL+t&)C-89B4SoX17(V8{g z;CMNVX$k}T9^@(#mM)o^B*lSwk@ure4s7X_k69-wR7h4Swk@r^w}n#Pm-SITON~x< Q3N0>ofm|W9Ay=II3D8I1+5i9m From 37cd4e1b2ebe472b49c2b066021bfebb58eac295 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 22:55:07 -0700 Subject: [PATCH 17/64] commit iso build portions --- iso/py/build-iso | 0 iso/py/configs/el9-beta.yaml | 118 ++++++++++++++++++++++++++++ iso/py/configs/el9.yaml | 1 + iso/py/configs/el9lh.yaml | 118 ++++++++++++++++++++++++++++ iso/py/sync-from-peridot-test | 4 +- iso/py/templates/buildImage.tmpl.sh | 44 +++++++++-- iso/py/templates/isobuild.tmpl.sh | 40 +++++++--- iso/py/templates/isomock.tmpl.cfg | 4 +- iso/py/util/dnf_utils.py | 1 + iso/py/util/iso_utils.py | 27 ++++++- 10 files changed, 331 insertions(+), 26 deletions(-) mode change 100644 => 100755 iso/py/build-iso create mode 100644 iso/py/configs/el9-beta.yaml create mode 100644 iso/py/configs/el9lh.yaml diff --git a/iso/py/build-iso b/iso/py/build-iso old mode 100644 new mode 100755 diff --git a/iso/py/configs/el9-beta.yaml b/iso/py/configs/el9-beta.yaml new file mode 100644 index 0000000..13eca3e --- /dev/null +++ b/iso/py/configs/el9-beta.yaml @@ -0,0 +1,118 @@ +--- +'9-beta': + fullname: 'Rocky Linux 9' + revision: '9.1' + rclvl: 'RC1' + bugurl: 'https://bugs.rockylinux.org' + allowed_arches: + - x86_64 + - aarch64 + - ppc64le + - s390x + provide_multilib: True + project_id: '' + repo_symlinks: + NFV: 'nfv' + renames: + all: 'devel' + all_repos: + - 'all' + - 'BaseOS' + - 'AppStream' + - 'CRB' + - 'HighAvailability' + - 'ResilientStorage' + - 'RT' + - 'NFV' + - 'SAP' + - 'SAPHANA' + - 'extras' + - 'plus' + no_comps_or_groups: + - 'all' + - 'extras' + - 'plus' + comps_or_groups: + - 'BaseOS' + - 'AppStream' + - 'CRB' + - 'HighAvailability' + - 'ResilientStorage' + - 'RT' + - 'NFV' + - 'SAP' + - 'SAPHANA' + has_modules: + - 'AppStream' + - 'CRB' + iso_map: + hosts: + x86_64: '' + aarch64: '' + ppc64le: '' + s390x: '' + images: + - dvd1 + - minimal + - boot + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' + repoclosure_map: + arches: + x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' + aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch' + ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch' + s390x: '--forcearch=s390x --arch=s390x --arch=noarch' + repos: + devel: [] + BaseOS: [] + AppStream: + - BaseOS + CRB: + - BaseOS + - AppStream + HighAvailability: + - BaseOS + - AppStream + ResilientStorage: + - BaseOS + - AppStream + RT: + - BaseOS + - AppStream + NFV: + - BaseOS + - AppStream + SAP: + - BaseOS + - AppStream + - HighAvailability + SAPHANA: + - BaseOS + - AppStream + - HighAvailability + extra_files: + git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/' + branch: 'r9' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-9' + testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' + list: + - 'SOURCES/Contributors' + - 'SOURCES/COMMUNITY-CHARTER' + - 'SOURCES/EULA' + - 'SOURCES/LICENSE' + - 'SOURCES/RPM-GPG-KEY-Rocky-9' + - 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' +... diff --git a/iso/py/configs/el9.yaml b/iso/py/configs/el9.yaml index b0a371b..faacbdb 100644 --- a/iso/py/configs/el9.yaml +++ b/iso/py/configs/el9.yaml @@ -109,6 +109,7 @@ stable: 'SOURCES/RPM-GPG-KEY-Rocky-9' testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' list: + - 'SOURCES/Contributors' - 'SOURCES/COMMUNITY-CHARTER' - 'SOURCES/EULA' - 'SOURCES/LICENSE' diff --git a/iso/py/configs/el9lh.yaml b/iso/py/configs/el9lh.yaml new file mode 100644 index 0000000..1aa3f54 --- /dev/null +++ b/iso/py/configs/el9lh.yaml @@ -0,0 +1,118 @@ +--- +'9-lookahead': + fullname: 'Rocky Linux 9' + revision: '9.1' + rclvl: 'RC1' + bugurl: 'https://bugs.rockylinux.org' + allowed_arches: + - x86_64 + - aarch64 + - ppc64le + - s390x + provide_multilib: True + project_id: '' + repo_symlinks: + NFV: 'nfv' + renames: + all: 'devel' + all_repos: + - 'all' + - 'BaseOS' + - 'AppStream' + - 'CRB' + - 'HighAvailability' + - 'ResilientStorage' + - 'RT' + - 'NFV' + - 'SAP' + - 'SAPHANA' + - 'extras' + - 'plus' + no_comps_or_groups: + - 'all' + - 'extras' + - 'plus' + comps_or_groups: + - 'BaseOS' + - 'AppStream' + - 'CRB' + - 'HighAvailability' + - 'ResilientStorage' + - 'RT' + - 'NFV' + - 'SAP' + - 'SAPHANA' + has_modules: + - 'AppStream' + - 'CRB' + iso_map: + hosts: + x86_64: '' + aarch64: '' + ppc64le: '' + s390x: '' + images: + - dvd1 + - minimal + - boot + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' + repoclosure_map: + arches: + x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' + aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch' + ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch' + s390x: '--forcearch=s390x --arch=s390x --arch=noarch' + repos: + devel: [] + BaseOS: [] + AppStream: + - BaseOS + CRB: + - BaseOS + - AppStream + HighAvailability: + - BaseOS + - AppStream + ResilientStorage: + - BaseOS + - AppStream + RT: + - BaseOS + - AppStream + NFV: + - BaseOS + - AppStream + SAP: + - BaseOS + - AppStream + - HighAvailability + SAPHANA: + - BaseOS + - AppStream + - HighAvailability + extra_files: + git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/' + branch: 'r9lh' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-9' + testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' + list: + - 'SOURCES/Contributors' + - 'SOURCES/COMMUNITY-CHARTER' + - 'SOURCES/EULA' + - 'SOURCES/LICENSE' + - 'SOURCES/RPM-GPG-KEY-Rocky-9' + - 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing' +... diff --git a/iso/py/sync-from-peridot-test b/iso/py/sync-from-peridot-test index 1166376..6d286e9 100755 --- a/iso/py/sync-from-peridot-test +++ b/iso/py/sync-from-peridot-test @@ -7,10 +7,10 @@ import argparse from util import Checks from util import RepoSync -rlvars = rldict['9'] +rlvars = rldict['9-lookahead'] r = Checks(rlvars, config['arch']) r.check_valid_arch() #a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) -a.run() +#a.run() diff --git a/iso/py/templates/buildImage.tmpl.sh b/iso/py/templates/buildImage.tmpl.sh index 3cc055e..a0f0231 100644 --- a/iso/py/templates/buildImage.tmpl.sh +++ b/iso/py/templates/buildImage.tmpl.sh @@ -1,19 +1,47 @@ #!/bin/bash +VOLID="{{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-boot1" +LOGFILE="{{ builddir }}/lorax-{{ arch }}.log" +VARIANT="{{ variant }}" +ARCH="{{ arch }}" +VERSION="{{ revision }}" +PRODUCT="{{ distname }}" +MOCKBLD="{{ builddir }}" +LORAXRES="{{ lorax_work_root }}" +LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" + {% for pkg in lorax %} sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl {% endfor %} -lorax --product='{{ distname }}' \ - --version='{{ revision }}' \ - --release='{{ revision }}' \ +lorax --product="${PRODUCT}" \ + --version="${VERSION}" \ + --release="${VERSION}" \ {%- for repo in repos %} --source={{ repo.url }} \ {%- endfor %} - --variant={{ variant }} \ + --variant="${VARIANT}" \ --nomacboot \ - --buildarch={{ arch }} \ - --volid={{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-boot1 \ - --logfile={{ mock_work_root }}/lorax.log \ + --buildarch="${ARCH}" \ + --volid="${VOLID}" \ + --logfile="${LOGFILE}" \ --rootfs-size=3 \ - {{ lorax_work_root }} + "${LORAXRES}" + +ret_val=$? +if [ $ret_val -ne 0 ]; then + echo "!! LORAX FAILED !!" + exit 1 +fi + +# If we didn't fail, let's pack up everything! +cd "${MOCKBLD}" +tar czf "${LORAX_TAR}" lorax "${LOGFILE}" + +tar_ret_val=$? +if [ $ret_val -ne 0 ]; then + echo "!! PROBLEM CREATING ARCHIVE !!" + exit 1 +fi + +exit 0 diff --git a/iso/py/templates/isobuild.tmpl.sh b/iso/py/templates/isobuild.tmpl.sh index 21b03f1..bb4d410 100644 --- a/iso/py/templates/isobuild.tmpl.sh +++ b/iso/py/templates/isobuild.tmpl.sh @@ -2,28 +2,44 @@ # This is a template that is used to build ISO's for Rocky Linux. Only under # extreme circumstances should you be filling this out and running manually. +# Vars +MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg" +MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" +MOCK_RESL="${MOCK_ROOT}/result" +MOCK_CHRO="${MOCK_ROOT}/root" +MOCK_LOG="${MOCK_RESL}/mock-output.log" +LORAX_SCR="/var/tmp/buildImage.sh" +LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" +ISOLATION="{{ isolation }}" +BUILDDIR="{{ builddir }}" + # Init the container mock \ - -r /var/tmp/lorax-{{ major }}.cfg \ - --isolation={{ isolation }} \ + -r "${MOCK_CFG}" \ + --isolation="${ISOLATION}" \ --enable-network \ --init -cp /var/tmp/buildImage.sh \ - /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/root/var/tmp +init_ret_val=$? +if [ $init_ret_val -ne 0 ]; then + echo "!! MOCK INIT FAILED !!" + exit 1 +fi + +mkdir -p "${MOCK_RESL}" +cp "${LORAX_SCR}" "${MOCK_CHRO}${LORAX_SCR}" mock \ - -r /var/tmp/lorax-{{ major }}.cfg \ + -r "${MOCK_CFG}" \ --shell \ - --isolation={{ isolation }} \ - --enable-network -- /bin/bash /var/tmp/buildImage.sh + --isolation="${ISOLATION}" \ + --enable-network -- /bin/bash /var/tmp/buildImage.sh | tee -a "${MOCK_LOG}" -ret_val=$? -if [ $ret_val -eq 0 ]; then +mock_ret_val=$? +if [ $mock_ret_val -eq 0 ]; then # Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result - mkdir /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result - cp /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/root/{{ builddir }}/lorax-{{ major }}-{{ arch }}.tar.gz \ - /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result + mkdir -p "${MOCK_RESL}" + cp "${MOCK_CHRO}${BUILDDIR}/${LORAX_TAR}" "${MOCK_RESL}" else echo "!! LORAX RUN FAILED !!" exit 1 diff --git a/iso/py/templates/isomock.tmpl.cfg b/iso/py/templates/isomock.tmpl.cfg index 69f84e3..53cd821 100644 --- a/iso/py/templates/isomock.tmpl.cfg +++ b/iso/py/templates/isomock.tmpl.cfg @@ -1,4 +1,4 @@ -config_opts['root'] = 'rocky-{{ major }}-{{ arch }}' +config_opts['root'] = '{{ shortname|lower }}-{{ major }}-{{ arch }}' config_opts['description'] = '{{ fullname }}' config_opts['target_arch'] = '{{ arch }}' config_opts['legal_host_arches'] = ('{{ arch }}',) @@ -8,7 +8,7 @@ config_opts['dist'] = '{{ dist }}' # only useful for --resultdir variable subst config_opts['releasever'] = '{{ major }}' config_opts['package_manager'] = '{{ pkgmanager|default("dnf") }}' config_opts['extra_chroot_dirs'] = [ '/run/lock', ] -# config_opts['bootstrap_image'] = 'quay.io/rockylinux/rockylinux:{{ major }}' +# config_opts['bootstrap_image'] = 'quay.io/{{ shortname|lower }}/{{ shortname|lower }}:{{ major }}' # If compose is local, the bind mounts will be here {% if compose_dir_is_here %} diff --git a/iso/py/util/dnf_utils.py b/iso/py/util/dnf_utils.py index bfdb88a..a965d59 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/py/util/dnf_utils.py @@ -46,6 +46,7 @@ class RepoSync: fullrun: bool = False, nofail: bool = False, gpgkey: str = 'stable', + rlmode: str = 'stable', logger=None ): self.nofail = nofail diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index 88de9fd..286a6d9 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -176,11 +176,12 @@ class IsoBuild: # local AND arch cannot be used together, local supersedes. print # warning. self.generate_iso_scripts() - print() + self.run_lorax() def generate_iso_scripts(self): """ - Generates the scripts needed to be ran in the mock roots + Generates the scripts needed to be ran to run lorax in mock as well as + package up the results. """ self.log.info('Generating ISO configuration and scripts') mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg') @@ -199,6 +200,7 @@ class IsoBuild: arch=self.current_arch, major=self.major_version, fullname=self.fullname, + shortname=self.shortname, required_pkgs=self.required_pkgs, dist=self.disttag, repos=self.repolist, @@ -224,6 +226,8 @@ class IsoBuild: distname=self.distname, revision=self.release, rc=rclevel, + builddir=self.mock_work_root, + lorax_work_root=self.lorax_result_root, ) mock_iso_entry = open(mock_iso_path, "w+") @@ -238,6 +242,25 @@ class IsoBuild: iso_template_entry.write(iso_template_output) iso_template_entry.close() + os.chmod(mock_sh_path, 0o755) + os.chmod(iso_template_path, 0o755) + + def run_lorax(self): + """ + This actually runs lorax on this system. It will call the right scripts + to do so. + """ + lorax_cmd = '/bin/bash /var/tmp/isobuild.sh' + self.log.info('Starting lorax...') + + try: + subprocess.call(shlex.split(lorax_cmd)) + except: + self.log.error('An error occured during execution.') + self.log.error('See the logs for more information.') + raise SystemExit() + + # !!! Send help, we would prefer to do this using the productmd python # !!! library. If you are reading this and you can help us, please do so! def treeinfo_write(self): From 1ae3c67ba6726ffe3518215b45cc61fc51c9aba0 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 16 Jun 2022 23:25:17 -0700 Subject: [PATCH 18/64] have a working script --- iso/py/.sync-from-peridot.swp | Bin 0 -> 12288 bytes iso/py/build-iso | 23 +++++++++++++++++++++++ iso/py/configs/el8.yaml | 2 ++ iso/py/configs/el9-beta.yaml | 2 ++ iso/py/configs/el9.yaml | 2 ++ iso/py/configs/el9lh.yaml | 2 ++ iso/py/util/.dnf_utils.py.swp | Bin 0 -> 16384 bytes iso/py/util/iso_utils.py | 2 +- 8 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 iso/py/.sync-from-peridot.swp create mode 100644 iso/py/util/.dnf_utils.py.swp diff --git a/iso/py/.sync-from-peridot.swp b/iso/py/.sync-from-peridot.swp new file mode 100644 index 0000000000000000000000000000000000000000..ffca78d9b593c362f3c08739b5b7b98ec0195717 GIT binary patch literal 12288 zcmeHNO^X~w7_N9Rends%LA_LFgqhX(h>2o?%wY*Adx%*~+{+S%n(msOa;mF3Rn@!9 zB_e`H!HY!jq@d`*gW_E;9`zvj13W20KsS-i(G~d-`K2Y54 zcf4K%945jItw@HEHhk!`mUB!9tB2WQXcsE#PmNCd88;%<2O{i`7e;}Bz`!aDoFHe< zJa^iAv|Fd>V~@VRigU0H3VE9N zy&8sFU?4CM7zhjm1_A?tfxtjuATSUZ2n+-U0t0s;11u)wqk9SY{5Up`|Nk$4|Nr_h zA-@2h0-pfy18)HWxCFckoC8h*e?3IVE#PP11K%#fxtjuATSUZ_&+kh1}0bCHA2=1tuCpPgqxA! zHkZy0%3-~#^ppu%uPQzE<6gZ!&80-h+Q5&;m}o3wv)F5Z;r{iE873ufa#)dOk~aZn z%+7dRXDElwvYm@;j0m;Ca-uOYJkxlk(&mQOR(s|eLy;)FPdTgmz>!>yhK_}MbelXxx0x!3DMvFMr%X|&>7>{hjro*88&qdL>QTv+?t5t*%I-|=lOJ(7ua`dbv7#GP zbO&CgbXH8|<&G>_Ji zkI|nN`IZ&BboMf|0!6r>ilzVPUx@oQAujqR06iD$bc-8{8f^8b;n#9u&=;?uzI<)3 zOG)*VqIFe;3SE~)2k}8q#pl6#C9KUtjop~r$cUn@N3G5BZE@-wN6{-T=oI_++eR00 z%s857UaPc~Uh&ydP_d30=)Uh$$YPfR0d90HI0mk;`*_MOr90m-RVvzQnl!l_|y S&2S>rkgFY`LUw-tgVBrE9wN`ZaEOR-#DtOmi$jMg zABO3K2`$HwkkQ`0RC~je;pTJqWm#>)Cw0$hlaL({W%5 zdYH}I9{iPOz%%f=7%1}bsg0xLwlnWLZR+=Xx6orZfBbd1@|8RTo&nE*XTUSy8So5v z20R0vf&Whi^6W7AJW_w7O~0_`H+0Ug*yBDcpLO*Au=>wf`L8?rzgzvMt^7_Kz{Ppl z>Y3F)+R?vc^^w&dwh6oXf3x~+t4})mzjosPw4;B~>Qh_)y>F@t&cD4R?)=q`-d6J` z&wyvZGvFEU40r}S1D*lTfM>un;2H1?{7*6v#)P~FnH@3D!}$Ndd;Wj<9fUjwJPSMq zJPLdRcnEj^P(TEH4EPYR0Stj2@J`^Rw-fS9;1|GCzz>0M1CIc6-~gBc_W?HpKRZgu zHQ>9zBfu8`0o)6m1BSqH;Q1qjJPlk0?gu^rydO9Q{N+aE3w$5=GLQlha2_}T90p!E zOvulH?*I<~0k8r5=?2Jw?*R`2_W-v5e?COW(B@Feg> z;FG`|z%k$k;1KW|^xYG{*MYA94+Eb9ZUc@3M*-8nHv%gUYL64dmE5^!zUWvNa-w_J z=`<7LVI-4GY8HqvXF3mJu1?m5Z38(mTDnCgzV8V0>2O!_bPa6SV@Yu9{67v|<)-Ip z^qRs~Z8~)cAGXX--T&F%a4c04=4(l~E7kSsmf}V0UArQgj``&Bz%iwy%m-H%c5Rkw z8edDMCRFK0u^HD~ro;trp6f&qVlVEAqPPwvQjaQpoCh?V! zE))xrFhc{Cn1xDT9w^q*46YED3Ln$!w3Mf|CJob=3YM+& zl$inPYS*$(0t@+y&lJ4IyfshspL_=PbEc+N}*x%*tss zSk&p$9VhL?pJbCgHix6CP&czojveGG3`{`X=);Dz!D>DJj3owJ+fxE&x!|LQn@q_W zk1_0p zuIi{|TGLQZ;b;pBk}0&6dXHxTvWD6f+Xh&*@WE)HWuYR|;jQbXrZF28lZ9n@*)}xu z0|w^QR+uSbR>Uo8OKo+m=9RW8w3VqGhupNID%8pon{|=mz-DTK<@2L5Wi7c9iHzAM z7GrZy8erg{w+4%Sf~i}rgR5<-=$mweFKprHd?++)d$CPhWm2_om4zE%t8A+NUS;9B ze3eaOT&*%|jf;Q7upAbv?9AvWyQ$5dNoR?8$?mV3sGZv7mZvQgu*1bIe%WPI6kApu`ns?~v|Hxu*ETYM&L#?9zwy zZo$m5Spa4qvCSu!(41{71!c6d%9M#r!-U4z9Y9;|n-@z!nS`laF2K6Hy$=^zVNx*~ z*1127$2+FFg>%E{|CAyoz31QHNqE8HQ4RHSdOGM+I_?8UCecG{K|bETXo zby-gZJ8%kJjBpc=n64ez#~o)NYXzutLbE1dEv_@rQ}Z1>F`a)=7U;vX_#&Tvp!|-W zp538{us0VxEe?iKO>QT|^q{8mBB^?;?4yJ$B^BJ3+vMu5hpS=f);#}T#q;_zfO-DM z<79dK4%ff3@~4f?AD#ivfM>un;2H1?cm_NJo&nE*XTUSy8So7Jw;3=mQ_PX!?GPzT b_;K%y%ys!j2k$I(`MwAL0xI7f4aq+Nu3h%S literal 0 HcmV?d00001 diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index 286a6d9..d168fff 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -59,7 +59,7 @@ class IsoBuild: # Relevant major version items self.release = rlvars['revision'] - self.minor_version = rlvars['revision'].split('.')[1] + self.minor_version = rlvars['minor'] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] self.rclvl = rlvars['rclvl'] self.repos = rlvars['iso_map']['repos'] From adc0cd3b812c15c273de4693b6e81417a5a1f88e Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 17 Jun 2022 00:16:21 -0700 Subject: [PATCH 19/64] add rc if statement --- iso/py/.sync-from-peridot.swp | Bin 12288 -> 0 bytes iso/py/templates/buildImage.tmpl.sh | 3 +++ iso/py/util/.dnf_utils.py.swp | Bin 16384 -> 0 bytes 3 files changed, 3 insertions(+) delete mode 100644 iso/py/.sync-from-peridot.swp delete mode 100644 iso/py/util/.dnf_utils.py.swp diff --git a/iso/py/.sync-from-peridot.swp b/iso/py/.sync-from-peridot.swp deleted file mode 100644 index ffca78d9b593c362f3c08739b5b7b98ec0195717..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeHNO^X~w7_N9Rends%LA_LFgqhX(h>2o?%wY*Adx%*~+{+S%n(msOa;mF3Rn@!9 zB_e`H!HY!jq@d`*gW_E;9`zvj13W20KsS-i(G~d-`K2Y54 zcf4K%945jItw@HEHhk!`mUB!9tB2WQXcsE#PmNCd88;%<2O{i`7e;}Bz`!aDoFHe< zJa^iAv|Fd>V~@VRigU0H3VE9N zy&8sFU?4CM7zhjm1_A?tfxtjuATSUZ2n+-U0t0s;11u)wqk9SY{5Up`|Nk$4|Nr_h zA-@2h0-pfy18)HWxCFckoC8h*e?3IVE#PP11K%#fxtjuATSUZ_&+kh1}0bCHA2=1tuCpPgqxA! zHkZy0%3-~#^ppu%uPQzE<6gZ!&80-h+Q5&;m}o3wv)F5Z;r{iE873ufa#)dOk~aZn z%+7dRXDElwvYm@;j0m;Ca-uOYJkxlk(&mQOR(s|eLy;)FPdTgmz>!>yhK_}MbelXxx0x!3DMvFMr%X|&>7>{hjro*88&qdL>QTv+?t5t*%I-|=lOJ(7ua`dbv7#GP zbO&CgbXH8|<&G>_Ji zkI|nN`IZ&BboMf|0!6r>ilzVPUx@oQAujqR06iD$bc-8{8f^8b;n#9u&=;?uzI<)3 zOG)*VqIFe;3SE~)2k}8q#pl6#C9KUtjop~r$cUn@N3G5BZE@-wN6{-T=oI_++eR00 z%s857UaPc~Uh&ydP_d30=)Uh$$YPfR0d90HI0mk;`*_MOr90m-RVvzQnl!l_|y S&2S>rkgFY`LUw-tgVBrE9wN`ZaEOR-#DtOmi$jMg zABO3K2`$HwkkQ`0RC~je;pTJqWm#>)Cw0$hlaL({W%5 zdYH}I9{iPOz%%f=7%1}bsg0xLwlnWLZR+=Xx6orZfBbd1@|8RTo&nE*XTUSy8So5v z20R0vf&Whi^6W7AJW_w7O~0_`H+0Ug*yBDcpLO*Au=>wf`L8?rzgzvMt^7_Kz{Ppl z>Y3F)+R?vc^^w&dwh6oXf3x~+t4})mzjosPw4;B~>Qh_)y>F@t&cD4R?)=q`-d6J` z&wyvZGvFEU40r}S1D*lTfM>un;2H1?{7*6v#)P~FnH@3D!}$Ndd;Wj<9fUjwJPSMq zJPLdRcnEj^P(TEH4EPYR0Stj2@J`^Rw-fS9;1|GCzz>0M1CIc6-~gBc_W?HpKRZgu zHQ>9zBfu8`0o)6m1BSqH;Q1qjJPlk0?gu^rydO9Q{N+aE3w$5=GLQlha2_}T90p!E zOvulH?*I<~0k8r5=?2Jw?*R`2_W-v5e?COW(B@Feg> z;FG`|z%k$k;1KW|^xYG{*MYA94+Eb9ZUc@3M*-8nHv%gUYL64dmE5^!zUWvNa-w_J z=`<7LVI-4GY8HqvXF3mJu1?m5Z38(mTDnCgzV8V0>2O!_bPa6SV@Yu9{67v|<)-Ip z^qRs~Z8~)cAGXX--T&F%a4c04=4(l~E7kSsmf}V0UArQgj``&Bz%iwy%m-H%c5Rkw z8edDMCRFK0u^HD~ro;trp6f&qVlVEAqPPwvQjaQpoCh?V! zE))xrFhc{Cn1xDT9w^q*46YED3Ln$!w3Mf|CJob=3YM+& zl$inPYS*$(0t@+y&lJ4IyfshspL_=PbEc+N}*x%*tss zSk&p$9VhL?pJbCgHix6CP&czojveGG3`{`X=);Dz!D>DJj3owJ+fxE&x!|LQn@q_W zk1_0p zuIi{|TGLQZ;b;pBk}0&6dXHxTvWD6f+Xh&*@WE)HWuYR|;jQbXrZF28lZ9n@*)}xu z0|w^QR+uSbR>Uo8OKo+m=9RW8w3VqGhupNID%8pon{|=mz-DTK<@2L5Wi7c9iHzAM z7GrZy8erg{w+4%Sf~i}rgR5<-=$mweFKprHd?++)d$CPhWm2_om4zE%t8A+NUS;9B ze3eaOT&*%|jf;Q7upAbv?9AvWyQ$5dNoR?8$?mV3sGZv7mZvQgu*1bIe%WPI6kApu`ns?~v|Hxu*ETYM&L#?9zwy zZo$m5Spa4qvCSu!(41{71!c6d%9M#r!-U4z9Y9;|n-@z!nS`laF2K6Hy$=^zVNx*~ z*1127$2+FFg>%E{|CAyoz31QHNqE8HQ4RHSdOGM+I_?8UCecG{K|bETXo zby-gZJ8%kJjBpc=n64ez#~o)NYXzutLbE1dEv_@rQ}Z1>F`a)=7U;vX_#&Tvp!|-W zp538{us0VxEe?iKO>QT|^q{8mBB^?;?4yJ$B^BJ3+vMu5hpS=f);#}T#q;_zfO-DM z<79dK4%ff3@~4f?AD#ivfM>un;2H1?cm_NJo&nE*XTUSy8So7Jw;3=mQ_PX!?GPzT b_;K%y%ys!j2k$I(`MwAL0xI7f4aq+Nu3h%S From 7ca3cc889d7db33a80deddf5583f4b40691d3597 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 17 Jun 2022 11:49:22 -0700 Subject: [PATCH 20/64] comments to explain how to do things --- iso/py/templates/buildImage.tmpl.sh | 4 +-- iso/py/util/iso_utils.py | 38 ++++++++++++++++++++++++++--- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/iso/py/templates/buildImage.tmpl.sh b/iso/py/templates/buildImage.tmpl.sh index bc74ccd..d7b7708 100644 --- a/iso/py/templates/buildImage.tmpl.sh +++ b/iso/py/templates/buildImage.tmpl.sh @@ -1,7 +1,6 @@ #!/bin/bash VOLID="{{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-boot1" -LOGFILE="{{ builddir }}/lorax-{{ arch }}.log" VARIANT="{{ variant }}" ARCH="{{ arch }}" VERSION="{{ revision }}" @@ -9,6 +8,7 @@ PRODUCT="{{ distname }}" MOCKBLD="{{ builddir }}" LORAXRES="{{ lorax_work_root }}" LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" +LOGFILE="lorax-{{ arch }}.log" {% for pkg in lorax %} sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl @@ -27,7 +27,7 @@ lorax --product="${PRODUCT}" \ --nomacboot \ --buildarch="${ARCH}" \ --volid="${VOLID}" \ - --logfile="${LOGFILE}" \ + --logfile="${MOCKBLD}/${LOGFILE}" \ --rootfs-size=3 \ "${LORAXRES}" diff --git a/iso/py/util/iso_utils.py b/iso/py/util/iso_utils.py index d168fff..f06d37d 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/py/util/iso_utils.py @@ -12,7 +12,13 @@ import subprocess import shlex import time import re +# This is for treeinfo +from configparser import ConfigParser from productmd.common import SortedConfigParser +from productmd.images import Image +from productmd.extra_files import ExtraFiles +import productmd.treeinfo +# End treeinfo from common import Color from jinja2 import Environment, FileSystemLoader @@ -31,6 +37,7 @@ class IsoBuild: config, major, rc: bool = False, + force_unpack: bool = False, isolation: str = 'auto', compose_dir_is_here: bool = False, image=None, @@ -56,6 +63,7 @@ class IsoBuild: self.mock_isolation = isolation self.iso_map = rlvars['iso_map'] self.release_candidate = rc + self.force_unpack = force_unpack # Relevant major version items self.release = rlvars['revision'] @@ -260,17 +268,39 @@ class IsoBuild: self.log.error('See the logs for more information.') raise SystemExit() + def run_image_build(self, arch): + """ + Builds the other images + """ + print() + + def run_boot_sync(self, arch, force_sync): + """ + This unpacks into BaseOS/$arch/os, assuming there's no data actually + there. There should be checks. + + 1. Sync from work/$arch/lorax to work/$arch/dvd + 2. Sync from work/$arch/lorax to work/$arch/minimal + 3. Sync from work/$arch/lorax to BaseOS/$arch/os + 4. Modify (3) .treeinfo + 5. Modify (1) .treeinfo, keep out boot.iso checksum + 6. Create a .treeinfo for AppStream + """ + self.sync_boot(arch, force_sync) + + def sync_boot(self, arch, force_sync): + """ + Syncs whatever is in work/$arch/lorax to BaseOS/$arch/os + """ + self.log.info('Syncing lorax to dvd directory...') + self.log.info('Syncing lorax to %s directory...' % self.iso_map['variant']) - # !!! Send help, we would prefer to do this using the productmd python - # !!! library. If you are reading this and you can help us, please do so! def treeinfo_write(self): """ Ensure treeinfo is written correctly """ print() - # !!! Send help, we would prefer to do this using the productmd python - # !!! library. If you are reading this and you can help us, please do so! def discinfo_write(self): """ Ensure discinfo is written correctly From fa74e96c84650f02e3c03728dfeee966cf460c05 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Thu, 16 Jun 2022 23:31:33 -0400 Subject: [PATCH 21/64] Lets write some poetry --- iso/{py => empanadas}/.gitignore | 0 iso/{py => empanadas}/README.md | 19 +- iso/empanadas/empanadas/__init__.py | 1 + iso/{py => empanadas/empanadas}/common.py | 0 .../empanadas}/configs/el8.yaml | 0 .../empanadas}/configs/el9-beta.yaml | 0 .../empanadas}/configs/el9.yaml | 0 .../empanadas}/configs/el9lh.yaml | 0 .../empanadas/scripts/build-iso.py} | 11 +- .../scripts/sync-from-peridot-test.py} | 15 +- .../empanadas/scripts/sync-from-peridot.py} | 12 +- .../empanadas/scripts/sync-sig.py} | 12 +- .../empanadas}/sig/altarch.yaml | 0 .../empanadas}/sig/cloud.yaml | 0 iso/{py => empanadas/empanadas}/sig/core.yaml | 0 .../empanadas}/templates/buildImage.tmpl.sh | 0 .../empanadas}/templates/isobuild.tmpl.sh | 0 .../empanadas}/templates/isolorax.tmpl.sh | 0 .../empanadas}/templates/isomock.tmpl.cfg | 0 .../empanadas}/templates/repoconfig.tmpl | 0 .../empanadas}/templates/reposync-src.tmpl | 0 .../empanadas}/templates/reposync.tmpl | 0 .../empanadas}/util/__init__.py | 6 +- iso/{py => empanadas/empanadas}/util/check.py | 3 +- .../empanadas}/util/dnf_utils.py | 4 +- .../empanadas}/util/iso_utils.py | 5 +- iso/empanadas/poetry.lock | 304 ++++++++++++++++++ iso/empanadas/pyproject.toml | 25 ++ iso/empanadas/tests/__init__.py | 0 iso/empanadas/tests/test_empanadas.py | 5 + 30 files changed, 392 insertions(+), 30 deletions(-) rename iso/{py => empanadas}/.gitignore (100%) rename iso/{py => empanadas}/README.md (61%) create mode 100644 iso/empanadas/empanadas/__init__.py rename iso/{py => empanadas/empanadas}/common.py (100%) rename iso/{py => empanadas/empanadas}/configs/el8.yaml (100%) rename iso/{py => empanadas/empanadas}/configs/el9-beta.yaml (100%) rename iso/{py => empanadas/empanadas}/configs/el9.yaml (100%) rename iso/{py => empanadas/empanadas}/configs/el9lh.yaml (100%) rename iso/{py/build-iso => empanadas/empanadas/scripts/build-iso.py} (85%) rename iso/{py/sync-from-peridot-test => empanadas/empanadas/scripts/sync-from-peridot-test.py} (61%) rename iso/{py/sync-from-peridot => empanadas/empanadas/scripts/sync-from-peridot.py} (93%) rename iso/{py/sync-sig => empanadas/empanadas/scripts/sync-sig.py} (93%) rename iso/{py => empanadas/empanadas}/sig/altarch.yaml (100%) rename iso/{py => empanadas/empanadas}/sig/cloud.yaml (100%) rename iso/{py => empanadas/empanadas}/sig/core.yaml (100%) rename iso/{py => empanadas/empanadas}/templates/buildImage.tmpl.sh (100%) rename iso/{py => empanadas/empanadas}/templates/isobuild.tmpl.sh (100%) rename iso/{py => empanadas/empanadas}/templates/isolorax.tmpl.sh (100%) rename iso/{py => empanadas/empanadas}/templates/isomock.tmpl.cfg (100%) rename iso/{py => empanadas/empanadas}/templates/repoconfig.tmpl (100%) rename iso/{py => empanadas/empanadas}/templates/reposync-src.tmpl (100%) rename iso/{py => empanadas/empanadas}/templates/reposync.tmpl (100%) rename iso/{py => empanadas/empanadas}/util/__init__.py (64%) rename iso/{py => empanadas/empanadas}/util/check.py (94%) rename iso/{py => empanadas/empanadas}/util/dnf_utils.py (99%) rename iso/{py => empanadas/empanadas}/util/iso_utils.py (99%) create mode 100644 iso/empanadas/poetry.lock create mode 100644 iso/empanadas/pyproject.toml create mode 100644 iso/empanadas/tests/__init__.py create mode 100644 iso/empanadas/tests/test_empanadas.py diff --git a/iso/py/.gitignore b/iso/empanadas/.gitignore similarity index 100% rename from iso/py/.gitignore rename to iso/empanadas/.gitignore diff --git a/iso/py/README.md b/iso/empanadas/README.md similarity index 61% rename from iso/py/README.md rename to iso/empanadas/README.md index 41f3a41..e414f5e 100644 --- a/iso/py/README.md +++ b/iso/empanadas/README.md @@ -1,5 +1,19 @@ # iso + +## Setup / Install + +1. Install [Poetry](https://python-poetry.org/docs/) +2. Setup: `poetry install` +3. Have fun + + +## Updating dependencies + +Dependencies can be manipulated via the pyproject.toml file or with the poetry add/remove commands. + +Changes to the poetry.lock should be commited if dependencies are added or updated. + ## TODO Verbose mode should exist to output everything that's being called or ran. @@ -52,5 +66,6 @@ r.check_valid_arch() ### script names and permissions -* Callable scripts should *not* end in `.py` -* They should have at least `775` or `+x` permissions +* Callable scripts should always end in `.py` and live in the empanadas/scripts folder +* Poetry will handle the installation of these executables with setuptools for distribution, and they can be invoked by name using `poetry run script-name`, too. + * Configure the script and function to be executed in pyproject.toml (TODO: dynamically load scripts from this directory as well as standardize on the script input/outputs) diff --git a/iso/empanadas/empanadas/__init__.py b/iso/empanadas/empanadas/__init__.py new file mode 100644 index 0000000..b794fd4 --- /dev/null +++ b/iso/empanadas/empanadas/__init__.py @@ -0,0 +1 @@ +__version__ = '0.1.0' diff --git a/iso/py/common.py b/iso/empanadas/empanadas/common.py similarity index 100% rename from iso/py/common.py rename to iso/empanadas/empanadas/common.py diff --git a/iso/py/configs/el8.yaml b/iso/empanadas/empanadas/configs/el8.yaml similarity index 100% rename from iso/py/configs/el8.yaml rename to iso/empanadas/empanadas/configs/el8.yaml diff --git a/iso/py/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml similarity index 100% rename from iso/py/configs/el9-beta.yaml rename to iso/empanadas/empanadas/configs/el9-beta.yaml diff --git a/iso/py/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml similarity index 100% rename from iso/py/configs/el9.yaml rename to iso/empanadas/empanadas/configs/el9.yaml diff --git a/iso/py/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml similarity index 100% rename from iso/py/configs/el9lh.yaml rename to iso/empanadas/empanadas/configs/el9lh.yaml diff --git a/iso/py/build-iso b/iso/empanadas/empanadas/scripts/build-iso.py similarity index 85% rename from iso/py/build-iso rename to iso/empanadas/empanadas/scripts/build-iso.py index 428ce45..c5907f1 100755 --- a/iso/py/build-iso +++ b/iso/empanadas/empanadas/scripts/build-iso.py @@ -1,10 +1,10 @@ -#!/usr/bin/env python3 # builds ISO's import argparse -from common import * -from util import Checks -from util import IsoBuild + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import IsoBuild parser = argparse.ArgumentParser(description="ISO Compose") @@ -27,4 +27,5 @@ a = IsoBuild( logger=results.logger, ) -a.run() +def run(): + a.run() diff --git a/iso/py/sync-from-peridot-test b/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py similarity index 61% rename from iso/py/sync-from-peridot-test rename to iso/empanadas/empanadas/scripts/sync-from-peridot-test.py index 6d286e9..0362718 100755 --- a/iso/py/sync-from-peridot-test +++ b/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python3 - # This is a testing script to ensure the RepoSync class is working as intended. -from common import * import argparse -from util import Checks -from util import RepoSync + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import RepoSync rlvars = rldict['9-lookahead'] r = Checks(rlvars, config['arch']) @@ -13,4 +12,10 @@ r.check_valid_arch() #a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) +<<<<<<< HEAD:iso/py/sync-from-peridot-test #a.run() +======= + +def run(): + a.run() +>>>>>>> 8d29760 (Lets write some poetry):iso/empanadas/empanadas/scripts/sync-from-peridot-test.py diff --git a/iso/py/sync-from-peridot b/iso/empanadas/empanadas/scripts/sync-from-peridot.py similarity index 93% rename from iso/py/sync-from-peridot rename to iso/empanadas/empanadas/scripts/sync-from-peridot.py index d114a59..a998a28 100755 --- a/iso/py/sync-from-peridot +++ b/iso/empanadas/empanadas/scripts/sync-from-peridot.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python3 - # This script can be called to do single syncs or full on syncs. import argparse -from common import * -from util import Checks -from util import RepoSync + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import RepoSync #rlvars = rldict['9'] #r = Checks(rlvars, config['arch']) @@ -56,4 +55,5 @@ a = RepoSync( logger=results.logger ) -a.run() +def run() + a.run() diff --git a/iso/py/sync-sig b/iso/empanadas/empanadas/scripts/sync-sig.py similarity index 93% rename from iso/py/sync-sig rename to iso/empanadas/empanadas/scripts/sync-sig.py index ff9c8e1..3de8479 100755 --- a/iso/py/sync-sig +++ b/iso/empanadas/empanadas/scripts/sync-sig.py @@ -1,11 +1,9 @@ -#!/usr/bin/env python3 - # This script can be called to do single syncs or full on syncs. import argparse -from common import * -from util import Checks -from util import SigRepoSync +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import SigRepoSync #rlvars = rldict['9'] #r = Checks(rlvars, config['arch']) @@ -58,4 +56,6 @@ a = SigRepoSync( logger=results.logger ) -a.run() + +def run(): + a.run() diff --git a/iso/py/sig/altarch.yaml b/iso/empanadas/empanadas/sig/altarch.yaml similarity index 100% rename from iso/py/sig/altarch.yaml rename to iso/empanadas/empanadas/sig/altarch.yaml diff --git a/iso/py/sig/cloud.yaml b/iso/empanadas/empanadas/sig/cloud.yaml similarity index 100% rename from iso/py/sig/cloud.yaml rename to iso/empanadas/empanadas/sig/cloud.yaml diff --git a/iso/py/sig/core.yaml b/iso/empanadas/empanadas/sig/core.yaml similarity index 100% rename from iso/py/sig/core.yaml rename to iso/empanadas/empanadas/sig/core.yaml diff --git a/iso/py/templates/buildImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh similarity index 100% rename from iso/py/templates/buildImage.tmpl.sh rename to iso/empanadas/empanadas/templates/buildImage.tmpl.sh diff --git a/iso/py/templates/isobuild.tmpl.sh b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh similarity index 100% rename from iso/py/templates/isobuild.tmpl.sh rename to iso/empanadas/empanadas/templates/isobuild.tmpl.sh diff --git a/iso/py/templates/isolorax.tmpl.sh b/iso/empanadas/empanadas/templates/isolorax.tmpl.sh similarity index 100% rename from iso/py/templates/isolorax.tmpl.sh rename to iso/empanadas/empanadas/templates/isolorax.tmpl.sh diff --git a/iso/py/templates/isomock.tmpl.cfg b/iso/empanadas/empanadas/templates/isomock.tmpl.cfg similarity index 100% rename from iso/py/templates/isomock.tmpl.cfg rename to iso/empanadas/empanadas/templates/isomock.tmpl.cfg diff --git a/iso/py/templates/repoconfig.tmpl b/iso/empanadas/empanadas/templates/repoconfig.tmpl similarity index 100% rename from iso/py/templates/repoconfig.tmpl rename to iso/empanadas/empanadas/templates/repoconfig.tmpl diff --git a/iso/py/templates/reposync-src.tmpl b/iso/empanadas/empanadas/templates/reposync-src.tmpl similarity index 100% rename from iso/py/templates/reposync-src.tmpl rename to iso/empanadas/empanadas/templates/reposync-src.tmpl diff --git a/iso/py/templates/reposync.tmpl b/iso/empanadas/empanadas/templates/reposync.tmpl similarity index 100% rename from iso/py/templates/reposync.tmpl rename to iso/empanadas/empanadas/templates/reposync.tmpl diff --git a/iso/py/util/__init__.py b/iso/empanadas/empanadas/util/__init__.py similarity index 64% rename from iso/py/util/__init__.py rename to iso/empanadas/empanadas/util/__init__.py index 1c96258..495236c 100644 --- a/iso/py/util/__init__.py +++ b/iso/empanadas/empanadas/util/__init__.py @@ -2,16 +2,16 @@ Imports all of our classes for this local module """ -from .check import ( +from empanadas.util.check import ( Checks, ) -from .dnf_utils import ( +from empanadas.util.dnf_utils import ( RepoSync, SigRepoSync ) -from .iso_utils import ( +from empanadas.util.iso_utils import ( IsoBuild, LiveBuild ) diff --git a/iso/py/util/check.py b/iso/empanadas/empanadas/util/check.py similarity index 94% rename from iso/py/util/check.py rename to iso/empanadas/empanadas/util/check.py index a87d86f..b491a61 100644 --- a/iso/py/util/check.py +++ b/iso/empanadas/empanadas/util/check.py @@ -1,6 +1,7 @@ # Is our arch allowed for this particular release? Some previous releases do # not support ppc or s390x -from common import Color +from empanadas.common import Color + class Checks: """This class helps check some things""" def __init__(self, rlvars, arch): diff --git a/iso/py/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py similarity index 99% rename from iso/py/util/dnf_utils.py rename to iso/empanadas/empanadas/util/dnf_utils.py index a965d59..6fe3da4 100644 --- a/iso/py/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -14,9 +14,11 @@ import time import re import json #import pipes -from common import Color + from jinja2 import Environment, FileSystemLoader +from empanadas.common import Color + #HAS_LIBREPO = True #try: # import librepo diff --git a/iso/py/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py similarity index 99% rename from iso/py/util/iso_utils.py rename to iso/empanadas/empanadas/util/iso_utils.py index f06d37d..500a980 100644 --- a/iso/py/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -12,6 +12,7 @@ import subprocess import shlex import time import re + # This is for treeinfo from configparser import ConfigParser from productmd.common import SortedConfigParser @@ -19,9 +20,11 @@ from productmd.images import Image from productmd.extra_files import ExtraFiles import productmd.treeinfo # End treeinfo -from common import Color + from jinja2 import Environment, FileSystemLoader +from empanadas.common import Color + class IsoBuild: """ This helps us build the generic ISO's for a Rocky Linux release. In diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock new file mode 100644 index 0000000..52acbff --- /dev/null +++ b/iso/empanadas/poetry.lock @@ -0,0 +1,304 @@ +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "more-itertools" +version = "8.13.0" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dev = ["pre-commit", "tox"] + +[[package]] +name = "productmd" +version = "1.33" +description = "Product, compose and installation media metadata library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "5.4.3" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=17.4.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +more-itertools = ">=4.0.0" +packaging = "*" +pluggy = ">=0.12,<1.0" +py = ">=1.5.0" +wcwidth = "*" + +[package.extras] +checkqa-mypy = ["mypy (==v0.761)"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "rpm-py-installer" +version = "1.1.0" +description = "RPM Python binding Installer" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[metadata] +lock-version = "1.1" +python-versions = "^3.10" +content-hash = "60b7aec35daf6724e2676f4db88df4a12ad46210c054102893617b336e74bb40" + +[metadata.files] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +more-itertools = [ + {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"}, + {file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +productmd = [ + {file = "productmd-1.33-py3-none-any.whl", hash = "sha256:467dfeb84e74834b6a65508536ccd8ec2d81c24a0ecee5e77d2c358e97eae164"}, + {file = "productmd-1.33.tar.gz", hash = "sha256:aaf49bdd2a5cb97f7c6b5011f669dbed153efc7bc61e6935fa796a1b94d16b7e"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, + {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +rpm-py-installer = [ + {file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml new file mode 100644 index 0000000..d24a836 --- /dev/null +++ b/iso/empanadas/pyproject.toml @@ -0,0 +1,25 @@ +[tool.poetry] +name = "empanadas" +version = "0.1.0" +description = "hand crafted ISOs with love and spice" +authors = ["Louis Abel ", "Neil Hanlon "] + +[tool.poetry.dependencies] +python = "^3.10" +rpm-py-installer = "^1.1.0" +PyYAML = "^6.0" +Jinja2 = "^3.1.2" +productmd = "^1.33" + +[tool.poetry.dev-dependencies] +pytest = "^5.2" + +[tool.poetry.scripts] +sync-from-peridot = "empanadas.scripts.sync-from-peridot:run" +sync-from-peridot-test = "empanadas.scripts.sync-from-peridot-test:run" +sync-sig = "empanadas.scripts.sync-sig:run" +build-iso = "empanadas.scripts.build-iso:run" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/iso/empanadas/tests/__init__.py b/iso/empanadas/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/iso/empanadas/tests/test_empanadas.py b/iso/empanadas/tests/test_empanadas.py new file mode 100644 index 0000000..4561768 --- /dev/null +++ b/iso/empanadas/tests/test_empanadas.py @@ -0,0 +1,5 @@ +from empanadas import __version__ + + +def test_version(): + assert __version__ == '0.1.0' From e3526b1588379e8bc51e48ecea537e3bb14b355f Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Fri, 17 Jun 2022 15:05:36 -0400 Subject: [PATCH 22/64] Fix requirements specifications for EL machines that this will run on :) --- iso/empanadas/poetry.lock | 194 +++++++++++++++++++++++++---------- iso/empanadas/pyproject.toml | 12 +-- 2 files changed, 144 insertions(+), 62 deletions(-) diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 52acbff..3e597d0 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -28,27 +28,44 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "importlib-metadata" +version = "4.8.3" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + [[package]] name = "jinja2" -version = "3.1.2" +version = "2.11.3" description = "A very fast and expressive template engine." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -MarkupSafe = ">=2.0" +MarkupSafe = ">=0.23" [package.extras] -i18n = ["Babel (>=2.7)"] +i18n = ["Babel (>=0.8)"] [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" [[package]] name = "more-itertools" @@ -77,6 +94,9 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + [package.extras] dev = ["pre-commit", "tox"] @@ -101,14 +121,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +version = "3.0.7" +description = "Python parsing module" category = "dev" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.6" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" @@ -122,6 +142,7 @@ python-versions = ">=3.5" atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} more-itertools = ">=4.0.0" packaging = "*" pluggy = ">=0.12,<1.0" @@ -156,6 +177,14 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "typing-extensions" +version = "4.1.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "wcwidth" version = "0.2.5" @@ -164,10 +193,22 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "zipp" +version = "3.6.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + [metadata] lock-version = "1.1" -python-versions = "^3.10" -content-hash = "60b7aec35daf6724e2676f4db88df4a12ad46210c054102893617b336e74bb40" +python-versions = ">=3.6" +content-hash = "24b4f1aa7304910b04db920cbf95e7c5f483008effe499a9303d02d561a421d4" [metadata.files] atomicwrites = [ @@ -182,51 +223,84 @@ colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] +importlib-metadata = [ + {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, + {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, +] jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] more-itertools = [ {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"}, @@ -249,8 +323,8 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, ] pytest = [ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, @@ -298,7 +372,15 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +typing-extensions = [ + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, +] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] +zipp = [ + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, +] diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index d24a836..db5bb83 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -5,14 +5,14 @@ description = "hand crafted ISOs with love and spice" authors = ["Louis Abel ", "Neil Hanlon "] [tool.poetry.dependencies] -python = "^3.10" -rpm-py-installer = "^1.1.0" -PyYAML = "^6.0" -Jinja2 = "^3.1.2" -productmd = "^1.33" +python = ">=3.6" +rpm-py-installer = "~1.1.0" +PyYAML = "~6.0" +Jinja2 = "~2" +productmd = "~1.33" [tool.poetry.dev-dependencies] -pytest = "^5.2" +pytest = "~5" [tool.poetry.scripts] sync-from-peridot = "empanadas.scripts.sync-from-peridot:run" From 3749866deee2d9d142426f0809b53be5fc4c208f Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Fri, 17 Jun 2022 15:08:38 -0400 Subject: [PATCH 23/64] Cleanup bad rebase --- iso/empanadas/empanadas/scripts/sync-from-peridot-test.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py b/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py index 0362718..023ba42 100755 --- a/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py +++ b/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py @@ -12,10 +12,6 @@ r.check_valid_arch() #a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) -<<<<<<< HEAD:iso/py/sync-from-peridot-test -#a.run() -======= def run(): a.run() ->>>>>>> 8d29760 (Lets write some poetry):iso/empanadas/empanadas/scripts/sync-from-peridot-test.py From 0abdea5c86fbe7b54cfa7f34cdf7b261171752d4 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Fri, 17 Jun 2022 15:54:11 -0400 Subject: [PATCH 24/64] Some changes to make scripts work in the new setup" --- iso/empanadas/empanadas/common.py | 7 ++++-- .../scripts/{build-iso.py => build_iso.py} | 0 ...c-from-peridot.py => sync_from_peridot.py} | 0 ...idot-test.py => sync_from_peridot_test.py} | 0 .../scripts/{sync-sig.py => sync_sig.py} | 0 iso/empanadas/empanadas/util/dnf_utils.py | 4 +-- iso/empanadas/empanadas/util/iso_utils.py | 4 +-- iso/empanadas/poetry.lock | 25 ++++++++++++++++--- iso/empanadas/pyproject.toml | 11 ++++---- 9 files changed, 37 insertions(+), 14 deletions(-) rename iso/empanadas/empanadas/scripts/{build-iso.py => build_iso.py} (100%) rename iso/empanadas/empanadas/scripts/{sync-from-peridot.py => sync_from_peridot.py} (100%) rename iso/empanadas/empanadas/scripts/{sync-from-peridot-test.py => sync_from_peridot_test.py} (100%) rename iso/empanadas/empanadas/scripts/{sync-sig.py => sync_sig.py} (100%) diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index 509a622..29ee4c5 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -41,12 +41,15 @@ config = { } # Importing the config from yaml -for conf in glob.iglob('configs/*.yaml'): +import importlib_resources +_rootdir = importlib_resources.files("empanadas") + +for conf in glob.iglob(f"{_rootdir}/configs/*.yaml"): with open(conf, 'r', encoding="utf-8") as file: rldict.update(yaml.safe_load(file)) # Import all SIG configs from yaml -for conf in glob.iglob('sig/*.yaml'): +for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"): with open(conf, 'r', encoding="utf-8") as file: sigdict.update(yaml.safe_load(file)) diff --git a/iso/empanadas/empanadas/scripts/build-iso.py b/iso/empanadas/empanadas/scripts/build_iso.py similarity index 100% rename from iso/empanadas/empanadas/scripts/build-iso.py rename to iso/empanadas/empanadas/scripts/build_iso.py diff --git a/iso/empanadas/empanadas/scripts/sync-from-peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py similarity index 100% rename from iso/empanadas/empanadas/scripts/sync-from-peridot.py rename to iso/empanadas/empanadas/scripts/sync_from_peridot.py diff --git a/iso/empanadas/empanadas/scripts/sync-from-peridot-test.py b/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py similarity index 100% rename from iso/empanadas/empanadas/scripts/sync-from-peridot-test.py rename to iso/empanadas/empanadas/scripts/sync_from_peridot_test.py diff --git a/iso/empanadas/empanadas/scripts/sync-sig.py b/iso/empanadas/empanadas/scripts/sync_sig.py similarity index 100% rename from iso/empanadas/empanadas/scripts/sync-sig.py rename to iso/empanadas/empanadas/scripts/sync_sig.py diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 6fe3da4..6cb8ba4 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -17,7 +17,7 @@ import json from jinja2 import Environment, FileSystemLoader -from empanadas.common import Color +from empanadas.common import Color, _rootdir #HAS_LIBREPO = True #try: @@ -83,7 +83,7 @@ class RepoSync: self.gpgkey = gpgkey # Templates - file_loader = FileSystemLoader('templates') + file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) # each el can have its own designated container to run stuff in, diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 500a980..6243dd1 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -23,7 +23,7 @@ import productmd.treeinfo from jinja2 import Environment, FileSystemLoader -from empanadas.common import Color +from empanadas.common import Color, _rootdir class IsoBuild: """ @@ -86,7 +86,7 @@ class IsoBuild: ) # Templates - file_loader = FileSystemLoader('templates') + file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) self.compose_latest_dir = os.path.join( diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 3e597d0..5b679ac 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -45,6 +45,21 @@ docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.8.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + [[package]] name = "jinja2" version = "2.11.3" @@ -197,7 +212,7 @@ python-versions = "*" name = "zipp" version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -207,8 +222,8 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" -python-versions = ">=3.6" -content-hash = "24b4f1aa7304910b04db920cbf95e7c5f483008effe499a9303d02d561a421d4" +python-versions = ">=3.7" +content-hash = "17bc9d78b4ea2e474754a1d321fec745361870a5315784e2a2f5b54a564bc5d8" [metadata.files] atomicwrites = [ @@ -227,6 +242,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, ] +importlib-resources = [ + {file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"}, + {file = "importlib_resources-5.8.0.tar.gz", hash = "sha256:568c9f16cb204f9decc8d6d24a572eeea27dacbb4cee9e6b03a8025736769751"}, +] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index db5bb83..22dfe42 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -5,20 +5,21 @@ description = "hand crafted ISOs with love and spice" authors = ["Louis Abel ", "Neil Hanlon "] [tool.poetry.dependencies] -python = ">=3.6" +python = ">=3.7" rpm-py-installer = "~1.1.0" PyYAML = "~6.0" Jinja2 = "~2" productmd = "~1.33" +importlib-resources = "^5.8.0" [tool.poetry.dev-dependencies] pytest = "~5" [tool.poetry.scripts] -sync-from-peridot = "empanadas.scripts.sync-from-peridot:run" -sync-from-peridot-test = "empanadas.scripts.sync-from-peridot-test:run" -sync-sig = "empanadas.scripts.sync-sig:run" -build-iso = "empanadas.scripts.build-iso:run" +sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" +sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" +sync_sig = "empanadas.scripts.sync_sig:run" +build-iso = "empanadas.scripts.build_iso:run" [build-system] requires = ["poetry-core>=1.0.0"] From f48caa4b547a9915688ad99abf5871f3a597d172 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 17 Jun 2022 13:27:40 -0700 Subject: [PATCH 25/64] actually catch the error from subprocess --- iso/empanadas/empanadas/util/iso_utils.py | 9 +++++---- iso/py/__pycache__/common.cpython-310.pyc | Bin 0 -> 1444 bytes .../util/__pycache__/__init__.cpython-310.pyc | Bin 0 -> 411 bytes iso/py/util/__pycache__/check.cpython-310.pyc | Bin 0 -> 907 bytes .../util/__pycache__/dnf_utils.cpython-310.pyc | Bin 0 -> 18251 bytes .../util/__pycache__/iso_utils.cpython-310.pyc | Bin 0 -> 7475 bytes 6 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 iso/py/__pycache__/common.cpython-310.pyc create mode 100644 iso/py/util/__pycache__/__init__.cpython-310.pyc create mode 100644 iso/py/util/__pycache__/check.cpython-310.pyc create mode 100644 iso/py/util/__pycache__/dnf_utils.cpython-310.pyc create mode 100644 iso/py/util/__pycache__/iso_utils.cpython-310.pyc diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 6243dd1..89ac824 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -143,7 +143,9 @@ class IsoBuild: self.iso_build() self.log.info('Compose repo directory: %s' % sync_root) - self.log.info('ISO Build Logs: %s' % log_root) + self.log.info('ISO Build Logs: /var/lib/mock/{}-{}-{}/result'.format( + self.shortname, self.major_version, self.current_arch) + ) self.log.info('ISO Build completed.') def build_repo_list(self): @@ -264,9 +266,8 @@ class IsoBuild: lorax_cmd = '/bin/bash /var/tmp/isobuild.sh' self.log.info('Starting lorax...') - try: - subprocess.call(shlex.split(lorax_cmd)) - except: + p = subprocess.call(shlex.split(lorax_cmd)) + if p != 0: self.log.error('An error occured during execution.') self.log.error('See the logs for more information.') raise SystemExit() diff --git a/iso/py/__pycache__/common.cpython-310.pyc b/iso/py/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da4d90c29b544303aec2a7f8ebd4014f33849c1d GIT binary patch literal 1444 zcmah|&5zqe6rZujj`QWsZoBQF9N>_{Ry6L?g4hZn+9qld*>sg^Rqa&Bay*m7?AYUu z9oqPWRIUjA01n9!{v=;HaYf?7rM$6Qb&EjEXx^7+etz#aZ)CUILNNZk{_E_-LFhN_ zTpT;#d;u%Fi4kImVS#9^F~yZ{P@~c&H7iXhskB9{O54cDDp zvS~50#mstRT}2#k+FQhpO^4YayIZt1Sc5sN$y%(;61&Xye=vXS(LL^LE^%vfnZu4P#^?!TPY^*bO^g!f5M|MGi0XL;>)COI zcI7|On*W3L(>rK6!;jIy)!#LjlTgcx1GubNy$uFOnn_?01A_*zX99DC+RUO2W>beX zXp=d##hSFuTC~I3fZQpq`%*|%Sl6B&9mbXVs8%1>>XS;{s@2i$I(REC$hD{7fP_;Z zZ^n~@Zt3`b62#p1%a-rQlC6ZcJHG#H6^Qx^HK(H?wH{1IqY1T7PNyg15heG}?oOyP zygU7>vdF#h>4@5Aqw)CB6WTnT3`f)PmlN1{@6mWj&Cz7|3sN>zU5URAy`_w~CxRIl z-XhAqELwP>RNTv@6z33#GU=sjh{dr?2I*QEaJUUCy8^;N1pjk7xY|=#eYNC*8eA0J z{#ifnvqAr1|G59S=y-9GYsge)yx6Ne#lg9e<-sCK7RCOBSgL@JFpp$X^x{YG?OHkhpz=lLYG8YPD!A`C3HrRb01QVQ`&i5 zm9M1CY3Fr4wYT%Ok~+{0;ow55H5^{e=q?oCzZ>f68vI$1aet)*bz?}%e=gN|4bTi_ zk_S=36|^ZNq&tN+v!zs(2}Q|d9xXEO`d}TzqQuazt9*Xr(*s;OJP9S!WViIC*Edun%YXRD9mXCfGVTSU!+08jx|+f z0~lLEj->gKg|$<6oxzhr8Fqd-%Oq#&<4xW06;X1IAB_ AfdBvi literal 0 HcmV?d00001 diff --git a/iso/py/util/__pycache__/__init__.cpython-310.pyc b/iso/py/util/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b6786532206a259ac045dd8e127b3bd32769341 GIT binary patch literal 411 zcmYjNJx{|h5Ov&qHBi)z)oT_kF)>sK!Bhz`(9MgL#%^lW?nQQj6#fnqe}*5Bm5IN= zfUpBdoaCo_cjxDKhQ*>lK8ClK?RkXIdm#SJe0+l zaW0_dd`#+~HZ{Gu8(r%iwu^(o0<>0rN1clT?xbA&({)%k)vk5-0=pKxuy4p%j z8`V*#8d`&uv;)uwU+_}29cXxha;}c5#(ed&<8@fkoq34SX}pY=2~Kg8e|-ZM CO>9{J literal 0 HcmV?d00001 diff --git a/iso/py/util/__pycache__/check.cpython-310.pyc b/iso/py/util/__pycache__/check.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27ff79b33cd0d78b1e17168b698b88fa6547f597 GIT binary patch literal 907 zcmZ8fO^?$s5ViBw614@bkSZ=Hf(yAcCvFI>*j;f!<*=M0AuEl&ZFZeH*iNBUB_wu# z2{-;yuAKM_T$UMU7ZpbGcbCD zQu3B?kNY=-``2FX@!*`K;V(EtQrd+lN-ACA>d*kc$4Vd&B&VFfO`7{XQXcSthc_e- zc?4*KE;?C>^2+R_yvJ$d^7!!$6wMgu$S}#qBb>17s_|^#w^BCNoa{7eq z2_t(CJiH6|-aMZaQmW5_vqA%+Nxd%Wqb|TX^}E0bd67q2ZVGL52v|Re9>DbbWwwIn zGg&Nz%$AkSOts8PrA20yl2@=%8I?8LthJTQT*0$u+YK43)|F)}!r}sOF^%c&VNZ2- zpDih5?hRw)Cj=9>-^EgE_k@EI5(;b7QMo*bvF0tt1D%lMAI0<}ZZXfL1_jRNW{e zl18gi7&A*FC!0p9U3|VZR@7%-Dyy+WUGV1pyVK4;o1dmsqa;q;-VO1Pq+fO@wcjc$Y6-Skeu|Ig3U2t{8PwP37^8LJg(ST;q+)P2Bmo`LQQ&sx@LeKcU#qp_<~9$mJ2b~>_^PkrH8$4$fsuDa F{{V5}+oAvf literal 0 HcmV?d00001 diff --git a/iso/py/util/__pycache__/dnf_utils.cpython-310.pyc b/iso/py/util/__pycache__/dnf_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b12b1c382b2e1f6ea314b1231aa860aaa286157b GIT binary patch literal 18251 zcmb_^TaX*qnI6z++%Xu;op&}bLlPthLtQOXwpJICvb^+2;mDQ^uY!Q+o*94`V0gMg zYM@5sNJ_HawH(=%q$=4>3S%d!Qu30dDwj>AQmN#1A5)dB&Re#UDnBII&BnRVy5#%* z(`aCZl)SD4!~Xl6)93o1bN=(+4!cuRxfp)0|HprDJM+C*><<~4{3Q^19*_H*0Cubz zv*WhWj#uOIHmZia6V-&g&G4P9CMBM#rsSQjrsbWfX5^i%X7Nt6bKCi9KF&JLc42#} zI<;M_7UQv(V|LO`y&tnv?-|u;#L{*Ku?%7*+gOj4v;Eg|>${zXYwh;7E#>TVU2CJO ztnH>!UDfPtTJ?@?HM-mQfLJA$yV>nEUF(J0PTTrQyUy1OuUo3yxU<`Cc6x7Bx@z;e z+|Mw^*jhOrBwy;byQ*vi`Rko~P1WseI~^}5-DtL*^q z9{@NpU@1lnRTEhQe~E|w%&eI;?2C5NG3^wdG@cBeES?;mJe~rcDLh3dX-_-JjYYd; z&%B=mW@qiW_hZ29C41gpcs~x@F4{{-$=ZkP!+7WHBgk*qN9|+po7KF1+&+QSf_*tm zx9n4>XDW=Hw$C6}F^rwHml2z`&)Mfuret5R%XrTS_E&<;Eufqe(Eq)hh2QI2K<<{) z-f^v-YuQ~rCHQW2cN|ssn%$0`AaBdDZr5GMas`bQYyFj%5#~(Rm1EVVHoLj8;V59- zYP9RFYa!HBPJ6ea+d)&^j$UvF-XIwzIsD!4eJrYiDb?M9Gt zz54C86BL@8ovw0fwsX6;8BB#Sx7$+g{%r+N!%-jvZun z>I!iODYn{Gy-twc=(XDnq&nS=dJ`WucQ)@hyFp5|x4X8}n8o54{T2{N<9P{>d(Mc( z{GuPL#ry(b+@At8{3Kw)PXU^K8ZhZ+0aJbsFzsgmGkzX0>n8wnK33e}7(#i33<(tw zN=Rr*hir2x<|pvPYDJ_Z<=eC$w^N6)+mIf&GlygHy`)o-a|WTT84|_-K>G$G;S?{PnYnMKVBkdS^F(Y}7+i2%rO#Rq9 zfpSQDFK*A?G1ZTC40(^+XcakrDmmt&96v=4#E|2pXSGfZPovIx#0`6asjbo;@pC5n zMvNrv#o<{$u@{$b%i*_JYuR4fGm!h-M9LvaIq%Qdv0hAl=3N+;?ZbQQ{YBJ%ad^qQ z(7L=AkH_$3#jCVd_u_*&e{Ns;75j*Nbk7*fBmFUdp1Gy$Y-h=pRkaf4Z%Inm(v4oX# zujyh(K8UYWld93a*A9wU!QK0gU8}3cmg5Gw9o20)4X@U;gEV#`%DL4nGS%3QTP@-H zy{7HdwtH=_*>2tr^3GeHs@FC^A(0ala;J^nxuB(~w$RET+vs+@db8sw6~|%^rYH7Q zZM(h`6fl%cP;2RQFx5c$&92(T2z$5t(^8$ca*HH;1&(19#%SF7CZZ2!mcO&IyuGq)TgzX&y8Ml+%jA^1L3NTLqw5CdPTkuI%vQJA3GyS{+f5auKrOfHUNAdaMH=@tew#axMnnbEQDS#f za-}gHoXQ}(>3BC0b`%=#Zi0Ry?%V^lG+$Y}@oF%&?kT6f{WTodZRWwTK&otadtQ*e zA>HthUO=&NELXSfQ3*XM5o*^c&yg9sTo=d+LuJ!a*IH0UXCC2bkOsygR zgD;{=yjZ3mt zvrv)yENmgf3xviVo^dmoKZ3-kYGMMifgT zmgt-f1OBY-v{g0P#7Ue=i|cK(YP~*acx_Ec}E8+wQrZb=$ETsnyHxCDhY;$97T zsQb-!+X9b*bk*%`Ze6w-*oU4IRYz7z@06bA-z=_gtL}oeyW0ahW?vQ_+f%!21}tA) z_&BS(VPStGl{6`;uD-L=ZZ^nlS>VK2IX_$VhSx(^cP)3vX*4&Qj$N@{*|4}_E0O`c z-deZgT(+8CRG0J}g9K~X+r}tSf#x@bh2$Wa2sX6Y@GvCXx{c1r=7$_WRLSq6b&Z|u zfidlN&;vbwq4~{v8-*3lKgbdeI8(?HuBUqFtQup;x@g@1z1CH*RXuKRi{i(I>S~;O z-5p)t@p?PsS}u<}Vxi4OyJxeQbb3U!i-2PK26bclhom1Cc z=4NSbSi9YxwN=08NI4rz({j6&s)<9u?O)=Gy2TYmS!Ul*tQL3XuH!-C=_<8*s(<7q zkxs*nFj{WA(u>mQ8>^L+*-;9I(sqcPweo^6B>D(@7DEn_;H^+Qd6fY2F$LA@Z#BJO zVd8kCNDN7*sa*9K@~OuOo*;Od;7b5ON)X}---6>4Gbcx6Fl#$I?e1=Eg4YXX!i8G% zx;4E}g%1i!+`pmNRFhe`hN{a+^)g$C4s2Wa1>xZ|-~|w6P{2hlP49HgoIL}H( zDPD^A3%5pR8X9V((8%b=aCDqqK?I~8d?84A+>e9wCqVp@B>i@Lodn+qQez%_16X;GkC9!A%p_?<_|>Rx zrd-tdxpSKy$k{&f_2}=G$iR=PMT^5@3JN+s5`8V~DbPod>HCf*9Xy^7b8T-@rq%BDq?4YJ;o8y z!AXTSFj1qga`3qjrN%?dz7=?y+;yP4#`ck0~Q<!|6bUg}FHDvz}cmz`gaBrze&TWMMeM|i-$WGBmuoXb0z;cd( z_eyQX25CIxiQ+>;W6w*p%wZDJv;ht*E3uRh{>VLzaN5t>3ETX@05`{cDc{7GRrz9$ zzSzkD^ck(pAnzyDvvvZp=`7>|n}|tFI%JbH9$S1wW3jQ~uPo*oW~q|MQ@T z9!>kjLCMb#^XTOa^(d_Zv;=j&edr$sC2)I1q};XBex7f9nF5>{7VV6zfthQuH~;lu z*3J%5mp_ZK%-OjpJnv6~%Nxn*CHIFC7a3m0MZL@%$}B{&dBhgOnBkY8?a1$$gQa~b z1*9CZr_f5QHKX+;cu$MQV)k$hH5|4}_&SU4bMnlh#1VT2X(sCgUwYJ@oroQ?=cKkd zlqvd4$dMczx92C)4oTVxdtq=AHO>zg>_zA*EZ=G^`iK1^{z?D1f7D+3AUQZS@%^-a zI{JRXKPKPLcuW3*eds-7a2BCO2`&3)hll)Ag3)D!&PeF&@UU>f%lhplhDT8H@Sdy+ zgco)Au8S6rei(P(wU403MfB^O;Q1VDi7!3F!6#sa>T?c?Eii~a?C zqlSj#Jc0Ty`IlrIC;f|l**^J!Ik=3zpO?O`_$%nKXmJpd9!pQp>)xRju6fHZhFtkH zR?OK!#Xg1BC`(X+;iG3Nixp9kC)PT?mmjQpCtA==4Nv*d4IzA*kA3=s#Nf(@@w-2; z&tMNtqlb@qXNG6(vw}C>;_|3P`<%4&7{>T`RNLbRYJ0*yKX}q--=0O^E=Y+AN<4)U z%Rb>bKG=I@jQ(l=>EZdk*l!qvFAbit&--5*Uf7G>i+^aSc^PlnzUWwOHUc>jS72=kp%Wi)XOhRn}m9_y3MPY(~6dg1f!MF`0n2HR{acgeyev( z^x)gN2W&rV{fhJhtqXQC7-M$utj^^>EA1P1Z`x1r_?4Q+eMd?Au~BLJaa{v*mY?kZ z{v#=UNQ{Nbk59grL{t)*F6_ywXrOQ&Q(B1Xc{x-`SZ+f#cRVnKU^X4c7EYWZQ&UBG zuj^wy)Q@Sd9ehAjS>5~K+NFXh^Zk0$v!D}bi{!&I30eW_qN5!BV`1ZDDI&ES_2}$j zkPZW{e-sZg!s^w!cWwj;96#06ThBaQd-}<$*?jt$r~Apb2hg?k^LPngfQWF7{hUNs zp}cYWM^<~TTD{%utg4-DYh|Sg?nHUk+k>|U)dDmeHC4Y~qpqo%fFitqG0N@&73W?5 zEY)D-QT(?D{^ofUw4MW~py9&XgH`E51*~9yb`;Z{k+hwtHXg@hfjC*#N<-Ix?$|&f z)iNvFj#r1uw2n09YuaB$ODj8UZ)HWK9T9OPN^`(Ymz~Bp^cq^x#vSYN=gvLWzxV&v zqVD_WwHr0HvNF*HY4^guc1P?2M!mAr?m^k+u273)t?Z69m=$XO%E_Rpr=*73YmIF? znBqWdGUQrgCn)m#*OkcU3{f}{0`4Ce_b>hIzd(J{&tg6_E7;HLw_XJ$Jvll_rpDRV z;r&zd%jP9)B0|#_;ALi1^NOn)dc>x(!MLamf=z-gfd1KaudY09iEvv(iQRxE7(yVq zx@Fg@R4NbFF#qIUS8Qtq`)uW&b^7hW)wc(yKQFjK&9YuZ+*i9;S>VhC8-O1f>UlIS zN=-;-58~F($b$BZ>%cMl5;mJdv$C<@9z4iVXv5yPaHag<6w+Z;TLG&HWk%2aD)uYC zA%X2;34fH37SuMINrDcv+YjR34a^%aymIq_@uu}4!x3$xFAwG?8jS==(Z=^zUf+Vr z4g5Pm6N(ieg{y!)1R;WLO|ab58j-7g(|YySt^UHB(9Vpq+mK2dS}=VK1_a^!xrj8= zO5yLI!1+3RoY}75aYP)w$Sju$w4S}u-Pu)_7++z=6m&>V$5u}-zPz@>jlC3sy2d*G6Zo|otDL2d4P8g19Qmsv-6m?j3NzL7=&QIouQVU&I>eJ`Ow9X^rAWhcW+u@{f zXTxYMt;lx}E7|k=61faX4XRU;XS6=5d5J;*-|2bPxrth$95WM%VZPGj2srQ6MV;xP z^chVJCx^eVFDV*tDNK%rSe>3s!JzghZ~@Fu=7>5xJDD2xcqvRf*ufxyZ5ouudsX)| zNRM{F6pcePch``|4JftO?$d#`X{t%D*Z*b&fvL*}<+5`bJM4}isVYFZxAVN6_sMB&ih2i8&-x`M{u|%@_JD;g3^kzIE74XJFDWKQ)n$wE=!?hTqLk%v6}~ z6I6=UM&*oO7@r!sgDGaNFqtQw`b8l#Q3ml`28J2*HUsq&jVT%QnrbNNOW0TRAZ*wC z)JR5cd}4e$SzGGkTpIOYyro1YA^n#APqv5g{M;;NBt7>@E|r6#OMl0BQ#w8MiJ4(Y z_rQb-O=BVTaUopz;7&;N@W_8q%@?Z-`cEk9r2$gwn%| z4JDhF#w0JSLeMUp@?yhWXl;Qi4!WGDBKA1q%teugvyI1(@Bop{Jj&b8k1Wn)gpUjB z&qHydsMra1agdku2T!;Q7=S*o$_$AeSadjY9P<2AZv&JKA^6o&SgSVVqgMy_#X2&Z zK#^NQAZ41eT1!ypU=JAm6A_j-$L7$nzPCZ#j(!ZM@Q8MYz3FFwF=A9SU~j}aP+0Qs zJCvgmLlf8+%oFFpK4tOPFaz}_OF)Jf&!9vdp|MTsIxN)Gz{6Rmv)R;k5N+0Q4;nQh zgo`Ro6!jxJ&wW}SAm;RT=A8Yc)J%ckvuWgHlzX40{Cd?!#HIcP{d|i)I_@~H% zqeYntRQFj~DdbP62Gpwe@xl-mNBjcLV`hSMsA3P7iTVx_53YWP1+5&*!y5{k7WhU%BbK)F52apz@a*CPS~tv4*0 z9fwjxJv4bz-rms0Qc4xf5c$A`iefTO(RvrNYPg_a)Z{tdRjdq)xdV=0!NpBho@|HF z#2n^BfvE#ol>ZZ^9UT9U9H_E=ZJ zkk!3S?;~!@&@2lh9L>+bXw<)c5!;CsHaEpq++Pl-C6rk-f^>$^hD}qNqQ_nT%*`&C zdtE7U@CcJ#C5ZLm$3ex)%Im6*QmVtKqS&%^ydzKf<#(Q1SXm0YMN>e zIA>LJNDagNX;u>sLWK3WNI>Ny2Tu|q38V%S;uJlKf*c5!@?6gS*fO9<-ql|r_z{7) zCq>7N`X19n(xP{f8W2!=Qh%P{9fH3=@Gd~PpwFgkG}S?JLk5)HQI5us$Rd~rasSd@ zSQJCVFj0kb5KJ4ip2NS4^S^UgiE3INEKk6fH6??k32Uu4NTfeFV%u)rJVvDOtmta4wlpXKZ#bf zZ1YjWnznFxaDDPX0^bDp2-pN+c0M1O$@OMumfO2gO-+w36msCowpR5f*qnOD5*~bV z8*3I!Z#_|WVb5{TKHMs0Cyl$gLNkuBwt0=ARqq)l#onNiyHheUi zIHTde8)aSV7bcZM{e{U`)OLS)U-Eq_P+_g#==O++m%ClMb~u&(kG?2dE*E``#snIu zt@9fj%|;Vakzk6ZeH!lD%{%BUl|UVm5|A}fRb$_c#cL^wC#3foNEE|AibsJVa6odmgC^5MQOtl~#Q$!yyI6S`vcNo!B z0h#b4=~~nUSAcz_l_A@wpAt@^73`!`om7-5Kd zn>V@34k4GBq2m|e13&|^PQv)aG*ch<7jBJq%VfDivSHuYuE*VkBL^BJ<5{7-fV@Al4m1Jo>Ot?}ecf@qGPW3qYV})-xu+QB9 zcPAG4BlG5Rp_Lgn0Q#|O6hyu(p)wh~Ft`4gpV_H5?!Zr0{O$1xXdJoi_fN0Er$|?0 z!zEYq-+EJxym!<;=z zl3`kD6oME0OlX6f)&7lM9R7_Z8&-4V%?)R1-MjldMnrtg`RPzfpvCSC?CSIA6Xkhr zDTQ?w;d$J3NcwZ~fI~;`=Do$%l6Po$n7;A|AK_y!z+$>^_ut9rVY!Em{pj!*ZP+r4 zY;S4Qo_$E#Sp-&=qPmt2)OE;bubxG(4ohxY@ed>Sai8!6ANuXXR(ixgGCV1^(xZc8 z_A&qH&=Ol|TzyAInzE03r&^~$wobLq;Iuy>p|c2`gspTg#8?V5D1HR6m7cV%sE5a; zO+r0R-R23bA;D&d%TuGV-93v{l!C1kYv;qb`W@W^wjZ{x`+?R4GbxNQFBX670{%qMAu$WRy1)dGjnx7)=*B))o5YXKN)cJSe$#@W zOLzE+CFV}?zaFJop#&tZb#=(=IQ~FnAqJry@{(kS>SJmxN1q&wa?T2gCGpxNQ$jJS zu3!k@5n8cc!0Elc?(0)_LwzKN>}aM{zGl=gKExhlFuLZYIs zAv{pf$7jYKonO|u;Xk-lhw0|AE0?Vofu~p3SvSjwPbJ%wF^z^!w_REp(dVdR`+HHb zF36m0J9YRNhINibM{yRyF$MyEuL%#uhxozD*zr^SInd7j^9KeF?!%(@5)sSErsM}H z60Zslj(yHZWPHGS9evS(=~4Kk$+~tR7dRqeKog?p0qUE{#Us)utjRR`^u>f1t}3Y6 zXl2GijP)zpQcGuA@}@9$U8Wd#brphF_z!Zf&V-Mu!$SS&935GN!Dco`{3tbV67uetMtTx(PNrCwSZVSb4sk?*c@BAeu)mXD{#ba?m!rCwsZaRz7B?G zFL~>OhTL3#XV)V4s4l^r&*OklQU4kCLJ%alLZxQSQJjz%|9J79`Z*W*_#T^+7*UHOs;vW=g zsb)Ih(WXQ%${Q-$sZFF_5l^o$?Rx}~WUAg{^v4AA1k@Ib5l_9(&|f6@O9X$Jfc~QD z`vkv5@Y@7GAozgbhXmkALZSIXhJFN4wluf=T_%j35rujF>rDG=1Qmel(FxQ}SbW4- zCfaowuOmmj{8ywT$B={YI-zE541b>@gNU%rn6M~--{2hY7OYrvy)*|pK zL*E8?kL+FsyE>Djg&+laJq0P9?zWK3p{FcD{-z9Hj4ztA$U*5`|Czriq-n=tjE=ne zZ9o(mi0UYv?)2ddw_y2u#U}y(zN|3mXC+aQe0Y`^4 zuS)Se)t4b45{<4MjD3hFJi99`h&f}Yr+V9kge3mouJzKZ-+1lS_3O1)u8B~(UEjs& z6^d9JJt{4&x)u4`z>op$B6X8dKX1dYdW(GtEgmb5d$$K)dat*!F&2vYhptJ3BrFpn zf}6$~bm5V{{G+(uqz9JWw}7luK%L8dUCQ*2MhJh{jlEo4Te~s(DH=uu;4V1+slShS zd6xID#Qa?ga;-kl<_A<(fIGiwDHGA$@n8_xpZr9O|w!g@z@)PXpM_lyg6>o}Vyy(p<-buOQosuiwa9Ku5h9VO# zdK=*tZ(jD+SG;-ITVL@GFMG=sZx~&We+t4LFM212(|e4US^y@l4|a0zwd;y55Likot&`cU+R z{lkB30AbGD(-*Ffgx_Ki>L9qHo}&{f1xY#PPs@etf9o9|o}hS&Ty$GLIkddvty2_Z z@a433rgavgV977-OJC+Hf;b1!ba>96VQwjV-q)fm>$zZ0gxNQkGkM8+X6?Y$T`?YL z9)omQ?$Rm?AzXuj`g{MCZ6 zbmaHM3wH|%r=Nv;Bf*gTp_hmT>OC?qUF1PWOV?{$W?1;@xxdUtBeih2UQie4T)7mA>~OWBuhDPW?Jd+$8u0!GQyTbodGn1k%ma?B`#c zO~Dfb&w)#_v=;p8mt=9BXnuYCe^Na9mMj+&AEL!I0eF1~SB1s`n@~E)WLkp6e1TNK zogz9=*Ysfj3CbQEHcy`4;;?xRnr44`vOB1x_j5&Uz4-y`@J1jL^{ z$Hs^qpGyA{UoWv4PWtDcO6DJZ@mZAn<3d%juJVj-S{9eAAuh*ix{JHe^Z|PxG0oq9 zAECfdPSqsK!W-V(+qiYl;Z{?vW~WtuTyDebd+|KQq=5-MMWYBZU(pBqbDR#B+s2G# U;2yL8kBn+cbp}8H1VM6tN!pcUOGaF;E!x_pY{zb+#*Q6Xa+FclZr9T^Y*GZvizTrF z3Hn}ewF{AxPUNRfJ(H(A^&jz-ym6oj#>ANt`Cr?;McWrL44lD0gx1 zx##}GIp?19-HW8zbT#~be)X4o|Ne@m{VNsD|0*cFhe!Pb5}|c8p$j9{J38k^$Kbru zsc>%2a;sx;+3wh!S36bCosNUN64wT9$JJ>pGp-LBod(lZ+#D=*mIlk6WnKGF6Sk;+ zritnkqqBmNBWftsP+AqnuGVt1yY9EgQ7n{qfA^zTl((Nq?=O??!(;D3G#npoyY7Qz z94YVpy)gE^83&ZT|A{B5Nz5%0*?ZUhH@uj()6(;`-9)CLxRVU`qu$4XRG|ddx|76- zY#Dj|?(i^@$#4)3(|qM#6o}^DGivmh zHB(#{R%i-4v_v&D_icVf3nFtx{WA?BYltRtM=XhDPWpM@hlDI0KL%uAY7uS%l2ovIZA$RVpgasDd)iura@cX0>`Sinhq`a~6_DGsu z+7G>6I1FXfJtx@Uyl6cc#$lksZSVe&#~B4Ojk@DFkT=mEW8$CGL1XPDN$N#|pcg8r zgl7GIAHt(L8B)W*+aC|RX_O4P53eE`_PFy0fAxVE422g*hiJ=#%w`xSDGx?d41#Fr ziBLtop_l9zE9l07QrmdL&`W5lauVfrgQ2$t^mM<^xUe$b8LC=F6t zW}-sx>;(%c;Z^8O?&W4MPLrM7klnl%sU!~QJ@O0PWH3TMUqsT6luxhPt-%oHzXk}h zkUMzPpBb7qsZKOsn_ydA(Lm`FLl{gMpejsNCKc3}6C0@ouE|^*l*Lpvbxxd9>RkiR zW}aJ6qPn3?DtI)%zQ_>{a~t5+xJMIfaIa{h&ZQ;r8%!-16pbXB+-GHCVZ7C%J*c&b ziSz=>OWflksAZ-u6+E$me!5t_qH+KAf<~VWP&DTnx4w*-E--Zk{EJLo1$Bw3=hEj7 zuAN**n{|{8u|eE}^;43~3#lhAKhdYP^lOuvxbnFU-iu{>l1N2dJ^7PKZeK*H3A-SFx5?PhLxJ9NawB zbqzIJ>GnbURG+#NcY%LPydb<&V_FCQ^+}ytbKf^6Zb`o>zE;pDUuV6(#Vy|C_eFc5 zVPC(vlRNw4I35OrFn2_xQl@IE4{ye#NzB|8ux4MS!C;g(=H@A}H^0=|nbn5vDj2iu z_VX%ee-MoFrVKwFM=})t=wVOgO9OaL|4||z`qVGC;1`3V+>zm7q{!;6K{QO{m-<$2 z$!>fYBMY$$Xq-{v@4;dCV;Se}NG1niH}xZt*TbV!2L3)Ag323MT@O3I7{1xXf_sTP z#-hi2c@3_1gzXh7IAuF#r<*M^Bi&-Rzx8;US7&dQ)o7jVIO(a(VG=LZsUXQ--n5_%S%NM7J}Gk@*F^NENIA+OF@SSu;5 zTUh}Am&=Apk6LvaBm}lO>QjY6$wKm4FH9eR4rN{eFLxBm;b9o( z=KY;}ALY&6REELeJ9yui+Nfid+bR{wIL&MKcu^_%4Rkvzxq%S#9%eM$Px56BMi4OU zH>O<#V;?0%wHmpp!gxQoWqb&%klS6x4cG*vd+Crc$m6s2CZN7&WT>r*O4wUMO-`;x8$LPL9K1w)a~Cp)du%9bO$Bd zHg!X{|6p4SIci+AThbf&qZ&gu|KOS^88lnAdVdB%w&&~X0*aJBG9G$3D*g~esvYPj z@XH5)m++PFum|SUm~jC_!GqsFu}(Gip7u#qR3^r{HuIoVHYb&ZvPE_5IqjX?B+JT8 z=tX5$?-Vf`;AWI~ur|0Tcpo?jm8}6`dW@YsSe;_t#WNlmZ5#qSZnqqH3+tAzQ}PBS zZ&UK8NEjdZY*6ygiTMGNmMK5vIyP~3t2NpP#b$P}Fs9fZisWNzUXkPB5eP*)mGnb$ zwoTh8B<@%%#)`h8XN^+c=&&Ga+N>o2`D+w<8jDx^K>N{4QxdtZ*2PTtB=!mMtW$GZ z8PUIj`p5Lzq$XiUwCeo~#7B;-%6iH-neW;YotdHAsi6(hTulo|I0^0;i0 zZ9JZ~@lW;bexUlHfMW+ITcd4ryG@nFf1&`(O^$ID1Qy7ieHOV}`4%N--s@E=vKH)M zGzy2JWwGGpJ*xNt5^_C5mCA7!dgM68dmYX)cuM;6E}H*69`!O3)1-Z4@*lLI+y7-& zSs%Ljd&8kU(y&Y;yIgAGDVMUK71|5{gT)3QKM&*93gbS}DHkMBBm0+9T#VpF5*9*b z+#bY)+lx3wI2wM1B|Zxx9!y~femMjMl_-U^k0lU24_J(YSj!zL?t6uMV=zvTdp?uu z%4k#|aZ>Ow3@p) ze8^jXXqfV}B7k%cOdwx-Lz|kSI<7MjI9sWz`H=F2kw7StbE>> zy6L5fDOR5lx~!k9PaJ;F`pE{F5b7+hYfKubt8rbUth2eUIccJ)z39~1tyrSJbsL` zb|`&JFigGa6mDw`74o~3Fpwk+S)e>JT6St3_pA?G*I5UG7`ekUA!HTsvx?P5!7%MK z=lF1t90E5t&h@)YBlAS;GrLq=r-V@~qdrl^Ks-i~a-qXs31cESAZy zqix>6#`ot~U2f8lTa-|w0$76U4W(pv{M5UWGcDZlO`-_k$~GkgVT)yL6ZIOB+)Uua zbL)|W7W6D?QM9suW7dlpMydTf(>W_SKzvl9=!UxgbeCOl zTp+r6+5pN1)I0%%IAt&WeN9-q z@Lu!>U#FeeWaqSFgBC8vyykQ5MGpYO@2^Q6kCQ zRpULJth+eC<|e&HZmE789w8gyE-SZq#@sD#E$<%T{Av`tq*iS2?m#?kNj`M*8$61R zrzR}PFxG)~$e!%;VYdv~3u-1bLl3F>S1yuNdjiG`!@OMz_3ltX2p&#sA-x{m3DU++ z!y$Y*4!+$qKEsDQU*Jx51Zk!6#~AXd73WLyQGE6dxMbE8nVV~78>N9Rw)vxLmLqfo z+WN{$ag^`=A66>h?K*9Xbqxm~ zV0YxEu;>Q8SDos@L5QwpuRbmDK`5eNn~`{UghTCF(9M?q=04Cctmu2x=0{nTVD>vW z3c1`V5=5^J!ZaX+?1x7q9GKqOdOdS{qh7Zkb|1cTD|5PuO5b_w)-4%ejevYbe7k|d z%=U8g82~$9Vcq#W6Cb{>lYqzu4U4=HopR{U`E7QkRO-`x76gX=r76zU`cI`ezFmAq zpv&gsj@hFNnEBPPM@#g&xKF^zz}uI}fE@&#uOnb>-aNQPE=LQ4Zr>wNJp`%ZcI!4IbwvUh-FL>(*C}8)c*a zRVveR>JNe71Khl;lrGh0FsyJzfNwk^e|q^JUb7tQ|KW8nm0_hPgZ-!_-~SZ4nAKE$UnE_yO?<+L zafg&j`4<>L(s7v&fqWE^6mBJXN~tJGDLJI%5hcfzoNa%m=nO9@01~uOw~#m*;%jqm z*XFMc%lI8GU%oV|?r)tRI&02#r|H~5?l=u+sZr(L1lksUWaaPS;jME1n-hO9tK_u@ z_>98Fp1sSe;qds3??2~0w|w=p8ZbAGZ>G#JlYC6cgpw&G&wA0nL&a?@tvIh~jxLGQ zS}S^d1GM}WB_xMVHNpKAfR{WVR!qq&NbrFr?5rTX37pRdVjDLu1Kg~VwbRWT7b;vh zh=vEj8+;F0+(?p~dB$fgE_c3JX#ZW3o$`>Fnq!lbIs2n Date: Fri, 17 Jun 2022 16:27:39 -0700 Subject: [PATCH 26/64] syntax error for syncing --- iso/empanadas/empanadas/scripts/sync_from_peridot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py index a998a28..e0fe0b4 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot.py @@ -55,5 +55,5 @@ a = RepoSync( logger=results.logger ) -def run() +def run(): a.run() From d38fa4f349d8eb2d3747039e5ed7eabe77e3c966 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 19 Jun 2022 07:29:01 -0700 Subject: [PATCH 27/64] push stuff up for now --- iso/empanadas/empanadas/common.py | 11 +++- iso/empanadas/empanadas/util/iso_utils.py | 72 ++++++++++++++++++++--- 2 files changed, 73 insertions(+), 10 deletions(-) diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index 29ee4c5..bf081ab 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -37,7 +37,16 @@ config = { "mock_work_root": "/builddir", "container": "centos:stream9", "distname": "Rocky Linux", - "shortname": "Rocky" + "shortname": "Rocky", + "translators": { + "x86_64": "amd64", + "aarch64": "arm64", + "ppc64le": "ppc64le", + "s390x": "s390x" + }, + "aws_region": "us-east-2", + "bucket": "resf-empanadas", + "bucket_url": "https://resf-empanadas.s3.us-east-2.amazonaws.com" } # Importing the config from yaml diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 89ac824..2caf1d5 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -21,6 +21,11 @@ from productmd.extra_files import ExtraFiles import productmd.treeinfo # End treeinfo +# lazy person's s3 parser +import xmltodict +import json +import urllib + from jinja2 import Environment, FileSystemLoader from empanadas.common import Color, _rootdir @@ -39,7 +44,9 @@ class IsoBuild: rlvars, config, major, + arch=None, rc: bool = False, + s3: bool = False, force_unpack: bool = False, isolation: str = 'auto', compose_dir_is_here: bool = False, @@ -66,9 +73,12 @@ class IsoBuild: self.mock_isolation = isolation self.iso_map = rlvars['iso_map'] self.release_candidate = rc + self.s3 = s3 self.force_unpack = force_unpack # Relevant major version items + self.arch = arch + self.arches = rlvars['allowed_arches'] self.release = rlvars['revision'] self.minor_version = rlvars['minor'] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] @@ -85,6 +95,11 @@ class IsoBuild: self.revision ) + # all bucket related info + self.s3_region = config['aws_region'] + self.s3_bucket = config['bucket'] + self.s3_bucket_url = config['bucket_url'] + # Templates file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) @@ -111,6 +126,11 @@ class IsoBuild: config['arch'] ) + self.lorax_work_dir = os.path.join( + self.compose_latest_dir, + "work/lorax" + ) + # This is temporary for now. if logger is None: self.log = logging.getLogger("iso") @@ -278,32 +298,66 @@ class IsoBuild: """ print() - def run_boot_sync(self, arch, force_sync): + def run_pull_lorax_artifacts(self): + """ + Pulls the required artifacts and unacps it to work/lorax/$arch + """ + self.log.info('Determining the latest pull...') + print() + + def _download_artifacts(self, force_unpack, arch=None): + """ + Download the requested artifact(s) + """ + print() + + def _unpack_artifacts(self, force_unpack, arch=None): + """ + Unpack the requested artifacts(s) + """ + print() + + def run_boot_sync(self): """ This unpacks into BaseOS/$arch/os, assuming there's no data actually there. There should be checks. - 1. Sync from work/$arch/lorax to work/$arch/dvd - 2. Sync from work/$arch/lorax to work/$arch/minimal - 3. Sync from work/$arch/lorax to BaseOS/$arch/os + 1. Sync from work/lorax/$arch to work/lorax/$arch/dvd + 2. Sync from work/lorax/$arch to work/lorax/$arch/minimal + 3. Sync from work/lorax/$arch to BaseOS/$arch/os 4. Modify (3) .treeinfo 5. Modify (1) .treeinfo, keep out boot.iso checksum 6. Create a .treeinfo for AppStream """ - self.sync_boot(arch, force_sync) + unpack_single_arch = False + arches_to_unpack = self.arches + if self.arch: + unpack_single_arch = True + arches_to_unpack = [self.arch] - def sync_boot(self, arch, force_sync): + self.sync_boot(force_unpack=self.force_unpack, arch=self.arch) + self.treeinfo_write(arch=self.arch) + + def sync_boot(self, force_unpack, arch): """ - Syncs whatever is in work/$arch/lorax to BaseOS/$arch/os + Syncs whatever """ self.log.info('Syncing lorax to dvd directory...') + # checks here, report that it already exists self.log.info('Syncing lorax to %s directory...' % self.iso_map['variant']) + # checks here, report that it already exists - def treeinfo_write(self): + def treeinfo_write(self, arch): """ Ensure treeinfo is written correctly """ - print() + self.log.info('Starting treeinfo work...') + + def _treeinfo_from_lorax(self, arch, force_unpack): + """ + Fixes lorax treeinfo + """ + self.log.info('Fixing up lorax treeinfo...') def discinfo_write(self): """ From db55677a881e2b01d73e954f21f54824688d77fc Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 19 Jun 2022 10:57:05 -0700 Subject: [PATCH 28/64] update lock and project --- iso/empanadas/empanadas/util/iso_utils.py | 6 +- iso/empanadas/poetry.lock | 118 +++++++++++++++++++++- iso/empanadas/pyproject.toml | 5 +- 3 files changed, 124 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 2caf1d5..f38e7a0 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -22,9 +22,11 @@ import productmd.treeinfo # End treeinfo # lazy person's s3 parser -import xmltodict -import json import urllib +import json +import xmltodict +# if we can access s3 +import boto3 from jinja2 import Environment, FileSystemLoader diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 5b679ac..4be7a6b 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -20,6 +20,38 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "boto3" +version = "1.24.12" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.27.12,<1.28.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.27.12" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.13.8)"] + [[package]] name = "colorama" version = "0.4.5" @@ -74,6 +106,14 @@ MarkupSafe = ">=0.23" [package.extras] i18n = ["Babel (>=0.8)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "markupsafe" version = "2.0.1" @@ -168,6 +208,17 @@ wcwidth = "*" checkqa-mypy = ["mypy (==v0.761)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pyyaml" version = "6.0" @@ -184,6 +235,20 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + [[package]] name = "six" version = "1.16.0" @@ -200,6 +265,19 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + [[package]] name = "wcwidth" version = "0.2.5" @@ -208,6 +286,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +category = "main" +optional = false +python-versions = ">=3.4" + [[package]] name = "zipp" version = "3.6.0" @@ -222,8 +308,8 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" -python-versions = ">=3.7" -content-hash = "17bc9d78b4ea2e474754a1d321fec745361870a5315784e2a2f5b54a564bc5d8" +python-versions = ">=3.7,<4" +content-hash = "93600aadcd1d588e33fc16d0fd7f505ee10484722c85bdadb612f57b10e9439b" [metadata.files] atomicwrites = [ @@ -234,6 +320,14 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +boto3 = [ + {file = "boto3-1.24.12-py3-none-any.whl", hash = "sha256:0b9757575b8003928defc5fb6e816936fa1bdb1384d0edec6622bb9fb104e96c"}, + {file = "boto3-1.24.12.tar.gz", hash = "sha256:f39b91a4c3614db8e44912ee82426fb4b16d5df2cd66883f3aff6f76d7f5d310"}, +] +botocore = [ + {file = "botocore-1.27.12-py3-none-any.whl", hash = "sha256:b8ac156e55267da6e728ea0b806bfcd97adf882801cffe7849c4b88ce4780326"}, + {file = "botocore-1.27.12.tar.gz", hash = "sha256:17d3ec9f684d21e06b64d9cb224934557bcd95031e2ecb551bf16271e8722fec"}, +] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, @@ -250,6 +344,10 @@ jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, @@ -349,6 +447,10 @@ pytest = [ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, ] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -387,6 +489,10 @@ pyyaml = [ rpm-py-installer = [ {file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"}, ] +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -395,10 +501,18 @@ typing-extensions = [ {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] +xmltodict = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] zipp = [ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 22dfe42..43f77f5 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -5,12 +5,15 @@ description = "hand crafted ISOs with love and spice" authors = ["Louis Abel ", "Neil Hanlon "] [tool.poetry.dependencies] -python = ">=3.7" +python = ">=3.7,<4" rpm-py-installer = "~1.1.0" +MarkupSafe = "<=2.0.1" PyYAML = "~6.0" Jinja2 = "~2" productmd = "~1.33" importlib-resources = "^5.8.0" +boto3 = "^1.24.12" +xmltodict = "^0.13.0" [tool.poetry.dev-dependencies] pytest = "~5" From 72f98dcdb6137ad8168d55efbc201ae3d528b227 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 19 Jun 2022 22:52:20 -0700 Subject: [PATCH 29/64] Tarball pull and extract --- .../empanadas/scripts/pull_unpack_artifact.py | 35 +++ iso/empanadas/empanadas/util/dnf_utils.py | 10 +- iso/empanadas/empanadas/util/iso_utils.py | 241 ++++++++++++++++-- iso/empanadas/poetry.lock | 63 ++++- iso/empanadas/pyproject.toml | 2 + 5 files changed, 331 insertions(+), 20 deletions(-) create mode 100755 iso/empanadas/empanadas/scripts/pull_unpack_artifact.py diff --git a/iso/empanadas/empanadas/scripts/pull_unpack_artifact.py b/iso/empanadas/empanadas/scripts/pull_unpack_artifact.py new file mode 100755 index 0000000..003f604 --- /dev/null +++ b/iso/empanadas/empanadas/scripts/pull_unpack_artifact.py @@ -0,0 +1,35 @@ +# builds ISO's + +import argparse + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import IsoBuild + +parser = argparse.ArgumentParser(description="ISO Artifact Builder") + +parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--s3', action='store_true', help="Release Candidate") +parser.add_argument('--arch', type=str, help="Architecture") +parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") +parser.add_argument('--force-unpack', action='store_true', help="Force an unpack") +parser.add_argument('--force-download', action='store_true', help="Force a download") +parser.add_argument('--logger', type=str) +results = parser.parse_args() +rlvars = rldict[results.release] +major = rlvars['major'] + +a = IsoBuild( + rlvars, + config, + major=major, + s3=results.s3, + arch=results.arch, + force_unpack=results.force_unpack, + force_download=results.force_download, + compose_dir_is_here=results.local_compose, + logger=results.logger, +) + +def run(): + a.run_pull_lorax_artifacts() diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 6cb8ba4..079b45f 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -452,7 +452,10 @@ class RepoSync: join_all_pods = ' '.join(entry_name_list) time.sleep(3) - self.log.info('Syncing %s ...' % r) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Syncing ' + r + ' ...' + ) pod_watcher = '{} wait {}'.format( cmd, join_all_pods @@ -500,7 +503,10 @@ class RepoSync: ) entry_name_list.clear() - self.log.info('Syncing %s completed' % r) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Syncing ' + r + ' completed' + ) if len(bad_exit_list) > 0: self.log.error( diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index f38e7a0..b389fc2 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -11,7 +11,14 @@ import os.path import subprocess import shlex import time -import re +import tarfile + +# lazy person's s3 parser +import requests +import json +import xmltodict +# if we can access s3 +import boto3 # This is for treeinfo from configparser import ConfigParser @@ -21,13 +28,6 @@ from productmd.extra_files import ExtraFiles import productmd.treeinfo # End treeinfo -# lazy person's s3 parser -import urllib -import json -import xmltodict -# if we can access s3 -import boto3 - from jinja2 import Environment, FileSystemLoader from empanadas.common import Color, _rootdir @@ -49,6 +49,7 @@ class IsoBuild: arch=None, rc: bool = False, s3: bool = False, + force_download: bool = False, force_unpack: bool = False, isolation: str = 'auto', compose_dir_is_here: bool = False, @@ -77,6 +78,7 @@ class IsoBuild: self.release_candidate = rc self.s3 = s3 self.force_unpack = force_unpack + self.force_download = force_download # Relevant major version items self.arch = arch @@ -102,6 +104,9 @@ class IsoBuild: self.s3_bucket = config['bucket'] self.s3_bucket_url = config['bucket_url'] + if s3: + self.s3 = boto3.client('s3') + # Templates file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) @@ -302,21 +307,223 @@ class IsoBuild: def run_pull_lorax_artifacts(self): """ - Pulls the required artifacts and unacps it to work/lorax/$arch + Pulls the required artifacts and unpacks it to work/lorax/$arch """ - self.log.info('Determining the latest pull...') - print() + # Determine if we're only managing one architecture out of all of them. + # It does not hurt to do everything at once. But the option is there. + unpack_single_arch = False + arches_to_unpack = self.arches + if self.arch: + unpack_single_arch = True + arches_to_unpack = [self.arch] - def _download_artifacts(self, force_unpack, arch=None): - """ - Download the requested artifact(s) - """ - print() + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Determining the latest pulls...' + ) + if self.s3: + latest_artifacts = self._s3_determine_latest() + else: + latest_artifacts = self._reqs_determine_latest() - def _unpack_artifacts(self, force_unpack, arch=None): + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Downloading requested artifact(s)' + ) + for arch in arches_to_unpack: + lorax_arch_dir = os.path.join( + self.lorax_work_dir, + arch + ) + + source_path = latest_artifacts[arch] + + full_drop = '{}/lorax-{}-{}.tar.gz'.format( + lorax_arch_dir, + self.major_version, + arch + ) + + if not os.path.exists(lorax_arch_dir): + os.makedirs(lorax_arch_dir, exist_ok=True) + + self.log.info( + 'Downloading artifact for ' + Color.BOLD + arch + Color.END + ) + if self.s3: + self._s3_download_artifacts( + self.force_download, + source_path, + full_drop + ) + else: + self._reqs_download_artifacts( + self.force_download, + source_path, + full_drop + ) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Download phase completed' + ) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Beginning unpack phase...' + ) + + for arch in arches_to_unpack: + tarname = 'lorax-{}-{}.tar.gz'.format( + self.major_version, + arch + ) + + tarball = os.path.join( + self.lorax_work_dir, + arch, + tarname + ) + + if not os.path.exists(tarball): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Artifact does not exist: ' + tarball + ) + continue + + self._unpack_artifacts(self.force_unpack, arch, tarball) + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Unpack phase completed' + ) + + def _s3_determine_latest(self): + """ + Using native s3, determine the latest artifacts and return a list + """ + temp = [] + data = {} + try: + self.s3.list_objects(Bucket=self.s3_bucket)['Contents'] + except: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Cannot access s3 bucket.' + ) + raise SystemExit() + + for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']: + if 'tar.gz' in y['Key']: + temp.append(y['Key']) + + for arch in self.arches: + temps = [] + for y in temp: + if arch in y: + temps.append(y) + temps.sort(reverse=True) + data[arch] = temps[0] + + return data + + def _s3_download_artifacts(self, force_download, source, dest): + """ + Download the requested artifact(s) via s3 + """ + if os.path.exists(dest): + if not force_download: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Artifact at ' + dest + ' already exists' + ) + return + + self.log.info('Downloading to: %s' % dest) + try: + self.s3.download_file( + Bucket=self.s3_bucket, + Key=source, + Filename=dest + ) + except: + self.log.error('There was an issue downloading from %s' % self.s3_bucket) + + def _reqs_determine_latest(self): + """ + Using requests, determine the latest artifacts and return a list + """ + temp = [] + data = {} + + try: + bucket_data = requests.get(self.s3_bucket_url) + except requests.exceptions.RequestException as e: + self.log.error('The s3 bucket http endpoint is inaccessible') + raise SystemExit(e) + + resp = xmltodict.parse(bucket_data.content) + + for y in resp['ListBucketResult']['Contents']: + if 'tar.gz' in y['Key']: + temp.append(y['Key']) + + for arch in self.arches: + temps = [] + for y in temp: + if arch in y: + temps.append(y) + temps.sort(reverse=True) + data[arch] = temps[0] + + return data + + def _reqs_download_artifacts(self, force_download, source, dest): + """ + Download the requested artifact(s) via requests only + """ + if os.path.exists(dest): + if not force_download: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Artifact at ' + dest + ' already exists' + ) + return + unurl = self.s3_bucket_url + '/' + source + + self.log.info('Downloading to: %s' % dest) + try: + with requests.get(unurl, allow_redirects=True) as r: + with open(dest, 'wb') as f: + f.write(r.content) + f.close() + r.close() + except requests.exceptions.RequestException as e: + self.log.error('There was a problem downloading the artifact') + raise SystemExit(e) + + def _unpack_artifacts(self, force_unpack, arch, tarball): """ Unpack the requested artifacts(s) """ + unpack_dir = os.path.join(self.lorax_work_dir, arch) + if not force_unpack: + file_check = os.path.join(unpack_dir, 'lorax/.treeinfo') + if os.path.exists(file_check): + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Artifact (' + arch + ') already unpacked' + ) + return + + self.log.info('Unpacking %s' % tarball) + with tarfile.open(tarball) as t: + t.extractall(unpack_dir) + t.close() + + def _copy_lorax_to_variant(self, force_unpack, arch): + """ + Copy to variants for easy access of mkiso and copying to compose dirs + """ print() def run_boot_sync(self): diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 4be7a6b..71eaf95 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -52,6 +52,25 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.13.8)"] +[[package]] +name = "certifi" +version = "2022.6.15" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "colorama" version = "0.4.5" @@ -60,6 +79,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "importlib-metadata" version = "4.8.3" @@ -227,6 +254,24 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "requests" +version = "2.28.0" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2.0.0,<2.1.0" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + [[package]] name = "rpm-py-installer" version = "1.1.0" @@ -309,7 +354,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = ">=3.7,<4" -content-hash = "93600aadcd1d588e33fc16d0fd7f505ee10484722c85bdadb612f57b10e9439b" +content-hash = "d011f4622c248f6aa107fd679616eaa19a897147398c6f52dd0dea0ab1d74486" [metadata.files] atomicwrites = [ @@ -328,10 +373,22 @@ botocore = [ {file = "botocore-1.27.12-py3-none-any.whl", hash = "sha256:b8ac156e55267da6e728ea0b806bfcd97adf882801cffe7849c4b88ce4780326"}, {file = "botocore-1.27.12.tar.gz", hash = "sha256:17d3ec9f684d21e06b64d9cb224934557bcd95031e2ecb551bf16271e8722fec"}, ] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] importlib-metadata = [ {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, @@ -486,6 +543,10 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +requests = [ + {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, + {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, +] rpm-py-installer = [ {file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"}, ] diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 43f77f5..5f8649e 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -14,6 +14,7 @@ productmd = "~1.33" importlib-resources = "^5.8.0" boto3 = "^1.24.12" xmltodict = "^0.13.0" +requests = "^2.28.0" [tool.poetry.dev-dependencies] pytest = "~5" @@ -23,6 +24,7 @@ sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" sync_sig = "empanadas.scripts.sync_sig:run" build-iso = "empanadas.scripts.build_iso:run" +pull-unpack-artifact = "empanadas.scripts.pull_unpack_artifact:run" [build-system] requires = ["poetry-core>=1.0.0"] From 22f920042eec9a18fda061e7a0d4e6251b2dc012 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 20 Jun 2022 01:34:10 -0700 Subject: [PATCH 30/64] add next phase before sign off --- iso/empanadas/empanadas/configs/el9-beta.yaml | 3 +- iso/empanadas/empanadas/configs/el9.yaml | 3 +- iso/empanadas/empanadas/configs/el9lh.yaml | 3 +- ...unpack_artifact.py => pull_unpack_tree.py} | 2 + iso/empanadas/empanadas/util/iso_utils.py | 64 +++++++++++++++++-- iso/empanadas/pyproject.toml | 2 +- 6 files changed, 63 insertions(+), 14 deletions(-) rename iso/empanadas/empanadas/scripts/{pull_unpack_artifact.py => pull_unpack_tree.py} (92%) diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 0303f67..12fe232 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -54,9 +54,8 @@ ppc64le: '' s390x: '' images: - - dvd1 + - dvd - minimal - - boot repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 281d6f7..34eef95 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -54,9 +54,8 @@ ppc64le: '' s390x: '' images: - - dvd1 + - dvd - minimal - - boot repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 853d96b..fa102ca 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -54,9 +54,8 @@ ppc64le: '' s390x: '' images: - - dvd1 + - dvd - minimal - - boot repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/scripts/pull_unpack_artifact.py b/iso/empanadas/empanadas/scripts/pull_unpack_tree.py similarity index 92% rename from iso/empanadas/empanadas/scripts/pull_unpack_artifact.py rename to iso/empanadas/empanadas/scripts/pull_unpack_tree.py index 003f604..77f8100 100755 --- a/iso/empanadas/empanadas/scripts/pull_unpack_artifact.py +++ b/iso/empanadas/empanadas/scripts/pull_unpack_tree.py @@ -10,6 +10,7 @@ parser = argparse.ArgumentParser(description="ISO Artifact Builder") parser.add_argument('--release', type=str, help="Major Release Version", required=True) parser.add_argument('--s3', action='store_true', help="Release Candidate") +parser.add_argument('--rc', action='store_true', help="Release Candidate") parser.add_argument('--arch', type=str, help="Architecture") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") parser.add_argument('--force-unpack', action='store_true', help="Force an unpack") @@ -23,6 +24,7 @@ a = IsoBuild( rlvars, config, major=major, + rc=results.rc, s3=results.s3, arch=results.arch, force_unpack=results.force_unpack, diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index b389fc2..3d70299 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -12,6 +12,7 @@ import subprocess import shlex import time import tarfile +import shutil # lazy person's s3 parser import requests @@ -396,10 +397,32 @@ class IsoBuild: '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Unpack phase completed' ) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Beginning image variant phase' + ) + + for arch in arches_to_unpack: + self.log.info( + 'Copying base lorax for ' + Color.BOLD + arch + Color.END + ) + for variant in self.iso_map['images']: + self._copy_lorax_to_variant(self.force_unpack, arch, variant) + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Image variant phase completed' + ) + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Beginning treeinfo phase' + ) + def _s3_determine_latest(self): """ - Using native s3, determine the latest artifacts and return a list + Using native s3, determine the latest artifacts and return a dict """ temp = [] data = {} @@ -520,11 +543,40 @@ class IsoBuild: t.extractall(unpack_dir) t.close() - def _copy_lorax_to_variant(self, force_unpack, arch): + def _copy_lorax_to_variant(self, force_unpack, arch, image): """ Copy to variants for easy access of mkiso and copying to compose dirs """ - print() + src_to_image = os.path.join( + self.lorax_work_dir, + arch, + 'lorax' + ) + + if not os.path.exists(os.path.join(src_to_image, '.treeinfo')): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Lorax base image does not exist' + ) + return + + path_to_image = os.path.join( + self.lorax_work_dir, + arch, + image + ) + + if not force_unpack: + file_check = os.path.join(path_to_image, '.treeinfo') + if os.path.exists(file_check): + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Lorax image for ' + image + ' already exists' + ) + return + + self.log.info('Copying base lorax to %s directory...' % image) + shutil.copytree(src_to_image, path_to_image) def run_boot_sync(self): """ @@ -547,13 +599,11 @@ class IsoBuild: self.sync_boot(force_unpack=self.force_unpack, arch=self.arch) self.treeinfo_write(arch=self.arch) - def sync_boot(self, force_unpack, arch): + def _sync_boot(self, force_unpack, arch, variant): """ Syncs whatever """ - self.log.info('Syncing lorax to dvd directory...') - # checks here, report that it already exists - self.log.info('Syncing lorax to %s directory...' % self.iso_map['variant']) + self.log.info('Copying lorax to %s directory...' % variant) # checks here, report that it already exists def treeinfo_write(self, arch): diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 5f8649e..afe2115 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -24,7 +24,7 @@ sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" sync_sig = "empanadas.scripts.sync_sig:run" build-iso = "empanadas.scripts.build_iso:run" -pull-unpack-artifact = "empanadas.scripts.pull_unpack_artifact:run" +pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" [build-system] requires = ["poetry-core>=1.0.0"] From 76dba70458797c3ca4c2ea4b11574f824006cde8 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Fri, 17 Jun 2022 16:01:13 -0400 Subject: [PATCH 31/64] Build a container to run the builds in * Build this into a container to deploy * Add lorax packages in * Build container from public source * Switch to different c9s mirror due to errors * add script for building --- iso/empanadas/Containerfile | 66 ++++++++++++++++++++++++++++++++ iso/empanadas/build-container.sh | 14 +++++++ iso/empanadas/images/epelkey.gpg | 29 ++++++++++++++ iso/empanadas/images/get_arch | 12 ++++++ iso/empanadas/images/rhel.repo | 37 ++++++++++++++++++ iso/empanadas/images/yum-sudo | 2 + 6 files changed, 160 insertions(+) create mode 100644 iso/empanadas/Containerfile create mode 100644 iso/empanadas/build-container.sh create mode 100644 iso/empanadas/images/epelkey.gpg create mode 100755 iso/empanadas/images/get_arch create mode 100644 iso/empanadas/images/rhel.repo create mode 100644 iso/empanadas/images/yum-sudo diff --git a/iso/empanadas/Containerfile b/iso/empanadas/Containerfile new file mode 100644 index 0000000..ff9be57 --- /dev/null +++ b/iso/empanadas/Containerfile @@ -0,0 +1,66 @@ +FROM quay.io/centos/centos:stream9 + +ADD images/get_arch /get_arch + +ENV TINI_VERSION v0.19.0 +RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)" +RUN chmod +x /tini + +RUN rm -rf /etc/yum.repos.d/*.repo +ADD images/epelkey.gpg /etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9 +ADD images/rhel.repo /etc/yum.repos.d/rhel.repo + +RUN dnf update -y && dnf install -y \ + bash \ + bzip2 \ + cpio \ + diffutils \ + findutils \ + gawk \ + gcc \ + gcc-c++ \ + git \ + grep \ + gzip \ + info \ + make \ + patch \ + python3 \ + redhat-rpm-config \ + rpm-build \ + scl-utils-build \ + sed \ + shadow-utils \ + tar \ + unzip \ + util-linux \ + which \ + xz \ + dnf-plugins-core \ + createrepo_c \ + rpm-sign \ + sudo \ + mock \ + python-pip \ + genisoimage \ + isomd5sum \ + lorax \ + lorax-templates-rhel \ + lorax-templates-generic + +RUN sed -i '/libreport-rhel-anaconda-bugzilla/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl + +RUN ssh-keygen -t rsa -q -f "$HOME/.ssh/id_rsa" -N "" +RUN dnf clean all +RUN rm -rf /etc/yum.repos.d/*.repo +RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder +RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d + +RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas' +# COPY . /app/ +# RUN pip install /app/ + +ENV USER=1002 +USER 1002 + +ENTRYPOINT ["/tini", "--"] diff --git a/iso/empanadas/build-container.sh b/iso/empanadas/build-container.sh new file mode 100644 index 0000000..5b0a53b --- /dev/null +++ b/iso/empanadas/build-container.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +MANIFEST_NAME="peridotempanadas" +BUILD_PATH="." +REGISTRY="docker.io" +USER="neilresf" +IMAGE_TAG="v0.1.0" +IMAGE_NAME="peridotempanadas" + +podman buildx build \ + --platform linux/amd64,linux/arm64,linux/s390x,linux/ppc64le \ + --tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \ + $PWD + diff --git a/iso/empanadas/images/epelkey.gpg b/iso/empanadas/images/epelkey.gpg new file mode 100644 index 0000000..0cc05ec --- /dev/null +++ b/iso/empanadas/images/epelkey.gpg @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGE3mOsBEACsU+XwJWDJVkItBaugXhXIIkb9oe+7aadELuVo0kBmc3HXt/Yp +CJW9hHEiGZ6z2jwgPqyJjZhCvcAWvgzKcvqE+9i0NItV1rzfxrBe2BtUtZmVcuE6 +2b+SPfxQ2Hr8llaawRjt8BCFX/ZzM4/1Qk+EzlfTcEcpkMf6wdO7kD6ulBk/tbsW +DHX2lNcxszTf+XP9HXHWJlA2xBfP+Dk4gl4DnO2Y1xR0OSywE/QtvEbN5cY94ieu +n7CBy29AleMhmbnx9pw3NyxcFIAsEZHJoU4ZW9ulAJ/ogttSyAWeacW7eJGW31/Z +39cS+I4KXJgeGRI20RmpqfH0tuT+X5Da59YpjYxkbhSK3HYBVnNPhoJFUc2j5iKy +XLgkapu1xRnEJhw05kr4LCbud0NTvfecqSqa+59kuVc+zWmfTnGTYc0PXZ6Oa3rK +44UOmE6eAT5zd/ToleDO0VesN+EO7CXfRsm7HWGpABF5wNK3vIEF2uRr2VJMvgqS +9eNwhJyOzoca4xFSwCkc6dACGGkV+CqhufdFBhmcAsUotSxe3zmrBjqA0B/nxIvH +DVgOAMnVCe+Lmv8T0mFgqZSJdIUdKjnOLu/GRFhjDKIak4jeMBMTYpVnU+HhMHLq +uDiZkNEvEEGhBQmZuI8J55F/a6UURnxUwT3piyi3Pmr2IFD7ahBxPzOBCQARAQAB +tCdGZWRvcmEgKGVwZWw5KSA8ZXBlbEBmZWRvcmFwcm9qZWN0Lm9yZz6JAk4EEwEI +ADgWIQT/itE0RZcQbs6BO5GKOHK/MihGfAUCYTeY6wIbDwULCQgHAgYVCgkICwIE +FgIDAQIeAQIXgAAKCRCKOHK/MihGfFX/EACBPWv20+ttYu1A5WvtHJPzwbj0U4yF +3zTQpBglQ2UfkRpYdipTlT3Ih6j5h2VmgRPtINCc/ZE28adrWpBoeFIS2YAKOCLC +nZYtHl2nCoLq1U7FSttUGsZ/t8uGCBgnugTfnIYcmlP1jKKA6RJAclK89evDQX5n +R9ZD+Cq3CBMlttvSTCht0qQVlwycedH8iWyYgP/mF0W35BIn7NuuZwWhgR00n/VG +4nbKPOzTWbsP45awcmivdrS74P6mL84WfkghipdmcoyVb1B8ZP4Y/Ke0RXOnLhNe +CfrXXvuW+Pvg2RTfwRDtehGQPAgXbmLmz2ZkV69RGIr54HJv84NDbqZovRTMr7gL +9k3ciCzXCiYQgM8yAyGHV0KEhFSQ1HV7gMnt9UmxbxBE2pGU7vu3CwjYga5DpwU7 +w5wu1TmM5KgZtZvuWOTDnqDLf0cKoIbW8FeeCOn24elcj32bnQDuF9DPey1mqcvT +/yEo/Ushyz6CVYxN8DGgcy2M9JOsnmjDx02h6qgWGWDuKgb9jZrvRedpAQCeemEd +fhEs6ihqVxRFl16HxC4EVijybhAL76SsM2nbtIqW1apBQJQpXWtQwwdvgTVpdEtE +r4ArVJYX5LrswnWEQMOelugUG6S3ZjMfcyOa/O0364iY73vyVgaYK+2XtT2usMux +VL469Kj5m13T6w== +=Mjs/ +-----END PGP PUBLIC KEY BLOCK----- \ No newline at end of file diff --git a/iso/empanadas/images/get_arch b/iso/empanadas/images/get_arch new file mode 100755 index 0000000..96abf42 --- /dev/null +++ b/iso/empanadas/images/get_arch @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +case "$(uname -m)" in + x86_64 | amd64) + echo -n "amd64" + ;; + arm64 | aarch64) + echo -n "arm64" + ;; + *) + echo -n "$(uname -m)" + ;; +esac diff --git a/iso/empanadas/images/rhel.repo b/iso/empanadas/images/rhel.repo new file mode 100644 index 0000000..28709f3 --- /dev/null +++ b/iso/empanadas/images/rhel.repo @@ -0,0 +1,37 @@ +[baseos] +name=CentOS Stream $releasever - BaseOS +baseurl=https://ord.mirror.rackspace.com/centos-stream/9-stream/BaseOS/$arch/os +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial +gpgcheck=1 +repo_gpgcheck=0 +metadata_expire=6h +countme=1 +enabled=1 + +[appstream] +name=CentOS Stream $releasever - AppStream +baseurl=https://ord.mirror.rackspace.com/centos-stream/9-stream/AppStream/$arch/os +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial +gpgcheck=1 +repo_gpgcheck=0 +metadata_expire=6h +countme=1 +enabled=1 + +[extras-common] +name=CentOS Stream $releasever - Extras packages +baseurl=http://mirror.stream.centos.org/SIGs/9-stream/extras/$arch/extras-common +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-SIG-Extras-SHA512 +gpgcheck=1 +repo_gpgcheck=0 +metadata_expire=6h +countme=1 +enabled=1 + +[epel] +name=Extra Packages for Enterprise Linux $releasever - $basearch +baseurl=https://download-ib01.fedoraproject.org/pub/epel/9/Everything/$arch +enabled=1 +gpgcheck=1 +countme=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9 diff --git a/iso/empanadas/images/yum-sudo b/iso/empanadas/images/yum-sudo new file mode 100644 index 0000000..a807ac7 --- /dev/null +++ b/iso/empanadas/images/yum-sudo @@ -0,0 +1,2 @@ +#!/bin/sh +sudo yum $@ \ No newline at end of file From 4587287e1e09e817b9cfa37e67129335ed387b6a Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Fri, 17 Jun 2022 17:56:08 -0400 Subject: [PATCH 32/64] Github actions work for automatic building * Add in really simple job template creation to prove the generation will work --- .github/workflows/mix-empanadas.yml | 47 ++++++++++++++++++ iso/empanadas/Containerfile | 17 +++++-- .../empanadas/scripts/launch_builds.py | 45 +++++++++++++++++ .../empanadas/templates/kube/Job.tmpl | 48 +++++++++++++++++++ iso/empanadas/poetry.lock | 46 +++++++++--------- iso/empanadas/pyproject.toml | 1 + 6 files changed, 178 insertions(+), 26 deletions(-) create mode 100644 .github/workflows/mix-empanadas.yml create mode 100755 iso/empanadas/empanadas/scripts/launch_builds.py create mode 100644 iso/empanadas/empanadas/templates/kube/Job.tmpl diff --git a/.github/workflows/mix-empanadas.yml b/.github/workflows/mix-empanadas.yml new file mode 100644 index 0000000..2f408ca --- /dev/null +++ b/.github/workflows/mix-empanadas.yml @@ -0,0 +1,47 @@ +--- +name: Build empanada container images + +on: + push: + branches: [ $default-branch ] + pull_request: + branches: [ $default-branch ] + workflow_dispatch: + +jobs: + buildx: + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + with: + install: true + + - name: Login to ghcr + if: github.event_name != 'pull_request' + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le + context: ./iso/empanadas + file: ./iso/empanadas/Containerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ghcr.io/neilhanlon/sig-core-toolkit:latest + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/iso/empanadas/Containerfile b/iso/empanadas/Containerfile index ff9be57..8bb82ae 100644 --- a/iso/empanadas/Containerfile +++ b/iso/empanadas/Containerfile @@ -1,6 +1,17 @@ -FROM quay.io/centos/centos:stream9 +FROM golang:1.18 as skbn ADD images/get_arch /get_arch +RUN git clone https://github.com/rubroboletus/skbn.git /usr/src/app/skbn.git + +WORKDIR /usr/src/app/skbn.git +RUN CGO_ENABLED=0 GOOS=linux GOARCH=$(/get_arch) go build \ + -ldflags "-X main.GitTag=$(git describe --tags --always) -X main.GitCommit=$(git rev-parse --short HEAD)" \ + -o skbn cmd/skbn.go + +FROM quay.io/centos/centos:stream9 + +COPY --from=skbn /usr/src/app/skbn.git/skbn /usr/local/bin/skbn +COPY --from=skbn /get_arch /get_arch ENV TINI_VERSION v0.19.0 RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)" @@ -57,8 +68,8 @@ RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peri RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas' -# COPY . /app/ -# RUN pip install /app/ + +RUN echo "nameserver 1.1.1.1 > /etc/resolv.conf" ENV USER=1002 USER 1002 diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py new file mode 100755 index 0000000..cccdee0 --- /dev/null +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -0,0 +1,45 @@ +# Launches the builds of ISOs + +import argparse + +from empanadas.common import * +from empanadas.common import _rootdir + +from jinja2 import Environment, FileSystemLoader + +parser = argparse.ArgumentParser(description="ISO Compose") + +parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--env', type=str, help="environment", required=True) +results = parser.parse_args() +rlvars = rldict[results.release] +major = rlvars['major'] + +EXTARCH=["s390x", "ppc64le"] +EKSARCH=["amd64", "arm64"] + +def run(): + file_loader = FileSystemLoader(f"{_rootdir}/templates") + tmplenv = Environment(loader=file_loader) + job_template = tmplenv.get_template('kube/Job.tmpl') + + arches = EKSARCH + if results.env == "ext" and results.env != "all": + arches = EXTARCH + elif results.env == "all": + arches = EKSARCH+EXTARCH + + out = "" + for arch in arches: + out += job_template.render( + architecture=arch, + backoffLimit=4, + command=["build-iso", "--release", "9", "--rc", "--isolation", "simple"], + containerName="buildiso", + imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", + jobName=f"build-iso-{arch}", + namespace="empanadas", + restartPolicy="Never", + ) + + print(out) diff --git a/iso/empanadas/empanadas/templates/kube/Job.tmpl b/iso/empanadas/empanadas/templates/kube/Job.tmpl new file mode 100644 index 0000000..1685421 --- /dev/null +++ b/iso/empanadas/empanadas/templates/kube/Job.tmpl @@ -0,0 +1,48 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ jobName }} + namespace: {{ namespace }} +spec: + template: + spec: + containers: + - name: {{ containerName }} + image: {{ imageName }} + command: {{ command }} + securityContext: + runAsUser: 1002 + privileged: true + lifecycle: + preStop: + exec: + command: [ + "skbn", + "cp", + "--src", + "/mnt/compose/9/latest-Rocky-9/", + "--dst", + "s3://resf-empanadas/{{ containerName }}/" + ] + env: + - name: AWS_REGION + value: us-east-2 + - name: AWS_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: empanadas-s3 + key: ID + - name: AWS_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: empanadas-s3 + key: SECRET + restartPolicy: {{ restartPolicy }} + tolerations: + - effect: NoSchedule + key: peridot.rockylinux.org/workflow-tolerates-arch + operator: Equal + value: {{ architecture }} + backoffLimit: {{ backoffLimit }} + diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 71eaf95..716b15a 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -89,20 +89,20 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.8.3" +version = "4.11.4" description = "Read metadata from Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -203,14 +203,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pytest" @@ -304,11 +304,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" @@ -341,15 +341,15 @@ python-versions = ">=3.4" [[package]] name = "zipp" -version = "3.6.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" @@ -390,8 +390,8 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, - {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, + {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, + {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, ] importlib-resources = [ {file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"}, @@ -497,8 +497,8 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, @@ -559,8 +559,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, @@ -575,6 +575,6 @@ xmltodict = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index afe2115..fb44274 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -25,6 +25,7 @@ sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" sync_sig = "empanadas.scripts.sync_sig:run" build-iso = "empanadas.scripts.build_iso:run" pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" +launch-builds = "empanadas.scripts.launch_builds:run" [build-system] requires = ["poetry-core>=1.0.0"] From 0603620773a3b2f62fc82083d99af9e747889030 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Sat, 18 Jun 2022 15:03:53 -0400 Subject: [PATCH 33/64] Fixes for current RC images * Make jobs uploads unique and match security context of peridot * change image build, fix extraction of build artifacts --- iso/empanadas/Containerfile | 15 ++++----------- .../empanadas/scripts/launch_builds.py | 3 ++- iso/empanadas/empanadas/templates/kube/Job.tmpl | 17 +++++++++++------ 3 files changed, 17 insertions(+), 18 deletions(-) diff --git a/iso/empanadas/Containerfile b/iso/empanadas/Containerfile index 8bb82ae..ce0ee1d 100644 --- a/iso/empanadas/Containerfile +++ b/iso/empanadas/Containerfile @@ -1,17 +1,10 @@ -FROM golang:1.18 as skbn - -ADD images/get_arch /get_arch -RUN git clone https://github.com/rubroboletus/skbn.git /usr/src/app/skbn.git - -WORKDIR /usr/src/app/skbn.git -RUN CGO_ENABLED=0 GOOS=linux GOARCH=$(/get_arch) go build \ - -ldflags "-X main.GitTag=$(git describe --tags --always) -X main.GitCommit=$(git rev-parse --short HEAD)" \ - -o skbn cmd/skbn.go +FROM ghcr.io/neilhanlon/skbn:latest as skbn FROM quay.io/centos/centos:stream9 +ADD images/get_arch /get_arch + COPY --from=skbn /usr/src/app/skbn.git/skbn /usr/local/bin/skbn -COPY --from=skbn /get_arch /get_arch ENV TINI_VERSION v0.19.0 RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)" @@ -69,7 +62,7 @@ RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas' -RUN echo "nameserver 1.1.1.1 > /etc/resolv.conf" +RUN echo "nameserver 1.1.1.1" > /etc/resolv.conf ENV USER=1002 USER 1002 diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py index cccdee0..903a743 100755 --- a/iso/empanadas/empanadas/scripts/launch_builds.py +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -35,10 +35,11 @@ def run(): architecture=arch, backoffLimit=4, command=["build-iso", "--release", "9", "--rc", "--isolation", "simple"], - containerName="buildiso", + containerName=f"buildiso-{major}-{arch}", imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", jobName=f"build-iso-{arch}", namespace="empanadas", + major=major, restartPolicy="Never", ) diff --git a/iso/empanadas/empanadas/templates/kube/Job.tmpl b/iso/empanadas/empanadas/templates/kube/Job.tmpl index 1685421..c3b0a92 100644 --- a/iso/empanadas/empanadas/templates/kube/Job.tmpl +++ b/iso/empanadas/empanadas/templates/kube/Job.tmpl @@ -11,9 +11,6 @@ spec: - name: {{ containerName }} image: {{ imageName }} command: {{ command }} - securityContext: - runAsUser: 1002 - privileged: true lifecycle: preStop: exec: @@ -21,10 +18,18 @@ spec: "skbn", "cp", "--src", - "/mnt/compose/9/latest-Rocky-9/", + "/var/lib/mock/rocky-{{ major }}-{{ architecture }}/root/builddir/lorax-*`", "--dst", - "s3://resf-empanadas/{{ containerName }}/" + "s3://resf-empanadas/{{ containerName }}/$(date +%s)/", + "--parallel", + "2" ] + securityContext: + runAsUser: 0 + runAsGroup: 0 + privileged: true + runAsNonRoot: false + allowPrivilegeEscalation: true env: - name: AWS_REGION value: us-east-2 @@ -38,11 +43,11 @@ spec: secretKeyRef: name: empanadas-s3 key: SECRET - restartPolicy: {{ restartPolicy }} tolerations: - effect: NoSchedule key: peridot.rockylinux.org/workflow-tolerates-arch operator: Equal value: {{ architecture }} + restartPolicy: {{ restartPolicy }} backoffLimit: {{ backoffLimit }} From 7a097fb302a87496363b2113d611846dbc6d6a18 Mon Sep 17 00:00:00 2001 From: Neil Hanlon Date: Sun, 19 Jun 2022 00:00:02 -0400 Subject: [PATCH 34/64] Ensure builds work properly - architectures must match * Actually use release version input to run build-iso command * Buildstamp should be an epoch * template out the job/pod names instead of doing them in the rendering * label pods with toleration --- iso/empanadas/Containerfile | 6 +-- .../empanadas/scripts/launch_builds.py | 8 ++-- .../empanadas/templates/kube/Job.tmpl | 42 +++++++++++-------- 3 files changed, 31 insertions(+), 25 deletions(-) diff --git a/iso/empanadas/Containerfile b/iso/empanadas/Containerfile index ce0ee1d..c3c4ed9 100644 --- a/iso/empanadas/Containerfile +++ b/iso/empanadas/Containerfile @@ -1,11 +1,7 @@ -FROM ghcr.io/neilhanlon/skbn:latest as skbn - FROM quay.io/centos/centos:stream9 ADD images/get_arch /get_arch -COPY --from=skbn /usr/src/app/skbn.git/skbn /usr/local/bin/skbn - ENV TINI_VERSION v0.19.0 RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)" RUN chmod +x /tini @@ -62,7 +58,7 @@ RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas' -RUN echo "nameserver 1.1.1.1" > /etc/resolv.conf +RUN pip install awscli ENV USER=1002 USER 1002 diff --git a/iso/empanadas/empanadas/scripts/launch_builds.py b/iso/empanadas/empanadas/scripts/launch_builds.py index 903a743..f0f82f7 100755 --- a/iso/empanadas/empanadas/scripts/launch_builds.py +++ b/iso/empanadas/empanadas/scripts/launch_builds.py @@ -1,6 +1,7 @@ # Launches the builds of ISOs import argparse +import datetime from empanadas.common import * from empanadas.common import _rootdir @@ -29,15 +30,16 @@ def run(): elif results.env == "all": arches = EKSARCH+EXTARCH + command = ["build-iso", "--release", f"{results.release}", "--rc", "--isolation", "simple"] + out = "" for arch in arches: out += job_template.render( architecture=arch, backoffLimit=4, - command=["build-iso", "--release", "9", "--rc", "--isolation", "simple"], - containerName=f"buildiso-{major}-{arch}", + buildTime=datetime.datetime.utcnow().strftime("%s"), + command=command, imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest", - jobName=f"build-iso-{arch}", namespace="empanadas", major=major, restartPolicy="Never", diff --git a/iso/empanadas/empanadas/templates/kube/Job.tmpl b/iso/empanadas/empanadas/templates/kube/Job.tmpl index c3b0a92..bfcc20a 100644 --- a/iso/empanadas/empanadas/templates/kube/Job.tmpl +++ b/iso/empanadas/empanadas/templates/kube/Job.tmpl @@ -2,34 +2,35 @@ apiVersion: batch/v1 kind: Job metadata: - name: {{ jobName }} + name: build-iso-{{ major }}-{{ architecture }} namespace: {{ namespace }} spec: template: + metadata: + labels: + peridot.rockylinux.org/workflow-tolerates-arch: {{ architecture }} spec: containers: - - name: {{ containerName }} - image: {{ imageName }} - command: {{ command }} - lifecycle: - preStop: - exec: - command: [ - "skbn", - "cp", - "--src", - "/var/lib/mock/rocky-{{ major }}-{{ architecture }}/root/builddir/lorax-*`", - "--dst", - "s3://resf-empanadas/{{ containerName }}/$(date +%s)/", - "--parallel", - "2" - ] + - name: buildiso-{{ major }}-{{ architecture }} + image: {{ imageName }} + command: ["/bin/bash", "-c"] + args: + - | + {{ command | join(' ') }} + aws s3 cp --recursive --exclude=* --include=lorax* \ + /var/lib/mock/rocky-{{ major }}-$(uname -m)/root/builddir/ \ + "s3://resf-empanadas/buildiso-{{ major }}-{{ architecture }}/{{ buildTime }}/" securityContext: runAsUser: 0 runAsGroup: 0 privileged: true runAsNonRoot: false allowPrivilegeEscalation: true + volumeMounts: + - mountPath: /etc/resolv.conf + name: resolv-conf + - mountPath: /var/lib/mock/ + name: mock env: - name: AWS_REGION value: us-east-2 @@ -49,5 +50,12 @@ spec: operator: Equal value: {{ architecture }} restartPolicy: {{ restartPolicy }} + volumes: + - name: resolv-conf + hostPath: + path: /etc/resolv.conf + type: File + - name: mock + emptyDir: {} backoffLimit: {{ backoffLimit }} From 6914d651c499eabe284150d8eeb14b3bca62ccf7 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 20 Jun 2022 16:49:14 -0700 Subject: [PATCH 35/64] update some functional tests --- func/README.md | 54 +++++++++----- .../10-check-firefox-start-page.sh | 2 +- func/core/pkg_mdadm/00-install-mdadm.sh | 3 + func/core/pkg_mdadm/01-test-mdadm.sh | 13 ++++ .../pkg_postgresql/00-install-postgresql.sh | 3 + .../pkg_postgresql/01-configure-postgresql.sh | 5 ++ func/core/pkg_postgresql/10-create-db.sh | 4 ++ func/core/pkg_postgresql/11-create-user.sh | 4 ++ func/core/pkg_postgresql/20-drop-db.sh | 4 ++ func/core/pkg_postgresql/21-drop-user.sh | 4 ++ func/core/pkg_release/30-os-release.sh | 4 +- func/core/pkg_tftp-server/00-install-tftp.sh | 3 + .../core/pkg_tftp-server/01-configure-tftp.sh | 20 ++++++ func/core/pkg_tftp-server/10-get-test.sh | 10 +++ func/core/pkg_tftp-server/11-put-test.sh | 14 ++++ func/stacks.sh | 59 +++++++++++++++ func/stacks/ipa/10-install-ipa.sh | 3 +- func/stacks/ipa/20-ipa-user.sh | 71 ++++++++++++++++++- func/stacks/ipa/21-ipa-service.sh | 60 +++++++++++++++- func/stacks/ipa/22-ipa-dns.sh | 43 +++++++++++ func/stacks/ipa/23-ipa-sudo.sh | 16 +++++ func/stacks/ipa/50-cleanup-ipa.sh | 2 +- iso/empanadas/empanadas/configs/el9-beta.yaml | 4 ++ iso/empanadas/empanadas/configs/el9.yaml | 4 ++ iso/empanadas/empanadas/configs/el9lh.yaml | 4 ++ iso/empanadas/empanadas/util/iso_utils.py | 17 +++-- 26 files changed, 398 insertions(+), 32 deletions(-) create mode 100755 func/core/pkg_mdadm/00-install-mdadm.sh create mode 100755 func/core/pkg_mdadm/01-test-mdadm.sh create mode 100755 func/core/pkg_postgresql/00-install-postgresql.sh create mode 100755 func/core/pkg_postgresql/01-configure-postgresql.sh create mode 100755 func/core/pkg_postgresql/10-create-db.sh create mode 100755 func/core/pkg_postgresql/11-create-user.sh create mode 100755 func/core/pkg_postgresql/20-drop-db.sh create mode 100755 func/core/pkg_postgresql/21-drop-user.sh create mode 100755 func/core/pkg_tftp-server/00-install-tftp.sh create mode 100755 func/core/pkg_tftp-server/01-configure-tftp.sh create mode 100755 func/core/pkg_tftp-server/10-get-test.sh create mode 100755 func/core/pkg_tftp-server/11-put-test.sh create mode 100644 func/stacks.sh diff --git a/func/README.md b/func/README.md index 1d007a9..52b0062 100644 --- a/func/README.md +++ b/func/README.md @@ -28,9 +28,11 @@ How to Run There are two ways to run through the tests: * By running `/bin/bash runtests.sh` - * Runs all tests + * Runs all core tests +* By running `/bin/bash stacktests.sh` + * Runs all stack tests (eg, lamp, ipa) * By running `/bin/bash monotests.sh` - * Runs all tests one by one to help identify failures as they happen + * Supposed to runs all tests one by one to help identify failures as they happen (not functional) Adding Tests ------------ @@ -316,6 +318,9 @@ Current Tree │   │   ├── 00-install-lsof.sh │   │   ├── 10-test-lsof.sh │   │   └── README.md +│   ├── pkg_mdadm +│   │   ├── 00-install-mdadm.sh +│   │   └── 01-test-mdadm.sh │   ├── pkg_network │   │   ├── 00-install-packages.sh │   │   ├── 10-tracepath.sh @@ -348,6 +353,13 @@ Current Tree │   │   ├── 30-postfix-sasl.sh │   │   ├── 40-postfix-tls.sh │   │   └── README.md +│   ├── pkg_postgresql +│   │   ├── 00-install-postgresql.sh +│   │   ├── 01-configure-postgresql.sh +│   │   ├── 10-create-db.sh +│   │   ├── 11-create-user.sh +│   │   ├── 20-drop-db.sh +│   │   └── 21-drop-user.sh │   ├── pkg_python │   │   ├── 00-install-python.sh │   │   ├── 10-test-python3.sh @@ -423,6 +435,11 @@ Current Tree │   ├── pkg_telnet │   │   ├── 00-install-telnet.sh │   │   └── 10-test-telnet.sh +│   ├── pkg_tftp-server +│   │   ├── 00-install-tftp.sh +│   │   ├── 01-configure-tftp.sh +│   │   ├── 10-get-test.sh +│   │   └── 11-put-test.sh │   ├── pkg_vsftpd │   │   ├── 00-install-vsftpd.sh │   │   ├── 10-anonymous-vsftpd.sh @@ -447,20 +464,21 @@ Current Tree ├── README.md ├── runtests.sh ├── skip.list -└── stacks - ├── ipa - │   ├── 00-ipa-pregame.sh - │   ├── 10-install-ipa.sh - │   ├── 11-configure-ipa.sh - │   ├── 12-verify-ipa.sh - │   ├── 20-ipa-user.sh - │   ├── 21-ipa-service.sh - │   ├── 22-ipa-dns.sh - │   ├── 23-ipa-sudo.sh - │   ├── 50-cleanup-ipa.sh - │   └── README.md - └── lamp - ├── 00-install-lamp.sh - ├── 01-verification.sh - └── 10-test-lamp.sh +├── stacks +│   ├── ipa +│   │   ├── 00-ipa-pregame.sh +│   │   ├── 10-install-ipa.sh +│   │   ├── 11-configure-ipa.sh +│   │   ├── 12-verify-ipa.sh +│   │   ├── 20-ipa-user.sh +│   │   ├── 21-ipa-service.sh +│   │   ├── 22-ipa-dns.sh +│   │   ├── 23-ipa-sudo.sh +│   │   ├── 50-cleanup-ipa.sh +│   │   └── README.md +│   └── lamp +│   ├── 00-install-lamp.sh +│   ├── 01-verification.sh +│   └── 10-test-lamp.sh +└── stacks.sh ``` diff --git a/func/core/pkg_firefox/10-check-firefox-start-page.sh b/func/core/pkg_firefox/10-check-firefox-start-page.sh index 33c59eb..3963fa1 100755 --- a/func/core/pkg_firefox/10-check-firefox-start-page.sh +++ b/func/core/pkg_firefox/10-check-firefox-start-page.sh @@ -9,7 +9,7 @@ fi COUNTS="$(grep -c rockylinux.org $FIREPATH)" -if [ "$COUNTS" -eq 2 ]; then +if [ "$COUNTS" -ge 2 ]; then r_checkExitStatus 0 else r_checkExitStatus 1 diff --git a/func/core/pkg_mdadm/00-install-mdadm.sh b/func/core/pkg_mdadm/00-install-mdadm.sh new file mode 100755 index 0000000..eba00b5 --- /dev/null +++ b/func/core/pkg_mdadm/00-install-mdadm.sh @@ -0,0 +1,3 @@ +#!/bin/bash +r_log "mdadm" "Install mdadm" +p_installPackageNormal mdadm diff --git a/func/core/pkg_mdadm/01-test-mdadm.sh b/func/core/pkg_mdadm/01-test-mdadm.sh new file mode 100755 index 0000000..c8e0be3 --- /dev/null +++ b/func/core/pkg_mdadm/01-test-mdadm.sh @@ -0,0 +1,13 @@ +#!/bin/bash +r_log "mdadm" "Check that mdadm will operate and return the right exit codes" +[ ${EUID} -eq 0 ] || { r_log "mdadm" "Not running as root. Skipping." ; exit "$PASS"; } +MDADM=$(which mdadm) + +[ -z "${MDADM}" ] && { r_log "mdadm" "which reported the binary but it doesn't exist, why?"; exit "$FAIL"; } + +${MDADM} --detail --scan &> /dev/null +ret_val=$? + +[ "$ret_val" -eq 0 ] || { r_log "mdadm" "There was a non-zero exit. This is likely fatal."; exit "$FAIL"; } + +r_checkExitStatus $ret_val diff --git a/func/core/pkg_postgresql/00-install-postgresql.sh b/func/core/pkg_postgresql/00-install-postgresql.sh new file mode 100755 index 0000000..cc39dd3 --- /dev/null +++ b/func/core/pkg_postgresql/00-install-postgresql.sh @@ -0,0 +1,3 @@ +#!/bin/bash +r_log "postgresql" "Installing postgresql" +p_installPackageNormal postgresql-server postgresql diff --git a/func/core/pkg_postgresql/01-configure-postgresql.sh b/func/core/pkg_postgresql/01-configure-postgresql.sh new file mode 100755 index 0000000..e84e765 --- /dev/null +++ b/func/core/pkg_postgresql/01-configure-postgresql.sh @@ -0,0 +1,5 @@ +#!/bin/bash +r_log "postgresql" "Initialize postgresql" +postgresql-setup --initdb +m_serviceCycler postgresql-server cycle +sleep 15 diff --git a/func/core/pkg_postgresql/10-create-db.sh b/func/core/pkg_postgresql/10-create-db.sh new file mode 100755 index 0000000..c796d2c --- /dev/null +++ b/func/core/pkg_postgresql/10-create-db.sh @@ -0,0 +1,4 @@ +#!/bin/bash +r_log "postgresql" "Creating db" +su - postgres -c 'createdb pg_test' +r_checkExitStatus $? diff --git a/func/core/pkg_postgresql/11-create-user.sh b/func/core/pkg_postgresql/11-create-user.sh new file mode 100755 index 0000000..271eacf --- /dev/null +++ b/func/core/pkg_postgresql/11-create-user.sh @@ -0,0 +1,4 @@ +#!/bin/bash +r_log "postgresql" "Creating user" +su - postgres -c 'createuser -S -R -D testuser' > /dev/null 2>&1 +r_checkExitStatus $? diff --git a/func/core/pkg_postgresql/20-drop-db.sh b/func/core/pkg_postgresql/20-drop-db.sh new file mode 100755 index 0000000..a887545 --- /dev/null +++ b/func/core/pkg_postgresql/20-drop-db.sh @@ -0,0 +1,4 @@ +#!/bin/bash +r_log "postgresql" "Dropping database" +su - postgres -c 'dropdb pg_test' > /dev/null 2>&1 +r_checkExitStatus $? diff --git a/func/core/pkg_postgresql/21-drop-user.sh b/func/core/pkg_postgresql/21-drop-user.sh new file mode 100755 index 0000000..0679dc1 --- /dev/null +++ b/func/core/pkg_postgresql/21-drop-user.sh @@ -0,0 +1,4 @@ +#!/bin/bash +r_log "postgresql" "Dropping user" +su - postgres -c 'dropuser testuser' > /dev/null 2>&1 +r_checkExitStatus $? diff --git a/func/core/pkg_release/30-os-release.sh b/func/core/pkg_release/30-os-release.sh index a35bfca..7b6b61d 100755 --- a/func/core/pkg_release/30-os-release.sh +++ b/func/core/pkg_release/30-os-release.sh @@ -4,10 +4,10 @@ r_log "rocky" "Check /etc/os-release stuff" r_log "rocky" "Verify support directives" for s in NAME=\"Rocky\ Linux\" \ ID=\"rocky\" \ - ROCKY_SUPPORT_PRODUCT=\"Rocky\ Linux\" \ + ROCKY_SUPPORT_PRODUCT=\"Rocky-Linux-$RL_VER\" \ ROCKY_SUPPORT_PRODUCT_VERSION=\"$RL_VER\"; do if ! grep -q "$s" /etc/os-release; then - r_log "rocky" "Missing string in /etc/os-release" + r_log "rocky" "Missing string ($s) in /etc/os-release" r_checkExitStatus 1 fi done diff --git a/func/core/pkg_tftp-server/00-install-tftp.sh b/func/core/pkg_tftp-server/00-install-tftp.sh new file mode 100755 index 0000000..34c47be --- /dev/null +++ b/func/core/pkg_tftp-server/00-install-tftp.sh @@ -0,0 +1,3 @@ +#!/bin/bash +r_log "tftp" "Installing packages" +p_installPackageNormal tftp-server tftp diff --git a/func/core/pkg_tftp-server/01-configure-tftp.sh b/func/core/pkg_tftp-server/01-configure-tftp.sh new file mode 100755 index 0000000..7766de2 --- /dev/null +++ b/func/core/pkg_tftp-server/01-configure-tftp.sh @@ -0,0 +1,20 @@ +#!/bin/bash +r_log "tftp" "Configure tftp" + +cat < /etc/xinetd.d/tftp +service tftp +{ + socket_type = dgram + protocol = udp + wait = yes + user = root + server = /usr/sbin/in.tftpd + server_args = -s /var/lib/tftpboot + disable = no + per_source = 11 + cps = 100 2 + flags = IPv4 +} +EOF + +m_serviceCycler tftp.socket start diff --git a/func/core/pkg_tftp-server/10-get-test.sh b/func/core/pkg_tftp-server/10-get-test.sh new file mode 100755 index 0000000..3113972 --- /dev/null +++ b/func/core/pkg_tftp-server/10-get-test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +r_log "tftp" "Getting a file from tftp" + +chmod 777 /var/lib/tftpboot +echo "rocky func" > /var/lib/tftpboot/tftptest +tftp 127.0.0.1 -c get tftptest + +grep -q "rocky func" tftptest +r_checkExitStatus +/bin/rm tftptest diff --git a/func/core/pkg_tftp-server/11-put-test.sh b/func/core/pkg_tftp-server/11-put-test.sh new file mode 100755 index 0000000..afda5f1 --- /dev/null +++ b/func/core/pkg_tftp-server/11-put-test.sh @@ -0,0 +1,14 @@ +#!/bin/bash +r_log "tftp" "Testing anon write" +TFTPDIR=/var/lib/tftpboot +setsebool tftp_anon_write 1 +chmod 777 $TFTPDIR +echo "rocky func" > puttest +touch $TFTPDIR > $TFTPDIR/puttest +chmod 666 $TFTPDIR/puttest +tftp 127.0.0.1 -c put puttest +sleep 2 +grep -q 'rocky func' $TFTPDIR/puttest +r_checkExitStatus $? +/bin/rm puttest +/bin/rm $TFTPDIR/puttest diff --git a/func/stacks.sh b/func/stacks.sh new file mode 100644 index 0000000..cca6550 --- /dev/null +++ b/func/stacks.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Release Engineering Core Functionality Testing +# Louis Abel @nazunalika + +################################################################################ +# Settings and variables + +# Exits on any non-zero exit status - Disabled for now. +#set -e +# Undefined variables will cause an exit +set -u + +COMMON_EXPORTS='./common/exports.sh' +COMMON_IMPORTS='./common/imports.sh' +SELINUX=$(getenforce) + +# End +################################################################################ + +# shellcheck source=/dev/null disable=SC2015 +[ -f $COMMON_EXPORTS ] && source $COMMON_EXPORTS || { echo -e "\n[-] $(date): Variables cannot be sourced."; exit 1; } +# shellcheck source=/dev/null disable=SC2015 +[ -f $COMMON_IMPORTS ] && source $COMMON_IMPORTS || { echo -e "\n[-] $(date): Functions cannot be sourced."; exit 1; } +# Init log +# shellcheck disable=SC2015 +[ -e "$LOGFILE" ] && m_recycleLog || touch "$LOGFILE" +# SELinux check +if [ "$SELINUX" != "Enforcing" ]; then + echo -e "\n[-] $(date): SELinux is not enforcing." + exit 1 +fi + +r_log "internal" "Starting Release Engineering Core Tests" + +################################################################################ +# Script Work + +# Skip tests in a list - some tests are already -x, so it won't be an issue +if [ -e skip.list ]; then + r_log "internal" "Disabling tests" + # shellcheck disable=SC2162 + grep -E "^${RL_VER}" skip.list | while read line; do + # shellcheck disable=SC2086 + testFile="$(echo $line | cut -d '|' -f 2)" + r_log "internal" "SKIP ${testFile}" + chmod -x "${testFile}" + done + r_log "internal" "WARNING: Tests above were disabled." +fi + +# TODO: should we let $1 judge what directory is ran? +# TODO: get some stacks and lib in there + +#r_processor <(/usr/bin/find ./core -type f | sort -t'/') +#r_processor <(/usr/bin/find ./lib -type f | sort -t'/') +r_processor <(/usr/bin/find ./stacks -type f | sort -t'/') + +r_log "internal" "Core Tests completed" +exit 0 diff --git a/func/stacks/ipa/10-install-ipa.sh b/func/stacks/ipa/10-install-ipa.sh index 40aa006..0e5b029 100644 --- a/func/stacks/ipa/10-install-ipa.sh +++ b/func/stacks/ipa/10-install-ipa.sh @@ -9,5 +9,6 @@ fi # going to be the same thing or not so this check is there just in case. if [ "$RL_VER" -eq 8 ]; then p_enableModule idm:DL1/{client,common,dns,server} - p_installPackageNormal ipa-server ipa-server-dns fi + +p_installPackageNormal ipa-server ipa-server-dns diff --git a/func/stacks/ipa/20-ipa-user.sh b/func/stacks/ipa/20-ipa-user.sh index 884f0df..9965a56 100644 --- a/func/stacks/ipa/20-ipa-user.sh +++ b/func/stacks/ipa/20-ipa-user.sh @@ -4,8 +4,77 @@ if m_getArch aarch64 | grep -qE 'aarch64'; then exit 0 fi -if [ "$IPAINSTALLED" -eq 1 ]; then +if [ "$IPAINSTALLED" -eq 1 ]; then r_log "ipa" "IPA was not successfully installed. Aborting." r_checkExitStatus 1 fi +kdestroy &> /dev/null +klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null +r_checkExitStatus $? + +expect -f - < /dev/null +r_checkExitStatus $? + +r_log "ipa" "Test adding a user" +userDetails="$(ipa user-add --first=test --last=user --random ipatestuser)" +echo "$userDetails" | grep -q 'Added user "ipatestuser"' +r_checkExitStatus $? + +echo "$userDetails" | grep -q 'First name: test' +r_checkExitStatus $? +echo "$userDetails" | grep -q 'Last name: user' +r_checkExitStatus $? +echo "$userDetails" | grep -q 'Full name: test user' +r_checkExitStatus $? +echo "$userDetails" | grep -q 'Home directory: /home/ipatestuser' +r_checkExitStatus $? + +r_log "ipa" "Changing password of the user" +kdestroy &> /dev/null + +expect -f - < /dev/null +r_checkExitStatus $? + +kdestroy &> /dev/null + +r_log "ipa" "Testing for user in getent" +getent passwd ipatestuser &> /dev/null +r_checkExitStatus $? diff --git a/func/stacks/ipa/21-ipa-service.sh b/func/stacks/ipa/21-ipa-service.sh index 9f053a1..db50dd1 100644 --- a/func/stacks/ipa/21-ipa-service.sh +++ b/func/stacks/ipa/21-ipa-service.sh @@ -1,6 +1,6 @@ #!/bin/bash if m_getArch aarch64 | grep -qE 'aarch64'; then - r_log "ipa -bash" "Skipping for aarch64" + r_log "ipa" "Skipping for aarch64" exit 0 fi @@ -9,3 +9,61 @@ if [ "$IPAINSTALLED" -eq 1 ]; then r_checkExitStatus 1 fi +kdestroy &> /dev/null +klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null +r_checkExitStatus $? + +expect -f - < /dev/null +r_checkExitStatus $? + +r_log "ipa" "Adding test service" +ipa service-add testservice/rltest.rlipa.local &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Getting keytab for service" +ipa-getkeytab -s rltest.rlipa.local -p testservice/rltest.rlipa.local -k /tmp/testservice.keytab &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Getting a certificate for service" +ipa-getcert request -K testservice/rltest.rlipa.local -D rltest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null +r_checkExitStatus $? + +while true; do + entry="$(ipa-getcert list -r | sed -n '/Request ID/,/auto-renew: yes/p')" + if [[ $entry =~ "status:" ]] && [[ $entry =~ "CA_REJECTED" ]]; then + r_checkExitStatus 1 + break + fi + if [[ $entry =~ "" ]]; then + r_checkExitStatus 0 + break + fi + sleep 1 +done + +while ! stat /etc/pki/tls/certs/testservice.crt &> /dev/null; do + sync + sleep 1 +done + +r_log "ipa" "Verifying keytab" +klist -k /tmp/testservice.keytab | grep "testservice/rltest.rlipa.local" &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Verifying key matches the certificate" +diff <(openssl x509 -in /etc/pki/tls/certs/testservice.crt -noout -modulus 2>&1 ) <(openssl rsa -in /etc/pki/tls/private/testservice.key -noout -modulus 2>&1 ) +r_checkExitStatus $? + +r_log "ipa" "Verifying the certificate against our CA" +openssl verify -CAfile /etc/ipa/ca.crt /etc/pki/tls/certs/testservice.crt | grep "/etc/pki/tls/certs/testservice.crt: OK" &> /dev/null +r_checkExitStatus $? diff --git a/func/stacks/ipa/22-ipa-dns.sh b/func/stacks/ipa/22-ipa-dns.sh index 9f053a1..4d74174 100644 --- a/func/stacks/ipa/22-ipa-dns.sh +++ b/func/stacks/ipa/22-ipa-dns.sh @@ -9,3 +9,46 @@ if [ "$IPAINSTALLED" -eq 1 ]; then r_checkExitStatus 1 fi +kdestroy &> /dev/null +klist 2>&1 | grep -qE "(No credentials|Credentials cache .* not found)" &> /dev/null +r_checkExitStatus $? + +expect -f - < /dev/null +r_checkExitStatus $? + +r_log "ipa" "Adding testzone subdomain" +ipa dnszone-add --name-server=rltest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null +r_checkExitStatus $? +sleep 5 + +r_log "ipa" "Get SOA from testzone subdomain" +dig @localhost SOA testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Adding a CNAME record to the primary domain" +ipa dnsrecord-add rlipa.local testrecord --cname-hostname=rltest &> /dev/null +r_checkExitStatus $? +sleep 5 + +r_log "ipa" "Retrieving CNAME record" +dig @localhost CNAME testrecord.rlipa.local | grep -q "status: NOERROR" &> /dev/null +r_checkExitStatus $? + +r_log "ipa" "Adding a CNAME to subdomain" +ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=rltest.rlipa.local. &> /dev/null +r_checkExitStatus $? +sleep 5 + +r_log "ipa" "Testing can retrieve record from subdomain" +dig @localhost CNAME testrecord.testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null +r_checkExitStatus $? diff --git a/func/stacks/ipa/23-ipa-sudo.sh b/func/stacks/ipa/23-ipa-sudo.sh index 9f053a1..68e50ac 100644 --- a/func/stacks/ipa/23-ipa-sudo.sh +++ b/func/stacks/ipa/23-ipa-sudo.sh @@ -9,3 +9,19 @@ if [ "$IPAINSTALLED" -eq 1 ]; then r_checkExitStatus 1 fi +kdestroy &> /dev/null +klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null +r_checkExitStatus $? + +expect -f - < /dev/null +r_checkExitStatus $? diff --git a/func/stacks/ipa/50-cleanup-ipa.sh b/func/stacks/ipa/50-cleanup-ipa.sh index 9f053a1..9f17e3f 100644 --- a/func/stacks/ipa/50-cleanup-ipa.sh +++ b/func/stacks/ipa/50-cleanup-ipa.sh @@ -1,6 +1,6 @@ #!/bin/bash if m_getArch aarch64 | grep -qE 'aarch64'; then - r_log "ipa -bash" "Skipping for aarch64" + r_log "ipa" "Skipping for aarch64" exit 0 fi diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 12fe232..e8b76df 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -56,6 +56,10 @@ images: - dvd - minimal + lorax_variants: + - dvd + - minimal + - BaseOS repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 34eef95..59d0ca8 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -56,6 +56,10 @@ images: - dvd - minimal + lorax_variants: + - dvd + - minimal + - BaseOS repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index fa102ca..71b8772 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -56,6 +56,10 @@ images: - dvd - minimal + lorax_variants: + - dvd + - minimal + - BaseOS repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 3d70299..b344e80 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -406,7 +406,7 @@ class IsoBuild: self.log.info( 'Copying base lorax for ' + Color.BOLD + arch + Color.END ) - for variant in self.iso_map['images']: + for variant in self.iso_map['lorax_variants']: self._copy_lorax_to_variant(self.force_unpack, arch, variant) self.log.info( @@ -576,7 +576,10 @@ class IsoBuild: return self.log.info('Copying base lorax to %s directory...' % image) - shutil.copytree(src_to_image, path_to_image) + try: + shutil.copytree(src_to_image, path_to_image, copy_function=shutil.copy2) + except: + self.log.error('%s already exists??' % image) def run_boot_sync(self): """ @@ -596,14 +599,14 @@ class IsoBuild: unpack_single_arch = True arches_to_unpack = [self.arch] - self.sync_boot(force_unpack=self.force_unpack, arch=self.arch) + self._sync_boot(force_unpack=self.force_unpack, arch=self.arch) self.treeinfo_write(arch=self.arch) - def _sync_boot(self, force_unpack, arch, variant): + def _sync_boot(self, force_unpack, arch, image): """ Syncs whatever """ - self.log.info('Copying lorax to %s directory...' % variant) + self.log.info('Copying lorax to %s directory...' % image) # checks here, report that it already exists def treeinfo_write(self, arch): @@ -616,7 +619,7 @@ class IsoBuild: """ Fixes lorax treeinfo """ - self.log.info('Fixing up lorax treeinfo...') + self.log.info('Fixing up lorax treeinfo for %s ...' % ) def discinfo_write(self): """ @@ -651,7 +654,7 @@ class IsoBuild: """ print() - def generate_graft_points(self): + def _generate_graft_points(self): """ Get a list of packages for an extras ISO. This should NOT be called during the usual run() section. From a28bed653a060b8692733f1bf0ad6082d9c90e11 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 20 Jun 2022 20:27:30 -0700 Subject: [PATCH 36/64] remove minimal as an image for now --- iso/empanadas/empanadas/configs/el9-beta.yaml | 1 - iso/empanadas/empanadas/configs/el9.yaml | 1 - iso/empanadas/empanadas/configs/el9lh.yaml | 1 - iso/empanadas/empanadas/util/iso_utils.py | 17 +++++++++++++---- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index e8b76df..e1656c2 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -55,7 +55,6 @@ s390x: '' images: - dvd - - minimal lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 59d0ca8..3aeddb9 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -55,7 +55,6 @@ s390x: '' images: - dvd - - minimal lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 71b8772..7029020 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -55,7 +55,6 @@ s390x: '' images: - dvd - - minimal lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index b344e80..ed0f7e5 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -461,7 +461,7 @@ class IsoBuild: ) return - self.log.info('Downloading to: %s' % dest) + self.log.info('Downloading ({}) to: {}'.format(source, dest)) try: self.s3.download_file( Bucket=self.s3_bucket, @@ -513,7 +513,7 @@ class IsoBuild: return unurl = self.s3_bucket_url + '/' + source - self.log.info('Downloading to: %s' % dest) + self.log.info('Downloading ({}) to: {}'.format(source, dest)) try: with requests.get(unurl, allow_redirects=True) as r: with open(dest, 'wb') as f: @@ -581,6 +581,15 @@ class IsoBuild: except: self.log.error('%s already exists??' % image) + self.log.info('Removing boot.iso from copy') + try: + os.remove(path_to_image + '/images/boot.iso') + except: + self.log.error( + '[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' + + 'Cannot remove boot.iso' + ) + def run_boot_sync(self): """ This unpacks into BaseOS/$arch/os, assuming there's no data actually @@ -615,11 +624,11 @@ class IsoBuild: """ self.log.info('Starting treeinfo work...') - def _treeinfo_from_lorax(self, arch, force_unpack): + def _treeinfo_from_lorax(self, arch, force_unpack, variant): """ Fixes lorax treeinfo """ - self.log.info('Fixing up lorax treeinfo for %s ...' % ) + self.log.info('Fixing up lorax treeinfo for %s ...' % variant) def discinfo_write(self): """ From 00f330226d4f3f982a87229bd409773c72980bc8 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 17:21:37 -0700 Subject: [PATCH 37/64] update poetry and iso utilities --- .../empanadas/templates/mkiso.tmpl.sh | 16 ++ iso/empanadas/empanadas/util/iso_utils.py | 259 +++++++++++++++++- iso/empanadas/poetry.lock | 30 +- iso/empanadas/pyproject.toml | 1 + 4 files changed, 295 insertions(+), 11 deletions(-) create mode 100644 iso/empanadas/empanadas/templates/mkiso.tmpl.sh diff --git a/iso/empanadas/empanadas/templates/mkiso.tmpl.sh b/iso/empanadas/empanadas/templates/mkiso.tmpl.sh new file mode 100644 index 0000000..7f78fc2 --- /dev/null +++ b/iso/empanadas/empanadas/templates/mkiso.tmpl.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -ex + +cd /builddir + +if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then + TEMPLATE="/usr/share/lorax" +fi + +{{ make_image }} + +{{ isohybrid }} + +{{ implantmd5 }} + +{{ make_manifest }} diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index ed0f7e5..c0524b6 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -13,6 +13,7 @@ import shlex import time import tarfile import shutil +import hashlib # lazy person's s3 parser import requests @@ -20,6 +21,7 @@ import json import xmltodict # if we can access s3 import boto3 +import kobo.shortcuts # This is for treeinfo from configparser import ConfigParser @@ -48,6 +50,7 @@ class IsoBuild: config, major, arch=None, + hfs_compat: bool = False, rc: bool = False, s3: bool = False, force_download: bool = False, @@ -108,6 +111,9 @@ class IsoBuild: if s3: self.s3 = boto3.client('s3') + # arch specific + self.hfs_compat = hfs_compat + # Templates file_loader = FileSystemLoader(f"{_rootdir}/templates") self.tmplenv = Environment(loader=file_loader) @@ -608,7 +614,7 @@ class IsoBuild: unpack_single_arch = True arches_to_unpack = [self.arch] - self._sync_boot(force_unpack=self.force_unpack, arch=self.arch) + self._sync_boot(force_unpack=self.force_unpack, arch=self.arch, image=None) self.treeinfo_write(arch=self.arch) def _sync_boot(self, force_unpack, arch, image): @@ -656,19 +662,266 @@ class IsoBuild: "", ] + # Next set of functions are loosely borrowed (in concept) from pungi. Some + # stuff may be combined/mixed together, other things may be simplified or + # reduced in nature. def build_extra_iso(self): """ Builds DVD images based on the data created from the initial lorax on each arch. This should NOT be called during the usual run() section. """ - print() def _generate_graft_points(self): """ Get a list of packages for an extras ISO. This should NOT be called during the usual run() section. """ - print() + + def _get_grafts(self): + """ + Actually get some grafts (get_iso_contents), called by generate grafts + """ + + def _write_grafts(self): + """ + Write out the graft points, called by get_grafts + """ + + def _scanning(self): + """ + Scan tree + """ + + def _merging(self): + """ + Merge tree + """ + + def _sorting(self): + """ + Sorting using the is_rpm and is_image funcs + """ + + def _is_rpm(self): + """ + Is this an RPM? :o + """ + + def _is_image(self): + """ + Is this an image? :o + """ + + def _get_vol_id(self): + """ + Gets a volume ID + """ + + def _get_boot_options(self, arch, createfrom, efi=True, hfs_compat=False): + """ + Gets boot options based on architecture, the iso commands are not + universal. + """ + if arch in ("armhfp",): + result = [] + return result + + if arch in ("aarch64",): + result = [ + "-eltorito-alt-boot", + "-e", + "images/efiboot.img", + "-no-emul-boot", + ] + return result + + if arch in ("i386", "i686", "x86_64"): + result = [ + "-b", + "isolinux/isolinux.bin", + "-c", + "isolinux/boot.cat", + "-no-emul-boot", + "-boot-load-size", + "4", + "-boot-info-table", + ] + + # EFI args + if arch == "x86_64": + result.extend( + [ + "-eltorito-alt-boot", + "-e", + "images/efiboot.img", + "-no-emul-boot" + ] + ) + return result + + # need to go double check if this is needed with stream 9 + if arch == "ppc64le" and hfs_compat: + result = [ + "-part", + "-hfs", + "-r", + "-l", + "-sysid", + "PPC", + "-no-desktop", + "-allow-multidot", + "-chrp-boot", + "-map", + os.path.join(createfrom, "mapping"), + "-hfs-bless", + "/ppc/mac" + ] + return result + + if arch == "ppc64le" and not hfs_compat: + result = [ + "-r", + "-l", + "-sysid", + "PPC", + "-chrp-boot", + ] + return result + + if arch in ("s390x",): + result = [ + "-eltorito-boot", + "images/cdboot.img", + "-no-emul-boot", + ] + return result + + raise ValueError("Architecture %s%s%s is NOT known" % (Color.BOLD, arch, Color.END)) + + # ALL COMMANDS # + def _get_mkisofs_cmd( + self, + iso, + paths, + appid=None, + volid=None, + volset=None, + exclude=None, + boot_args=None, + input_charset="utf-8", + grafts=None, + use_xorrisofs=False, + iso_level=None + ): + # I should hardcode this I think + #untranslated_filenames = True + #translation_table = True + #joliet = True + #joliet_long = True + #rock = True + + cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"] + + if iso_level: + cmd.extend(["-iso-level", str(iso_level)]) + + if appid: + cmd.extend(["-appid", appid]) + + #if untranslated_filenames: + cmd.append("-untranslated-filenames") + + if volid: + cmd.extend(["-volid", volid]) + + #if joliet: + cmd.append("-J") + + #if joliet_long: + cmd.append("-joliet-long") + + if volset: + cmd.extend(["-volset", volset]) + + #if rock: + cmd.append("-rational-rock") + + #if not use_xorrisofs and translation_table: + if not use_xorrisofs: + cmd.append("-translation-table") + + if input_charset: + cmd.extend(["-input-charset", input_charset]) + + if exclude: + for i in kobo.shortcuts.force_list(exclude): + cmd.extend(["-x", i]) + + if boot_args: + cmd.extend(boot_args) + + cmd.extend(["-o", iso]) + + if grafts: + cmd.append("-graft-points") + cmd.extend(["-path-list", grafts]) + + return cmd + + def _get_implantisomd5_cmd(self, opts): + """ + Implants md5 into iso + """ + cmd = ["/usr/bin/implantisomd5", "--supported-iso", opts['iso_path']] + return cmd + + def _get_manifest_cmd(self, opts): + """ + Gets an ISO manifest + """ + return "/usr/bin/isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % ( + shlex.quote(opts['iso_name']), + shlex.quote(opts['iso_name']), + ) + + def _get_isohybrid_cmd(self, opts): + cmd = [] + if opts['arch'] == "x86_64": + cmd = ["/usr/bin/isohybrid"] + cmd.append("--uefi") + cmd.append(opts['iso_path']) + return cmd + + def _get_make_image_cmd(self, opts): + """ + Generates the command to actually make the image in the first place + """ + isokwargs = {} + isokwargs["boot_args"] = self._get_boot_options( + opts['arch'], + os.path.join("$TEMPLATE", "config_files/ppc"), + hfs_compat=self.hfs_compat, + ) + + if opts['arch'] in ("ppc64", "ppc64le"): + isokwargs["input_charset"] = None + + cmd = self._get_mkisofs_cmd( + opts['iso_name'], + volid=opts['volid'], + exclude=["./lost+found"], + grafts=opts['graft_points'], + use_xorrisofs=opts['use_xorrisofs'], + iso_level=opts['iso_level'], + **isokwargs + ) + return cmd + + + def _write_script(self, opts): + """ + Writes out the script to make the DVD + """ class LiveBuild: """ diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 716b15a..35ab49d 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -22,14 +22,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "boto3" -version = "1.24.12" +version = "1.24.14" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.12,<1.28.0" +botocore = ">=1.27.14,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -38,7 +38,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.12" +version = "1.27.14" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -141,6 +141,17 @@ category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "kobo" +version = "0.24.1" +description = "A pile of python modules used by Red Hat release engineering to build their tools" +category = "main" +optional = false +python-versions = ">2.6" + +[package.dependencies] +six = "*" + [[package]] name = "markupsafe" version = "2.0.1" @@ -354,7 +365,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.7,<4" -content-hash = "d011f4622c248f6aa107fd679616eaa19a897147398c6f52dd0dea0ab1d74486" +content-hash = "ccd47ad1b0819968dbad34b68c3f9afd98bd657ee639f9037731fd2a0746bd16" [metadata.files] atomicwrites = [ @@ -366,12 +377,12 @@ attrs = [ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] boto3 = [ - {file = "boto3-1.24.12-py3-none-any.whl", hash = "sha256:0b9757575b8003928defc5fb6e816936fa1bdb1384d0edec6622bb9fb104e96c"}, - {file = "boto3-1.24.12.tar.gz", hash = "sha256:f39b91a4c3614db8e44912ee82426fb4b16d5df2cd66883f3aff6f76d7f5d310"}, + {file = "boto3-1.24.14-py3-none-any.whl", hash = "sha256:490f5e88f5551b33ae3019a37412158b76426d63d1fb910968ade9b6a024e5fe"}, + {file = "boto3-1.24.14.tar.gz", hash = "sha256:e284705da36faa668c715ae1f74ebbff4320dbfbe3a733df3a8ab076d1ed1226"}, ] botocore = [ - {file = "botocore-1.27.12-py3-none-any.whl", hash = "sha256:b8ac156e55267da6e728ea0b806bfcd97adf882801cffe7849c4b88ce4780326"}, - {file = "botocore-1.27.12.tar.gz", hash = "sha256:17d3ec9f684d21e06b64d9cb224934557bcd95031e2ecb551bf16271e8722fec"}, + {file = "botocore-1.27.14-py3-none-any.whl", hash = "sha256:df1e9b208ff93daac7c645b0b04fb6dccd7f20262eae24d87941727025cbeece"}, + {file = "botocore-1.27.14.tar.gz", hash = "sha256:bb56fa77b8fa1ec367c2e16dee62d60000451aac5140dcce3ebddc167fd5c593"}, ] certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, @@ -405,6 +416,9 @@ jmespath = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +kobo = [ + {file = "kobo-0.24.1.tar.gz", hash = "sha256:d5a30cc20c323f3e9d9b4b2e511650c4b98929b88859bd8cf57463876686e407"}, +] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index fb44274..8755e18 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -15,6 +15,7 @@ importlib-resources = "^5.8.0" boto3 = "^1.24.12" xmltodict = "^0.13.0" requests = "^2.28.0" +kobo = "^0.24.1" [tool.poetry.dev-dependencies] pytest = "~5" From 12bb69379b17995afa640b236fa7de6034ec2f1c Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 18:03:43 -0700 Subject: [PATCH 38/64] attempted func corrections --- func/common/exports.sh | 2 +- func/core/pkg_archive/00-install-formats.sh | 2 +- func/core/pkg_archive/21-gzip-test.sh | 8 ++++++-- func/core/pkg_lsb/00-install-lsb.sh | 5 +++++ func/core/pkg_lsb/10-test-branding.sh | 5 +++++ func/core/pkg_network/00-install-packages.sh | 6 +++++- func/skip.list | 3 +++ 7 files changed, 26 insertions(+), 5 deletions(-) diff --git a/func/common/exports.sh b/func/common/exports.sh index 4ed75bc..2bb991f 100644 --- a/func/common/exports.sh +++ b/func/common/exports.sh @@ -11,5 +11,5 @@ export readonly RELEASE_NAME=rocky # A 0 means it was successful. It can be changed to 1 on failure. export IPAINSTALLED=0 -LOGFILE="./log/$(date +'%m-%d-%Y')-tests.log" +LOGFILE="$(pwd)/log/$(date +'%m-%d-%Y')-tests.log" export LOGFILE diff --git a/func/core/pkg_archive/00-install-formats.sh b/func/core/pkg_archive/00-install-formats.sh index 6525314..f7c8d77 100755 --- a/func/core/pkg_archive/00-install-formats.sh +++ b/func/core/pkg_archive/00-install-formats.sh @@ -2,4 +2,4 @@ r_log "archive" "Installing appropriate archive formats" # We might need expect for zmore - does anyone actually use zmore? -p_installPackageNormal bzip2 diffutils gzip less ncompress tar unzip util-linux-ng zip lzop +p_installPackageNormal bzip2 diffutils gzip less tar unzip util-linux-ng zip lzop diff --git a/func/core/pkg_archive/21-gzip-test.sh b/func/core/pkg_archive/21-gzip-test.sh index ce4a440..a19c96d 100755 --- a/func/core/pkg_archive/21-gzip-test.sh +++ b/func/core/pkg_archive/21-gzip-test.sh @@ -89,8 +89,12 @@ gzip $FILE $FILE.1 || r_checkExitStatus 1 r_log "archive" "Verify that .Z files can be handled" gunzip $FILE.gz ls -l /var/tmp >> $FILE -compress $FILE || r_checkExitStatus 1 -gunzip $FILE.Z || r_checkExitStatus 1 +if [ "$RL_VER" -eq 8 ]; then + compress $FILE || r_checkExitStatus 1 + gunzip $FILE.Z || r_checkExitStatus 1 +else + r_log "archive" "Skipping for 9" +fi # handle some zip files r_log "archive" "Verify that .zip files can be handled" diff --git a/func/core/pkg_lsb/00-install-lsb.sh b/func/core/pkg_lsb/00-install-lsb.sh index 452dd70..3733c79 100755 --- a/func/core/pkg_lsb/00-install-lsb.sh +++ b/func/core/pkg_lsb/00-install-lsb.sh @@ -1,3 +1,8 @@ #!/bin/bash r_log "lsb" "Install LSB package" +if [ "$RL_VER" -ge 8 ]; then + r_log "lsb" "redhat-lsb is not in EL9" + exit $PASS +fi + p_installPackageNormal redhat-lsb diff --git a/func/core/pkg_lsb/10-test-branding.sh b/func/core/pkg_lsb/10-test-branding.sh index 9be22cb..229fbcb 100755 --- a/func/core/pkg_lsb/10-test-branding.sh +++ b/func/core/pkg_lsb/10-test-branding.sh @@ -1,5 +1,10 @@ #!/bin/bash r_log "lsb" "Test LSB branding" +if [ "$RL_VER" -ge 8 ]; then + r_log "lsb" "redhat-lsb is not in EL9" + exit $PASS +fi + lsb_release -i | grep -q "Rocky" r_checkExitStatus $? lsb_release -d | grep -q "Rocky" diff --git a/func/core/pkg_network/00-install-packages.sh b/func/core/pkg_network/00-install-packages.sh index 964c071..df109be 100755 --- a/func/core/pkg_network/00-install-packages.sh +++ b/func/core/pkg_network/00-install-packages.sh @@ -1,3 +1,7 @@ #!/bin/bash r_log "network" "Install necessary network packages and utilities" -p_installPackageNormal traceroute iputils iproute mtr arpwatch psmisc net-tools which iptraf +pkgs=(traceroute iputils iproute mtr psmisc net-tools which iptraf) +if [ "$RL_VER" -eq 8 ]; then + pkgs+=( arpwatch ) +fi +p_installPackageNormal "${pkgs[@]}" diff --git a/func/skip.list b/func/skip.list index ccddc50..2ebe6ab 100644 --- a/func/skip.list +++ b/func/skip.list @@ -10,9 +10,12 @@ # -> Must be a URL to bugs.rl.o, a github issue number, or a code, such as: # * NEEDINFO # * NOTREADY +# * NOPKG 8|./core/pkg_archive/26-zmore.sh|nazunalika|NEEDINFO 8|./core/pkg_nfs/12-prepare-autofs.sh|nazunalika|NEEDINFO 8|./core/pkg_diffutils/00-install-diff.sh|nazunalika|NOTREADY 8|./core/pkg_snmp/12-test-snmp-3.sh|nazunalika|NOTWORKING 8|./core/pkg_samba/00-install-samba.sh|nazunalika|NOTWORKING 8|./core/pkg_samba/10-test-samba.sh|nazunalika|NOTWORKING +9|./core/pkg_archive/27-znew.sh|nazunalika|NOPKG +9|./core/pkg_network/30-test-arpwatch.sh|nazunalika|NOPKG From ba9126be5f0ddc2b892244eced95144e4d79dffb Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 18:21:01 -0700 Subject: [PATCH 39/64] try to fix a few more tests --- func/core/pkg_httpd/10-httpd-branding.sh | 2 +- func/core/pkg_postgresql/01-configure-postgresql.sh | 2 +- func/core/pkg_release/20-check-gpg-keys.sh | 4 ++-- func/core/pkg_shadow-utils/30-group-tests.sh | 2 +- func/core/pkg_tftp-server/01-configure-tftp.sh | 5 ++++- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/func/core/pkg_httpd/10-httpd-branding.sh b/func/core/pkg_httpd/10-httpd-branding.sh index 7508411..cbfe6ca 100755 --- a/func/core/pkg_httpd/10-httpd-branding.sh +++ b/func/core/pkg_httpd/10-httpd-branding.sh @@ -2,7 +2,7 @@ r_log "httpd" "Verify httpd branding" r_log "httpd" "Token" -curl -sI http://localhost/ | grep -i "Server:\ Apache.*\ (Rocky)" > /dev/null 2>&1 +curl -sI http://localhost/ | grep -i "Server:\ Apache.*\ (Rocky Linux)" > /dev/null 2>&1 r_checkExitStatus $? r_log "httpd" "index" diff --git a/func/core/pkg_postgresql/01-configure-postgresql.sh b/func/core/pkg_postgresql/01-configure-postgresql.sh index e84e765..0f1cb43 100755 --- a/func/core/pkg_postgresql/01-configure-postgresql.sh +++ b/func/core/pkg_postgresql/01-configure-postgresql.sh @@ -1,5 +1,5 @@ #!/bin/bash r_log "postgresql" "Initialize postgresql" postgresql-setup --initdb -m_serviceCycler postgresql-server cycle +m_serviceCycler postgresql cycle sleep 15 diff --git a/func/core/pkg_release/20-check-gpg-keys.sh b/func/core/pkg_release/20-check-gpg-keys.sh index d99b178..9a30354 100755 --- a/func/core/pkg_release/20-check-gpg-keys.sh +++ b/func/core/pkg_release/20-check-gpg-keys.sh @@ -4,8 +4,8 @@ if [ "$RL_VER" -eq 8 ]; then file /etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial > /dev/null 2>&1 && \ file /etc/pki/rpm-gpg/RPM-GPG-KEY-rockytesting > /dev/null 2>&1 else - file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}" > /ev/null 2>&1 && \ - file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}-Testing" > /ev/null 2>&1 + file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}" > /dev/null 2>&1 && \ + file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}-Testing" > /dev/null 2>&1 fi r_checkExitStatus $? diff --git a/func/core/pkg_shadow-utils/30-group-tests.sh b/func/core/pkg_shadow-utils/30-group-tests.sh index 4c30bba..b2e502d 100755 --- a/func/core/pkg_shadow-utils/30-group-tests.sh +++ b/func/core/pkg_shadow-utils/30-group-tests.sh @@ -110,6 +110,6 @@ r_log "shadow" "Test sg" sg onyxuser "touch /var/tmp/onyxsg" r_checkExitStatus $? r_log "shadow" "Verify sg worked" -stat --format="%U" /var/tmp/onyxsg | grep -q onyxuser +stat --format="%G" /var/tmp/onyxsg | grep -q onyxuser r_checkExitStatus $? rm /var/tmp/onyxsg diff --git a/func/core/pkg_tftp-server/01-configure-tftp.sh b/func/core/pkg_tftp-server/01-configure-tftp.sh index 7766de2..117d8ad 100755 --- a/func/core/pkg_tftp-server/01-configure-tftp.sh +++ b/func/core/pkg_tftp-server/01-configure-tftp.sh @@ -1,7 +1,8 @@ #!/bin/bash r_log "tftp" "Configure tftp" -cat < /etc/xinetd.d/tftp +if [ "$RL_VER" -eq 8 ]; then + cat < /etc/xinetd.d/tftp service tftp { socket_type = dgram @@ -17,4 +18,6 @@ service tftp } EOF +fi + m_serviceCycler tftp.socket start From 5667a75fa22d63df26e9be1a5e183a48437c925c Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 20:29:07 -0700 Subject: [PATCH 40/64] fix os release detection --- func/core/pkg_release/30-os-release.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/func/core/pkg_release/30-os-release.sh b/func/core/pkg_release/30-os-release.sh index 7b6b61d..65b9ca7 100755 --- a/func/core/pkg_release/30-os-release.sh +++ b/func/core/pkg_release/30-os-release.sh @@ -5,7 +5,7 @@ r_log "rocky" "Verify support directives" for s in NAME=\"Rocky\ Linux\" \ ID=\"rocky\" \ ROCKY_SUPPORT_PRODUCT=\"Rocky-Linux-$RL_VER\" \ - ROCKY_SUPPORT_PRODUCT_VERSION=\"$RL_VER\"; do + ROCKY_SUPPORT_PRODUCT_VERSION=\"$RL_VER\..*\"; do if ! grep -q "$s" /etc/os-release; then r_log "rocky" "Missing string ($s) in /etc/os-release" r_checkExitStatus 1 From ab7c36e02c99543760ec7c33ead10ce0fc87fdae Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 23:03:08 -0700 Subject: [PATCH 41/64] update extra files --- iso/empanadas/empanadas/util/dnf_utils.py | 102 +++++++++++++++++++++- 1 file changed, 99 insertions(+), 3 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 079b45f..5471abe 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -10,6 +10,7 @@ import os import os.path import subprocess import shlex +import shutil import time import re import json @@ -41,6 +42,7 @@ class RepoSync: ignore_debug: bool = False, ignore_source: bool = False, repoclosure: bool = False, + refresh_extra_files: bool = False, skip_all: bool = False, hashed: bool = False, parallel: bool = False, @@ -60,6 +62,7 @@ class RepoSync: self.skip_all = skip_all self.hashed = hashed self.repoclosure = repoclosure + self.refresh_extra_files = refresh_extra_files # Enables podman syncing, which should effectively speed up operations self.parallel = parallel # Relevant config items @@ -117,6 +120,11 @@ class RepoSync: "work/logs" ) + self.compose_global_work_dir = os.path.join( + self.compose_latest_dir, + "work/global" + ) + # This is temporary for now. if logger is None: self.log = logging.getLogger("reposync") @@ -186,6 +194,11 @@ class RepoSync: self.date_stamp ) + global_work_root = os.path.join( + work_root, + "global", + ) + if self.dryrun: self.log.error('Dry Runs are not supported just yet. Sorry!') raise SystemExit() @@ -193,12 +206,15 @@ class RepoSync: self.sync(self.repo, sync_root, work_root, log_root, self.arch) if self.fullrun: - self.deploy_extra_files() + self.deploy_extra_files(global_work_root) self.symlink_to_latest(generated_dir) if self.repoclosure: self.repoclosure_work(sync_root, work_root, log_root) + if self.refresh_extra_files: + self.deploy_extra_files(global_work_root) + self.log.info('Compose repo directory: %s' % sync_root) self.log.info('Compose logs: %s' % log_root) self.log.info('Compose completed.') @@ -240,10 +256,14 @@ class RepoSync: bad_exit_list = [] self.log.info('Generating container entries') entries_dir = os.path.join(work_root, "entries") + global_work_root = os.path.join(work_root, "global") if not os.path.exists(entries_dir): os.makedirs(entries_dir, exist_ok=True) # yeah, I know. + if not os.path.exists(global_work_root): + os.makedirs(global_work_root, exist_ok=True) + if not os.path.exists(log_root): os.makedirs(log_root, exist_ok=True) @@ -648,6 +668,22 @@ class RepoSync: ) return cmd + def git_cmd(self) -> str: + """ + This generates the git command. This is when we need to pull down extra + files or do work from a git repository. + """ + cmd = None + if os.path.exists("/usr/bin/git"): + cmd = "/usr/bin/git" + else: + self.log.error('/usr/bin/git was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + ) + return cmd + def repoclosure_work(self, sync_root, work_root, log_root): """ This is where we run repoclosures, based on the configuration of each @@ -806,7 +842,7 @@ class RepoSync: for issue in bad_exit_list: self.log.error(issue) - def deploy_extra_files(self): + def deploy_extra_files(self, global_work_dir): """ deploys extra files based on info of rlvars including a extra_files.json @@ -814,7 +850,60 @@ class RepoSync: also deploys COMPOSE_ID and maybe in the future a metadata dir with a bunch of compose-esque stuff. """ - self.log.info('Deploying extra files...') + cmd = self.git_cmd() + tmpclone = '/tmp/clone' + extra_files_dir = os.path.join( + global_work_dir, + 'extra-files' + ) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Deploying extra files to work directory ...' + ) + + if not os.path.exists(global_work_dir): + os.makedirs(global_work_dir, exist_ok=True) + + clonecmd = '{} clone {} -b {} -q {}'.format( + cmd, + self.extra_files['git_repo'], + self.extra_files['branch'], + tmpclone + ) + + git_clone = subprocess.Popen( + clonecmd, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + shell=True + ) + + # Copy files + for extra in self.extra_files['list']: + src = '/tmp/clone/' + extra + try: + shutil.copy2(src, extra_files_dir) + except: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Extra file not copied: ' + src + ) + + try: + shutil.rmtree(tmpclone) + except OSError as e: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Directory ' + tmpclone + ' could not be removed: ' + + e.strerror + ) + + # Create metadata here? + + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Extra files phase completed.' + ) class SigRepoSync: """ @@ -831,6 +920,7 @@ class SigRepoSync: arch=None, ignore_source: bool = False, repoclosure: bool = False, + refresh_extra_files: bool = False, skip_all: bool = False, hashed: bool = False, parallel: bool = False, @@ -847,6 +937,7 @@ class SigRepoSync: self.skip_all = skip_all self.hashed = hashed self.repoclosure = repoclosure + self.refresh_extra_files = refresh_extra_files # Enables podman syncing, which should effectively speed up operations self.parallel = parallel # Relevant config items @@ -894,6 +985,11 @@ class SigRepoSync: "work/logs" ) + self.compose_global_work_dir = os.path.join( + self.compose_latest_dir, + "work/global" + ) + # This is temporary for now. if logger is None: self.log = logging.getLogger("sigreposync") From fdbfd840a0c6b00406cbd216a301c6f812a5a670 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 23:10:25 -0700 Subject: [PATCH 42/64] add extra file refresh to primary script --- iso/empanadas/empanadas/scripts/sync_from_peridot.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py index e0fe0b4..5ffdc40 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot.py @@ -25,6 +25,7 @@ parser.add_argument('--hashed', action='store_true') parser.add_argument('--dry-run', action='store_true') parser.add_argument('--full-run', action='store_true') parser.add_argument('--no-fail', action='store_true') +parser.add_argument('--refresh-extra-files', action='store_true') # I am aware this is confusing, I want podman to be the default option parser.add_argument('--simple', action='store_false') parser.add_argument('--logger', type=str) @@ -52,7 +53,8 @@ a = RepoSync( dryrun=results.dry_run, fullrun=results.full_run, nofail=results.no_fail, - logger=results.logger + logger=results.logger, + refresh_extra_files=results.refresh_extra_files, ) def run(): From 08f7a44cc1fe8e853153e740238c343dde229113 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 23:23:36 -0700 Subject: [PATCH 43/64] fix subprocess --- iso/empanadas/empanadas/util/dnf_utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 5471abe..2d28436 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -871,11 +871,10 @@ class RepoSync: tmpclone ) - git_clone = subprocess.Popen( - clonecmd, + git_clone = subprocess.call( + shlex.split(clonecmd), stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - shell=True + stderr=subprocess.DEVNULL ) # Copy files From 9463f96381f72c5b8efa817fb924655c2d5da69f Mon Sep 17 00:00:00 2001 From: nazunalika Date: Tue, 21 Jun 2022 23:51:00 -0700 Subject: [PATCH 44/64] var had incorrect name for extra files --- iso/empanadas/empanadas/util/dnf_utils.py | 29 ++++++++++++++--------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 2d28436..2d1e429 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -120,7 +120,7 @@ class RepoSync: "work/logs" ) - self.compose_global_work_dir = os.path.join( + self.compose_global_work_root = os.path.join( self.compose_latest_dir, "work/global" ) @@ -203,7 +203,7 @@ class RepoSync: self.log.error('Dry Runs are not supported just yet. Sorry!') raise SystemExit() - self.sync(self.repo, sync_root, work_root, log_root, self.arch) + self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch) if self.fullrun: self.deploy_extra_files(global_work_root) @@ -219,7 +219,7 @@ class RepoSync: self.log.info('Compose logs: %s' % log_root) self.log.info('Compose completed.') - def sync(self, repo, sync_root, work_root, log_root, arch=None): + def sync(self, repo, sync_root, work_root, log_root, global_work_root, arch=None): """ Calls out syncing of the repos. We generally sync each component of a repo: @@ -230,7 +230,7 @@ class RepoSync: If parallel is true, we will run in podman. """ if self.parallel: - self.podman_sync(repo, sync_root, work_root, log_root, arch) + self.podman_sync(repo, sync_root, work_root, log_root, global_work_root, arch) else: self.dnf_sync(repo, sync_root, work_root, arch) @@ -242,7 +242,15 @@ class RepoSync: self.log.error('Please install podman and enable parallel') raise SystemExit() - def podman_sync(self, repo, sync_root, work_root, log_root, arch): + def podman_sync( + self, + repo, + sync_root, + work_root, + log_root, + global_work_root, + arch + ): """ This is for podman syncs @@ -256,7 +264,6 @@ class RepoSync: bad_exit_list = [] self.log.info('Generating container entries') entries_dir = os.path.join(work_root, "entries") - global_work_root = os.path.join(work_root, "global") if not os.path.exists(entries_dir): os.makedirs(entries_dir, exist_ok=True) @@ -842,7 +849,7 @@ class RepoSync: for issue in bad_exit_list: self.log.error(issue) - def deploy_extra_files(self, global_work_dir): + def deploy_extra_files(self, global_work_root): """ deploys extra files based on info of rlvars including a extra_files.json @@ -853,7 +860,7 @@ class RepoSync: cmd = self.git_cmd() tmpclone = '/tmp/clone' extra_files_dir = os.path.join( - global_work_dir, + global_work_root, 'extra-files' ) self.log.info( @@ -861,8 +868,8 @@ class RepoSync: 'Deploying extra files to work directory ...' ) - if not os.path.exists(global_work_dir): - os.makedirs(global_work_dir, exist_ok=True) + if not os.path.exists(extra_files_dir): + os.makedirs(extra_files_dir, exist_ok=True) clonecmd = '{} clone {} -b {} -q {}'.format( cmd, @@ -984,7 +991,7 @@ class SigRepoSync: "work/logs" ) - self.compose_global_work_dir = os.path.join( + self.compose_global_work_root = os.path.join( self.compose_latest_dir, "work/global" ) From 2e5c4778f47cf247ac91584a3d45803616428789 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 23 Jun 2022 13:12:53 -0700 Subject: [PATCH 45/64] Update Configs + Extra ISO Build Progress * Configs updated to deal with repo and structure * Add graft point process * Start local and podman process for extra ISO builds --- iso/empanadas/empanadas/configs/el8.yaml | 34 ++ iso/empanadas/empanadas/configs/el9-beta.yaml | 8 +- iso/empanadas/empanadas/configs/el9.yaml | 8 +- iso/empanadas/empanadas/configs/el9lh.yaml | 8 +- .../empanadas/scripts/build_iso_extra.py | 37 ++ ...{mkiso.tmpl.sh => buildExtraImage.tmpl.sh} | 10 + .../empanadas/templates/extraisobuild.tmpl.sh | 49 +++ .../empanadas/templates/isobuild.tmpl.sh | 2 +- .../empanadas/templates/repoconfig.tmpl | 6 + iso/empanadas/empanadas/util/dnf_utils.py | 9 +- iso/empanadas/empanadas/util/iso_utils.py | 329 ++++++++++++++++-- iso/empanadas/pyproject.toml | 1 + 12 files changed, 466 insertions(+), 35 deletions(-) create mode 100755 iso/empanadas/empanadas/scripts/build_iso_extra.py rename iso/empanadas/empanadas/templates/{mkiso.tmpl.sh => buildExtraImage.tmpl.sh} (50%) create mode 100644 iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh diff --git a/iso/empanadas/empanadas/configs/el8.yaml b/iso/empanadas/empanadas/configs/el8.yaml index 032a9de..5cf0ed1 100644 --- a/iso/empanadas/empanadas/configs/el8.yaml +++ b/iso/empanadas/empanadas/configs/el8.yaml @@ -5,6 +5,7 @@ rclvl: 'RC2' major: '8' minor: '6' + bugurl: 'https://bugs.rockylinux.org' allowed_arches: - x86_64 - aarch64 @@ -64,6 +65,34 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + structure: + packages: 'os/Packages' + repodata: 'os/repodata' + iso_map: + hosts: + x86_64: '' + aarch64: '' + images: + dvd: + repos: + - 'BaseOS' + - 'AppStream' + lorax_variants: + - dvd + - minimal + - BaseOS + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' @@ -91,10 +120,15 @@ - AppStream extra_files: git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r8/' branch: 'r8' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-rockyofficial' + testing: 'SOURCES/RPM-GPG-KEY-rockytesting' list: - 'SOURCES/COMMUNITY-CHARTER' - 'SOURCES/EULA' - 'SOURCES/LICENSE' - 'SOURCES/RPM-GPG-KEY-rockyofficial' + - 'SOURCES/RPM-GPG-KEY-rockytesting' ... diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index e1656c2..f8bd8a6 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -47,6 +47,9 @@ has_modules: - 'AppStream' - 'CRB' + structure: + packages: 'os/Packages' + repodata: 'os/repodata' iso_map: hosts: x86_64: '' @@ -54,7 +57,10 @@ ppc64le: '' s390x: '' images: - - dvd + dvd: + repos: + - 'BaseOS' + - 'AppStream' lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 3aeddb9..a13c859 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -47,6 +47,9 @@ has_modules: - 'AppStream' - 'CRB' + structure: + packages: 'os/Packages' + repodata: 'os/repodata' iso_map: hosts: x86_64: '' @@ -54,7 +57,10 @@ ppc64le: '' s390x: '' images: - - dvd + dvd: + repos: + - 'BaseOS' + - 'AppStream' lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 7029020..680a6c1 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -47,6 +47,9 @@ has_modules: - 'AppStream' - 'CRB' + structure: + packages: 'os/Packages' + repodata: 'os/repodata' iso_map: hosts: x86_64: '' @@ -54,7 +57,10 @@ ppc64le: '' s390x: '' images: - - dvd + dvd: + repos: + - 'BaseOS' + - 'AppStream' lorax_variants: - dvd - minimal diff --git a/iso/empanadas/empanadas/scripts/build_iso_extra.py b/iso/empanadas/empanadas/scripts/build_iso_extra.py new file mode 100755 index 0000000..b33b202 --- /dev/null +++ b/iso/empanadas/empanadas/scripts/build_iso_extra.py @@ -0,0 +1,37 @@ +# builds ISO's + +import argparse + +from empanadas.common import * +from empanadas.util import Checks +from empanadas.util import IsoBuild + +parser = argparse.ArgumentParser(description="ISO Compose") + +parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--rc', action='store_true', help="Release Candidate") +parser.add_argument('--arch', type=str, help="Architecture") +parser.add_argument('--isolation', type=str, help="Mock Isolation") +parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") +parser.add_argument('--logger', type=str) +parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built") +parser.add_argument('--extra-iso-mode', type=str) +results = parser.parse_args() +rlvars = rldict[results.release] +major = rlvars['major'] + +a = IsoBuild( + rlvars, + config, + major=major, + rc=results.rc, + arch=results.arch, + isolation=results.isolation, + extra_iso=results.extra_iso, + extra_iso_mode=results.extra_iso_mode, + compose_dir_is_here=results.local_compose, + logger=results.logger, +) + +def run(): + a.run_build_extra_iso() diff --git a/iso/empanadas/empanadas/templates/mkiso.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh similarity index 50% rename from iso/empanadas/empanadas/templates/mkiso.tmpl.sh rename to iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 7f78fc2..623da54 100644 --- a/iso/empanadas/empanadas/templates/mkiso.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -1,6 +1,10 @@ #!/bin/bash set -ex +{% if inside_podman %} +mkdir /builddor +{% endif %} + cd /builddir if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then @@ -14,3 +18,9 @@ fi {{ implantmd5 }} {{ make_manifest }} + +{% if inside_podman %} +mkdir -p {{ compose_work_iso_dir }}/{{ arch }} +cp /builddir/*.iso {{ compose_work_iso_dir }}/{{ arch }} +cp /builddir/*.iso.manifest {{ compose_work_iso_dir }}/{{ arch }} +{% endif %} diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh new file mode 100644 index 0000000..29ddfa0 --- /dev/null +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# This is a template that is used to build extra ISO's for Rocky Linux. Only +# under extreme circumstances should you be filling this out and running +# manually. + +# Vars +MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg" +MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" +MOCK_RESL="${MOCK_ROOT}/result" +MOCK_CHRO="${MOCK_ROOT}/root" +MOCK_LOG="${MOCK_RESL}/mock-output.log" +IMAGE_SCR="/var/tmp/buildExtraImage.sh" +IMAGE_ISO="{{ shortname }}-{{ major }}.{{ minor }}{{ rc }}-{{ arch }}-dvd{{ discnum|default('1') }}.iso" +ISOLATION="{{ isolation }}" +BUILDDIR="{{ builddir }}" + +# Init the container +mock \ + -r "${MOCK_CFG}" \ + --isolation="${ISOLATION}" \ + --enable-network \ + --init + +init_ret_val=$? +if [ $init_ret_val -ne 0 ]; then + echo "!! MOCK INIT FAILED !!" + exit 1 +fi + +mkdir -p "${MOCK_RESL}" +cp "${IMAGE_SCR}" "${MOCK_CHRO}${IMAGE_SCR}" + +mock \ + -r "${MOCK_CFG}" \ + --shell \ + --isolation="${ISOLATION}" \ + --enable-network -- /bin/bash "${IMAGE_SCR}" | tee -a "${MOCK_LOG}" + +mock_ret_val=$? +if [ $mock_ret_val -eq 0 ]; then + # Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result + mkdir -p "${MOCK_RESL}" + cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}" "${MOCK_RESL}" +else + echo "!! EXTRA ISO RUN FAILED !!" + exit 1 +fi + +# Clean up? diff --git a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh index bb4d410..d11a2f1 100644 --- a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh @@ -33,7 +33,7 @@ mock \ -r "${MOCK_CFG}" \ --shell \ --isolation="${ISOLATION}" \ - --enable-network -- /bin/bash /var/tmp/buildImage.sh | tee -a "${MOCK_LOG}" + --enable-network -- /bin/bash "${LORAX_SCR}" | tee -a "${MOCK_LOG}" mock_ret_val=$? if [ $mock_ret_val -eq 0 ]; then diff --git a/iso/empanadas/empanadas/templates/repoconfig.tmpl b/iso/empanadas/empanadas/templates/repoconfig.tmpl index ce57623..d322929 100644 --- a/iso/empanadas/empanadas/templates/repoconfig.tmpl +++ b/iso/empanadas/empanadas/templates/repoconfig.tmpl @@ -4,17 +4,23 @@ name={{repo.name}} baseurl={{ repo.baseurl }} enabled=1 gpgcheck=1 +repo_gpgcheck=1 +gpgkey={{ repo.gpgkey }} [{{ repo.name }}-debug] name={{repo.name}} baseurl={{ repo.baseurl }}-debug enabled=1 gpgcheck=1 +repo_gpgcheck=1 +gpgkey={{ repo.gpgkey }} [{{ repo.name }}-source] name={{repo.name}} baseurl={{ repo.srcbaseurl }} enabled=1 gpgcheck=1 +repo_gpgcheck=1 +gpgkey={{ repo.gpgkey }} {% endfor %} diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 2d1e429..b3eea20 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -360,7 +360,7 @@ class RepoSync: sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " "--repoid={} -p {} --forcearch {} --norepopath " - "--gpgcheck 2>&1").format( + "--gpgcheck --assumeyes 2>&1").format( self.dnf_config, a, r, @@ -370,7 +370,7 @@ class RepoSync: debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " "--download-metadata --repoid={}-debug -p {} --forcearch {} " - "--gpgcheck --norepopath 2>&1").format( + "--gpgcheck --norepopath --assumeyes 2>&1").format( self.dnf_config, a, r, @@ -433,7 +433,7 @@ class RepoSync: source_sync_cmd = ("/usr/bin/dnf reposync -c {} " "--download-metadata --repoid={}-source -p {} " - "--gpgcheck --norepopath 2>&1").format( + "--gpgcheck --norepopath --assumeyes 2>&1").format( self.dnf_config, r, source_sync_path @@ -626,7 +626,8 @@ class RepoSync: repodata = { 'name': repo, 'baseurl': constructed_url, - 'srcbaseurl': constructed_url_src + 'srcbaseurl': constructed_url_src, + 'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey] } repolist.append(repodata) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index c0524b6..200adfc 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -22,6 +22,7 @@ import xmltodict # if we can access s3 import boto3 import kobo.shortcuts +from fnmatch import fnmatch # This is for treeinfo from configparser import ConfigParser @@ -56,6 +57,8 @@ class IsoBuild: force_download: bool = False, force_unpack: bool = False, isolation: str = 'auto', + extra_iso=None, + extra_iso_mode: str = 'local', compose_dir_is_here: bool = False, image=None, logger=None @@ -83,6 +86,8 @@ class IsoBuild: self.s3 = s3 self.force_unpack = force_unpack self.force_download = force_download + self.extra_iso = extra_iso + self.extra_iso_mode = extra_iso_mode # Relevant major version items self.arch = arch @@ -94,6 +99,7 @@ class IsoBuild: self.repos = rlvars['iso_map']['repos'] self.repo_base_url = config['repo_base_url'] self.project_id = rlvars['project_id'] + self.structure = rlvars['structure'] self.extra_files = rlvars['extra_files'] @@ -134,11 +140,11 @@ class IsoBuild: "work/logs" ) - self.iso_work_dir = os.path.join( - self.compose_latest_dir, - "work/iso", - config['arch'] - ) + #self.iso_work_dir = os.path.join( + # self.compose_latest_dir, + # "work/iso", + # config['arch'] + #) self.lorax_work_dir = os.path.join( self.compose_latest_dir, @@ -306,12 +312,6 @@ class IsoBuild: self.log.error('See the logs for more information.') raise SystemExit() - def run_image_build(self, arch): - """ - Builds the other images - """ - print() - def run_pull_lorax_artifacts(self): """ Pulls the required artifacts and unpacks it to work/lorax/$arch @@ -665,52 +665,322 @@ class IsoBuild: # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or # reduced in nature. - def build_extra_iso(self): + def run_build_extra_iso(self): """ Builds DVD images based on the data created from the initial lorax on each arch. This should NOT be called during the usual run() section. """ + sync_root = self.compose_latest_sync + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Starting Extra ISOs phase' + ) - def _generate_graft_points(self): + self._extra_iso_build_wrap() + + self.log.info('Compose repo directory: %s' % sync_root) + self.log.info('ISO result directory: %s/$arch' % self.lorax_work_dir) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Extra ISO phase completed.' + ) + + def _extra_iso_build_wrap(self): + """ + Try to figure out where the build is going, we only support mock for + now. + """ + arches_to_build = self.arches + if self.arch: + arches_to_build = [self.arch] + + images_to_build = self.iso_map['images'] + if self.extra_iso: + images_to_build = [self.extra_iso] + + for y in images_to_build: + for a in arches_to_build: + grafts = self._generate_graft_points( + a, + y, + self.iso_map['images'][y]['repos'], + ) + + if self.extra_iso_mode == 'local': + self._extra_iso_local_config(a, y, grafts) + self._extra_iso_local_run() + elif self.extra_iso_mode == 'podman': + continue + else: + self.log.info( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Mode specified is not valid.' + ) + raise SystemExit() + + if self.extra_iso_mode == 'podman': + print() + + def _extra_iso_local_config(self, arch, image, grafts): + """ + Local ISO build mode - this should build in mock + """ + self.log.info('Generating Extra ISO configuration and script') + mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg') + mock_sh_template = self.tmplenv.get_template('extraisobuild.tmpl.sh') + iso_template = self.tmplenv.get_template('buildExtraImage.tmpl.sh') + + mock_iso_path = '/var/tmp/lorax-' + self.major_version + '.cfg' + mock_sh_path = '/var/tmp/extraisobuild.sh' + iso_template_path = '/var/tmp/buildExtraImage.sh' + + rclevel = '' + if self.release_candidate: + rclevel = '-' + self.rclvl + + mock_iso_template_output = mock_iso_template.render( + arch=self.current_arch, + major=self.major_version, + fullname=self.fullname, + shortname=self.shortname, + required_pkgs=self.required_pkgs, + dist=self.disttag, + repos=self.repolist, + user_agent='{{ user_agent }}', + ) + + mock_sh_template_output = mock_sh_template.render( + arch=self.current_arch, + major=self.major_version, + isolation=self.mock_isolation, + builddir=self.mock_work_root, + shortname=self.shortname, + ) + + + def _extra_iso_local_run(self): + """ + Runs the actual local process + """ + + def _generate_graft_points( + self, + arch, + iso, + variants, + ): """ Get a list of packages for an extras ISO. This should NOT be called during the usual run() section. """ + lorax_base_dir = os.path.join(self.lorax_work_dir, arch) + global_work_dir = os.path.join(self.compose_latest_dir, "work/global") - def _get_grafts(self): + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Generating graft points for extra iso: (' + arch + ') ' + iso + ) + files = {} + # This is the data we need to actually boot + lorax_for_var = os.path.join(lorax_base_dir, iso) + + if not os.path.exists(lorax_for_var + '/.treeinfo'): + self.log.info( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + '!! .treeinfo is missing, does this variant actually exist? !!' + ) + return + + # extra files + extra_files_for_var = os.path.join( + global_work_dir, + "extra-files" + ) + + # actually get the boot data + files = self._get_grafts([lorax_for_var, extra_files_for_var]) + + # This is to get all the packages for each repo + for repo in variants: + pkg_for_var = os.path.join( + self.compose_latest_sync, + repo, + arch, + self.structure['packages'] + ) + rd_for_var = os.path.join( + self.compose_latest_sync, + repo, + arch, + self.structure['repodata'] + ) + + for k, v in self._get_grafts([pkg_for_var]).items(): + files[os.path.join(repo, "Packages", k)] = v + + for k, v in self._get_grafts([rd_for_var]).items(): + files[os.path.join(repo, "repodata", k)] = v + + grafts = '{}/{}-{}-grafts'.format( + lorax_base_dir, + iso, + arch + ) + self._write_grafts( + grafts, + files, + exclude=["*/lost+found", "*/boot.iso"] + ) + return grafts + + def _get_grafts(self, paths, exclusive_paths=None, exclude=None): """ Actually get some grafts (get_iso_contents), called by generate grafts """ + result = {} + exclude = exclude or [] + exclusive_paths = exclusive_paths or [] - def _write_grafts(self): - """ - Write out the graft points, called by get_grafts - """ + for p in paths: + if isinstance(p, dict): + tree = p + else: + tree = self._scanning(p) + result = self._merging(result, tree) - def _scanning(self): + for p in exclusive_paths: + tree = self._scanning(p) + result = self._merging(result, tree, exclusive=True) + + # Resolves possible symlinks + for key in result.keys(): + path = result[key] + if os.path.islink(path): + real_path = os.readlink(path) + abspath = os.path.normpath(os.path.join(os.path.dirname(path), real_path)) + if not abspath.startswith(self.compose_base): + result[key] = abspath + + return result + + def _write_grafts(self, filepath, u, exclude=None): + """ + Write out the graft points + """ + seen = set() + exclude = exclude or [] + result = {} + for zl in sorted(u, reverse=True): + dirn = os.path.dirname(zl) + + if not zl.endswith("/"): + result[zl] = u[zl] + seen.add(dirn) + continue + + found = False + for j in seen: + if j.startswith(dirn): + found = True + break + if not found: + result[zl] = u[zl] + seen.add(dirn) + + fh = open(filepath, "w") + for zl in sorted(result, key=self._sorting): + found = False + for excl in exclude: + if fnmatch(zl, excl): + found = True + break + if found: + continue + fh.write("%s=%s\n" % (zl, u[zl])) + fh.close() + + def _scanning(self, p): """ Scan tree """ + path = os.path.abspath(p) + result = {} + for root, dirs, files in os.walk(path): + for file in files: + abspath = os.path.join(root, file) + relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/") + result[relpath] = abspath - def _merging(self): + # Include empty directories too + if root != path: + abspath = os.path.join(root, "") + relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/") + result[relpath] = abspath + + return result + + + def _merging(self, tree_a, tree_b, exclusive=False): """ Merge tree """ + result = tree_b.copy() + all_dirs = set( + [os.path.dirname(dirn).rstrip("/") for dirn in result if os.path.dirname(dirn) != ""] + ) - def _sorting(self): - """ - Sorting using the is_rpm and is_image funcs - """ + for dirn in tree_a: + dn = os.path.dirname(dirn) + if exclusive: + match = False + for x in all_dirs: + if dn == x or dn.startswith("%s/" % x): + match = True + break + if match: + continue - def _is_rpm(self): + if dirn in result: + continue + + result[dirn] = tree_a[dirn] + return result + + def _sorting(self, k): + """ + Sorting using the is_rpm and is_image funcs. Images are first, extras + next, rpm's last. + """ + rolling = (0 if self._is_image(k) else 2 if self._is_rpm(k) else 1, k) + return rolling + + def _is_rpm(self, k): """ Is this an RPM? :o """ + result = k.endswith(".rpm") + return result - def _is_image(self): + def _is_image(self, k): """ Is this an image? :o """ + if ( + k.startswith("images/") or + k.startswith("isolinux/") or + k.startswith("EFI/") or + k.startswith("etc/") or + k.startswith("ppc/") + ): + return True + + if ( + k.endswith(".img") or + k.endswith(".ins") + ): + return True + + return False def _get_vol_id(self): """ @@ -819,8 +1089,13 @@ class IsoBuild: #joliet = True #joliet_long = True #rock = True - cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"] + if not os.path.exists(cmd[0]): + self.log.error('%s was not found. Good bye.' % cmd[0]) + raise SystemExit("\n\n" + cmd[0] + " was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + ) if iso_level: cmd.extend(["-iso-level", str(iso_level)]) diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index 8755e18..a43a91d 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -25,6 +25,7 @@ sync_from_peridot = "empanadas.scripts.sync_from_peridot:run" sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" sync_sig = "empanadas.scripts.sync_sig:run" build-iso = "empanadas.scripts.build_iso:run" +build-iso-extra = "empanadas.scripts.build_iso_extra:run" pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" launch-builds = "empanadas.scripts.launch_builds:run" From b75fe6289980248a46c816cddf11e29b66d480c5 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Thu, 23 Jun 2022 15:29:22 -0700 Subject: [PATCH 46/64] scripts now generate --- iso/empanadas/empanadas/configs/el8.yaml | 2 + iso/empanadas/empanadas/configs/el9-beta.yaml | 3 + iso/empanadas/empanadas/configs/el9.yaml | 3 + iso/empanadas/empanadas/configs/el9lh.yaml | 3 + .../empanadas/scripts/build_iso_extra.py | 2 +- .../templates/buildExtraImage.tmpl.sh | 1 + .../empanadas/templates/extraisobuild.tmpl.sh | 2 +- iso/empanadas/empanadas/util/iso_utils.py | 91 +++++++++++++++---- 8 files changed, 88 insertions(+), 19 deletions(-) diff --git a/iso/empanadas/empanadas/configs/el8.yaml b/iso/empanadas/empanadas/configs/el8.yaml index 5cf0ed1..f6c1a67 100644 --- a/iso/empanadas/empanadas/configs/el8.yaml +++ b/iso/empanadas/empanadas/configs/el8.yaml @@ -69,6 +69,8 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: + xorrisofs: False + iso_level: False hosts: x86_64: '' aarch64: '' diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index f8bd8a6..ea6a94a 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -51,6 +51,8 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: + xorrisofs: False + iso_level: False hosts: x86_64: '' aarch64: '' @@ -58,6 +60,7 @@ s390x: '' images: dvd: + discnum: '1' repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index a13c859..e50fbdf 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -51,6 +51,8 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: + xorrisofs: False + iso_level: False hosts: x86_64: '' aarch64: '' @@ -58,6 +60,7 @@ s390x: '' images: dvd: + discnum: '1' repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 680a6c1..69c37e1 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -51,6 +51,8 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: + xorrisofs: False + iso_level: False hosts: x86_64: '' aarch64: '' @@ -58,6 +60,7 @@ s390x: '' images: dvd: + discnum: '1' repos: - 'BaseOS' - 'AppStream' diff --git a/iso/empanadas/empanadas/scripts/build_iso_extra.py b/iso/empanadas/empanadas/scripts/build_iso_extra.py index b33b202..074dba4 100755 --- a/iso/empanadas/empanadas/scripts/build_iso_extra.py +++ b/iso/empanadas/empanadas/scripts/build_iso_extra.py @@ -15,7 +15,7 @@ parser.add_argument('--isolation', type=str, help="Mock Isolation") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") parser.add_argument('--logger', type=str) parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built") -parser.add_argument('--extra-iso-mode', type=str) +parser.add_argument('--extra-iso-mode', type=str, default='local') results = parser.parse_args() rlvars = rldict[results.release] major = rlvars['major'] diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 623da54..8f5d54a 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -23,4 +23,5 @@ fi mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cp /builddir/*.iso {{ compose_work_iso_dir }}/{{ arch }} cp /builddir/*.iso.manifest {{ compose_work_iso_dir }}/{{ arch }} +cp /builddir/*.log {{ compose_work_iso_dir }}/{{ arch }} {% endif %} diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index 29ddfa0..e8f555b 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -10,7 +10,7 @@ MOCK_RESL="${MOCK_ROOT}/result" MOCK_CHRO="${MOCK_ROOT}/root" MOCK_LOG="${MOCK_RESL}/mock-output.log" IMAGE_SCR="/var/tmp/buildExtraImage.sh" -IMAGE_ISO="{{ shortname }}-{{ major }}.{{ minor }}{{ rc }}-{{ arch }}-dvd{{ discnum|default('1') }}.iso" +IMAGE_ISO="{{ isoname }}" ISOLATION="{{ isolation }}" BUILDDIR="{{ builddir }}" diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 200adfc..d3b1d1c 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -140,11 +140,10 @@ class IsoBuild: "work/logs" ) - #self.iso_work_dir = os.path.join( - # self.compose_latest_dir, - # "work/iso", - # config['arch'] - #) + self.iso_work_dir = os.path.join( + self.compose_latest_dir, + "work/iso" + ) self.lorax_work_dir = os.path.join( self.compose_latest_dir, @@ -708,7 +707,7 @@ class IsoBuild: if self.extra_iso_mode == 'local': self._extra_iso_local_config(a, y, grafts) - self._extra_iso_local_run() + #self._extra_iso_local_run() elif self.extra_iso_mode == 'podman': continue else: @@ -738,6 +737,27 @@ class IsoBuild: if self.release_candidate: rclevel = '-' + self.rclvl + discnum = '' + if self.iso_map['images'][image]['discnum']: + discnum = self.iso_map['images'][image]['discnum'] + + volid = '{}-{}-{}-{}-{}'.format( + self.shortname, + self.major_version, + self.minor_version, + arch, + image + ) + + isoname = '{}-{}.{}-{}-{}{}.iso'.format( + self.shortname, + self.major_version, + self.minor_version, + arch, + image, + discnum + ) + mock_iso_template_output = mock_iso_template.render( arch=self.current_arch, major=self.major_version, @@ -755,8 +775,48 @@ class IsoBuild: isolation=self.mock_isolation, builddir=self.mock_work_root, shortname=self.shortname, + isoname=isoname, ) + opts = { + 'arch': arch, + 'iso_name': isoname, + 'volid': volid, + 'graft_points': grafts, + 'use_xorrisofs': self.iso_map['xorrisofs'], + 'iso_level': self.iso_map['iso_level'], + } + + make_image = self._get_make_image_cmd(opts) + isohybrid = self._get_isohybrid_cmd(opts) + implantmd5 = self._get_implantisomd5_cmd(opts) + make_manifest = self._get_manifest_cmd(opts) + + iso_template_output = iso_template.render( + inside_podman=False, + arch=arch, + compose_work_iso_dir=self.iso_work_dir, + make_image=make_image, + isohybrid=isohybrid, + implantmd5=implantmd5, + make_manifest=make_manifest, + ) + + mock_iso_entry = open(mock_iso_path, "w+") + mock_iso_entry.write(mock_iso_template_output) + mock_iso_entry.close() + + mock_sh_entry = open(mock_sh_path, "w+") + mock_sh_entry.write(mock_sh_template_output) + mock_sh_entry.close() + + iso_template_entry = open(iso_template_path, "w+") + iso_template_entry.write(iso_template_output) + iso_template_entry.close() + + os.chmod(mock_sh_path, 0o755) + os.chmod(iso_template_path, 0o755) + def _extra_iso_local_run(self): """ @@ -1072,7 +1132,6 @@ class IsoBuild: def _get_mkisofs_cmd( self, iso, - paths, appid=None, volid=None, volset=None, @@ -1147,8 +1206,9 @@ class IsoBuild: """ Implants md5 into iso """ - cmd = ["/usr/bin/implantisomd5", "--supported-iso", opts['iso_path']] - return cmd + cmd = ["/usr/bin/implantisomd5", "--supported-iso", opts['iso_name']] + returned_cmd = ' '.join(cmd) + return returned_cmd def _get_manifest_cmd(self, opts): """ @@ -1164,8 +1224,9 @@ class IsoBuild: if opts['arch'] == "x86_64": cmd = ["/usr/bin/isohybrid"] cmd.append("--uefi") - cmd.append(opts['iso_path']) - return cmd + cmd.append(opts['iso_name']) + returned_cmd = ' '.join(cmd) + return returned_cmd def _get_make_image_cmd(self, opts): """ @@ -1190,14 +1251,10 @@ class IsoBuild: iso_level=opts['iso_level'], **isokwargs ) - return cmd + returned_cmd = ' '.join(cmd) + return returned_cmd - def _write_script(self, opts): - """ - Writes out the script to make the DVD - """ - class LiveBuild: """ This helps us build the live images for Rocky Linux. From 6073066c8438be59079f19aae2eaaf0ceb0019eb Mon Sep 17 00:00:00 2001 From: nazunalika Date: Fri, 24 Jun 2022 15:37:32 -0700 Subject: [PATCH 47/64] restructure iso map configs --- iso/empanadas/empanadas/configs/el9-beta.yaml | 61 +++++++------------ iso/empanadas/empanadas/configs/el9.yaml | 61 +++++++------------ iso/empanadas/empanadas/configs/el9lh.yaml | 61 +++++++------------ iso/empanadas/empanadas/util/dnf_utils.py | 4 +- iso/empanadas/empanadas/util/iso_utils.py | 44 ++++++++----- 5 files changed, 101 insertions(+), 130 deletions(-) diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index ea6a94a..75a6163 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -30,56 +30,41 @@ - 'SAPHANA' - 'extras' - 'plus' - no_comps_or_groups: - - 'all' - - 'extras' - - 'plus' - comps_or_groups: - - 'BaseOS' - - 'AppStream' - - 'CRB' - - 'HighAvailability' - - 'ResilientStorage' - - 'RT' - - 'NFV' - - 'SAP' - - 'SAPHANA' - has_modules: - - 'AppStream' - - 'CRB' structure: packages: 'os/Packages' repodata: 'os/repodata' iso_map: xorrisofs: False iso_level: False - hosts: - x86_64: '' - aarch64: '' - ppc64le: '' - s390x: '' images: dvd: discnum: '1' + variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' - lorax_variants: - - dvd - - minimal - - BaseOS - repos: - - 'BaseOS' - - 'AppStream' - variant: 'BaseOS' - lorax_removes: - - 'libreport-rhel-anaconda-bugzilla' - required_pkgs: - - 'lorax' - - 'genisoimage' - - 'isomd5sum' - - 'lorax-templates-rhel' - - 'lorax-templates-generic' + minimal: + isoskip: True + repos: [] + BaseOS: + isoskip: True + variant: 'BaseOS' + repos: + - 'BaseOS' + - 'AppStream' + lorax: + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index e50fbdf..783b86c 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -30,56 +30,41 @@ - 'SAPHANA' - 'extras' - 'plus' - no_comps_or_groups: - - 'all' - - 'extras' - - 'plus' - comps_or_groups: - - 'BaseOS' - - 'AppStream' - - 'CRB' - - 'HighAvailability' - - 'ResilientStorage' - - 'RT' - - 'NFV' - - 'SAP' - - 'SAPHANA' - has_modules: - - 'AppStream' - - 'CRB' structure: packages: 'os/Packages' repodata: 'os/repodata' iso_map: xorrisofs: False iso_level: False - hosts: - x86_64: '' - aarch64: '' - ppc64le: '' - s390x: '' images: dvd: discnum: '1' + variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' - lorax_variants: - - dvd - - minimal - - BaseOS - repos: - - 'BaseOS' - - 'AppStream' - variant: 'BaseOS' - lorax_removes: - - 'libreport-rhel-anaconda-bugzilla' - required_pkgs: - - 'lorax' - - 'genisoimage' - - 'isomd5sum' - - 'lorax-templates-rhel' - - 'lorax-templates-generic' + minimal: + isoskip: True + repos: [] + BaseOS: + isoskip: True + variant: 'BaseOS' + repos: + - 'BaseOS' + - 'AppStream' + lorax: + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 69c37e1..400b8fa 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -30,56 +30,41 @@ - 'SAPHANA' - 'extras' - 'plus' - no_comps_or_groups: - - 'all' - - 'extras' - - 'plus' - comps_or_groups: - - 'BaseOS' - - 'AppStream' - - 'CRB' - - 'HighAvailability' - - 'ResilientStorage' - - 'RT' - - 'NFV' - - 'SAP' - - 'SAPHANA' - has_modules: - - 'AppStream' - - 'CRB' structure: packages: 'os/Packages' repodata: 'os/repodata' iso_map: xorrisofs: False iso_level: False - hosts: - x86_64: '' - aarch64: '' - ppc64le: '' - s390x: '' images: dvd: discnum: '1' + variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' - lorax_variants: - - dvd - - minimal - - BaseOS - repos: - - 'BaseOS' - - 'AppStream' - variant: 'BaseOS' - lorax_removes: - - 'libreport-rhel-anaconda-bugzilla' - required_pkgs: - - 'lorax' - - 'genisoimage' - - 'isomd5sum' - - 'lorax-templates-rhel' - - 'lorax-templates-generic' + minimal: + isoskip: True + repos: [] + BaseOS: + isoskip: True + variant: 'BaseOS' + repos: + - 'BaseOS' + - 'AppStream' + lorax: + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index b3eea20..ff0de02 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -855,8 +855,8 @@ class RepoSync: deploys extra files based on info of rlvars including a extra_files.json - also deploys COMPOSE_ID and maybe in the future a metadata dir with a - bunch of compose-esque stuff. + might also deploy COMPOSE_ID and maybe in the future a metadata dir with + a bunch of compose-esque stuff. """ cmd = self.git_cmd() tmpclone = '/tmp/clone' diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index d3b1d1c..40a0c0a 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -77,7 +77,7 @@ class IsoBuild: self.compose_base = config['compose_root'] + "/" + major self.iso_drop = config['compose_root'] + "/" + major + "/isos" self.current_arch = config['arch'] - self.required_pkgs = rlvars['iso_map']['required_pkgs'] + self.required_pkgs = rlvars['iso_map']['lorax']['required_pkgs'] self.mock_work_root = config['mock_work_root'] self.lorax_result_root = config['mock_work_root'] + "/" + "lorax" self.mock_isolation = isolation @@ -96,7 +96,7 @@ class IsoBuild: self.minor_version = rlvars['minor'] self.revision = rlvars['revision'] + "-" + rlvars['rclvl'] self.rclvl = rlvars['rclvl'] - self.repos = rlvars['iso_map']['repos'] + self.repos = rlvars['iso_map']['lorax']['repos'] self.repo_base_url = config['repo_base_url'] self.project_id = rlvars['project_id'] self.structure = rlvars['structure'] @@ -273,8 +273,8 @@ class IsoBuild: minor=self.minor_version, shortname=self.shortname, repos=self.repolist, - variant=self.iso_map['variant'], - lorax=self.iso_map['lorax_removes'], + variant=self.iso_map['lorax']['variant'], + lorax=self.iso_map['lorax']['lorax_removes'], distname=self.distname, revision=self.release, rc=rclevel, @@ -411,7 +411,7 @@ class IsoBuild: self.log.info( 'Copying base lorax for ' + Color.BOLD + arch + Color.END ) - for variant in self.iso_map['lorax_variants']: + for variant in self.iso_map['images']: self._copy_lorax_to_variant(self.force_unpack, arch, variant) self.log.info( @@ -424,6 +424,14 @@ class IsoBuild: 'Beginning treeinfo phase' ) + for arch in arches_to_unpack: + for variant in self.iso_map['images']: + self.log.info( + 'Configuring treeinfo for %s%s %s%s' % (Color.BOLD, arch, variant, Color.END) + ) + + self._treeinfo_wrapper(arch, variant) + def _s3_determine_latest(self): """ @@ -614,7 +622,7 @@ class IsoBuild: arches_to_unpack = [self.arch] self._sync_boot(force_unpack=self.force_unpack, arch=self.arch, image=None) - self.treeinfo_write(arch=self.arch) + #self._treeinfo_write(arch=self.arch) def _sync_boot(self, force_unpack, arch, image): """ @@ -623,17 +631,18 @@ class IsoBuild: self.log.info('Copying lorax to %s directory...' % image) # checks here, report that it already exists - def treeinfo_write(self, arch): + def _treeinfo_wrapper(self, arch, variant): """ - Ensure treeinfo is written correctly + Ensure treeinfo is written correctly based on the variant passed. Each + .treeinfo file should be configured similarly but also differently from + the next. """ - self.log.info('Starting treeinfo work...') + image = os.path.join(self.lorax_work_dir, arch, variant) + treeinfo = os.path.join(image, '.treeinfo') + repos = self.iso_map['images'][variant]['repos'] - def _treeinfo_from_lorax(self, arch, force_unpack, variant): - """ - Fixes lorax treeinfo - """ - self.log.info('Fixing up lorax treeinfo for %s ...' % variant) + #ti = productmd.treeinfo.TreeInfo() + #ti.load(treeinfo) def discinfo_write(self): """ @@ -698,6 +707,13 @@ class IsoBuild: images_to_build = [self.extra_iso] for y in images_to_build: + if 'isoskip' in self.iso_map['images'][y] and self.iso_map['images'][y]['isoskip']: + self.log.info( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Skipping ' + y + ' image' + ) + continue + for a in arches_to_build: grafts = self._generate_graft_points( a, From 43470e336bccc43c4947758a098d9c63f67b4c79 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 26 Jun 2022 12:24:56 -0700 Subject: [PATCH 48/64] RLBS - Finalize Treeinfo and Add xorrisofs In preparation to build ISO's with mock or parallel podman containers, treeinfo support was fully added as well as the addition of xorrisofs to prepare for the future. xorrisofs should also work for both current and previous releases. We have also add a currently-pending RLN experiment. --- iso/empanadas/empanadas/configs/el9-beta.yaml | 9 +- iso/empanadas/empanadas/configs/el9.yaml | 9 +- iso/empanadas/empanadas/configs/el9lh.yaml | 9 +- iso/empanadas/empanadas/configs/rln.yaml | 121 +++++++ .../templates/buildExtraImage.tmpl.sh | 9 +- .../empanadas/templates/extraisobuild.tmpl.sh | 2 +- .../empanadas/templates/xorriso.tmpl.txt | 5 + iso/empanadas/empanadas/util/dnf_utils.py | 36 +- iso/empanadas/empanadas/util/iso_utils.py | 335 ++++++++++++++---- 9 files changed, 455 insertions(+), 80 deletions(-) create mode 100644 iso/empanadas/empanadas/configs/rln.yaml create mode 100644 iso/empanadas/empanadas/templates/xorriso.tmpl.txt diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 75a6163..6de4bbe 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -6,6 +6,7 @@ major: '9' minor: '1' bugurl: 'https://bugs.rockylinux.org' + checksum: 'sha256' allowed_arches: - x86_64 - aarch64 @@ -38,15 +39,19 @@ iso_level: False images: dvd: - discnum: '1' + disc: True variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' minimal: + disc: True isoskip: True - repos: [] + repos: + - 'minimal' + variant: 'minimal' BaseOS: + disc: False isoskip: True variant: 'BaseOS' repos: diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 783b86c..b71a1ef 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -6,6 +6,7 @@ major: '9' minor: '0' bugurl: 'https://bugs.rockylinux.org' + checksum: 'sha256' allowed_arches: - x86_64 - aarch64 @@ -38,15 +39,19 @@ iso_level: False images: dvd: - discnum: '1' + disc: True variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' minimal: + disc: True isoskip: True - repos: [] + repos: + - 'minimal' + variant: 'minimal' BaseOS: + disc: False isoskip: True variant: 'BaseOS' repos: diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 400b8fa..621e758 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -6,6 +6,7 @@ major: '9' minor: '1' bugurl: 'https://bugs.rockylinux.org' + checksum: 'sha256' allowed_arches: - x86_64 - aarch64 @@ -38,15 +39,19 @@ iso_level: False images: dvd: - discnum: '1' + disc: True variant: 'AppStream' repos: - 'BaseOS' - 'AppStream' minimal: + disc: True isoskip: True - repos: [] + repos: + - 'minimal' + variant: 'minimal' BaseOS: + disc: False isoskip: True variant: 'BaseOS' repos: diff --git a/iso/empanadas/empanadas/configs/rln.yaml b/iso/empanadas/empanadas/configs/rln.yaml new file mode 100644 index 0000000..1bd52d6 --- /dev/null +++ b/iso/empanadas/empanadas/configs/rln.yaml @@ -0,0 +1,121 @@ +--- +'rln': + fullname: 'Rocky Linux New' + revision: '10' + rclvl: 'RC1' + major: '10' + minor: '0' + bugurl: 'https://bugs.rockylinux.org' + checksum: 'sha256' + allowed_arches: + - x86_64 + - aarch64 + - ppc64le + - s390x + provide_multilib: True + project_id: '' + repo_symlinks: + NFV: 'nfv' + renames: + all: 'devel' + all_repos: + - 'all' + - 'BaseOS' + - 'AppStream' + - 'CRB' + - 'HighAvailability' + - 'ResilientStorage' + - 'RT' + - 'NFV' + - 'SAP' + - 'SAPHANA' + - 'extras' + - 'plus' + structure: + packages: 'os/Packages' + repodata: 'os/repodata' + iso_map: + xorrisofs: True + iso_level: False + images: + dvd: + discnum: '1' + variant: 'AppStream' + repos: + - 'BaseOS' + - 'AppStream' + minimal: + discnum: '1' + isoskip: True + repos: + - 'minimal' + variant: 'minimal' + BaseOS: + isoskip: True + variant: 'BaseOS' + repos: + - 'BaseOS' + - 'AppStream' + lorax: + repos: + - 'BaseOS' + - 'AppStream' + variant: 'BaseOS' + lorax_removes: + - 'libreport-rhel-anaconda-bugzilla' + required_pkgs: + - 'lorax' + - 'genisoimage' + - 'isomd5sum' + - 'lorax-templates-rhel' + - 'lorax-templates-generic' + - 'xorriso' + repoclosure_map: + arches: + x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' + aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch' + ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch' + s390x: '--forcearch=s390x --arch=s390x --arch=noarch' + repos: + devel: [] + BaseOS: [] + AppStream: + - BaseOS + CRB: + - BaseOS + - AppStream + HighAvailability: + - BaseOS + - AppStream + ResilientStorage: + - BaseOS + - AppStream + RT: + - BaseOS + - AppStream + NFV: + - BaseOS + - AppStream + SAP: + - BaseOS + - AppStream + - HighAvailability + SAPHANA: + - BaseOS + - AppStream + - HighAvailability + extra_files: + git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' + git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/rln/' + branch: 'rln' + gpg: + stable: 'SOURCES/RPM-GPG-KEY-Rocky-RLN' + testing: 'SOURCES/RPM-GPG-KEY-Rocky-RLN-Testing' + list: + - 'SOURCES/Contributors' + - 'SOURCES/COMMUNITY-CHARTER' + - 'SOURCES/EULA' + - 'SOURCES/LICENSE' + - 'SOURCES/RPM-GPG-KEY-Rocky-RLN' + - 'SOURCES/RPM-GPG-KEY-Rocky-RLN' +... diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 8f5d54a..ab99d2e 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -1,8 +1,9 @@ #!/bin/bash set -ex -{% if inside_podman %} -mkdir /builddor +{% if extra_iso_mode == "podman" %} +{{ lorax_pkg_cmd }} +mkdir /builddir {% endif %} cd /builddir @@ -19,9 +20,9 @@ fi {{ make_manifest }} -{% if inside_podman %} +{% if extra_iso_mode == "podman" %} mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cp /builddir/*.iso {{ compose_work_iso_dir }}/{{ arch }} cp /builddir/*.iso.manifest {{ compose_work_iso_dir }}/{{ arch }} -cp /builddir/*.log {{ compose_work_iso_dir }}/{{ arch }} +#cp /builddir/*.log {{ compose_work_iso_dir }}/{{ arch }} {% endif %} diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index e8f555b..7dc6584 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -9,7 +9,7 @@ MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" MOCK_RESL="${MOCK_ROOT}/result" MOCK_CHRO="${MOCK_ROOT}/root" MOCK_LOG="${MOCK_RESL}/mock-output.log" -IMAGE_SCR="/var/tmp/buildExtraImage.sh" +IMAGE_SCR="{{ entries_dir }}/buildExtraImage.sh" IMAGE_ISO="{{ isoname }}" ISOLATION="{{ isolation }}" BUILDDIR="{{ builddir }}" diff --git a/iso/empanadas/empanadas/templates/xorriso.tmpl.txt b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt new file mode 100644 index 0000000..ca2b68e --- /dev/null +++ b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt @@ -0,0 +1,5 @@ +-indev {{ boot_iso }} +-outdev {{ isoname }} +-boot_image any replay +-volid {{ volid }} +{{ grafts }} diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index ff0de02..ee5ca07 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -20,6 +20,9 @@ from jinja2 import Environment, FileSystemLoader from empanadas.common import Color, _rootdir +# initial treeinfo data is made here +import productmd.treeinfo + #HAS_LIBREPO = True #try: # import librepo @@ -43,6 +46,7 @@ class RepoSync: ignore_source: bool = False, repoclosure: bool = False, refresh_extra_files: bool = False, + refresh_treeinfo: bool = False, skip_all: bool = False, hashed: bool = False, parallel: bool = False, @@ -63,6 +67,7 @@ class RepoSync: self.hashed = hashed self.repoclosure = repoclosure self.refresh_extra_files = refresh_extra_files + self.refresh_treeinfo = refresh_treeinfo # Enables podman syncing, which should effectively speed up operations self.parallel = parallel # Relevant config items @@ -203,18 +208,28 @@ class RepoSync: self.log.error('Dry Runs are not supported just yet. Sorry!') raise SystemExit() + if self.fullrun and self.refresh_extra_files: + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'A full run implies extra files are also deployed.' + ) + self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch) if self.fullrun: self.deploy_extra_files(global_work_root) + self.deploy_treeinfo(self.repo, sync_root, self.arch) self.symlink_to_latest(generated_dir) if self.repoclosure: self.repoclosure_work(sync_root, work_root, log_root) - if self.refresh_extra_files: + if self.refresh_extra_files and not self.fullrun: self.deploy_extra_files(global_work_root) + if self.refresh_treeinfo and not self.fullrun: + self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.log.info('Compose repo directory: %s' % sync_root) self.log.info('Compose logs: %s' % log_root) self.log.info('Compose completed.') @@ -885,9 +900,12 @@ class RepoSync: stderr=subprocess.DEVNULL ) - # Copy files + # Copy files to work root for extra in self.extra_files['list']: src = '/tmp/clone/' + extra + # Copy extra files to root of compose here also - The extra files + # are meant to be picked up by our ISO creation process and also + # exist on our mirrors. try: shutil.copy2(src, extra_files_dir) except: @@ -912,6 +930,20 @@ class RepoSync: 'Extra files phase completed.' ) + def deploy_treeinfo(self, repo, sync_root, arch): + """ + Deploys initial treeinfo files. These have the potential of being + overwritten by our ISO process, which is fine. + """ + arches_to_tree = self.arches + if arch: + arches_to_tree = [arch] + + repos_to_tree = self.repos + if repo and not self.fullrun: + repos_to_tree = [repo] + + class SigRepoSync: """ This helps us do reposync operations for SIG's. Do not use this for the diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 40a0c0a..2784e77 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -21,6 +21,7 @@ import json import xmltodict # if we can access s3 import boto3 +# relative_path, compute_file_checksums import kobo.shortcuts from fnmatch import fnmatch @@ -88,6 +89,7 @@ class IsoBuild: self.force_download = force_download self.extra_iso = extra_iso self.extra_iso_mode = extra_iso_mode + self.checksum = rlvars['checksum'] # Relevant major version items self.arch = arch @@ -142,7 +144,7 @@ class IsoBuild: self.iso_work_dir = os.path.join( self.compose_latest_dir, - "work/iso" + "work/isos" ) self.lorax_work_dir = os.path.join( @@ -566,6 +568,11 @@ class IsoBuild: 'lorax' ) + iso_to_go = os.path.join( + self.iso_work_dir, + arch + ) + if not os.path.exists(os.path.join(src_to_image, '.treeinfo')): self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + @@ -588,20 +595,40 @@ class IsoBuild: ) return + self.log.info('Copying %s boot iso to work directory...' % arch) + os.makedirs(iso_to_go, exist_ok=True) + + rclevel = '' + if self.release_candidate: + rclevel = '-' + self.rclvl + + isobootpath = '{}/{}-{}.{}{}-{}-{}.iso'.format( + iso_to_go, + self.shortname, + self.major_version, + self.minor_version, + rclevel, + arch, + image + ) + + shutil.copy2(src_to_image + '/images/boot.iso', isobootpath) + self.log.info('Copying base lorax to %s directory...' % image) try: - shutil.copytree(src_to_image, path_to_image, copy_function=shutil.copy2) + shutil.copytree(src_to_image, path_to_image, copy_function=shutil.copy2, dirs_exist_ok=True) except: self.log.error('%s already exists??' % image) - self.log.info('Removing boot.iso from copy') - try: - os.remove(path_to_image + '/images/boot.iso') - except: - self.log.error( - '[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' + - 'Cannot remove boot.iso' - ) + if self.iso_map['images'][image]['disc']: + self.log.info('Removing boot.iso from %s' % image) + try: + os.remove(path_to_image + '/images/boot.iso') + except: + self.log.error( + '[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' + + 'Cannot remove boot.iso' + ) def run_boot_sync(self): """ @@ -639,10 +666,86 @@ class IsoBuild: """ image = os.path.join(self.lorax_work_dir, arch, variant) treeinfo = os.path.join(image, '.treeinfo') - repos = self.iso_map['images'][variant]['repos'] + imagemap = self.iso_map['images'][variant] + primary = imagemap['variant'] + repos = imagemap['repos'] + is_disc = False - #ti = productmd.treeinfo.TreeInfo() - #ti.load(treeinfo) + if imagemap['disc']: + is_disc = True + discnum = 1 + + # load up productmd + ti = productmd.treeinfo.TreeInfo() + ti.load(treeinfo) + + # Set the name + ti.release.name = self.distname + ti.release.short = self.shortname + # Set the version (the initial lorax run does this, but we are setting + # it just in case) + ti.release.version = self.release + # Assign the present images into a var as a copy. For each platform, + # clear out the present dictionary. For each item and path in the + # assigned var, assign it back to the platform dictionary. If the path + # is empty, continue. Do checksums afterwards. + plats = ti.images.images.copy() + for platform in ti.images.images: + ti.images.images[platform] = {} + for i, p in plats[platform].items(): + if not p: + continue + if 'boot.iso' in i and is_disc: + continue + ti.images.images[platform][i] = p + ti.checksums.add(p, self.checksum, root_dir=image) + + # stage2 checksums + if ti.stage2.mainimage: + ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) + + if ti.stage2.instimage: + ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) + + # If we are a disc, set the media section appropriately. + if is_disc: + ti.media.discnum = discnum + ti.media.totaldiscs = discnum + + # Create variants + # Note to self: There's a lot of legacy stuff running around for + # Fedora, ELN, and RHEL in general. This is the general structure, + # apparently. But there could be a chance it'll change. We may need to + # put in a configuration to deal with it at some point. + #ti.variants.variants.clear() + for y in repos: + if y in ti.variants.variants.keys(): + vari = ti.variants.variants[y] + else: + vari = productmd.treeinfo.Variant(ti) + + vari.id = y + vari.uid = y + vari.name = y + vari.type = "variant" + if is_disc: + vari.paths.repository = y + vari.paths.packages = y + "/Packages" + else: + if y == primary: + vari.paths.repository = "." + vari.paths.packages = "Packages" + else: + vari.paths.repository = "../../../" + y + "/" + arch + "/os" + vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" + + if y not in ti.variants.variants.keys(): + ti.variants.add(vari) + + del vari + + # Set default variant + ti.dump(treeinfo, main_variant=primary) def discinfo_write(self): """ @@ -679,11 +782,19 @@ class IsoBuild: each arch. This should NOT be called during the usual run() section. """ sync_root = self.compose_latest_sync + self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Starting Extra ISOs phase' ) + if not os.path.exists(self.compose_base): + self.log.info( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'The compose directory MUST be here. Cannot continue.' + ) + raise SystemExit() + self._extra_iso_build_wrap() self.log.info('Compose repo directory: %s' % sync_root) @@ -698,6 +809,11 @@ class IsoBuild: Try to figure out where the build is going, we only support mock for now. """ + work_root = os.path.join( + self.compose_latest_dir, + 'work' + ) + arches_to_build = self.arches if self.arch: arches_to_build = [self.arch] @@ -715,15 +831,32 @@ class IsoBuild: continue for a in arches_to_build: + lorax_path = os.path.join(self.lorax_work_dir, a, 'lorax', '.treeinfo') + image_path = os.path.join(self.lorax_work_dir, a, y, '.treeinfo') + if not os.path.exists(image_path): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Lorax data not found for ' + y + '. Skipping.' + ) + + if not os.path.exists(lorax_path): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'Lorax not found at all. This is considered fatal.' + ) + + raise SystemExit() + grafts = self._generate_graft_points( a, y, self.iso_map['images'][y]['repos'], ) + self._extra_iso_local_config(a, y, grafts, work_root) if self.extra_iso_mode == 'local': - self._extra_iso_local_config(a, y, grafts) - #self._extra_iso_local_run() + self._extra_iso_local_run(a, y, work_root) + print() elif self.extra_iso_mode == 'podman': continue else: @@ -733,45 +866,48 @@ class IsoBuild: ) raise SystemExit() - if self.extra_iso_mode == 'podman': - print() - - def _extra_iso_local_config(self, arch, image, grafts): + def _extra_iso_local_config(self, arch, image, grafts, work_root): """ Local ISO build mode - this should build in mock """ self.log.info('Generating Extra ISO configuration and script') + + entries_dir = os.path.join(work_root, "entries") + boot_iso = os.path.join(work_root, "lorax", arch, "lorax/images/boot.iso") mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg') mock_sh_template = self.tmplenv.get_template('extraisobuild.tmpl.sh') iso_template = self.tmplenv.get_template('buildExtraImage.tmpl.sh') + xorriso_template = self.tmplenv.get_template('xorriso.tmpl.txt') - mock_iso_path = '/var/tmp/lorax-' + self.major_version + '.cfg' - mock_sh_path = '/var/tmp/extraisobuild.sh' - iso_template_path = '/var/tmp/buildExtraImage.sh' + mock_iso_path = '/var/tmp/lorax-{}.cfg'.format(self.major_version) + mock_sh_path = '{}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image) + iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image) + xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image) rclevel = '' if self.release_candidate: rclevel = '-' + self.rclvl - discnum = '' - if self.iso_map['images'][image]['discnum']: - discnum = self.iso_map['images'][image]['discnum'] - - volid = '{}-{}-{}-{}-{}'.format( + volid = '{}-{}-{}{}-{}-{}'.format( self.shortname, self.major_version, self.minor_version, + rclevel, arch, image ) - isoname = '{}-{}.{}-{}-{}{}.iso'.format( + isoname = '{}-{}.{}{}-{}-{}.iso'.format( self.shortname, self.major_version, self.minor_version, + rclevel, arch, - image, - discnum + image + ) + + lorax_pkg_cmd = '/usr/bin/dnf install {} -y'.format( + ' '.join(self.iso_map['lorax']['required_pkgs']) ) mock_iso_template_output = mock_iso_template.render( @@ -783,6 +919,8 @@ class IsoBuild: dist=self.disttag, repos=self.repolist, user_agent='{{ user_agent }}', + compose_dir_is_here=True, + compose_dir=self.compose_root, ) mock_sh_template_output = mock_sh_template.render( @@ -792,6 +930,7 @@ class IsoBuild: builddir=self.mock_work_root, shortname=self.shortname, isoname=isoname, + entries_dir=entries_dir, ) opts = { @@ -809,15 +948,35 @@ class IsoBuild: make_manifest = self._get_manifest_cmd(opts) iso_template_output = iso_template.render( - inside_podman=False, + extra_iso_mode=self.extra_iso_mode, arch=arch, compose_work_iso_dir=self.iso_work_dir, make_image=make_image, isohybrid=isohybrid, implantmd5=implantmd5, make_manifest=make_manifest, + lorax_pkg_cmd=lorax_pkg_cmd, ) + if opts['use_xorrisofs']: + # Here we generate another template instead for xorrisofs. We'll do + # manual writes for now instead of a template. I'm too tired, it's + # 1am, and I can't rationally think of how to do this in jinja (I + # know it's easy, it's just too late) + xp = open(grafts) + xorpoint = xp.read() + xp.close() + xorriso_template_output = xorriso_template.render( + boot_iso=boot_iso, + isoname=isoname, + volid=volid, + grafts=xorpoint, + ) + xorriso_template_entry = open(xorriso_template_path, "w+") + xorriso_template_entry.write(xorriso_template_output) + xorriso_template_entry.close() + + mock_iso_entry = open(mock_iso_path, "w+") mock_iso_entry.write(mock_iso_template_output) mock_iso_entry.close() @@ -834,9 +993,9 @@ class IsoBuild: os.chmod(iso_template_path, 0o755) - def _extra_iso_local_run(self): + def _extra_iso_local_run(self, arch, image, work_root): """ - Runs the actual local process + Runs the actual local process using mock """ def _generate_graft_points( @@ -902,12 +1061,25 @@ class IsoBuild: iso, arch ) + + xorrs = '{}/xorriso-{}.txt'.format( + lorax_base_dir, + arch + ) + self._write_grafts( grafts, + xorrs, files, exclude=["*/lost+found", "*/boot.iso"] ) - return grafts + + if self.iso_map['xorrisofs']: + grafters = xorrs + else: + grafters = grafts + + return grafters def _get_grafts(self, paths, exclusive_paths=None, exclude=None): """ @@ -939,7 +1111,7 @@ class IsoBuild: return result - def _write_grafts(self, filepath, u, exclude=None): + def _write_grafts(self, filepath, xorrspath, u, exclude=None): """ Write out the graft points """ @@ -963,17 +1135,30 @@ class IsoBuild: result[zl] = u[zl] seen.add(dirn) - fh = open(filepath, "w") - for zl in sorted(result, key=self._sorting): - found = False - for excl in exclude: - if fnmatch(zl, excl): - found = True - break - if found: - continue - fh.write("%s=%s\n" % (zl, u[zl])) - fh.close() + if self.iso_map['xorrisofs']: + fx = open(xorrspath, "w") + for zm in sorted(result, key=self._sorting): + found = False + for excl in exclude: + if fnmatch(zm, excl): + found = True + break + if found: + continue + fx.write("-map %s %s\n" % (u[zm], zm)) + fx.close() + else: + fh = open(filepath, "w") + for zl in sorted(result, key=self._sorting): + found = False + for excl in exclude: + if fnmatch(zl, excl): + found = True + break + if found: + continue + fh.write("%s=%s\n" % (zl, u[zl])) + fh.close() def _scanning(self, p): """ @@ -1160,7 +1345,7 @@ class IsoBuild: ): # I should hardcode this I think #untranslated_filenames = True - #translation_table = True + translation_table = True #joliet = True #joliet_long = True #rock = True @@ -1196,8 +1381,7 @@ class IsoBuild: #if rock: cmd.append("-rational-rock") - #if not use_xorrisofs and translation_table: - if not use_xorrisofs: + if not use_xorrisofs and translation_table: cmd.append("-translation-table") if input_charset: @@ -1230,18 +1414,31 @@ class IsoBuild: """ Gets an ISO manifest """ - return "/usr/bin/isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % ( - shlex.quote(opts['iso_name']), - shlex.quote(opts['iso_name']), - ) + if opts['use_xorrisofs']: + return """/usr/bin/xorriso -dev %s --find | + tail -n+2 | + tr -d "'" | + cut -c2- sort >> %s.manifest""" % ( + shlex.quote(opts['iso_name']), + shlex.quote(opts['iso_name']), + ) + else: + return "/usr/bin/isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % ( + shlex.quote(opts['iso_name']), + shlex.quote(opts['iso_name']), + ) def _get_isohybrid_cmd(self, opts): cmd = [] - if opts['arch'] == "x86_64": - cmd = ["/usr/bin/isohybrid"] - cmd.append("--uefi") - cmd.append(opts['iso_name']) - returned_cmd = ' '.join(cmd) + if not opts['use_xorrisofs']: + if opts['arch'] == "x86_64": + cmd = ["/usr/bin/isohybrid"] + cmd.append("--uefi") + cmd.append(opts['iso_name']) + returned_cmd = ' '.join(cmd) + else: + returned_cmd = '' + return returned_cmd def _get_make_image_cmd(self, opts): @@ -1258,15 +1455,19 @@ class IsoBuild: if opts['arch'] in ("ppc64", "ppc64le"): isokwargs["input_charset"] = None - cmd = self._get_mkisofs_cmd( - opts['iso_name'], - volid=opts['volid'], - exclude=["./lost+found"], - grafts=opts['graft_points'], - use_xorrisofs=opts['use_xorrisofs'], - iso_level=opts['iso_level'], - **isokwargs - ) + if opts['use_xorrisofs']: + cmd = ['/usr/bin/xorrisofs', '-dialog', 'on', '<', opts['graft_points']] + else: + cmd = self._get_mkisofs_cmd( + opts['iso_name'], + volid=opts['volid'], + exclude=["./lost+found"], + grafts=opts['graft_points'], + use_xorrisofs=False, + iso_level=opts['iso_level'], + **isokwargs + ) + returned_cmd = ' '.join(cmd) return returned_cmd From 37c0be3fd06a0d9e177c427d0c36fb8a6ded7008 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 26 Jun 2022 21:29:55 -0700 Subject: [PATCH 49/64] attempt to build ISO in both mock and podman --- iso/empanadas/empanadas/common.py | 42 ++++ iso/empanadas/empanadas/configs/el9-beta.yaml | 1 + iso/empanadas/empanadas/configs/el9.yaml | 1 + iso/empanadas/empanadas/configs/el9lh.yaml | 1 + iso/empanadas/empanadas/configs/rln.yaml | 1 - .../empanadas/templates/buildImage.tmpl.sh | 12 + .../empanadas/templates/extraisobuild.tmpl.sh | 2 +- iso/empanadas/empanadas/util/iso_utils.py | 225 ++++++++++++++++-- 8 files changed, 262 insertions(+), 23 deletions(-) diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index bf081ab..e445caf 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -6,6 +6,7 @@ import glob import rpm import yaml import logging +import hashlib # These are a bunch of colors we may use in terminal output class Color: @@ -20,6 +21,47 @@ class Color: BOLD = '\033[1m' END = '\033[0m' +class Utils: + """ + Quick utilities that may be commonly used + """ + @staticmethod + def get_checksum(path, hashtype, logger): + """ + Generates a checksum from the provided path by doing things in chunks. + This way we don't do it in memory. + """ + try: + checksum = hashlib.new(hashtype) + except ValueError: + logger.error("Invalid hash type: %s" % hashtype) + return False + + try: + input_file = open(path, "rb") + except IOError as e: + logger.error("Could not open file %s: %s" % (path, e)) + return False + + while True: + chunk = input_file.read(8192) + if not chunk: + break + checksum.update(chunk) + + input_file.close() + stat = os.stat(path) + base = os.path.basename(path) + # This emulates our current syncing scripts that runs stat and + # sha256sum and what not with a very specific output. + return "%s: %s bytes\n%s (%s) = %s" % ( + base, + stat.st_size, + hashtype.upper(), + base, + checksum.hexdigest() + ) + # vars and additional checks rldict = {} sigdict = {} diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 6de4bbe..0ca78a8 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index b71a1ef..1918b68 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 621e758..78de775 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/rln.yaml b/iso/empanadas/empanadas/configs/rln.yaml index 1bd52d6..c36fcb1 100644 --- a/iso/empanadas/empanadas/configs/rln.yaml +++ b/iso/empanadas/empanadas/configs/rln.yaml @@ -65,7 +65,6 @@ - 'libreport-rhel-anaconda-bugzilla' required_pkgs: - 'lorax' - - 'genisoimage' - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' diff --git a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh index d7b7708..a17c853 100644 --- a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh @@ -39,6 +39,18 @@ fi # If we didn't fail, let's pack up everything! cd "${MOCKBLD}" + +# Get ISO manifest +if [ -f "/usr/bin/xorriso" ]; then + /usr/bin/xorriso -dev lorax/images/boot.iso --find | + tail -n+2 | + tr -d "'" | + cut -c2- sort >> lorax/images/boot.iso.manifest +elif [ -f "/usr/bin/isoinfo" ]; then + /usr/bin/isoinfo -R -f -i lorax/images/boot.iso | + grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest +fi + tar czf "${LORAX_TAR}" lorax "${LOGFILE}" tar_ret_val=$? diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index 7dc6584..892c5c9 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -40,7 +40,7 @@ mock_ret_val=$? if [ $mock_ret_val -eq 0 ]; then # Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result mkdir -p "${MOCK_RESL}" - cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}" "${MOCK_RESL}" + cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}*" "${MOCK_RESL}" else echo "!! EXTRA ISO RUN FAILED !!" exit 1 diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 2784e77..23c06ad 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -35,7 +35,7 @@ import productmd.treeinfo from jinja2 import Environment, FileSystemLoader -from empanadas.common import Color, _rootdir +from empanadas.common import Color, _rootdir, Utils class IsoBuild: """ @@ -105,6 +105,10 @@ class IsoBuild: self.extra_files = rlvars['extra_files'] + self.container = config['container'] + if 'container' in rlvars and len(rlvars['container']) > 0: + self.container = rlvars['container'] + self.staging_dir = os.path.join( config['staging_root'], config['category_stub'], @@ -416,6 +420,8 @@ class IsoBuild: for variant in self.iso_map['images']: self._copy_lorax_to_variant(self.force_unpack, arch, variant) + self._copy_boot_to_work(self.force_unpack, arch) + self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Image variant phase completed' @@ -595,25 +601,6 @@ class IsoBuild: ) return - self.log.info('Copying %s boot iso to work directory...' % arch) - os.makedirs(iso_to_go, exist_ok=True) - - rclevel = '' - if self.release_candidate: - rclevel = '-' + self.rclvl - - isobootpath = '{}/{}-{}.{}{}-{}-{}.iso'.format( - iso_to_go, - self.shortname, - self.major_version, - self.minor_version, - rclevel, - arch, - image - ) - - shutil.copy2(src_to_image + '/images/boot.iso', isobootpath) - self.log.info('Copying base lorax to %s directory...' % image) try: shutil.copytree(src_to_image, path_to_image, copy_function=shutil.copy2, dirs_exist_ok=True) @@ -630,6 +617,73 @@ class IsoBuild: 'Cannot remove boot.iso' ) + def _copy_boot_to_work(self, force_unpack, arch): + src_to_image = os.path.join( + self.lorax_work_dir, + arch, + 'lorax' + ) + + iso_to_go = os.path.join( + self.iso_work_dir, + arch + ) + + path_to_src_image = '{}/{}'.format( + src_to_image, + '/images/boot.iso' + ) + + rclevel = '' + if self.release_candidate: + rclevel = '-' + self.rclvl + + discname = '{}-{}.{}{}-{}-{}.iso'.format( + self.shortname, + self.major_version, + self.minor_version, + rclevel, + arch, + 'boot' + ) + + isobootpath = '{}/{}'.format( + iso_to_go, + discname + ) + + manifest = '{}.{}'.format( + isobootpath, + 'manifest' + ) + + if not force_unpack: + file_check = isobootpath + if os.path.exists(file_check): + self.log.warn( + '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + + 'Boot image (' + discname + ') already exists' + ) + return + + self.log.info('Copying %s boot iso to work directory...' % arch) + os.makedirs(iso_to_go, exist_ok=True) + shutil.copy2(path_to_src_image, isobootpath) + if os.path.exists(path_to_src_image + '.manifest'): + shutil.copy2(path_to_src_image + '.manifest', manifest) + + self.log.info('Creating checksum for %s boot iso...' % arch) + checksum = Utils.get_checksum(isobootpath, self.checksum, self.log) + if not checksum: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + isobootpath + ' not found! Are you sure we copied it?' + ) + return + with open(isobootpath + '.CHECKSUM', "w+") as c: + c.write(checksum) + c.close() + def run_boot_sync(self): """ This unpacks into BaseOS/$arch/os, assuming there's no data actually @@ -866,9 +920,13 @@ class IsoBuild: ) raise SystemExit() + if self.extra_iso_mode == 'podman': + self._extra_iso_podman_run(arches_to_build, images_to_build, work_root) + def _extra_iso_local_config(self, arch, image, grafts, work_root): """ - Local ISO build mode - this should build in mock + Local ISO build configuration - This generates the configuration for + both mock and podman entries """ self.log.info('Generating Extra ISO configuration and script') @@ -997,6 +1055,114 @@ class IsoBuild: """ Runs the actual local process using mock """ + entries_dir = os.path.join(work_root, "entries") + extra_iso_cmd = '/bin/bash {}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image) + self.log.info('Starting mock build...') + p = subprocess.call(shlex.split(extra_iso_cmd)) + if p != 0: + self.log.error('An error occured during execution.') + self.log.error('See the logs for more information.') + raise SystemExit() + # Copy it if the compose dir is here? + + def _extra_iso_podman_run(self, arches, images, work_root): + """ + Does all the image building in podman containers to parallelize the + builds. This is a case where you can call this instead of looping mock, + or not run it in peridot. This gives the Release Engineer a little more + flexibility if they care enough. + + This honestly assumes you are running this on a machine that has access + to the compose directories. It's the same as if you were doing a + reposync of the repositories. + """ + cmd = self.podman_cmd() + entries_dir = os.path.join(work_root, "entries") + for i in images: + entry_name_list = [] + image_name = i + arch_sync = arches.copy() + + for a in arch_sync: + entry_name = 'buildExtraImage-{}-{}.sh'.format(a, i) + entry_name_list.append(entry_name) + + for pod in entry_name_list: + podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format( + cmd, + self.compose_root, + self.compose_root, + entries_dir, + entries_dir, + pod, + entries_dir, + pod, + self.container + ) + + process = subprocess.call( + shlex.split(podman_cmd_entry), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + + join_all_pods = ' '.join(entry_name_list) + time.sleep(3) + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Building ' + i + ' ...' + ) + pod_watcher = '{} wait {}'.format( + cmd, + join_all_pods + ) + + watch_man = subprocess.call( + shlex.split(pod_watcher), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + + # After the above is done, we'll check each pod process for an exit + # code. + pattern = "Exited (0)" + for pod in entry_name_list: + checkcmd = '{} ps -f status=exited -f name={}'.format( + cmd, + pod + ) + podcheck = subprocess.Popen( + checkcmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True + ) + + output, errors = podcheck.communicate() + if 'Exited (0)' not in output.decode(): + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod + ) + bad_exit_list.append(pod) + + rmcmd = '{} rm {}'.format( + cmd, + join_all_pods + ) + + rmpod = subprocess.Popen( + rmcmd, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + shell=True + ) + + entry_name_list.clear() + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Building ' + i + ' completed' + ) + def _generate_graft_points( self, @@ -1471,6 +1637,23 @@ class IsoBuild: returned_cmd = ' '.join(cmd) return returned_cmd + def podman_cmd(self) -> str: + """ + This generates the podman run command. This is in the case that we want + to do reposyncs in parallel as we cannot reasonably run multiple + instances of dnf reposync on a single system. + """ + cmd = None + if os.path.exists("/usr/bin/podman"): + cmd = "/usr/bin/podman" + else: + self.log.error('/usr/bin/podman was not found. Good bye.') + raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease " + " ensure that you have installed the necessary packages on " + " this system. " + Color.BOLD + "Note that docker is not " + "supported." + Color.END + ) + return cmd class LiveBuild: """ From 28facef12cab4492b5f6d5c7f00b8c0af828cc80 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 26 Jun 2022 22:05:24 -0700 Subject: [PATCH 50/64] fix template for iso build --- .../empanadas/scripts/sync_from_peridot.py | 8 ++------ .../empanadas/templates/extraisobuild.tmpl.sh | 2 +- iso/empanadas/empanadas/util/iso_utils.py | 13 +++++++++++++ 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py index 5ffdc40..e2481d7 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot.py @@ -6,15 +6,11 @@ from empanadas.common import * from empanadas.util import Checks from empanadas.util import RepoSync -#rlvars = rldict['9'] -#r = Checks(rlvars, config['arch']) -#r.check_valid_arch() - # Start up the parser baby parser = argparse.ArgumentParser(description="Peridot Sync and Compose") # All of our options -parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) parser.add_argument('--repo', type=str, help="Repository name") parser.add_argument('--arch', type=str, help="Architecture") parser.add_argument('--ignore-debug', action='store_true') @@ -41,7 +37,7 @@ r.check_valid_arch() a = RepoSync( rlvars, config, - major=results.release, + major=rlvars['major'], repo=results.repo, arch=results.arch, ignore_debug=results.ignore_debug, diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index 892c5c9..15a73e4 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -9,7 +9,7 @@ MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}" MOCK_RESL="${MOCK_ROOT}/result" MOCK_CHRO="${MOCK_ROOT}/root" MOCK_LOG="${MOCK_RESL}/mock-output.log" -IMAGE_SCR="{{ entries_dir }}/buildExtraImage.sh" +IMAGE_SCR="{{ entries_dir }}/buildExtraImage-{{ arch }}-{{ image }}.sh" IMAGE_ISO="{{ isoname }}" ISOLATION="{{ isolation }}" BUILDDIR="{{ builddir }}" diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 23c06ad..656a155 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -439,6 +439,12 @@ class IsoBuild: ) self._treeinfo_wrapper(arch, variant) + # Do a dirsync for non-disc data + if not self.iso_map['images'][variant]['disc']: + self.log.info( + 'Syncing repo data and images for %s%s%s' % (Color.BOLD, variant, Color.END) + ) + self._copy_nondisc_to_repo(self.force_unpack, arch, variant) def _s3_determine_latest(self): @@ -684,6 +690,12 @@ class IsoBuild: c.write(checksum) c.close() + def _copy_nondisc_to_repo(self, force_unpack, arch, repo): + """ + Syncs data from a non-disc set of images to the appropriate repo. Repo + and image MUST match names for this to work. + """ + def run_boot_sync(self): """ This unpacks into BaseOS/$arch/os, assuming there's no data actually @@ -989,6 +1001,7 @@ class IsoBuild: shortname=self.shortname, isoname=isoname, entries_dir=entries_dir, + image=image, ) opts = { From fce45f3a727fb9a1e93a5e6cd3774b14f2dfb6e3 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 26 Jun 2022 22:45:31 -0700 Subject: [PATCH 51/64] put in a hack for xorrisofs --- iso/empanadas/empanadas/configs/el9-beta.yaml | 1 - iso/empanadas/empanadas/configs/el9.yaml | 1 - iso/empanadas/empanadas/configs/el9lh.yaml | 1 - .../empanadas/templates/extraisobuild.tmpl.sh | 3 +- iso/empanadas/empanadas/util/iso_utils.py | 28 +++++++++++++++++-- 5 files changed, 27 insertions(+), 7 deletions(-) diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 0ca78a8..6de4bbe 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -70,7 +70,6 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' - - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index 1918b68..b71a1ef 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -70,7 +70,6 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' - - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 78de775..621e758 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -70,7 +70,6 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' - - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh index 15a73e4..4d42901 100644 --- a/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh @@ -40,7 +40,8 @@ mock_ret_val=$? if [ $mock_ret_val -eq 0 ]; then # Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result mkdir -p "${MOCK_RESL}" - cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}*" "${MOCK_RESL}" + cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}" "${MOCK_RESL}" + cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}.manifest" "${MOCK_RESL}" else echo "!! EXTRA ISO RUN FAILED !!" exit 1 diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 656a155..814c0f1 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -254,12 +254,23 @@ class IsoBuild: if self.release_candidate: rclevel = '-' + self.rclvl + # This is kind of a hack. Installing xorrisofs sets the alternatives to + # it, so backwards compatibility is sort of guaranteed. But we want to + # emulate as close as possible to what pungi does, so unless we + # explicitly ask for xorr (in el8 and 9), we should NOT be using it. + # For RLN and el10, we'll use xorr all the way through. When 8 is no + # longer getting ISO's, we'll remove this section. + required_pkgs = self.required_pkgs.copy() + if self.iso_map['xorrisofs']: + if 'genisoimage' in required_pkgs and 'xorriso' not in required_pkgs: + required_pkgs.append('xorriso') + mock_iso_template_output = mock_iso_template.render( arch=self.current_arch, major=self.major_version, fullname=self.fullname, shortname=self.shortname, - required_pkgs=self.required_pkgs, + required_pkgs=required_pkgs, dist=self.disttag, repos=self.repolist, user_agent='{{ user_agent }}', @@ -954,6 +965,17 @@ class IsoBuild: iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image) xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image) + # This is kind of a hack. Installing xorrisofs sets the alternatives to + # it, so backwards compatibility is sort of guaranteed. But we want to + # emulate as close as possible to what pungi does, so unless we + # explicitly ask for xorr (in el8 and 9), we should NOT be using it. + # For RLN and el10, we'll use xorr all the way through. When 8 is no + # longer getting ISO's, we'll remove this section. + required_pkgs = self.required_pkgs.copy() + if self.iso_map['xorrisofs']: + if 'genisoimage' in required_pkgs and 'xorriso' not in required_pkgs: + required_pkgs.append('xorriso') + rclevel = '' if self.release_candidate: rclevel = '-' + self.rclvl @@ -977,7 +999,7 @@ class IsoBuild: ) lorax_pkg_cmd = '/usr/bin/dnf install {} -y'.format( - ' '.join(self.iso_map['lorax']['required_pkgs']) + ' '.join(required_pkgs) ) mock_iso_template_output = mock_iso_template.render( @@ -985,7 +1007,7 @@ class IsoBuild: major=self.major_version, fullname=self.fullname, shortname=self.shortname, - required_pkgs=self.required_pkgs, + required_pkgs=required_pkgs, dist=self.disttag, repos=self.repolist, user_agent='{{ user_agent }}', From b262e6f0c52f55686ca186c252c0944a8ed1e736 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Sun, 26 Jun 2022 23:51:08 -0700 Subject: [PATCH 52/64] fix list and fix packages --- iso/empanadas/empanadas/util/iso_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 814c0f1..f70070b 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -976,6 +976,9 @@ class IsoBuild: if 'genisoimage' in required_pkgs and 'xorriso' not in required_pkgs: required_pkgs.append('xorriso') + if self.extra_iso_mode == 'podman': + required_pkgs.append('which') + rclevel = '' if self.release_candidate: rclevel = '-' + self.rclvl @@ -1113,6 +1116,7 @@ class IsoBuild: """ cmd = self.podman_cmd() entries_dir = os.path.join(work_root, "entries") + bad_exit_list = [] for i in images: entry_name_list = [] image_name = i From 436caefcbd030c3015d218b0b95bc433d49addf4 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 00:07:39 -0700 Subject: [PATCH 53/64] turn on xorrisofs for 9 --- iso/empanadas/empanadas/configs/el9-beta.yaml | 3 ++- iso/empanadas/empanadas/configs/el9.yaml | 3 ++- iso/empanadas/empanadas/configs/el9lh.yaml | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 6de4bbe..7d4a9bb 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -35,7 +35,7 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: - xorrisofs: False + xorrisofs: True iso_level: False images: dvd: @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index b71a1ef..fe393a8 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -35,7 +35,7 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: - xorrisofs: False + xorrisofs: True iso_level: False images: dvd: @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 621e758..91cb818 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -35,7 +35,7 @@ packages: 'os/Packages' repodata: 'os/repodata' iso_map: - xorrisofs: False + xorrisofs: True iso_level: False images: dvd: @@ -70,6 +70,7 @@ - 'isomd5sum' - 'lorax-templates-rhel' - 'lorax-templates-generic' + - 'xorriso' repoclosure_map: arches: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' From 8302604f7d1745411e2519a45edd53861159a7f8 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 00:26:42 -0700 Subject: [PATCH 54/64] enable logs in general --- .../templates/buildExtraImage.tmpl.sh | 22 ++++++++----------- iso/empanadas/empanadas/util/iso_utils.py | 13 ++++++++++- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index ab99d2e..bef34c7 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -2,27 +2,23 @@ set -ex {% if extra_iso_mode == "podman" %} -{{ lorax_pkg_cmd }} -mkdir /builddir +{{ lorax_pkg_cmd }} | tee -a {{ log_path }} +mkdir -p {{ compose_work_iso_dir }}/{{ arch }} +cd {{ compose_work_iso_dir }}/{{ arch }} +{% else %} +cd /builddir {% endif %} -cd /builddir if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then TEMPLATE="/usr/share/lorax" fi -{{ make_image }} +{{ make_image }} | tee -a {{ log_path }} -{{ isohybrid }} +{{ isohybrid }} | tee -a {{ log_path }} -{{ implantmd5 }} +{{ implantmd5 }} | tee -a {{ log_path }} -{{ make_manifest }} +{{ make_manifest }} | tee -a {{ log_path }} -{% if extra_iso_mode == "podman" %} -mkdir -p {{ compose_work_iso_dir }}/{{ arch }} -cp /builddir/*.iso {{ compose_work_iso_dir }}/{{ arch }} -cp /builddir/*.iso.manifest {{ compose_work_iso_dir }}/{{ arch }} -#cp /builddir/*.log {{ compose_work_iso_dir }}/{{ arch }} -{% endif %} diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index f70070b..2c8acb1 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -933,7 +933,6 @@ class IsoBuild: if self.extra_iso_mode == 'local': self._extra_iso_local_run(a, y, work_root) - print() elif self.extra_iso_mode == 'podman': continue else: @@ -965,6 +964,17 @@ class IsoBuild: iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image) xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image) + log_root = os.path.join( + work_root, + "logs", + self.date_stamp + ) + + if not os.path.exists(log_root): + os.makedirs(log_root, exist_ok=True) + + log_path = '{}/{}-{}.log'.format(log_root, arch, image) + # This is kind of a hack. Installing xorrisofs sets the alternatives to # it, so backwards compatibility is sort of guaranteed. But we want to # emulate as close as possible to what pungi does, so unless we @@ -1052,6 +1062,7 @@ class IsoBuild: implantmd5=implantmd5, make_manifest=make_manifest, lorax_pkg_cmd=lorax_pkg_cmd, + log_path=log_path, ) if opts['use_xorrisofs']: From 6d239b48e978fa1ff4f486e6b04069bd29c80270 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 00:29:45 -0700 Subject: [PATCH 55/64] remove templates for xorr --- iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh | 4 ++-- iso/empanadas/empanadas/util/iso_utils.py | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index bef34c7..91631b2 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -7,12 +7,12 @@ mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cd {{ compose_work_iso_dir }}/{{ arch }} {% else %} cd /builddir -{% endif %} - if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then TEMPLATE="/usr/share/lorax" fi +{% endif %} + {{ make_image }} | tee -a {{ log_path }} diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 2c8acb1..48255fd 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -986,9 +986,6 @@ class IsoBuild: if 'genisoimage' in required_pkgs and 'xorriso' not in required_pkgs: required_pkgs.append('xorriso') - if self.extra_iso_mode == 'podman': - required_pkgs.append('which') - rclevel = '' if self.release_candidate: rclevel = '-' + self.rclvl From 0ac1b0ba9610b36882fb7c7281c0614acab1f9d5 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 00:40:28 -0700 Subject: [PATCH 56/64] try logging some stuff --- .../empanadas/templates/buildExtraImage.tmpl.sh | 10 +++++----- iso/empanadas/empanadas/util/iso_utils.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 91631b2..0fedfca 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -2,7 +2,7 @@ set -ex {% if extra_iso_mode == "podman" %} -{{ lorax_pkg_cmd }} | tee -a {{ log_path }} +{{ lorax_pkg_cmd }} mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cd {{ compose_work_iso_dir }}/{{ arch }} {% else %} @@ -14,11 +14,11 @@ fi {% endif %} -{{ make_image }} | tee -a {{ log_path }} +{{ make_image }} -{{ isohybrid }} | tee -a {{ log_path }} +{{ isohybrid }} -{{ implantmd5 }} | tee -a {{ log_path }} +{{ implantmd5 }} -{{ make_manifest }} | tee -a {{ log_path }} +{{ make_manifest }} diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 48255fd..29d5dfd 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -973,7 +973,7 @@ class IsoBuild: if not os.path.exists(log_root): os.makedirs(log_root, exist_ok=True) - log_path = '{}/{}-{}.log'.format(log_root, arch, image) + log_path_command = '| tee -a {}/{}-{}.log'.format(log_root, arch, image) # This is kind of a hack. Installing xorrisofs sets the alternatives to # it, so backwards compatibility is sort of guaranteed. But we want to @@ -1008,8 +1008,9 @@ class IsoBuild: image ) - lorax_pkg_cmd = '/usr/bin/dnf install {} -y'.format( - ' '.join(required_pkgs) + lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format( + ' '.join(required_pkgs), + log_path_command ) mock_iso_template_output = mock_iso_template.render( @@ -1045,7 +1046,7 @@ class IsoBuild: 'iso_level': self.iso_map['iso_level'], } - make_image = self._get_make_image_cmd(opts) + make_image = '{} {}'.format(self._get_make_image_cmd(opts), log_path_command) isohybrid = self._get_isohybrid_cmd(opts) implantmd5 = self._get_implantisomd5_cmd(opts) make_manifest = self._get_manifest_cmd(opts) @@ -1059,7 +1060,6 @@ class IsoBuild: implantmd5=implantmd5, make_manifest=make_manifest, lorax_pkg_cmd=lorax_pkg_cmd, - log_path=log_path, ) if opts['use_xorrisofs']: From cd16cf78f45dba71d0788f245eb2fce1cb7bf76e Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 00:50:41 -0700 Subject: [PATCH 57/64] fix dialog error --- iso/empanadas/empanadas/util/iso_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 29d5dfd..3bdcb72 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -1669,7 +1669,7 @@ class IsoBuild: isokwargs["input_charset"] = None if opts['use_xorrisofs']: - cmd = ['/usr/bin/xorrisofs', '-dialog', 'on', '<', opts['graft_points']] + cmd = ['/usr/bin/xorriso', '-dialog', 'on', '<', opts['graft_points']] else: cmd = self._get_mkisofs_cmd( opts['iso_name'], From e7386c2f6fbfe7069bbabd9c24c859479fa6c025 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 01:01:36 -0700 Subject: [PATCH 58/64] xorr conf blank? --- iso/empanadas/empanadas/templates/xorriso.tmpl.txt | 2 +- iso/empanadas/empanadas/util/iso_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/iso/empanadas/empanadas/templates/xorriso.tmpl.txt b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt index ca2b68e..a15d32d 100644 --- a/iso/empanadas/empanadas/templates/xorriso.tmpl.txt +++ b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt @@ -2,4 +2,4 @@ -outdev {{ isoname }} -boot_image any replay -volid {{ volid }} -{{ grafts }} +{{ graft }} diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 3bdcb72..e986493 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -1074,7 +1074,7 @@ class IsoBuild: boot_iso=boot_iso, isoname=isoname, volid=volid, - grafts=xorpoint, + graft=xorpoint, ) xorriso_template_entry = open(xorriso_template_path, "w+") xorriso_template_entry.write(xorriso_template_output) From 5ca80da56d44d1da46c5ee561677aefe916dbb00 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 01:13:12 -0700 Subject: [PATCH 59/64] wrong path was being used for grafts --- iso/empanadas/empanadas/util/iso_utils.py | 35 +++++++++++------------ 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index e986493..3ca46c1 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -1046,6 +1046,22 @@ class IsoBuild: 'iso_level': self.iso_map['iso_level'], } + if opts['use_xorrisofs']: + # Generate a xorriso compatible dialog + xp = open(grafts) + xorpoint = xp.read() + xp.close() + xorriso_template_output = xorriso_template.render( + boot_iso=boot_iso, + isoname=isoname, + volid=volid, + graft=xorpoint, + ) + xorriso_template_entry = open(xorriso_template_path, "w+") + xorriso_template_entry.write(xorriso_template_output) + xorriso_template_entry.close() + opts['graft_points'] = xorriso_template_path + make_image = '{} {}'.format(self._get_make_image_cmd(opts), log_path_command) isohybrid = self._get_isohybrid_cmd(opts) implantmd5 = self._get_implantisomd5_cmd(opts) @@ -1062,25 +1078,6 @@ class IsoBuild: lorax_pkg_cmd=lorax_pkg_cmd, ) - if opts['use_xorrisofs']: - # Here we generate another template instead for xorrisofs. We'll do - # manual writes for now instead of a template. I'm too tired, it's - # 1am, and I can't rationally think of how to do this in jinja (I - # know it's easy, it's just too late) - xp = open(grafts) - xorpoint = xp.read() - xp.close() - xorriso_template_output = xorriso_template.render( - boot_iso=boot_iso, - isoname=isoname, - volid=volid, - graft=xorpoint, - ) - xorriso_template_entry = open(xorriso_template_path, "w+") - xorriso_template_entry.write(xorriso_template_output) - xorriso_template_entry.close() - - mock_iso_entry = open(mock_iso_path, "w+") mock_iso_entry.write(mock_iso_template_output) mock_iso_entry.close() From 7d9f222fe959cd3e053e87be6e38714bae663e92 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 01:18:57 -0700 Subject: [PATCH 60/64] end argument missing --- iso/empanadas/empanadas/templates/xorriso.tmpl.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/iso/empanadas/empanadas/templates/xorriso.tmpl.txt b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt index a15d32d..133aa11 100644 --- a/iso/empanadas/empanadas/templates/xorriso.tmpl.txt +++ b/iso/empanadas/empanadas/templates/xorriso.tmpl.txt @@ -3,3 +3,4 @@ -boot_image any replay -volid {{ volid }} {{ graft }} +-end From 1e043ee714d1729f818aa02f1d15899158617f53 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 02:06:01 -0700 Subject: [PATCH 61/64] fix manifest and add checksum phase to podman --- iso/empanadas/empanadas/util/iso_utils.py | 43 ++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 3ca46c1..0b49bd1 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -945,6 +945,12 @@ class IsoBuild: if self.extra_iso_mode == 'podman': self._extra_iso_podman_run(arches_to_build, images_to_build, work_root) + def _extra_iso_podman_checksum(self, arch, image, work_root): + """ + Generate checksum on the fly post-podman run + """ + + def _extra_iso_local_config(self, arch, image, grafts, work_root): """ Local ISO build configuration - This generates the configuration for @@ -1121,7 +1127,9 @@ class IsoBuild: """ cmd = self.podman_cmd() entries_dir = os.path.join(work_root, "entries") + isos_dir = os.path.join(work_root, "isos") bad_exit_list = [] + checksum_list = [] for i in images: entry_name_list = [] image_name = i @@ -1131,6 +1139,22 @@ class IsoBuild: entry_name = 'buildExtraImage-{}-{}.sh'.format(a, i) entry_name_list.append(entry_name) + rclevel = '' + if self.release_candidate: + rclevel = '-' + self.rclvl + + isoname = '{}/{}-{}.{}{}-{}-{}.iso'.format( + a, + self.shortname, + self.major_version, + self.minor_version, + rclevel, + a, + i + ) + + checksum_list.append(isoname) + for pod in entry_name_list: podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format( cmd, @@ -1202,6 +1226,23 @@ class IsoBuild: ) entry_name_list.clear() + for p in checksum_list: + path = os.path.join(isos_dir, p) + if os.path.exists(path): + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Performing checksum for ' + p + ) + checksum = Utils.get_checksum(path, self.checksum, self.log) + if not checksum: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + path + ' not found! Are you sure it was built?' + ) + with open(path + '.CHECKSUM', "w+") as c: + c.write(checksum) + c.close() + self.log.info( '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Building ' + i + ' completed' @@ -1628,7 +1669,7 @@ class IsoBuild: return """/usr/bin/xorriso -dev %s --find | tail -n+2 | tr -d "'" | - cut -c2- sort >> %s.manifest""" % ( + cut -c2- | sort >> %s.manifest""" % ( shlex.quote(opts['iso_name']), shlex.quote(opts['iso_name']), ) From 57d51dadc7c375f857019f40b138b9d7a7d5e79c Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 03:39:11 -0700 Subject: [PATCH 62/64] initial boot images should have dvd in the volid --- iso/empanadas/empanadas/common.py | 2 +- iso/empanadas/empanadas/configs/el9-beta.yaml | 2 +- iso/empanadas/empanadas/configs/el9lh.yaml | 2 +- .../empanadas/templates/buildImage.tmpl.sh | 2 +- .../empanadas/templates/isolorax.tmpl.sh | 8 ----- iso/empanadas/empanadas/util/iso_utils.py | 35 +++++++++++-------- 6 files changed, 24 insertions(+), 27 deletions(-) delete mode 100644 iso/empanadas/empanadas/templates/isolorax.tmpl.sh diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index e445caf..b1b4d25 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -54,7 +54,7 @@ class Utils: base = os.path.basename(path) # This emulates our current syncing scripts that runs stat and # sha256sum and what not with a very specific output. - return "%s: %s bytes\n%s (%s) = %s" % ( + return "%s: %s bytes\n%s (%s) = %s\n" % ( base, stat.st_size, hashtype.upper(), diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index 7d4a9bb..f5f3a07 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -2,7 +2,7 @@ '9-beta': fullname: 'Rocky Linux 9' revision: '9.1' - rclvl: 'RC1' + rclvl: 'BETA1' major: '9' minor: '1' bugurl: 'https://bugs.rockylinux.org' diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 91cb818..9583886 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -2,7 +2,7 @@ '9-lookahead': fullname: 'Rocky Linux 9' revision: '9.1' - rclvl: 'RC1' + rclvl: 'LH1' major: '9' minor: '1' bugurl: 'https://bugs.rockylinux.org' diff --git a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh index a17c853..bdf062b 100644 --- a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh @@ -1,6 +1,6 @@ #!/bin/bash -VOLID="{{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-boot1" +VOLID="{{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-dvd" VARIANT="{{ variant }}" ARCH="{{ arch }}" VERSION="{{ revision }}" diff --git a/iso/empanadas/empanadas/templates/isolorax.tmpl.sh b/iso/empanadas/empanadas/templates/isolorax.tmpl.sh deleted file mode 100644 index f08b123..0000000 --- a/iso/empanadas/empanadas/templates/isolorax.tmpl.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -# You should not be running this manually. - -# everything should be logged like | tee -a {{ log_path }}/lorax-{{ arch }}-{{ date_stamp }}.log -# for the dvd, we need to rely on pulling from {{ entries_root }}/dvd-{{ arch }}-list - -# Run the base lorax steps into a work dir specific to its arch -# copy everything into BaseOS/arch/os diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 0b49bd1..70244aa 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -824,17 +824,16 @@ class IsoBuild: # Set default variant ti.dump(treeinfo, main_variant=primary) - def discinfo_write(self): + def discinfo_write(self, file_path, arch): """ Ensure discinfo is written correctly """ - #with open(file_path, "w") as f: - # f.write("%s\n" % self.timestamp) - # f.write("%s\n" % self.fullname) - # f.write("%s\n" % self.arch) - # if disc_numbers: - # f.write("%s\n" % ",".join([str(i) for i in disc_numbers])) - print() + with open(file_path, "w+") as f: + f.write("%s\n" % self.timestamp) + f.write("%s\n" % self.fullname) + f.write("%s\n" % arch) + f.write("ALL\n") + f.close() def write_media_repo(self): """ @@ -945,12 +944,6 @@ class IsoBuild: if self.extra_iso_mode == 'podman': self._extra_iso_podman_run(arches_to_build, images_to_build, work_root) - def _extra_iso_podman_checksum(self, arch, image, work_root): - """ - Generate checksum on the fly post-podman run - """ - - def _extra_iso_local_config(self, arch, image, grafts, work_root): """ Local ISO build configuration - This generates the configuration for @@ -1099,7 +1092,6 @@ class IsoBuild: os.chmod(mock_sh_path, 0o755) os.chmod(iso_template_path, 0o755) - def _extra_iso_local_run(self, arch, image, work_root): """ Runs the actual local process using mock @@ -1248,6 +1240,19 @@ class IsoBuild: 'Building ' + i + ' completed' ) + if len(bad_exit_list) == 0: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + 'Copying ISOs over to compose directory...' + ) + print() + else: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'There were issues with the work done. As a result, ' + + 'the ISOs will not be copied.' + ) + def _generate_graft_points( self, From f4f29d97ccfe6ac5027c342e9575985ce3f46c73 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 17:59:21 -0700 Subject: [PATCH 63/64] Expand and Generalize ISO Functions * Utils is now empanadas.util.Shared * Each config now has a profile name to determine latest-Rocky-{} link (Results RLBT#0000131) * Check added to see if an ISO was built at some point, and forcefully exit if so * Lorax tar ball should be in the format of of lorax-X.Y-ARCH to ensure there's no collisions between stable, beta, and lh builds --- iso/empanadas/empanadas/common.py | 41 ---------- iso/empanadas/empanadas/configs/el8.yaml | 1 + iso/empanadas/empanadas/configs/el9-beta.yaml | 3 +- iso/empanadas/empanadas/configs/el9.yaml | 3 +- iso/empanadas/empanadas/configs/el9lh.yaml | 3 +- iso/empanadas/empanadas/configs/rln.yaml | 3 +- iso/empanadas/empanadas/scripts/build_iso.py | 2 +- .../empanadas/scripts/build_iso_extra.py | 4 +- .../empanadas/scripts/sync_from_peridot.py | 5 +- .../scripts/sync_from_peridot_test.py | 3 +- .../templates/buildExtraImage.tmpl.sh | 1 + .../empanadas/templates/buildImage.tmpl.sh | 6 +- .../empanadas/templates/isobuild.tmpl.sh | 2 +- iso/empanadas/empanadas/util/__init__.py | 7 +- iso/empanadas/empanadas/util/dnf_utils.py | 4 +- iso/empanadas/empanadas/util/iso_utils.py | 50 ++++-------- iso/empanadas/empanadas/util/shared.py | 79 +++++++++++++++++++ 17 files changed, 126 insertions(+), 91 deletions(-) create mode 100644 iso/empanadas/empanadas/util/shared.py diff --git a/iso/empanadas/empanadas/common.py b/iso/empanadas/empanadas/common.py index b1b4d25..c3619ce 100644 --- a/iso/empanadas/empanadas/common.py +++ b/iso/empanadas/empanadas/common.py @@ -21,47 +21,6 @@ class Color: BOLD = '\033[1m' END = '\033[0m' -class Utils: - """ - Quick utilities that may be commonly used - """ - @staticmethod - def get_checksum(path, hashtype, logger): - """ - Generates a checksum from the provided path by doing things in chunks. - This way we don't do it in memory. - """ - try: - checksum = hashlib.new(hashtype) - except ValueError: - logger.error("Invalid hash type: %s" % hashtype) - return False - - try: - input_file = open(path, "rb") - except IOError as e: - logger.error("Could not open file %s: %s" % (path, e)) - return False - - while True: - chunk = input_file.read(8192) - if not chunk: - break - checksum.update(chunk) - - input_file.close() - stat = os.stat(path) - base = os.path.basename(path) - # This emulates our current syncing scripts that runs stat and - # sha256sum and what not with a very specific output. - return "%s: %s bytes\n%s (%s) = %s\n" % ( - base, - stat.st_size, - hashtype.upper(), - base, - checksum.hexdigest() - ) - # vars and additional checks rldict = {} sigdict = {} diff --git a/iso/empanadas/empanadas/configs/el8.yaml b/iso/empanadas/empanadas/configs/el8.yaml index f6c1a67..eb80aff 100644 --- a/iso/empanadas/empanadas/configs/el8.yaml +++ b/iso/empanadas/empanadas/configs/el8.yaml @@ -5,6 +5,7 @@ rclvl: 'RC2' major: '8' minor: '6' + profile: '8' bugurl: 'https://bugs.rockylinux.org' allowed_arches: - x86_64 diff --git a/iso/empanadas/empanadas/configs/el9-beta.yaml b/iso/empanadas/empanadas/configs/el9-beta.yaml index f5f3a07..19d6cd5 100644 --- a/iso/empanadas/empanadas/configs/el9-beta.yaml +++ b/iso/empanadas/empanadas/configs/el9-beta.yaml @@ -1,10 +1,11 @@ --- '9-beta': - fullname: 'Rocky Linux 9' + fullname: 'Rocky Linux 9.1' revision: '9.1' rclvl: 'BETA1' major: '9' minor: '1' + profile: '9-beta' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' allowed_arches: diff --git a/iso/empanadas/empanadas/configs/el9.yaml b/iso/empanadas/empanadas/configs/el9.yaml index fe393a8..88a978b 100644 --- a/iso/empanadas/empanadas/configs/el9.yaml +++ b/iso/empanadas/empanadas/configs/el9.yaml @@ -1,10 +1,11 @@ --- '9': - fullname: 'Rocky Linux 9' + fullname: 'Rocky Linux 9.0' revision: '9.0' rclvl: 'RC1' major: '9' minor: '0' + profile: '9' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' allowed_arches: diff --git a/iso/empanadas/empanadas/configs/el9lh.yaml b/iso/empanadas/empanadas/configs/el9lh.yaml index 9583886..4176f66 100644 --- a/iso/empanadas/empanadas/configs/el9lh.yaml +++ b/iso/empanadas/empanadas/configs/el9lh.yaml @@ -1,10 +1,11 @@ --- '9-lookahead': - fullname: 'Rocky Linux 9' + fullname: 'Rocky Linux 9.1' revision: '9.1' rclvl: 'LH1' major: '9' minor: '1' + profile: '9-lookahead' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' allowed_arches: diff --git a/iso/empanadas/empanadas/configs/rln.yaml b/iso/empanadas/empanadas/configs/rln.yaml index c36fcb1..7544ce4 100644 --- a/iso/empanadas/empanadas/configs/rln.yaml +++ b/iso/empanadas/empanadas/configs/rln.yaml @@ -2,9 +2,10 @@ 'rln': fullname: 'Rocky Linux New' revision: '10' - rclvl: 'RC1' + rclvl: 'RLN120' major: '10' minor: '0' + profile: 'rln' bugurl: 'https://bugs.rockylinux.org' checksum: 'sha256' allowed_arches: diff --git a/iso/empanadas/empanadas/scripts/build_iso.py b/iso/empanadas/empanadas/scripts/build_iso.py index c5907f1..b35cdf2 100755 --- a/iso/empanadas/empanadas/scripts/build_iso.py +++ b/iso/empanadas/empanadas/scripts/build_iso.py @@ -8,7 +8,7 @@ from empanadas.util import IsoBuild parser = argparse.ArgumentParser(description="ISO Compose") -parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) parser.add_argument('--isolation', type=str, help="mock isolation mode") parser.add_argument('--rc', action='store_true', help="Release Candidate") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") diff --git a/iso/empanadas/empanadas/scripts/build_iso_extra.py b/iso/empanadas/empanadas/scripts/build_iso_extra.py index 074dba4..a646d12 100755 --- a/iso/empanadas/empanadas/scripts/build_iso_extra.py +++ b/iso/empanadas/empanadas/scripts/build_iso_extra.py @@ -8,7 +8,7 @@ from empanadas.util import IsoBuild parser = argparse.ArgumentParser(description="ISO Compose") -parser.add_argument('--release', type=str, help="Major Release Version", required=True) +parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) parser.add_argument('--rc', action='store_true', help="Release Candidate") parser.add_argument('--arch', type=str, help="Architecture") parser.add_argument('--isolation', type=str, help="Mock Isolation") @@ -30,7 +30,7 @@ a = IsoBuild( extra_iso=results.extra_iso, extra_iso_mode=results.extra_iso_mode, compose_dir_is_here=results.local_compose, - logger=results.logger, + logger=results.logger ) def run(): diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot.py b/iso/empanadas/empanadas/scripts/sync_from_peridot.py index e2481d7..1e40ed8 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot.py @@ -28,8 +28,9 @@ parser.add_argument('--logger', type=str) # Parse them results = parser.parse_args() - rlvars = rldict[results.release] +major = rlvars['major'] + r = Checks(rlvars, config['arch']) r.check_valid_arch() @@ -37,7 +38,7 @@ r.check_valid_arch() a = RepoSync( rlvars, config, - major=rlvars['major'], + major=major, repo=results.repo, arch=results.arch, ignore_debug=results.ignore_debug, diff --git a/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py b/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py index 023ba42..5057753 100755 --- a/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py +++ b/iso/empanadas/empanadas/scripts/sync_from_peridot_test.py @@ -14,4 +14,5 @@ r.check_valid_arch() a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) def run(): - a.run() + print(rlvars.keys()) + print(rlvars) diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 0fedfca..8f145de 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -5,6 +5,7 @@ set -ex {{ lorax_pkg_cmd }} mkdir -p {{ compose_work_iso_dir }}/{{ arch }} cd {{ compose_work_iso_dir }}/{{ arch }} +test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; } {% else %} cd /builddir diff --git a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh index bdf062b..817d73b 100644 --- a/iso/empanadas/empanadas/templates/buildImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildImage.tmpl.sh @@ -7,8 +7,9 @@ VERSION="{{ revision }}" PRODUCT="{{ distname }}" MOCKBLD="{{ builddir }}" LORAXRES="{{ lorax_work_root }}" -LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" +LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz" LOGFILE="lorax-{{ arch }}.log" +BUGURL="{{ bugurl }}" {% for pkg in lorax %} sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl @@ -23,6 +24,7 @@ lorax --product="${PRODUCT}" \ {%- for repo in repos %} --source={{ repo.url }} \ {%- endfor %} + --bugurl="${BUGURL}" \ --variant="${VARIANT}" \ --nomacboot \ --buildarch="${ARCH}" \ @@ -45,7 +47,7 @@ if [ -f "/usr/bin/xorriso" ]; then /usr/bin/xorriso -dev lorax/images/boot.iso --find | tail -n+2 | tr -d "'" | - cut -c2- sort >> lorax/images/boot.iso.manifest + cut -c2- | sort >> lorax/images/boot.iso.manifest elif [ -f "/usr/bin/isoinfo" ]; then /usr/bin/isoinfo -R -f -i lorax/images/boot.iso | grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest diff --git a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh index d11a2f1..95184b6 100644 --- a/iso/empanadas/empanadas/templates/isobuild.tmpl.sh +++ b/iso/empanadas/empanadas/templates/isobuild.tmpl.sh @@ -9,7 +9,7 @@ MOCK_RESL="${MOCK_ROOT}/result" MOCK_CHRO="${MOCK_ROOT}/root" MOCK_LOG="${MOCK_RESL}/mock-output.log" LORAX_SCR="/var/tmp/buildImage.sh" -LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" +LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz" ISOLATION="{{ isolation }}" BUILDDIR="{{ builddir }}" diff --git a/iso/empanadas/empanadas/util/__init__.py b/iso/empanadas/empanadas/util/__init__.py index 495236c..f107a54 100644 --- a/iso/empanadas/empanadas/util/__init__.py +++ b/iso/empanadas/empanadas/util/__init__.py @@ -6,6 +6,10 @@ from empanadas.util.check import ( Checks, ) +from empanadas.util.shared import ( + Shared, +) + from empanadas.util.dnf_utils import ( RepoSync, SigRepoSync @@ -18,5 +22,6 @@ from empanadas.util.iso_utils import ( __all__ = [ 'Checks', - 'RepoSync' + 'RepoSync', + 'Shared' ] diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index ee5ca07..a54b4ee 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -19,6 +19,7 @@ import json from jinja2 import Environment, FileSystemLoader from empanadas.common import Color, _rootdir +from empanadas.util import Shared # initial treeinfo data is made here import productmd.treeinfo @@ -76,6 +77,7 @@ class RepoSync: self.repo_base_url = config['repo_base_url'] self.compose_root = config['compose_root'] self.compose_base = config['compose_root'] + "/" + major + self.profile = rlvars['profile'] # Relevant major version items self.shortname = config['shortname'] @@ -112,7 +114,7 @@ class RepoSync: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-Rocky-{}".format(major) + "latest-Rocky-{}".format(self.profile) ) self.compose_latest_sync = os.path.join( diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index 70244aa..5921df9 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -13,7 +13,6 @@ import shlex import time import tarfile import shutil -import hashlib # lazy person's s3 parser import requests @@ -35,7 +34,8 @@ import productmd.treeinfo from jinja2 import Environment, FileSystemLoader -from empanadas.common import Color, _rootdir, Utils +from empanadas.common import Color, _rootdir +from empanadas.util import Shared class IsoBuild: """ @@ -90,6 +90,7 @@ class IsoBuild: self.extra_iso = extra_iso self.extra_iso_mode = extra_iso_mode self.checksum = rlvars['checksum'] + self.profile = rlvars['profile'] # Relevant major version items self.arch = arch @@ -102,6 +103,7 @@ class IsoBuild: self.repo_base_url = config['repo_base_url'] self.project_id = rlvars['project_id'] self.structure = rlvars['structure'] + self.bugurl = rlvars['bugurl'] self.extra_files = rlvars['extra_files'] @@ -133,7 +135,7 @@ class IsoBuild: self.compose_latest_dir = os.path.join( config['compose_root'], major, - "latest-Rocky-{}".format(major) + "latest-Rocky-{}".format(self.profile) ) self.compose_latest_sync = os.path.join( @@ -189,7 +191,7 @@ class IsoBuild: self.log.info('Compose repo directory: %s' % sync_root) self.log.info('ISO Build Logs: /var/lib/mock/{}-{}-{}/result'.format( - self.shortname, self.major_version, self.current_arch) + self.shortname.lower(), self.major_version, self.current_arch) ) self.log.info('ISO Build completed.') @@ -282,6 +284,7 @@ class IsoBuild: isolation=self.mock_isolation, builddir=self.mock_work_root, shortname=self.shortname, + revision=self.release, ) iso_template_output = iso_template.render( @@ -297,6 +300,7 @@ class IsoBuild: rc=rclevel, builddir=self.mock_work_root, lorax_work_root=self.lorax_result_root, + bugurl=self.bugurl, ) mock_iso_entry = open(mock_iso_path, "w+") @@ -363,7 +367,7 @@ class IsoBuild: full_drop = '{}/lorax-{}-{}.tar.gz'.format( lorax_arch_dir, - self.major_version, + self.release, arch ) @@ -396,7 +400,7 @@ class IsoBuild: for arch in arches_to_unpack: tarname = 'lorax-{}-{}.tar.gz'.format( - self.major_version, + self.release, arch ) @@ -474,7 +478,7 @@ class IsoBuild: raise SystemExit() for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']: - if 'tar.gz' in y['Key']: + if 'tar.gz' in y['Key'] and self.release in y['Key']: temp.append(y['Key']) for arch in self.arches: @@ -525,7 +529,7 @@ class IsoBuild: resp = xmltodict.parse(bucket_data.content) for y in resp['ListBucketResult']['Contents']: - if 'tar.gz' in y['Key']: + if 'tar.gz' in y['Key'] and self.release in y['Key']: temp.append(y['Key']) for arch in self.arches: @@ -690,7 +694,7 @@ class IsoBuild: shutil.copy2(path_to_src_image + '.manifest', manifest) self.log.info('Creating checksum for %s boot iso...' % arch) - checksum = Utils.get_checksum(isobootpath, self.checksum, self.log) + checksum = Shared.get_checksum(isobootpath, self.checksum, self.log) if not checksum: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + @@ -824,31 +828,6 @@ class IsoBuild: # Set default variant ti.dump(treeinfo, main_variant=primary) - def discinfo_write(self, file_path, arch): - """ - Ensure discinfo is written correctly - """ - with open(file_path, "w+") as f: - f.write("%s\n" % self.timestamp) - f.write("%s\n" % self.fullname) - f.write("%s\n" % arch) - f.write("ALL\n") - f.close() - - def write_media_repo(self): - """ - Ensure media.repo exists - """ - data = [ - "[InstallMedia]", - "name=%s" % self.fullname, - "mediaid=%s" % self.timestamp, - "metadata_expire=-1", - "gpgcheck=0", - "cost=500", - "", - ] - # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or # reduced in nature. @@ -1075,6 +1054,7 @@ class IsoBuild: implantmd5=implantmd5, make_manifest=make_manifest, lorax_pkg_cmd=lorax_pkg_cmd, + isoname=isoname, ) mock_iso_entry = open(mock_iso_path, "w+") @@ -1225,7 +1205,7 @@ class IsoBuild: '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + 'Performing checksum for ' + p ) - checksum = Utils.get_checksum(path, self.checksum, self.log) + checksum = Shared.get_checksum(path, self.checksum, self.log) if not checksum: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py new file mode 100644 index 0000000..d9bb357 --- /dev/null +++ b/iso/empanadas/empanadas/util/shared.py @@ -0,0 +1,79 @@ +# These are shared utilities used + +import os +import hashlib + +class Shared: + """ + Quick utilities that may be commonly used + """ + @staticmethod + def get_checksum(path, hashtype, logger): + """ + Generates a checksum from the provided path by doing things in chunks. + This way we don't do it in memory. + """ + try: + checksum = hashlib.new(hashtype) + except ValueError: + logger.error("Invalid hash type: %s" % hashtype) + return False + + try: + input_file = open(path, "rb") + except IOError as e: + logger.error("Could not open file %s: %s" % (path, e)) + return False + + while True: + chunk = input_file.read(8192) + if not chunk: + break + checksum.update(chunk) + + input_file.close() + stat = os.stat(path) + base = os.path.basename(path) + # This emulates our current syncing scripts that runs stat and + # sha256sum and what not with a very specific output. + return "%s: %s bytes\n%s (%s) = %s\n" % ( + base, + stat.st_size, + hashtype.upper(), + base, + checksum.hexdigest() + ) + + @staticmethod + def discinfo_write(timestamp, fullname, arch, file_path): + """ + Ensure discinfo is written correctly + """ + data = [ + "%s" % timestamp, + "%s" % fullname, + "%s" % arch, + "ALL" + ] + + with open(file_path, "w+") as f: + f.write("\n".join(data)) + f.close() + + @staticmethod + def media_repo_write(timestamp, fullname, file_path): + """ + Ensure media.repo exists + """ + data = [ + "[InstallMedia]", + "name=%s" % fullname, + "mediaid=%s" % timestamp, + "metadata_expire=-1", + "gpgcheck=0", + "cost=500", + "", + ] + + with open(file_path, "w") as f: + f.write("\n".join(data)) From 6fc01c8590545e111193f266a382c6fc93062f83 Mon Sep 17 00:00:00 2001 From: nazunalika Date: Mon, 27 Jun 2022 18:35:11 -0700 Subject: [PATCH 64/64] change RC notes --- iso/empanadas/empanadas/scripts/build_iso.py | 2 +- iso/empanadas/empanadas/scripts/build_iso_extra.py | 2 +- iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/iso/empanadas/empanadas/scripts/build_iso.py b/iso/empanadas/empanadas/scripts/build_iso.py index b35cdf2..fbf1d06 100755 --- a/iso/empanadas/empanadas/scripts/build_iso.py +++ b/iso/empanadas/empanadas/scripts/build_iso.py @@ -10,7 +10,7 @@ parser = argparse.ArgumentParser(description="ISO Compose") parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) parser.add_argument('--isolation', type=str, help="mock isolation mode") -parser.add_argument('--rc', action='store_true', help="Release Candidate") +parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") parser.add_argument('--logger', type=str) results = parser.parse_args() diff --git a/iso/empanadas/empanadas/scripts/build_iso_extra.py b/iso/empanadas/empanadas/scripts/build_iso_extra.py index a646d12..9fe9c05 100755 --- a/iso/empanadas/empanadas/scripts/build_iso_extra.py +++ b/iso/empanadas/empanadas/scripts/build_iso_extra.py @@ -9,7 +9,7 @@ from empanadas.util import IsoBuild parser = argparse.ArgumentParser(description="ISO Compose") parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True) -parser.add_argument('--rc', action='store_true', help="Release Candidate") +parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN") parser.add_argument('--arch', type=str, help="Architecture") parser.add_argument('--isolation', type=str, help="Mock Isolation") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") diff --git a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh index 8f145de..ae9b5cf 100644 --- a/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh +++ b/iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh @@ -4,7 +4,7 @@ set -ex {% if extra_iso_mode == "podman" %} {{ lorax_pkg_cmd }} mkdir -p {{ compose_work_iso_dir }}/{{ arch }} -cd {{ compose_work_iso_dir }}/{{ arch }} +cd {{ compose_work_iso_dir }}/{{ arch }} test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; } {% else %} cd /builddir