From 77178e96578d50ef48a61f1258b83b303cb75428 Mon Sep 17 00:00:00 2001 From: Louis Abel Date: Sun, 3 Jul 2022 21:00:57 -0700 Subject: [PATCH] move treeinfo writing to Shared --- iso/empanadas/empanadas/util/dnf_utils.py | 90 ++++++++++++-- iso/empanadas/empanadas/util/iso_utils.py | 134 +++++++++++--------- iso/empanadas/empanadas/util/shared.py | 145 +++++++++++++++++++--- sync/.sync-to-prod.sh.swp | Bin 0 -> 12288 bytes 4 files changed, 287 insertions(+), 82 deletions(-) create mode 100644 sync/.sync-to-prod.sh.swp diff --git a/iso/empanadas/empanadas/util/dnf_utils.py b/iso/empanadas/empanadas/util/dnf_utils.py index 119933c..c7e9a79 100644 --- a/iso/empanadas/empanadas/util/dnf_utils.py +++ b/iso/empanadas/empanadas/util/dnf_utils.py @@ -242,6 +242,7 @@ class RepoSync: if self.fullrun: self.deploy_extra_files(sync_root, global_work_root) self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.tweak_treeinfo(self.repo, sync_root, self.arch) self.symlink_to_latest(generated_dir) if self.repoclosure: @@ -250,8 +251,11 @@ class RepoSync: if self.refresh_extra_files and not self.fullrun: self.deploy_extra_files(sync_root, global_work_root) + # This does NOT overwrite treeinfo files. This just ensures they exist + # and are configured correctly. if self.refresh_treeinfo and not self.fullrun: self.deploy_treeinfo(self.repo, sync_root, self.arch) + self.tweak_treeinfo(self.repo, sync_root, self.arch) self.deploy_metadata(sync_root) @@ -450,6 +454,53 @@ class RepoSync: os.chmod(entry_point_sh, 0o755) os.chmod(debug_entry_point_sh, 0o755) + # During fullruns, a kickstart directory is made. Kickstart + # should not be updated nor touched during regular runs under + # any circumstances. + if self.fullrun: + ks_entry_name = '{}-ks-{}'.format(r, a) + entry_name_list.append(ks_entry_name) + ks_point_sh = os.path.join( + entries_dir, + ks_entry_name + ) + + ks_sync_path = os.path.join( + sync_root, + repo_name, + a, + 'kickstart' + ) + + ks_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " + "--repoid={} -p {} --forcearch {} --norepopath " + "--gpgcheck --assumeyes 2>&1").format( + self.dnf_config, + a, + r, + ks_sync_path, + a + ) + + ks_sync_log = ("{}/{}-{}-ks.log").format( + log_root, + repo_name, + a + ) + + ks_sync_template = self.tmplenv.get_template('reposync.tmpl') + ks_sync_output = ks_sync_template.render( + import_gpg_cmd=import_gpg_cmd, + arch_force_cp=arch_force_cp, + dnf_plugin_cmd=dnf_plugin_cmd, + sync_cmd=ks_sync_cmd, + sync_log=ks_sync_log + ) + ks_entry_point_open = open(ks_point_sh, "w+") + ks_entry_point_open.write(ks_sync_output) + ks_entry_point_open.close() + os.chmod(ks_point_sh, 0o755) + # We ignoring sources? if (not self.ignore_source and not arch) or ( not self.ignore_source and arch == 'source'): @@ -635,7 +686,6 @@ class RepoSync: config_file = open(fname, "w+") repolist = [] for repo in self.repos: - constructed_url = '{}/{}/repo/{}{}/$basearch'.format( self.repo_base_url, self.project_id, @@ -1159,6 +1209,12 @@ class RepoSync: repo_name + ' source media.repo already exists' ) + def tweak_treeinfo(self, repo, sync_root, arch): + """ + This modifies treeinfo for the primary repository. If the repository is + listed in the iso_map as a non-disc, it will be considered for modification. + """ + def run_compose_closeout(self): """ Closes out a compose as file. This ensures kickstart repositories are @@ -1185,8 +1241,12 @@ class RepoSync: # Verify if the link even exists if not os.path.exists(self.compose_latest_dir): - self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir) - self.log.error('!! Please perform a full run if you have not done so.') + self.log.error( + '!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir + ) + self.log.error( + '!! Please perform a full run if you have not done so.' + ) raise SystemExit() log_root = os.path.join( @@ -1210,12 +1270,28 @@ class RepoSync: 'Starting to sync ISOs to compose' ) - iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir) - - if not iso_result: + if os.path.exists('/usr/bin/fpsync'): + message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir) + elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'): + message, ret = Shared.rsync_method(iso_root, sync_iso_root) + else: self.log.error( '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Sync failed' + 'fpsync nor parallel + rsync were found on this system. ' + + 'There is also no built-in parallel rsync method at this ' + + 'time.' + ) + raise SystemExit() + + if ret != 0: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + message + ) + else: + self.log.info( + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + + message ) class SigRepoSync: diff --git a/iso/empanadas/empanadas/util/iso_utils.py b/iso/empanadas/empanadas/util/iso_utils.py index e402c4a..ee682c9 100644 --- a/iso/empanadas/empanadas/util/iso_utils.py +++ b/iso/empanadas/empanadas/util/iso_utils.py @@ -799,54 +799,76 @@ class IsoBuild: is for basic use. Eventually it'll be expanded to handle this scenario. """ image = os.path.join(self.lorax_work_dir, arch, variant) - treeinfo = os.path.join(image, '.treeinfo') - discinfo = os.path.join(image, '.discinfo') - mediarepo = os.path.join(image, 'media.repo') imagemap = self.iso_map['images'][variant] - primary = imagemap['variant'] - repos = imagemap['repos'] - is_disc = False + data = { + 'arch': arch, + 'variant': variant, + 'variant_path': image, + 'checksum': self.checksum, + 'distname': self.distname, + 'fullname': self.fullname, + 'shortname': self.shortname, + 'release': self.release, + 'timestamp': self.timestamp, + } - if imagemap['disc']: - is_disc = True - discnum = 1 + try: + Shared.treeinfo_modify_write(data, imagemap) + except Exception as e: + self.log.error( + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + + 'There was an error writing treeinfo.' + ) + self.log.error(e) + + #treeinfo = os.path.join(image, '.treeinfo') + #discinfo = os.path.join(image, '.discinfo') + #mediarepo = os.path.join(image, 'media.repo') + #imagemap = self.iso_map['images'][variant] + #primary = imagemap['variant'] + #repos = imagemap['repos'] + #is_disc = False + + #if imagemap['disc']: + # is_disc = True + # discnum = 1 # load up productmd - ti = productmd.treeinfo.TreeInfo() - ti.load(treeinfo) + #ti = productmd.treeinfo.TreeInfo() + #ti.load(treeinfo) # Set the name - ti.release.name = self.distname - ti.release.short = self.shortname + #ti.release.name = self.distname + #ti.release.short = self.shortname # Set the version (the initial lorax run does this, but we are setting # it just in case) - ti.release.version = self.release + #ti.release.version = self.release # Assign the present images into a var as a copy. For each platform, # clear out the present dictionary. For each item and path in the # assigned var, assign it back to the platform dictionary. If the path # is empty, continue. Do checksums afterwards. - plats = ti.images.images.copy() - for platform in ti.images.images: - ti.images.images[platform] = {} - for i, p in plats[platform].items(): - if not p: - continue - if 'boot.iso' in i and is_disc: - continue - ti.images.images[platform][i] = p - ti.checksums.add(p, self.checksum, root_dir=image) + #plats = ti.images.images.copy() + #for platform in ti.images.images: + # ti.images.images[platform] = {} + # for i, p in plats[platform].items(): + # if not p: + # continue + # if 'boot.iso' in i and is_disc: + # continue + # ti.images.images[platform][i] = p + # ti.checksums.add(p, self.checksum, root_dir=image) # stage2 checksums - if ti.stage2.mainimage: - ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) + #if ti.stage2.mainimage: + # ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) - if ti.stage2.instimage: - ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) + #if ti.stage2.instimage: + # ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) # If we are a disc, set the media section appropriately. - if is_disc: - ti.media.discnum = discnum - ti.media.totaldiscs = discnum + #if is_disc: + # ti.media.discnum = discnum + # ti.media.totaldiscs = discnum # Create variants # Note to self: There's a lot of legacy stuff running around for @@ -854,38 +876,38 @@ class IsoBuild: # apparently. But there could be a chance it'll change. We may need to # put in a configuration to deal with it at some point. #ti.variants.variants.clear() - for y in repos: - if y in ti.variants.variants.keys(): - vari = ti.variants.variants[y] - else: - vari = productmd.treeinfo.Variant(ti) + #for y in repos: + # if y in ti.variants.variants.keys(): + # vari = ti.variants.variants[y] + # else: + # vari = productmd.treeinfo.Variant(ti) - vari.id = y - vari.uid = y - vari.name = y - vari.type = "variant" - if is_disc: - vari.paths.repository = y - vari.paths.packages = y + "/Packages" - else: - if y == primary: - vari.paths.repository = "." - vari.paths.packages = "Packages" - else: - vari.paths.repository = "../../../" + y + "/" + arch + "/os" - vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" + # vari.id = y + # vari.uid = y + # vari.name = y + # vari.type = "variant" + # if is_disc: + # vari.paths.repository = y + # vari.paths.packages = y + "/Packages" + # else: + # if y == primary: + # vari.paths.repository = "." + # vari.paths.packages = "Packages" + # else: + # vari.paths.repository = "../../../" + y + "/" + arch + "/os" + # vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" - if y not in ti.variants.variants.keys(): - ti.variants.add(vari) + # if y not in ti.variants.variants.keys(): + # ti.variants.add(vari) - del vari + # del vari # Set default variant - ti.dump(treeinfo, main_variant=primary) + #ti.dump(treeinfo, main_variant=primary) # Set discinfo - Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) + #Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) # Set media.repo - Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) + #Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) # Next set of functions are loosely borrowed (in concept) from pungi. Some # stuff may be combined/mixed together, other things may be simplified or diff --git a/iso/empanadas/empanadas/util/shared.py b/iso/empanadas/empanadas/util/shared.py index eca879d..305316b 100644 --- a/iso/empanadas/empanadas/util/shared.py +++ b/iso/empanadas/empanadas/util/shared.py @@ -107,11 +107,115 @@ class Shared: ti.dump(file_path) @staticmethod - def treeinfo_modify_write(): + def treeinfo_modify_write(data, imagemap): """ Modifies a specific treeinfo with already available data. This is in the case of modifying treeinfo for primary repos or images. """ + arch = data['arch'] + variant = data['variant'] + variant_path = data['variant_path'] + checksum = data['checksum'] + distname = data['distname'] + fullname = data['fullname'] + shortname = data['shortname'] + release = data['release'] + timestamp = data['timestamp'] + + os_or_ks = '' + if '/os/' in variant_path: + os_or_ks = 'os' + if '/kickstart/' in variant_path: + os_or_ks = 'kickstart' + + image = os.path.join(variant_path) + treeinfo = os.path.join(image, '.treeinfo') + discinfo = os.path.join(image, '.discinfo') + mediarepo = os.path.join(image, 'media.repo') + #imagemap = self.iso_map['images'][variant] + primary = imagemap['variant'] + repos = imagemap['repos'] + is_disc = False + + if imagemap['disc']: + is_disc = True + discnum = 1 + + # load up productmd + ti = productmd.treeinfo.TreeInfo() + ti.load(treeinfo) + + # Set the name + ti.release.name = distname + ti.release.short = shortname + # Set the version (the initial lorax run does this, but we are setting + # it just in case) + ti.release.version = release + # Assign the present images into a var as a copy. For each platform, + # clear out the present dictionary. For each item and path in the + # assigned var, assign it back to the platform dictionary. If the path + # is empty, continue. Do checksums afterwards. + plats = ti.images.images.copy() + for platform in ti.images.images: + ti.images.images[platform] = {} + for i, p in plats[platform].items(): + if not p: + continue + if 'boot.iso' in i and is_disc: + continue + ti.images.images[platform][i] = p + ti.checksums.add(p, checksum, root_dir=image) + + # stage2 checksums + if ti.stage2.mainimage: + ti.checksums.add(ti.stage2.mainimage, checksum, root_dir=image) + + if ti.stage2.instimage: + ti.checksums.add(ti.stage2.instimage, checksum, root_dir=image) + + # If we are a disc, set the media section appropriately. + if is_disc: + ti.media.discnum = discnum + ti.media.totaldiscs = discnum + + # Create variants + # Note to self: There's a lot of legacy stuff running around for + # Fedora, ELN, and RHEL in general. This is the general structure, + # apparently. But there could be a chance it'll change. We may need to + # put in a configuration to deal with it at some point. + #ti.variants.variants.clear() + for y in repos: + if y in ti.variants.variants.keys(): + vari = ti.variants.variants[y] + else: + vari = productmd.treeinfo.Variant(ti) + + vari.id = y + vari.uid = y + vari.name = y + vari.type = "variant" + if is_disc: + vari.paths.repository = y + vari.paths.packages = y + "/Packages" + else: + if y == primary: + vari.paths.repository = "." + vari.paths.packages = "Packages" + else: + vari.paths.repository = "../../../" + y + "/" + arch + "/" + os_or_ks + vari.paths.packages = "../../../" + y + "/" + arch + "/" + os_or_ks + "/Packages" + + if y not in ti.variants.variants.keys(): + ti.variants.add(vari) + + del vari + + # Set default variant + ti.dump(treeinfo, main_variant=primary) + # Set discinfo + Shared.discinfo_write(timestamp, fullname, arch, discinfo) + # Set media.repo + Shared.media_repo_write(timestamp, fullname, mediarepo) @staticmethod def write_metadata( @@ -345,18 +449,16 @@ class Shared: """ @staticmethod - def fpsync_method(src, dest, logger, tmp_dir): + def fpsync_method(src, dest, tmp_dir): """ Returns a list for the fpsync command """ cmd = '/usr/bin/fpsync' rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005' if not os.path.exists(cmd): - logger.warn( - '[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + - 'fpsync not found' - ) - return False + message = 'fpsync not found' + retval = 1 + return message, retval os.makedirs(tmp_dir, exist_ok=True) @@ -373,25 +475,30 @@ class Shared: stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) + if process != 0: - logger.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'fpsync failed' - ) - return False + message = 'Syncing (fpsync) failed' + retval = process + return message, retval if os.path.exists(dest): - return True + message = 'Syncing (fpsync) succeeded' + retval = process else: - logger.error( - '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + - 'Path synced does not seem to exist for some reason.' - ) - return False + message = 'Path synced does not seem to exist for some reason.' + retval = 1 + + return message, retval @staticmethod - def rsync_method(src, dest, logger, tmp_dir): + def rsync_method(src, dest): """ Returns a string for the rsync command plus parallel. Yes, this is a hack. """ + find_cmd = '/usr/bin/find' + parallel_cmd = '/usr/bin/parallel' + rsync_cmd = '/usr/bin/rsync' + switches = '-av --chown=10004:10005 --progress --relative --human-readable' + + os.makedirs(dest, exist_ok=True) diff --git a/sync/.sync-to-prod.sh.swp b/sync/.sync-to-prod.sh.swp new file mode 100644 index 0000000000000000000000000000000000000000..75f3529bd37db6f81cb1f6cd48c5ac3accac90cb GIT binary patch literal 12288 zcmeI2L2nyH6vroAXbLSAd;nhUsDY@vYbPx#i9!{eG%-@*D0W(*rj5qCV|(lIt~;}v z#7Yb&4jj2~Lwo=Zh%*uwRBq4{2e=_|feRNt0RLGhb(#<*QmI6xS?O=@&dz%?^PBfx z5@qUz+vR1t;Z8DKml&I_epeq|{FLqFFEPpMLYnqdF3qFR<8!(^cD08 zB%lDAhf2^d7a98r`VsmXN}>DEMd-H+jC~LN{wiaSp>Lo!q4Urv^y@2(eG7d7b)h!& z09uCrI?vdj&>zra=pnQO5p)jvd6cm)p_|YM^aykR9Qq7ef)*gNo)Tm)OTZGa1S|nd zz!G>)1R5bD@>}tCM8!g(aBbRrUZ+MXWh0bAhB|rjkc6GA!BIFT*gMt?N0~SH>mGY~ zvJH`!YUSbVA(vh~j67cno(Sj3@b8`A4CBEzvpDm3gK0Ri3^Hp!pxoH*>RM%fy;Lh# zRyNlvm0I5$+bzx278WaOA8uA_>+koy9PRDVE(w65+$|0;0VOfGnWK{wgi82Hta`Z_ z5<6i+#Tn{n{_}K4j4MH0VogDhT0)7dR8JH7Ak|42QHg6o+dN8)!&aza6*lPZU2+?W+T`rexI3}o?sTL(F>cnpws5;##ftkm^vl!I zK~J$IR1Y(Wnplc2x(UGs14v=!_I!Ee=@!B+8;r8ROS=Ig0lz&S1eP*qP08U~UBV59+T7aWLM?+}xRZl`IC!6< z1H!`z9M6=Y6R%WRUaeFYHp}w{YL*7Wrhgr+LAha%;do|8YNFGP6<0k7sM;>9` z8oR2qSRtKu(FG@U&0s{k{Uc-OG>FNObmht*LMA52>F}KZyV{}xIZ3Z8NYWm4x#AMZ z4p!Ttl#U-J;P!xuH!uc#{3qBqm|2!?2`T-S@Y_fmZT4ffTAD0OUB_ERE7KHn3K>;~pNBDy!@AgmczA>|>k$&UayYIT5*Bt-<5Xloq_0TawGE2!&(}Wf( zj8iRp6SS>j3nEwnM}N zBC92|OXY^CDn0n?v{Bf|=^lxyLCVaYscvRVGt<5c#R~Any$(if^*$eZ5f7Iq-jbcYzWX05V0Uta4o zra{gZK=2k|M7(2e3%R#(0>0CUqZ~UiqOZMkwODw!Xg&()RUP;ln9&wjp^1Hs)4Wtg lP1AI!@HSN3F}u?=n}Z)S=1hI+Ct)11{N?NiULBjo{sC&2w+sLP literal 0 HcmV?d00001