move treeinfo writing to Shared

This commit is contained in:
Louis Abel 2022-07-03 21:00:57 -07:00
parent 3cf47dd85c
commit 77178e9657
Signed by untrusted user: label
GPG Key ID: B37E62D143879B36
4 changed files with 287 additions and 82 deletions

View File

@ -242,6 +242,7 @@ class RepoSync:
if self.fullrun: if self.fullrun:
self.deploy_extra_files(sync_root, global_work_root) self.deploy_extra_files(sync_root, global_work_root)
self.deploy_treeinfo(self.repo, sync_root, self.arch) self.deploy_treeinfo(self.repo, sync_root, self.arch)
self.tweak_treeinfo(self.repo, sync_root, self.arch)
self.symlink_to_latest(generated_dir) self.symlink_to_latest(generated_dir)
if self.repoclosure: if self.repoclosure:
@ -250,8 +251,11 @@ class RepoSync:
if self.refresh_extra_files and not self.fullrun: if self.refresh_extra_files and not self.fullrun:
self.deploy_extra_files(sync_root, global_work_root) self.deploy_extra_files(sync_root, global_work_root)
# This does NOT overwrite treeinfo files. This just ensures they exist
# and are configured correctly.
if self.refresh_treeinfo and not self.fullrun: if self.refresh_treeinfo and not self.fullrun:
self.deploy_treeinfo(self.repo, sync_root, self.arch) self.deploy_treeinfo(self.repo, sync_root, self.arch)
self.tweak_treeinfo(self.repo, sync_root, self.arch)
self.deploy_metadata(sync_root) self.deploy_metadata(sync_root)
@ -450,6 +454,53 @@ class RepoSync:
os.chmod(entry_point_sh, 0o755) os.chmod(entry_point_sh, 0o755)
os.chmod(debug_entry_point_sh, 0o755) os.chmod(debug_entry_point_sh, 0o755)
# During fullruns, a kickstart directory is made. Kickstart
# should not be updated nor touched during regular runs under
# any circumstances.
if self.fullrun:
ks_entry_name = '{}-ks-{}'.format(r, a)
entry_name_list.append(ks_entry_name)
ks_point_sh = os.path.join(
entries_dir,
ks_entry_name
)
ks_sync_path = os.path.join(
sync_root,
repo_name,
a,
'kickstart'
)
ks_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
"--repoid={} -p {} --forcearch {} --norepopath "
"--gpgcheck --assumeyes 2>&1").format(
self.dnf_config,
a,
r,
ks_sync_path,
a
)
ks_sync_log = ("{}/{}-{}-ks.log").format(
log_root,
repo_name,
a
)
ks_sync_template = self.tmplenv.get_template('reposync.tmpl')
ks_sync_output = ks_sync_template.render(
import_gpg_cmd=import_gpg_cmd,
arch_force_cp=arch_force_cp,
dnf_plugin_cmd=dnf_plugin_cmd,
sync_cmd=ks_sync_cmd,
sync_log=ks_sync_log
)
ks_entry_point_open = open(ks_point_sh, "w+")
ks_entry_point_open.write(ks_sync_output)
ks_entry_point_open.close()
os.chmod(ks_point_sh, 0o755)
# We ignoring sources? # We ignoring sources?
if (not self.ignore_source and not arch) or ( if (not self.ignore_source and not arch) or (
not self.ignore_source and arch == 'source'): not self.ignore_source and arch == 'source'):
@ -635,7 +686,6 @@ class RepoSync:
config_file = open(fname, "w+") config_file = open(fname, "w+")
repolist = [] repolist = []
for repo in self.repos: for repo in self.repos:
constructed_url = '{}/{}/repo/{}{}/$basearch'.format( constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
self.repo_base_url, self.repo_base_url,
self.project_id, self.project_id,
@ -1159,6 +1209,12 @@ class RepoSync:
repo_name + ' source media.repo already exists' repo_name + ' source media.repo already exists'
) )
def tweak_treeinfo(self, repo, sync_root, arch):
"""
This modifies treeinfo for the primary repository. If the repository is
listed in the iso_map as a non-disc, it will be considered for modification.
"""
def run_compose_closeout(self): def run_compose_closeout(self):
""" """
Closes out a compose as file. This ensures kickstart repositories are Closes out a compose as file. This ensures kickstart repositories are
@ -1185,8 +1241,12 @@ class RepoSync:
# Verify if the link even exists # Verify if the link even exists
if not os.path.exists(self.compose_latest_dir): if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir) self.log.error(
self.log.error('!! Please perform a full run if you have not done so.') '!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir
)
self.log.error(
'!! Please perform a full run if you have not done so.'
)
raise SystemExit() raise SystemExit()
log_root = os.path.join( log_root = os.path.join(
@ -1210,12 +1270,28 @@ class RepoSync:
'Starting to sync ISOs to compose' 'Starting to sync ISOs to compose'
) )
iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir) if os.path.exists('/usr/bin/fpsync'):
message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir)
if not iso_result: elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
message, ret = Shared.rsync_method(iso_root, sync_iso_root)
else:
self.log.error( self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Sync failed' 'fpsync nor parallel + rsync were found on this system. ' +
'There is also no built-in parallel rsync method at this ' +
'time.'
)
raise SystemExit()
if ret != 0:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
message
)
else:
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
message
) )
class SigRepoSync: class SigRepoSync:

View File

@ -799,54 +799,76 @@ class IsoBuild:
is for basic use. Eventually it'll be expanded to handle this scenario. is for basic use. Eventually it'll be expanded to handle this scenario.
""" """
image = os.path.join(self.lorax_work_dir, arch, variant) image = os.path.join(self.lorax_work_dir, arch, variant)
treeinfo = os.path.join(image, '.treeinfo')
discinfo = os.path.join(image, '.discinfo')
mediarepo = os.path.join(image, 'media.repo')
imagemap = self.iso_map['images'][variant] imagemap = self.iso_map['images'][variant]
primary = imagemap['variant'] data = {
repos = imagemap['repos'] 'arch': arch,
is_disc = False 'variant': variant,
'variant_path': image,
'checksum': self.checksum,
'distname': self.distname,
'fullname': self.fullname,
'shortname': self.shortname,
'release': self.release,
'timestamp': self.timestamp,
}
if imagemap['disc']: try:
is_disc = True Shared.treeinfo_modify_write(data, imagemap)
discnum = 1 except Exception as e:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'There was an error writing treeinfo.'
)
self.log.error(e)
#treeinfo = os.path.join(image, '.treeinfo')
#discinfo = os.path.join(image, '.discinfo')
#mediarepo = os.path.join(image, 'media.repo')
#imagemap = self.iso_map['images'][variant]
#primary = imagemap['variant']
#repos = imagemap['repos']
#is_disc = False
#if imagemap['disc']:
# is_disc = True
# discnum = 1
# load up productmd # load up productmd
ti = productmd.treeinfo.TreeInfo() #ti = productmd.treeinfo.TreeInfo()
ti.load(treeinfo) #ti.load(treeinfo)
# Set the name # Set the name
ti.release.name = self.distname #ti.release.name = self.distname
ti.release.short = self.shortname #ti.release.short = self.shortname
# Set the version (the initial lorax run does this, but we are setting # Set the version (the initial lorax run does this, but we are setting
# it just in case) # it just in case)
ti.release.version = self.release #ti.release.version = self.release
# Assign the present images into a var as a copy. For each platform, # Assign the present images into a var as a copy. For each platform,
# clear out the present dictionary. For each item and path in the # clear out the present dictionary. For each item and path in the
# assigned var, assign it back to the platform dictionary. If the path # assigned var, assign it back to the platform dictionary. If the path
# is empty, continue. Do checksums afterwards. # is empty, continue. Do checksums afterwards.
plats = ti.images.images.copy() #plats = ti.images.images.copy()
for platform in ti.images.images: #for platform in ti.images.images:
ti.images.images[platform] = {} # ti.images.images[platform] = {}
for i, p in plats[platform].items(): # for i, p in plats[platform].items():
if not p: # if not p:
continue # continue
if 'boot.iso' in i and is_disc: # if 'boot.iso' in i and is_disc:
continue # continue
ti.images.images[platform][i] = p # ti.images.images[platform][i] = p
ti.checksums.add(p, self.checksum, root_dir=image) # ti.checksums.add(p, self.checksum, root_dir=image)
# stage2 checksums # stage2 checksums
if ti.stage2.mainimage: #if ti.stage2.mainimage:
ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image) # ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image)
if ti.stage2.instimage: #if ti.stage2.instimage:
ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image) # ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image)
# If we are a disc, set the media section appropriately. # If we are a disc, set the media section appropriately.
if is_disc: #if is_disc:
ti.media.discnum = discnum # ti.media.discnum = discnum
ti.media.totaldiscs = discnum # ti.media.totaldiscs = discnum
# Create variants # Create variants
# Note to self: There's a lot of legacy stuff running around for # Note to self: There's a lot of legacy stuff running around for
@ -854,38 +876,38 @@ class IsoBuild:
# apparently. But there could be a chance it'll change. We may need to # apparently. But there could be a chance it'll change. We may need to
# put in a configuration to deal with it at some point. # put in a configuration to deal with it at some point.
#ti.variants.variants.clear() #ti.variants.variants.clear()
for y in repos: #for y in repos:
if y in ti.variants.variants.keys(): # if y in ti.variants.variants.keys():
vari = ti.variants.variants[y] # vari = ti.variants.variants[y]
else: # else:
vari = productmd.treeinfo.Variant(ti) # vari = productmd.treeinfo.Variant(ti)
vari.id = y # vari.id = y
vari.uid = y # vari.uid = y
vari.name = y # vari.name = y
vari.type = "variant" # vari.type = "variant"
if is_disc: # if is_disc:
vari.paths.repository = y # vari.paths.repository = y
vari.paths.packages = y + "/Packages" # vari.paths.packages = y + "/Packages"
else: # else:
if y == primary: # if y == primary:
vari.paths.repository = "." # vari.paths.repository = "."
vari.paths.packages = "Packages" # vari.paths.packages = "Packages"
else: # else:
vari.paths.repository = "../../../" + y + "/" + arch + "/os" # vari.paths.repository = "../../../" + y + "/" + arch + "/os"
vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages" # vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages"
if y not in ti.variants.variants.keys(): # if y not in ti.variants.variants.keys():
ti.variants.add(vari) # ti.variants.add(vari)
del vari # del vari
# Set default variant # Set default variant
ti.dump(treeinfo, main_variant=primary) #ti.dump(treeinfo, main_variant=primary)
# Set discinfo # Set discinfo
Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo) #Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo)
# Set media.repo # Set media.repo
Shared.media_repo_write(self.timestamp, self.fullname, mediarepo) #Shared.media_repo_write(self.timestamp, self.fullname, mediarepo)
# Next set of functions are loosely borrowed (in concept) from pungi. Some # Next set of functions are loosely borrowed (in concept) from pungi. Some
# stuff may be combined/mixed together, other things may be simplified or # stuff may be combined/mixed together, other things may be simplified or

View File

@ -107,11 +107,115 @@ class Shared:
ti.dump(file_path) ti.dump(file_path)
@staticmethod @staticmethod
def treeinfo_modify_write(): def treeinfo_modify_write(data, imagemap):
""" """
Modifies a specific treeinfo with already available data. This is in Modifies a specific treeinfo with already available data. This is in
the case of modifying treeinfo for primary repos or images. the case of modifying treeinfo for primary repos or images.
""" """
arch = data['arch']
variant = data['variant']
variant_path = data['variant_path']
checksum = data['checksum']
distname = data['distname']
fullname = data['fullname']
shortname = data['shortname']
release = data['release']
timestamp = data['timestamp']
os_or_ks = ''
if '/os/' in variant_path:
os_or_ks = 'os'
if '/kickstart/' in variant_path:
os_or_ks = 'kickstart'
image = os.path.join(variant_path)
treeinfo = os.path.join(image, '.treeinfo')
discinfo = os.path.join(image, '.discinfo')
mediarepo = os.path.join(image, 'media.repo')
#imagemap = self.iso_map['images'][variant]
primary = imagemap['variant']
repos = imagemap['repos']
is_disc = False
if imagemap['disc']:
is_disc = True
discnum = 1
# load up productmd
ti = productmd.treeinfo.TreeInfo()
ti.load(treeinfo)
# Set the name
ti.release.name = distname
ti.release.short = shortname
# Set the version (the initial lorax run does this, but we are setting
# it just in case)
ti.release.version = release
# Assign the present images into a var as a copy. For each platform,
# clear out the present dictionary. For each item and path in the
# assigned var, assign it back to the platform dictionary. If the path
# is empty, continue. Do checksums afterwards.
plats = ti.images.images.copy()
for platform in ti.images.images:
ti.images.images[platform] = {}
for i, p in plats[platform].items():
if not p:
continue
if 'boot.iso' in i and is_disc:
continue
ti.images.images[platform][i] = p
ti.checksums.add(p, checksum, root_dir=image)
# stage2 checksums
if ti.stage2.mainimage:
ti.checksums.add(ti.stage2.mainimage, checksum, root_dir=image)
if ti.stage2.instimage:
ti.checksums.add(ti.stage2.instimage, checksum, root_dir=image)
# If we are a disc, set the media section appropriately.
if is_disc:
ti.media.discnum = discnum
ti.media.totaldiscs = discnum
# Create variants
# Note to self: There's a lot of legacy stuff running around for
# Fedora, ELN, and RHEL in general. This is the general structure,
# apparently. But there could be a chance it'll change. We may need to
# put in a configuration to deal with it at some point.
#ti.variants.variants.clear()
for y in repos:
if y in ti.variants.variants.keys():
vari = ti.variants.variants[y]
else:
vari = productmd.treeinfo.Variant(ti)
vari.id = y
vari.uid = y
vari.name = y
vari.type = "variant"
if is_disc:
vari.paths.repository = y
vari.paths.packages = y + "/Packages"
else:
if y == primary:
vari.paths.repository = "."
vari.paths.packages = "Packages"
else:
vari.paths.repository = "../../../" + y + "/" + arch + "/" + os_or_ks
vari.paths.packages = "../../../" + y + "/" + arch + "/" + os_or_ks + "/Packages"
if y not in ti.variants.variants.keys():
ti.variants.add(vari)
del vari
# Set default variant
ti.dump(treeinfo, main_variant=primary)
# Set discinfo
Shared.discinfo_write(timestamp, fullname, arch, discinfo)
# Set media.repo
Shared.media_repo_write(timestamp, fullname, mediarepo)
@staticmethod @staticmethod
def write_metadata( def write_metadata(
@ -345,18 +449,16 @@ class Shared:
""" """
@staticmethod @staticmethod
def fpsync_method(src, dest, logger, tmp_dir): def fpsync_method(src, dest, tmp_dir):
""" """
Returns a list for the fpsync command Returns a list for the fpsync command
""" """
cmd = '/usr/bin/fpsync' cmd = '/usr/bin/fpsync'
rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005' rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005'
if not os.path.exists(cmd): if not os.path.exists(cmd):
logger.warn( message = 'fpsync not found'
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' + retval = 1
'fpsync not found' return message, retval
)
return False
os.makedirs(tmp_dir, exist_ok=True) os.makedirs(tmp_dir, exist_ok=True)
@ -373,25 +475,30 @@ class Shared:
stdout=subprocess.DEVNULL, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
) )
if process != 0: if process != 0:
logger.error( message = 'Syncing (fpsync) failed'
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + retval = process
'fpsync failed' return message, retval
)
return False
if os.path.exists(dest): if os.path.exists(dest):
return True message = 'Syncing (fpsync) succeeded'
retval = process
else: else:
logger.error( message = 'Path synced does not seem to exist for some reason.'
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + retval = 1
'Path synced does not seem to exist for some reason.'
) return message, retval
return False
@staticmethod @staticmethod
def rsync_method(src, dest, logger, tmp_dir): def rsync_method(src, dest):
""" """
Returns a string for the rsync command plus parallel. Yes, this is a Returns a string for the rsync command plus parallel. Yes, this is a
hack. hack.
""" """
find_cmd = '/usr/bin/find'
parallel_cmd = '/usr/bin/parallel'
rsync_cmd = '/usr/bin/rsync'
switches = '-av --chown=10004:10005 --progress --relative --human-readable'
os.makedirs(dest, exist_ok=True)

BIN
sync/.sync-to-prod.sh.swp Normal file

Binary file not shown.