move treeinfo writing to Shared

This commit is contained in:
Louis Abel 2022-07-03 21:00:57 -07:00
parent 3cf47dd85c
commit 77178e9657
Signed by: label
GPG Key ID: B37E62D143879B36
4 changed files with 287 additions and 82 deletions

View File

@ -242,6 +242,7 @@ class RepoSync:
if self.fullrun:
self.deploy_extra_files(sync_root, global_work_root)
self.deploy_treeinfo(self.repo, sync_root, self.arch)
self.tweak_treeinfo(self.repo, sync_root, self.arch)
self.symlink_to_latest(generated_dir)
if self.repoclosure:
@ -250,8 +251,11 @@ class RepoSync:
if self.refresh_extra_files and not self.fullrun:
self.deploy_extra_files(sync_root, global_work_root)
# This does NOT overwrite treeinfo files. This just ensures they exist
# and are configured correctly.
if self.refresh_treeinfo and not self.fullrun:
self.deploy_treeinfo(self.repo, sync_root, self.arch)
self.tweak_treeinfo(self.repo, sync_root, self.arch)
self.deploy_metadata(sync_root)
@ -450,6 +454,53 @@ class RepoSync:
os.chmod(entry_point_sh, 0o755)
os.chmod(debug_entry_point_sh, 0o755)
# During fullruns, a kickstart directory is made. Kickstart
# should not be updated nor touched during regular runs under
# any circumstances.
if self.fullrun:
ks_entry_name = '{}-ks-{}'.format(r, a)
entry_name_list.append(ks_entry_name)
ks_point_sh = os.path.join(
entries_dir,
ks_entry_name
)
ks_sync_path = os.path.join(
sync_root,
repo_name,
a,
'kickstart'
)
ks_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
"--repoid={} -p {} --forcearch {} --norepopath "
"--gpgcheck --assumeyes 2>&1").format(
self.dnf_config,
a,
r,
ks_sync_path,
a
)
ks_sync_log = ("{}/{}-{}-ks.log").format(
log_root,
repo_name,
a
)
ks_sync_template = self.tmplenv.get_template('reposync.tmpl')
ks_sync_output = ks_sync_template.render(
import_gpg_cmd=import_gpg_cmd,
arch_force_cp=arch_force_cp,
dnf_plugin_cmd=dnf_plugin_cmd,
sync_cmd=ks_sync_cmd,
sync_log=ks_sync_log
)
ks_entry_point_open = open(ks_point_sh, "w+")
ks_entry_point_open.write(ks_sync_output)
ks_entry_point_open.close()
os.chmod(ks_point_sh, 0o755)
# We ignoring sources?
if (not self.ignore_source and not arch) or (
not self.ignore_source and arch == 'source'):
@ -635,7 +686,6 @@ class RepoSync:
config_file = open(fname, "w+")
repolist = []
for repo in self.repos:
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
self.repo_base_url,
self.project_id,
@ -1159,6 +1209,12 @@ class RepoSync:
repo_name + ' source media.repo already exists'
)
def tweak_treeinfo(self, repo, sync_root, arch):
"""
This modifies treeinfo for the primary repository. If the repository is
listed in the iso_map as a non-disc, it will be considered for modification.
"""
def run_compose_closeout(self):
"""
Closes out a compose as file. This ensures kickstart repositories are
@ -1185,8 +1241,12 @@ class RepoSync:
# Verify if the link even exists
if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
self.log.error('!! Please perform a full run if you have not done so.')
self.log.error(
'!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir
)
self.log.error(
'!! Please perform a full run if you have not done so.'
)
raise SystemExit()
log_root = os.path.join(
@ -1210,12 +1270,28 @@ class RepoSync:
'Starting to sync ISOs to compose'
)
iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir)
if not iso_result:
if os.path.exists('/usr/bin/fpsync'):
message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir)
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
message, ret = Shared.rsync_method(iso_root, sync_iso_root)
else:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Sync failed'
'fpsync nor parallel + rsync were found on this system. ' +
'There is also no built-in parallel rsync method at this ' +
'time.'
)
raise SystemExit()
if ret != 0:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
message
)
else:
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
message
)
class SigRepoSync:

View File

@ -799,54 +799,76 @@ class IsoBuild:
is for basic use. Eventually it'll be expanded to handle this scenario.
"""
image = os.path.join(self.lorax_work_dir, arch, variant)
treeinfo = os.path.join(image, '.treeinfo')
discinfo = os.path.join(image, '.discinfo')
mediarepo = os.path.join(image, 'media.repo')
imagemap = self.iso_map['images'][variant]
primary = imagemap['variant']
repos = imagemap['repos']
is_disc = False
data = {
'arch': arch,
'variant': variant,
'variant_path': image,
'checksum': self.checksum,
'distname': self.distname,
'fullname': self.fullname,
'shortname': self.shortname,
'release': self.release,
'timestamp': self.timestamp,
}
if imagemap['disc']:
is_disc = True
discnum = 1
try:
Shared.treeinfo_modify_write(data, imagemap)
except Exception as e:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'There was an error writing treeinfo.'
)
self.log.error(e)
#treeinfo = os.path.join(image, '.treeinfo')
#discinfo = os.path.join(image, '.discinfo')
#mediarepo = os.path.join(image, 'media.repo')
#imagemap = self.iso_map['images'][variant]
#primary = imagemap['variant']
#repos = imagemap['repos']
#is_disc = False
#if imagemap['disc']:
# is_disc = True
# discnum = 1
# load up productmd
ti = productmd.treeinfo.TreeInfo()
ti.load(treeinfo)
#ti = productmd.treeinfo.TreeInfo()
#ti.load(treeinfo)
# Set the name
ti.release.name = self.distname
ti.release.short = self.shortname
#ti.release.name = self.distname
#ti.release.short = self.shortname
# Set the version (the initial lorax run does this, but we are setting
# it just in case)
ti.release.version = self.release
#ti.release.version = self.release
# Assign the present images into a var as a copy. For each platform,
# clear out the present dictionary. For each item and path in the
# assigned var, assign it back to the platform dictionary. If the path
# is empty, continue. Do checksums afterwards.
plats = ti.images.images.copy()
for platform in ti.images.images:
ti.images.images[platform] = {}
for i, p in plats[platform].items():
if not p:
continue
if 'boot.iso' in i and is_disc:
continue
ti.images.images[platform][i] = p
ti.checksums.add(p, self.checksum, root_dir=image)
#plats = ti.images.images.copy()
#for platform in ti.images.images:
# ti.images.images[platform] = {}
# for i, p in plats[platform].items():
# if not p:
# continue
# if 'boot.iso' in i and is_disc:
# continue
# ti.images.images[platform][i] = p
# ti.checksums.add(p, self.checksum, root_dir=image)
# stage2 checksums
if ti.stage2.mainimage:
ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image)
#if ti.stage2.mainimage:
# ti.checksums.add(ti.stage2.mainimage, self.checksum, root_dir=image)
if ti.stage2.instimage:
ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image)
#if ti.stage2.instimage:
# ti.checksums.add(ti.stage2.instimage, self.checksum, root_dir=image)
# If we are a disc, set the media section appropriately.
if is_disc:
ti.media.discnum = discnum
ti.media.totaldiscs = discnum
#if is_disc:
# ti.media.discnum = discnum
# ti.media.totaldiscs = discnum
# Create variants
# Note to self: There's a lot of legacy stuff running around for
@ -854,38 +876,38 @@ class IsoBuild:
# apparently. But there could be a chance it'll change. We may need to
# put in a configuration to deal with it at some point.
#ti.variants.variants.clear()
for y in repos:
if y in ti.variants.variants.keys():
vari = ti.variants.variants[y]
else:
vari = productmd.treeinfo.Variant(ti)
#for y in repos:
# if y in ti.variants.variants.keys():
# vari = ti.variants.variants[y]
# else:
# vari = productmd.treeinfo.Variant(ti)
vari.id = y
vari.uid = y
vari.name = y
vari.type = "variant"
if is_disc:
vari.paths.repository = y
vari.paths.packages = y + "/Packages"
else:
if y == primary:
vari.paths.repository = "."
vari.paths.packages = "Packages"
else:
vari.paths.repository = "../../../" + y + "/" + arch + "/os"
vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages"
# vari.id = y
# vari.uid = y
# vari.name = y
# vari.type = "variant"
# if is_disc:
# vari.paths.repository = y
# vari.paths.packages = y + "/Packages"
# else:
# if y == primary:
# vari.paths.repository = "."
# vari.paths.packages = "Packages"
# else:
# vari.paths.repository = "../../../" + y + "/" + arch + "/os"
# vari.paths.packages = "../../../" + y + "/" + arch + "/os/Packages"
if y not in ti.variants.variants.keys():
ti.variants.add(vari)
# if y not in ti.variants.variants.keys():
# ti.variants.add(vari)
del vari
# del vari
# Set default variant
ti.dump(treeinfo, main_variant=primary)
#ti.dump(treeinfo, main_variant=primary)
# Set discinfo
Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo)
#Shared.discinfo_write(self.timestamp, self.fullname, arch, discinfo)
# Set media.repo
Shared.media_repo_write(self.timestamp, self.fullname, mediarepo)
#Shared.media_repo_write(self.timestamp, self.fullname, mediarepo)
# Next set of functions are loosely borrowed (in concept) from pungi. Some
# stuff may be combined/mixed together, other things may be simplified or

View File

@ -107,11 +107,115 @@ class Shared:
ti.dump(file_path)
@staticmethod
def treeinfo_modify_write():
def treeinfo_modify_write(data, imagemap):
"""
Modifies a specific treeinfo with already available data. This is in
the case of modifying treeinfo for primary repos or images.
"""
arch = data['arch']
variant = data['variant']
variant_path = data['variant_path']
checksum = data['checksum']
distname = data['distname']
fullname = data['fullname']
shortname = data['shortname']
release = data['release']
timestamp = data['timestamp']
os_or_ks = ''
if '/os/' in variant_path:
os_or_ks = 'os'
if '/kickstart/' in variant_path:
os_or_ks = 'kickstart'
image = os.path.join(variant_path)
treeinfo = os.path.join(image, '.treeinfo')
discinfo = os.path.join(image, '.discinfo')
mediarepo = os.path.join(image, 'media.repo')
#imagemap = self.iso_map['images'][variant]
primary = imagemap['variant']
repos = imagemap['repos']
is_disc = False
if imagemap['disc']:
is_disc = True
discnum = 1
# load up productmd
ti = productmd.treeinfo.TreeInfo()
ti.load(treeinfo)
# Set the name
ti.release.name = distname
ti.release.short = shortname
# Set the version (the initial lorax run does this, but we are setting
# it just in case)
ti.release.version = release
# Assign the present images into a var as a copy. For each platform,
# clear out the present dictionary. For each item and path in the
# assigned var, assign it back to the platform dictionary. If the path
# is empty, continue. Do checksums afterwards.
plats = ti.images.images.copy()
for platform in ti.images.images:
ti.images.images[platform] = {}
for i, p in plats[platform].items():
if not p:
continue
if 'boot.iso' in i and is_disc:
continue
ti.images.images[platform][i] = p
ti.checksums.add(p, checksum, root_dir=image)
# stage2 checksums
if ti.stage2.mainimage:
ti.checksums.add(ti.stage2.mainimage, checksum, root_dir=image)
if ti.stage2.instimage:
ti.checksums.add(ti.stage2.instimage, checksum, root_dir=image)
# If we are a disc, set the media section appropriately.
if is_disc:
ti.media.discnum = discnum
ti.media.totaldiscs = discnum
# Create variants
# Note to self: There's a lot of legacy stuff running around for
# Fedora, ELN, and RHEL in general. This is the general structure,
# apparently. But there could be a chance it'll change. We may need to
# put in a configuration to deal with it at some point.
#ti.variants.variants.clear()
for y in repos:
if y in ti.variants.variants.keys():
vari = ti.variants.variants[y]
else:
vari = productmd.treeinfo.Variant(ti)
vari.id = y
vari.uid = y
vari.name = y
vari.type = "variant"
if is_disc:
vari.paths.repository = y
vari.paths.packages = y + "/Packages"
else:
if y == primary:
vari.paths.repository = "."
vari.paths.packages = "Packages"
else:
vari.paths.repository = "../../../" + y + "/" + arch + "/" + os_or_ks
vari.paths.packages = "../../../" + y + "/" + arch + "/" + os_or_ks + "/Packages"
if y not in ti.variants.variants.keys():
ti.variants.add(vari)
del vari
# Set default variant
ti.dump(treeinfo, main_variant=primary)
# Set discinfo
Shared.discinfo_write(timestamp, fullname, arch, discinfo)
# Set media.repo
Shared.media_repo_write(timestamp, fullname, mediarepo)
@staticmethod
def write_metadata(
@ -345,18 +449,16 @@ class Shared:
"""
@staticmethod
def fpsync_method(src, dest, logger, tmp_dir):
def fpsync_method(src, dest, tmp_dir):
"""
Returns a list for the fpsync command
"""
cmd = '/usr/bin/fpsync'
rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005'
if not os.path.exists(cmd):
logger.warn(
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
'fpsync not found'
)
return False
message = 'fpsync not found'
retval = 1
return message, retval
os.makedirs(tmp_dir, exist_ok=True)
@ -373,25 +475,30 @@ class Shared:
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
if process != 0:
logger.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'fpsync failed'
)
return False
message = 'Syncing (fpsync) failed'
retval = process
return message, retval
if os.path.exists(dest):
return True
message = 'Syncing (fpsync) succeeded'
retval = process
else:
logger.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Path synced does not seem to exist for some reason.'
)
return False
message = 'Path synced does not seem to exist for some reason.'
retval = 1
return message, retval
@staticmethod
def rsync_method(src, dest, logger, tmp_dir):
def rsync_method(src, dest):
"""
Returns a string for the rsync command plus parallel. Yes, this is a
hack.
"""
find_cmd = '/usr/bin/find'
parallel_cmd = '/usr/bin/parallel'
rsync_cmd = '/usr/bin/rsync'
switches = '-av --chown=10004:10005 --progress --relative --human-readable'
os.makedirs(dest, exist_ok=True)

BIN
sync/.sync-to-prod.sh.swp Normal file

Binary file not shown.