test out parts of finalize

This commit is contained in:
Louis Abel 2022-07-03 00:19:13 -07:00
parent 2884bb0eaa
commit c081f6f202
Signed by untrusted user: label
GPG Key ID: B37E62D143879B36
4 changed files with 294 additions and 99 deletions

View File

@ -1,6 +1,7 @@
--- ---
cloud: cloud:
'8': '8':
profile: 'cloud'
cloud-kernel: cloud-kernel:
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c' project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
allowed_arches: allowed_arches:
@ -10,7 +11,15 @@ cloud:
allowed_arches: allowed_arches:
- aarch64 - aarch64
- x86_64 - x86_64
project_id: '' project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/'
branch: 'r8'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
'9': '9':
cloud-kernel: cloud-kernel:
project_id: '' project_id: ''

View File

@ -186,7 +186,13 @@ class RepoSync:
# This should create the initial compose dir and set the path. # This should create the initial compose dir and set the path.
# Otherwise, just use the latest link. # Otherwise, just use the latest link.
if self.fullrun: if self.fullrun:
generated_dir = self.generate_compose_dirs() generated_dir = Shared.generate_compose_dirs(
self.compose_base,
self.shortname,
self.fullversion,
self.date_stamp,
self.log
)
work_root = os.path.join( work_root = os.path.join(
generated_dir, generated_dir,
'work' 'work'
@ -292,7 +298,7 @@ class RepoSync:
Each container runs their own script Each container runs their own script
wait till all is finished wait till all is finished
""" """
cmd = self.podman_cmd() cmd = Shared.podman_cmd(self.log)
contrunlist = [] contrunlist = []
bad_exit_list = [] bad_exit_list = []
self.log.info('Generating container entries') self.log.info('Generating container entries')
@ -582,24 +588,6 @@ class RepoSync:
'No issues detected.' 'No issues detected.'
) )
def generate_compose_dirs(self) -> str:
"""
Generate compose dirs for full runs
"""
compose_base_dir = os.path.join(
self.compose_base,
"{}-{}-{}".format(
self.shortname,
self.fullversion,
self.date_stamp
)
)
self.log.info('Creating compose directory %s' % compose_base_dir)
if not os.path.exists(compose_base_dir):
os.makedirs(compose_base_dir)
return compose_base_dir
def symlink_to_latest(self, generated_dir): def symlink_to_latest(self, generated_dir):
""" """
Emulates pungi and symlinks latest-Rocky-X Emulates pungi and symlinks latest-Rocky-X
@ -628,7 +616,7 @@ class RepoSync:
""" """
fname = os.path.join( fname = os.path.join(
dest_path, dest_path,
"{}-config.repo".format(self.major_version) "{}-{}-config.repo".format(self.shortname, self.major_version)
) )
self.log.info('Generating the repo configuration: %s' % fname) self.log.info('Generating the repo configuration: %s' % fname)
@ -676,60 +664,6 @@ class RepoSync:
config_file.close() config_file.close()
return fname return fname
def reposync_cmd(self) -> str:
"""
This generates the reposync command. We don't support reposync by
itself and will raise an error.
:return: The path to the reposync command. If dnf exists, we'll use
that. Otherwise, fail immediately.
"""
cmd = None
if os.path.exists("/usr/bin/dnf"):
cmd = "/usr/bin/dnf reposync"
else:
self.log.error('/usr/bin/dnf was not found. Good bye.')
raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync "
"is not sufficient and you are likely running on an el7 "
"system or a grossly modified EL8+ system, " + Color.BOLD +
"which tells us that you probably made changes to these tools "
"expecting them to work and got to this point." + Color.END)
return cmd
def podman_cmd(self) -> str:
"""
This generates the podman run command. This is in the case that we want
to do reposyncs in parallel as we cannot reasonably run multiple
instances of dnf reposync on a single system.
"""
cmd = None
if os.path.exists("/usr/bin/podman"):
cmd = "/usr/bin/podman"
else:
self.log.error('/usr/bin/podman was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. " + Color.BOLD + "Note that docker is not "
"supported." + Color.END
)
return cmd
def git_cmd(self) -> str:
"""
This generates the git command. This is when we need to pull down extra
files or do work from a git repository.
"""
cmd = None
if os.path.exists("/usr/bin/git"):
cmd = "/usr/bin/git"
else:
self.log.error('/usr/bin/git was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. "
)
return cmd
def repoclosure_work(self, sync_root, work_root, log_root): def repoclosure_work(self, sync_root, work_root, log_root):
""" """
This is where we run repoclosures, based on the configuration of each This is where we run repoclosures, based on the configuration of each
@ -740,7 +674,7 @@ class RepoSync:
against itself. (This means BaseOS should be able to survive by against itself. (This means BaseOS should be able to survive by
itself.) itself.)
""" """
cmd = self.podman_cmd() cmd = Shared.podman_cmd(self.log)
entries_dir = os.path.join(work_root, "entries") entries_dir = os.path.join(work_root, "entries")
bad_exit_list = [] bad_exit_list = []
@ -901,7 +835,7 @@ class RepoSync:
'Deploying treeinfo, discinfo, and media.repo' 'Deploying treeinfo, discinfo, and media.repo'
) )
cmd = self.git_cmd() cmd = Shared.git_cmd(self.log)
tmpclone = '/tmp/clone' tmpclone = '/tmp/clone'
extra_files_dir = os.path.join( extra_files_dir = os.path.join(
global_work_root, global_work_root,
@ -1231,7 +1165,57 @@ class RepoSync:
work/isos to compose/isos, and combines all checksum files per arch work/isos to compose/isos, and combines all checksum files per arch
into a final CHECKSUM file. into a final CHECKSUM file.
""" """
# latest-X-Y should exist at all times for this to work.
work_root = os.path.join(
self.compose_latest_dir,
'work'
)
sync_root = self.compose_latest_sync
sync_iso_root = os.path.join(
sync_root,
'isos'
)
tmp_dir = os.path.join(
self.compose_root,
'partitions'
)
# Verify if the link even exists
if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
self.log.error('!! Please perform a full run if you have not done so.')
raise SystemExit()
log_root = os.path.join(
work_root,
"logs",
self.date_stamp
)
iso_root = os.path.join(
work_root,
"isos"
)
global_work_root = os.path.join(
work_root,
"global",
)
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Starting to sync ISOs to compose'
)
iso_result = Shared.fpsync_method(iso_root, sync_iso_root, self.log, tmp_dir)
if not iso_result:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Sync failed'
)
class SigRepoSync: class SigRepoSync:
""" """
@ -1278,6 +1262,7 @@ class SigRepoSync:
self.compose_root = config['compose_root'] self.compose_root = config['compose_root']
self.compose_base = config['compose_root'] + "/" + major self.compose_base = config['compose_root'] + "/" + major
self.profile = rlvars['profile'] self.profile = rlvars['profile']
self.sigprofile = sigvars['profile']
self.iso_map = rlvars['iso_map'] self.iso_map = rlvars['iso_map']
self.distname = config['distname'] self.distname = config['distname']
self.fullname = rlvars['fullname'] self.fullname = rlvars['fullname']
@ -1312,7 +1297,11 @@ class SigRepoSync:
self.compose_latest_dir = os.path.join( self.compose_latest_dir = os.path.join(
config['compose_root'], config['compose_root'],
major, major,
"latest-{}-{}-SIG".format(self.shortname, major) "latest-{}-{}-SIG-{}".format(
self.shortname,
major,
self.sigprofile
)
) )
self.compose_latest_sync = os.path.join( self.compose_latest_sync = os.path.join(
@ -1345,7 +1334,7 @@ class SigRepoSync:
self.log.info('sig reposync init') self.log.info('sig reposync init')
self.log.info(major) self.log.info(major)
#self.dnf_config = self.generate_conf() #self.dnf_config = Shared.generate_conf()
def run(self): def run(self):
""" """

View File

@ -1156,7 +1156,7 @@ class IsoBuild:
to the compose directories. It's the same as if you were doing a to the compose directories. It's the same as if you were doing a
reposync of the repositories. reposync of the repositories.
""" """
cmd = self.podman_cmd() cmd = Shared.podman_cmd(self.log)
entries_dir = os.path.join(work_root, "entries") entries_dir = os.path.join(work_root, "entries")
isos_dir = os.path.join(work_root, "isos") isos_dir = os.path.join(work_root, "isos")
bad_exit_list = [] bad_exit_list = []
@ -1766,24 +1766,6 @@ class IsoBuild:
returned_cmd = ' '.join(cmd) returned_cmd = ' '.join(cmd)
return returned_cmd return returned_cmd
def podman_cmd(self) -> str:
"""
This generates the podman run command. This is in the case that we want
to do reposyncs in parallel as we cannot reasonably run multiple
instances of dnf reposync on a single system.
"""
cmd = None
if os.path.exists("/usr/bin/podman"):
cmd = "/usr/bin/podman"
else:
self.log.error('/usr/bin/podman was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. " + Color.BOLD + "Note that docker is not "
"supported." + Color.END
)
return cmd
class LiveBuild: class LiveBuild:
""" """
This helps us build the live images for Rocky Linux. This helps us build the live images for Rocky Linux.

View File

@ -3,8 +3,11 @@
import os import os
import json import json
import hashlib import hashlib
import shlex
import subprocess
import yaml import yaml
import productmd.treeinfo import productmd.treeinfo
from empanadas.common import Color
class ArchCheck: class ArchCheck:
""" """
@ -179,3 +182,215 @@ class Shared:
with open(file_path, "w") as f: with open(file_path, "w") as f:
f.write("\n".join(data)) f.write("\n".join(data))
@staticmethod
def generate_compose_dirs(
compose_base,
shortname,
version,
date_stamp,
logger
) -> str:
"""
Generate compose dirs for full runs
"""
compose_base_dir = os.path.join(
compose_base,
"{}-{}-{}".format(
shortname,
version,
date_stamp
)
)
logger.info('Creating compose directory %s' % compose_base_dir)
if not os.path.exists(compose_base_dir):
os.makedirs(compose_base_dir)
return compose_base_dir
@staticmethod
def podman_cmd(logger) -> str:
"""
This generates the podman run command. This is in the case that we want
to do reposyncs in parallel as we cannot reasonably run multiple
instances of dnf reposync on a single system.
"""
cmd = None
if os.path.exists("/usr/bin/podman"):
cmd = "/usr/bin/podman"
else:
logger.error('/usr/bin/podman was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. " + Color.BOLD + "Note that docker is not "
"supported." + Color.END
)
return cmd
@staticmethod
def reposync_cmd(logger) -> str:
"""
This generates the reposync command. We don't support reposync by
itself and will raise an error.
:return: The path to the reposync command. If dnf exists, we'll use
that. Otherwise, fail immediately.
"""
cmd = None
if os.path.exists("/usr/bin/dnf"):
cmd = "/usr/bin/dnf reposync"
else:
logger('/usr/bin/dnf was not found. Good bye.')
raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync "
"is not sufficient and you are likely running on an el7 "
"system or a grossly modified EL8+ system, " + Color.BOLD +
"which tells us that you probably made changes to these tools "
"expecting them to work and got to this point." + Color.END)
return cmd
@staticmethod
def git_cmd(logger) -> str:
"""
This generates the git command. This is when we need to pull down extra
files or do work from a git repository.
"""
cmd = None
if os.path.exists("/usr/bin/git"):
cmd = "/usr/bin/git"
else:
logger.error('/usr/bin/git was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. "
)
return cmd
@staticmethod
def generate_conf(data, logger, dest_path='/var/tmp') -> str:
"""
Generates the necessary repo conf file for the operation. This repo
file should be temporary in nature. This will generate a repo file
with all repos by default. If a repo is chosen for sync, that will be
the only one synced.
:param dest_path: The destination where the temporary conf goes
:param repo: The repo object to create a file for
"""
fname = os.path.join(
dest_path,
"{}-{}-config.repo".format(data.shortname, data.major_version)
)
data.log.info('Generating the repo configuration: %s' % fname)
if data.repo_base_url.startswith("/"):
logger.error("Local file syncs are not supported.")
raise SystemExit(Color.BOLD + "Local file syncs are not "
"supported." + Color.END)
prehashed = ''
if data.hashed:
prehashed = "hashed-"
# create dest_path
if not os.path.exists(dest_path):
os.makedirs(dest_path, exist_ok=True)
config_file = open(fname, "w+")
repolist = []
for repo in data.repos:
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
data.repo_base_url,
data.project_id,
prehashed,
repo,
)
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
data.repo_base_url,
data.project_id,
prehashed,
repo,
)
repodata = {
'name': repo,
'baseurl': constructed_url,
'srcbaseurl': constructed_url_src,
'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey]
}
repolist.append(repodata)
template = data.tmplenv.get_template('repoconfig.tmpl')
output = template.render(repos=repolist)
config_file.write(output)
config_file.close()
return fname
@staticmethod
def quick_sync(src, dest, logger, tmp_dir):
"""
Does a quick sync from one place to another. This determines the method
in which will be used. We will look for fpsync and fall back to
parallel | rsync if that is also available. It will fail if parallel is
not available.
Return true or false on completion?
"""
@staticmethod
def simple_sync(src, dest):
"""
This is for simple syncs only, using rsync or copytree.
"""
@staticmethod
def fpsync_method(src, dest, logger, tmp_dir):
"""
Returns a list for the fpsync command
"""
cmd = '/usr/bin/fpsync'
rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005'
if not os.path.exists(cmd):
logger.warn(
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
'fpsync not found'
)
return False
os.makedirs(tmp_dir, exist_ok=True)
fpsync_cmd = '{} -o "{}" -n 18 -t {} {} {}'.format(
cmd,
rsync_switches,
tmp_dir,
src,
dest
)
process = subprocess.call(
shlex.split(fpsync_cmd),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
if process != 0:
logger.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'fpsync failed'
)
return False
if os.path.exists(dest):
return True
else:
logger.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Path synced does not seem to exist for some reason.'
)
return False
@staticmethod
def rsync_method(src, dest, logger, tmp_dir):
"""
Returns a string for the rsync command plus parallel. Yes, this is a
hack.
"""