toolkit/iso/empanadas/empanadas/util/dnf_utils.py

1022 lines
36 KiB
Python
Raw Normal View History

2022-05-22 06:20:15 +00:00
"""
Syncs yum repos for mirroring and composing.
Louis Abel <label AT rockylinux.org>
"""
#import shutil
import logging
import sys
import os
import os.path
import subprocess
import shlex
2022-06-22 06:03:08 +00:00
import shutil
2022-05-25 03:35:58 +00:00
import time
import re
2022-06-15 20:53:12 +00:00
import json
2022-05-22 06:20:15 +00:00
#import pipes
2022-06-17 03:31:33 +00:00
2022-06-11 17:49:37 +00:00
from jinja2 import Environment, FileSystemLoader
from empanadas.common import Color, _rootdir
2022-06-17 03:31:33 +00:00
2022-05-22 06:20:15 +00:00
#HAS_LIBREPO = True
#try:
# import librepo
#except:
# HAS_LIBREPO = False
class RepoSync:
"""
This helps us do reposync operations for the base system. SIG syncs are a
different class entirely. This is on purpose. Please use the SigRepoSync
class for SIG syncs.
"""
2022-05-22 06:20:15 +00:00
def __init__(
self,
rlvars,
config,
major,
repo=None,
arch=None,
2022-05-26 04:39:26 +00:00
ignore_debug: bool = False,
ignore_source: bool = False,
repoclosure: bool = False,
2022-06-22 06:03:08 +00:00
refresh_extra_files: bool = False,
2022-05-26 04:39:26 +00:00
skip_all: bool = False,
2022-06-10 23:05:44 +00:00
hashed: bool = False,
2022-05-26 04:39:26 +00:00
parallel: bool = False,
2022-05-22 06:20:15 +00:00
dryrun: bool = False,
fullrun: bool = False,
nofail: bool = False,
gpgkey: str = 'stable',
2022-06-17 05:55:07 +00:00
rlmode: str = 'stable',
2022-05-22 06:20:15 +00:00
logger=None
):
self.nofail = nofail
2022-05-22 06:20:15 +00:00
self.dryrun = dryrun
self.fullrun = fullrun
self.arch = arch
self.ignore_debug = ignore_debug
self.ignore_source = ignore_source
2022-05-24 01:12:11 +00:00
self.skip_all = skip_all
2022-06-10 23:05:44 +00:00
self.hashed = hashed
2022-05-26 04:39:26 +00:00
self.repoclosure = repoclosure
2022-06-22 06:03:08 +00:00
self.refresh_extra_files = refresh_extra_files
# Enables podman syncing, which should effectively speed up operations
self.parallel = parallel
# Relevant config items
2022-05-22 06:20:15 +00:00
self.major_version = major
self.date_stamp = config['date_stamp']
self.repo_base_url = config['repo_base_url']
2022-05-25 03:35:58 +00:00
self.compose_root = config['compose_root']
2022-05-22 06:20:15 +00:00
self.compose_base = config['compose_root'] + "/" + major
# Relevant major version items
2022-06-16 20:18:18 +00:00
self.shortname = config['shortname']
self.revision = rlvars['revision'] + "-" + rlvars['rclvl']
2022-06-16 19:56:57 +00:00
self.fullversion = rlvars['revision']
self.arches = rlvars['allowed_arches']
self.project_id = rlvars['project_id']
self.repo_renames = rlvars['renames']
self.repos = rlvars['all_repos']
self.multilib = rlvars['provide_multilib']
self.repo = repo
self.extra_files = rlvars['extra_files']
self.gpgkey = gpgkey
2022-06-11 17:49:37 +00:00
# Templates
file_loader = FileSystemLoader(f"{_rootdir}/templates")
2022-06-11 17:49:37 +00:00
self.tmplenv = Environment(loader=file_loader)
2022-05-25 03:35:58 +00:00
# each el can have its own designated container to run stuff in,
# otherwise we'll just default to the default config.
self.container = config['container']
if 'container' in rlvars and len(rlvars['container']) > 0:
self.container = rlvars['container']
2022-05-26 04:39:26 +00:00
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
self.repoclosure_map = rlvars['repoclosure_map']
2022-05-22 06:20:15 +00:00
self.staging_dir = os.path.join(
config['staging_root'],
config['category_stub'],
self.revision
)
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
"latest-Rocky-{}".format(major)
)
self.compose_latest_sync = os.path.join(
self.compose_latest_dir,
"compose"
)
self.compose_log_dir = os.path.join(
self.compose_latest_dir,
"work/logs"
)
2022-06-22 06:51:00 +00:00
self.compose_global_work_root = os.path.join(
2022-06-22 06:03:08 +00:00
self.compose_latest_dir,
"work/global"
)
2022-05-22 06:20:15 +00:00
# This is temporary for now.
if logger is None:
self.log = logging.getLogger("reposync")
self.log.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
formatter = logging.Formatter(
'%(asctime)s :: %(name)s :: %(message)s',
'%Y-%m-%d %H:%M:%S'
)
handler.setFormatter(formatter)
self.log.addHandler(handler)
self.log.info('reposync init')
self.log.info(self.revision)
self.dnf_config = self.generate_conf()
2022-05-22 06:20:15 +00:00
def run(self):
2022-05-22 06:20:15 +00:00
"""
This must be called to perform the sync. This will run through, create
the configuration file as required, and try to do a sync of every repo
applicable or the repo actually specified. If self.repo is None, it
will be assumed all repos are synced as dictated by rlvars.
* Dry runs only create initial directories and structure
* Full runs sync everything from the top and setup structure,
including creating a symlink to latest-Rocky-X
* self.repo is ignored during full runs (noted in stdout)
* self.arch being set will force only that arch to sync
"""
if self.fullrun and self.repo:
self.log.error('WARNING: repo ignored when doing a full sync')
if self.fullrun and self.dryrun:
self.log.error('A full and dry run is currently not supported.')
raise SystemExit('\nA full and dry run is currently not supported.')
# This should create the initial compose dir and set the path.
# Otherwise, just use the latest link.
2022-05-22 06:20:15 +00:00
if self.fullrun:
generated_dir = self.generate_compose_dirs()
work_root = os.path.join(
generated_dir,
'work'
)
2022-05-22 06:20:15 +00:00
sync_root = os.path.join(
generated_dir,
2022-05-22 06:20:15 +00:00
'compose'
)
else:
# Put in a verification here.
work_root = os.path.join(
self.compose_latest_dir,
'work'
)
2022-05-22 06:20:15 +00:00
sync_root = self.compose_latest_sync
2022-05-26 04:39:26 +00:00
# Verify if the link even exists
if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
self.log.error('!! Please perform a full run if you have not done so.')
raise SystemExit()
log_root = os.path.join(
work_root,
2022-06-16 19:24:19 +00:00
"logs",
self.date_stamp
)
2022-06-22 06:03:08 +00:00
global_work_root = os.path.join(
work_root,
"global",
)
2022-05-24 01:12:11 +00:00
if self.dryrun:
self.log.error('Dry Runs are not supported just yet. Sorry!')
raise SystemExit()
2022-06-22 06:51:00 +00:00
self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch)
if self.fullrun:
2022-06-22 06:03:08 +00:00
self.deploy_extra_files(global_work_root)
2022-06-16 20:18:18 +00:00
self.symlink_to_latest(generated_dir)
2022-05-26 04:39:26 +00:00
if self.repoclosure:
self.repoclosure_work(sync_root, work_root, log_root)
2022-06-22 06:03:08 +00:00
if self.refresh_extra_files:
self.deploy_extra_files(global_work_root)
2022-05-29 02:28:29 +00:00
self.log.info('Compose repo directory: %s' % sync_root)
self.log.info('Compose logs: %s' % log_root)
self.log.info('Compose completed.')
2022-06-22 06:51:00 +00:00
def sync(self, repo, sync_root, work_root, log_root, global_work_root, arch=None):
2022-05-22 06:20:15 +00:00
"""
Calls out syncing of the repos. We generally sync each component of a
repo:
2022-05-22 06:20:15 +00:00
* each architecture
* each architecture debug
* each source
If parallel is true, we will run in podman.
"""
if self.parallel:
2022-06-22 06:51:00 +00:00
self.podman_sync(repo, sync_root, work_root, log_root, global_work_root, arch)
else:
self.dnf_sync(repo, sync_root, work_root, arch)
def dnf_sync(self, repo, sync_root, work_root, arch):
"""
This is for normal dnf syncs. This is very slow.
2022-05-22 06:20:15 +00:00
"""
2022-06-10 23:05:44 +00:00
self.log.error('DNF syncing has been removed.')
self.log.error('Please install podman and enable parallel')
raise SystemExit()
2022-06-22 06:51:00 +00:00
def podman_sync(
self,
repo,
sync_root,
work_root,
log_root,
global_work_root,
arch
):
"""
This is for podman syncs
Create sync_root/work/entries
Generate scripts as needed into dir
Each container runs their own script
wait till all is finished
"""
cmd = self.podman_cmd()
contrunlist = []
bad_exit_list = []
self.log.info('Generating container entries')
entries_dir = os.path.join(work_root, "entries")
if not os.path.exists(entries_dir):
os.makedirs(entries_dir, exist_ok=True)
# yeah, I know.
2022-06-22 06:03:08 +00:00
if not os.path.exists(global_work_root):
os.makedirs(global_work_root, exist_ok=True)
if not os.path.exists(log_root):
os.makedirs(log_root, exist_ok=True)
sync_single_arch = False
arches_to_sync = self.arches
if arch:
sync_single_arch = True
arches_to_sync = [arch]
sync_single_repo = False
repos_to_sync = self.repos
if repo and not self.fullrun:
sync_single_repo = True
repos_to_sync = [repo]
for r in repos_to_sync:
2022-05-25 03:35:58 +00:00
entry_name_list = []
repo_name = r
2022-05-26 21:46:43 +00:00
arch_sync = arches_to_sync.copy()
2022-05-26 04:39:26 +00:00
2022-05-25 03:35:58 +00:00
if r in self.repo_renames:
repo_name = self.repo_renames[r]
2022-05-26 04:39:26 +00:00
if 'all' in r and 'x86_64' in arches_to_sync and self.multilib:
arch_sync.append('i686')
# There should be a check here that if it's "all" and multilib
# is on, i686 should get synced too.
for a in arch_sync:
entry_name = '{}-{}'.format(r, a)
debug_entry_name = '{}-debug-{}'.format(r, a)
2022-05-25 03:35:58 +00:00
entry_name_list.append(entry_name)
if not self.ignore_debug:
entry_name_list.append(debug_entry_name)
2022-05-24 01:12:11 +00:00
entry_point_sh = os.path.join(
entries_dir,
entry_name
)
debug_entry_point_sh = os.path.join(
entries_dir,
debug_entry_name
)
os_sync_path = os.path.join(
sync_root,
repo_name,
a,
'os'
)
debug_sync_path = os.path.join(
sync_root,
repo_name,
a,
'debug/tree'
)
2022-06-12 09:22:01 +00:00
import_gpg_cmd = ("/usr/bin/rpm --import {}{}").format(
self.extra_files['git_raw_path'],
self.extra_files['gpg'][self.gpgkey]
)
2022-05-27 13:59:27 +00:00
arch_force_cp = ("/usr/bin/sed 's|$basearch|{}|g' {} > {}.{}".format(
a,
self.dnf_config,
self.dnf_config,
a
))
2022-06-16 19:24:19 +00:00
sync_log = ("{}/{}-{}.log").format(
2022-06-11 17:49:37 +00:00
log_root,
repo_name,
2022-06-16 19:24:19 +00:00
a
2022-06-11 17:49:37 +00:00
)
2022-06-16 19:24:19 +00:00
debug_sync_log = ("{}/{}-{}-debug.log").format(
log_root,
repo_name,
2022-06-16 19:24:19 +00:00
a
)
2022-05-27 13:59:27 +00:00
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
2022-06-12 09:55:32 +00:00
"--repoid={} -p {} --forcearch {} --norepopath "
"--gpgcheck --assumeyes 2>&1").format(
2022-05-24 01:12:11 +00:00
self.dnf_config,
2022-05-27 13:59:27 +00:00
a,
2022-05-24 01:12:11 +00:00
r,
os_sync_path,
a
2022-05-24 01:12:11 +00:00
)
2022-05-27 13:59:27 +00:00
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
"--download-metadata --repoid={}-debug -p {} --forcearch {} "
"--gpgcheck --norepopath --assumeyes 2>&1").format(
2022-05-24 01:12:11 +00:00
self.dnf_config,
2022-05-27 13:59:27 +00:00
a,
2022-05-24 01:12:11 +00:00
r,
debug_sync_path,
a
2022-06-11 02:59:43 +00:00
)
dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y"
2022-06-11 02:59:43 +00:00
2022-06-11 17:49:37 +00:00
sync_template = self.tmplenv.get_template('reposync.tmpl')
sync_output = sync_template.render(
import_gpg_cmd=import_gpg_cmd,
2022-06-11 17:49:37 +00:00
arch_force_cp=arch_force_cp,
dnf_plugin_cmd=dnf_plugin_cmd,
sync_cmd=sync_cmd,
sync_log=sync_log
2022-06-11 17:49:37 +00:00
)
debug_sync_template = self.tmplenv.get_template('reposync.tmpl')
debug_sync_output = debug_sync_template.render(
import_gpg_cmd=import_gpg_cmd,
2022-06-11 17:49:37 +00:00
arch_force_cp=arch_force_cp,
dnf_plugin_cmd=dnf_plugin_cmd,
sync_cmd=debug_sync_cmd,
sync_log=debug_sync_log
2022-06-11 17:49:37 +00:00
)
2022-05-24 01:12:11 +00:00
entry_point_open = open(entry_point_sh, "w+")
debug_entry_point_open = open(debug_entry_point_sh, "w+")
2022-06-11 17:49:37 +00:00
entry_point_open.write(sync_output)
debug_entry_point_open.write(debug_sync_output)
2022-05-24 01:12:11 +00:00
entry_point_open.close()
debug_entry_point_open.close()
2022-05-25 03:35:58 +00:00
os.chmod(entry_point_sh, 0o755)
os.chmod(debug_entry_point_sh, 0o755)
# We ignoring sources?
if not self.ignore_source:
source_entry_name = '{}-source'.format(r)
entry_name_list.append(source_entry_name)
source_entry_point_sh = os.path.join(
entries_dir,
source_entry_name
)
source_sync_path = os.path.join(
sync_root,
repo_name,
'source/tree'
)
2022-06-16 19:24:19 +00:00
source_sync_log = ("{}/{}-source.log").format(
log_root,
2022-06-16 19:24:19 +00:00
repo_name
2022-05-25 03:35:58 +00:00
)
2022-06-11 02:59:43 +00:00
source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
"--download-metadata --repoid={}-source -p {} "
"--gpgcheck --norepopath --assumeyes 2>&1").format(
self.dnf_config,
r,
source_sync_path
2022-06-11 02:59:43 +00:00
)
2022-06-11 17:49:37 +00:00
source_sync_template = self.tmplenv.get_template('reposync-src.tmpl')
source_sync_output = source_sync_template.render(
import_gpg_cmd=import_gpg_cmd,
dnf_plugin_cmd=dnf_plugin_cmd,
sync_cmd=source_sync_cmd,
sync_log=source_sync_log
2022-06-11 17:49:37 +00:00
)
2022-05-25 03:35:58 +00:00
source_entry_point_open = open(source_entry_point_sh, "w+")
2022-06-11 17:49:37 +00:00
source_entry_point_open.write(source_sync_output)
2022-05-25 03:35:58 +00:00
source_entry_point_open.close()
os.chmod(source_entry_point_sh, 0o755)
2022-05-24 01:12:11 +00:00
# Spawn up all podman processes for repo
2022-05-25 03:35:58 +00:00
self.log.info('Starting podman processes for %s ...' % r)
#print(entry_name_list)
for pod in entry_name_list:
2022-05-26 20:27:56 +00:00
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
2022-05-25 03:35:58 +00:00
cmd,
self.compose_root,
self.compose_root,
self.dnf_config,
self.dnf_config,
entries_dir,
entries_dir,
pod,
entries_dir,
pod,
self.container
)
#print(podman_cmd_entry)
process = subprocess.call(
shlex.split(podman_cmd_entry),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
join_all_pods = ' '.join(entry_name_list)
time.sleep(3)
2022-06-20 05:52:20 +00:00
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Syncing ' + r + ' ...'
)
2022-05-25 03:35:58 +00:00
pod_watcher = '{} wait {}'.format(
cmd,
join_all_pods
)
#print(pod_watcher)
watch_man = subprocess.call(
shlex.split(pod_watcher),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
# After the above is done, we'll check each pod process for an exit
# code.
pattern = "Exited (0)"
for pod in entry_name_list:
checkcmd = '{} ps -f status=exited -f name={}'.format(
cmd,
pod
)
podcheck = subprocess.Popen(
checkcmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
output, errors = podcheck.communicate()
if 'Exited (0)' not in output.decode():
2022-06-12 09:55:32 +00:00
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod
)
bad_exit_list.append(pod)
rmcmd = '{} rm {}'.format(
cmd,
join_all_pods
)
rmpod = subprocess.Popen(
rmcmd,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
shell=True
2022-05-25 03:35:58 +00:00
)
entry_name_list.clear()
2022-06-20 05:52:20 +00:00
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Syncing ' + r + ' completed'
)
2022-05-24 01:12:11 +00:00
if len(bad_exit_list) > 0:
self.log.error(
Color.BOLD + Color.RED + 'There were issues syncing these '
'repositories:' + Color.END
)
for issue in bad_exit_list:
self.log.error(issue)
2022-06-13 14:37:50 +00:00
else:
self.log.info(
'[' + Color.BOLD + Color.GREEN + ' OK ' + Color.END + '] '
'No issues detected.'
)
2022-05-22 06:20:15 +00:00
def generate_compose_dirs(self) -> str:
"""
Generate compose dirs for full runs
"""
compose_base_dir = os.path.join(
self.compose_base,
2022-06-16 19:56:57 +00:00
"Rocky-{}-{}".format(self.fullversion, self.date_stamp)
2022-05-22 06:20:15 +00:00
)
self.log.info('Creating compose directory %s' % compose_base_dir)
if not os.path.exists(compose_base_dir):
os.makedirs(compose_base_dir)
return compose_base_dir
2022-06-16 20:18:18 +00:00
def symlink_to_latest(self, generated_dir):
2022-05-22 06:20:15 +00:00
"""
Emulates pungi and symlinks latest-Rocky-X
This link will be what is updated in full runs. Whatever is in this
'latest' directory is what is rsynced on to staging after completion.
This link should not change often.
"""
2022-06-16 20:18:18 +00:00
try:
os.remove(self.compose_latest_dir)
except:
pass
self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version))
os.symlink(generated_dir, self.compose_latest_dir)
2022-05-22 06:20:15 +00:00
def generate_conf(self, dest_path='/var/tmp') -> str:
"""
Generates the necessary repo conf file for the operation. This repo
file should be temporary in nature. This will generate a repo file
with all repos by default. If a repo is chosen for sync, that will be
the only one synced.
:param dest_path: The destination where the temporary conf goes
:param repo: The repo object to create a file for
"""
2022-05-22 06:20:15 +00:00
fname = os.path.join(
dest_path,
"{}-config.repo".format(self.major_version)
)
self.log.info('Generating the repo configuration: %s' % fname)
if self.repo_base_url.startswith("/"):
self.log.error("Local file syncs are not supported.")
raise SystemExit(Color.BOLD + "Local file syncs are not "
"supported." + Color.END)
2022-06-10 23:05:44 +00:00
prehashed = ''
if self.hashed:
prehashed = "hashed-"
2022-05-22 06:20:15 +00:00
# create dest_path
if not os.path.exists(dest_path):
os.makedirs(dest_path, exist_ok=True)
config_file = open(fname, "w+")
2022-06-11 17:49:37 +00:00
repolist = []
2022-05-22 06:20:15 +00:00
for repo in self.repos:
2022-06-10 23:14:22 +00:00
2022-06-10 23:05:44 +00:00
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
2022-05-22 06:20:15 +00:00
self.repo_base_url,
self.project_id,
2022-06-10 23:05:44 +00:00
prehashed,
2022-05-22 06:20:15 +00:00
repo,
)
2022-06-10 23:05:44 +00:00
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
2022-05-22 06:20:15 +00:00
self.repo_base_url,
self.project_id,
2022-06-10 23:05:44 +00:00
prehashed,
2022-05-22 06:20:15 +00:00
repo,
)
2022-06-11 17:49:37 +00:00
repodata = {
'name': repo,
'baseurl': constructed_url,
'srcbaseurl': constructed_url_src,
'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
2022-06-11 17:49:37 +00:00
}
repolist.append(repodata)
2022-05-22 06:20:15 +00:00
2022-06-11 17:49:37 +00:00
template = self.tmplenv.get_template('repoconfig.tmpl')
output = template.render(repos=repolist)
config_file.write(output)
2022-05-24 01:12:11 +00:00
config_file.close()
return fname
def reposync_cmd(self) -> str:
"""
This generates the reposync command. We don't support reposync by
itself and will raise an error.
:return: The path to the reposync command. If dnf exists, we'll use
2022-05-22 06:20:15 +00:00
that. Otherwise, fail immediately.
"""
cmd = None
if os.path.exists("/usr/bin/dnf"):
cmd = "/usr/bin/dnf reposync"
else:
2022-05-22 06:20:15 +00:00
self.log.error('/usr/bin/dnf was not found. Good bye.')
raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync "
"is not sufficient and you are likely running on an el7 "
2022-05-22 06:20:15 +00:00
"system or a grossly modified EL8+ system, " + Color.BOLD +
"which tells us that you probably made changes to these tools "
"expecting them to work and got to this point." + Color.END)
return cmd
2022-05-22 06:20:15 +00:00
def podman_cmd(self) -> str:
"""
This generates the podman run command. This is in the case that we want
to do reposyncs in parallel as we cannot reasonably run multiple
instances of dnf reposync on a single system.
"""
cmd = None
if os.path.exists("/usr/bin/podman"):
2022-05-25 03:35:58 +00:00
cmd = "/usr/bin/podman"
else:
self.log.error('/usr/bin/podman was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. " + Color.BOLD + "Note that docker is not "
"supported." + Color.END
)
return cmd
2022-06-22 06:03:08 +00:00
def git_cmd(self) -> str:
"""
This generates the git command. This is when we need to pull down extra
files or do work from a git repository.
"""
cmd = None
if os.path.exists("/usr/bin/git"):
cmd = "/usr/bin/git"
else:
self.log.error('/usr/bin/git was not found. Good bye.')
raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. "
)
return cmd
2022-05-26 04:39:26 +00:00
def repoclosure_work(self, sync_root, work_root, log_root):
2022-05-29 02:28:29 +00:00
"""
This is where we run repoclosures, based on the configuration of each
EL version. Each major version should have a dictionary of lists that
point to which repos they'll be targetting. An empty list because the
repoclosure is ran against itself, and itself only. In the case of 8,
9, and perhaps 10, BaseOS is the only repo that should be checking
against itself. (This means BaseOS should be able to survive by
itself.)
"""
cmd = self.podman_cmd()
entries_dir = os.path.join(work_root, "entries")
bad_exit_list = []
if not self.parallel:
self.log.error('repoclosure is too slow to run one by one. enable parallel mode.')
raise SystemExit()
self.log.info('Beginning repoclosure phase')
for repo in self.repoclosure_map['repos']:
if self.repo and repo not in self.repo:
continue
2022-05-29 02:28:29 +00:00
repoclosure_entry_name_list = []
self.log.info('Setting up repoclosure for {}'.format(repo))
for arch in self.repoclosure_map['arches']:
repo_combination = []
repoclosure_entry_name = 'repoclosure-{}-{}'.format(repo, arch)
repoclosure_entry_name_list.append(repoclosure_entry_name)
repoclosure_arch_list = self.repoclosure_map['arches'][arch]
# Some repos will have additional repos to close against - this
# helps append
if len(self.repoclosure_map['repos'][repo]) > 0:
for l in self.repoclosure_map['repos'][repo]:
stretch = '--repofrompath={},file://{}/{}/{}/os --repo={}'.format(
l,
sync_root,
l,
arch,
l
)
repo_combination.append(stretch)
join_repo_comb = ' '.join(repo_combination)
repoclosure_entry_point_sh = os.path.join(
entries_dir,
repoclosure_entry_name
)
repoclosure_entry_point_sh = os.path.join(
entries_dir,
repoclosure_entry_name
)
repoclosure_cmd = ('/usr/bin/dnf repoclosure {} '
'--repofrompath={},file://{}/{}/{}/os --repo={} --check={} {} '
2022-06-16 19:24:19 +00:00
'| tee -a {}/{}-repoclosure-{}.log').format(
2022-05-29 02:28:29 +00:00
repoclosure_arch_list,
repo,
sync_root,
repo,
arch,
repo,
repo,
2022-05-29 02:28:29 +00:00
join_repo_comb,
log_root,
repo,
2022-06-16 19:24:19 +00:00
arch
2022-05-29 02:28:29 +00:00
)
repoclosure_entry_point_open = open(repoclosure_entry_point_sh, "w+")
repoclosure_entry_point_open.write('#!/bin/bash\n')
repoclosure_entry_point_open.write('set -o pipefail\n')
repoclosure_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n')
2022-05-29 21:25:32 +00:00
repoclosure_entry_point_open.write('/usr/bin/dnf clean all\n')
2022-05-29 02:28:29 +00:00
repoclosure_entry_point_open.write(repoclosure_cmd + '\n')
repoclosure_entry_point_open.close()
os.chmod(repoclosure_entry_point_sh, 0o755)
repo_combination.clear()
self.log.info('Spawning pods for %s' % repo)
for pod in repoclosure_entry_name_list:
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,
self.dnf_config,
self.dnf_config,
entries_dir,
entries_dir,
pod,
entries_dir,
pod,
self.container
)
#print(podman_cmd_entry)
process = subprocess.call(
shlex.split(podman_cmd_entry),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
join_all_pods = ' '.join(repoclosure_entry_name_list)
time.sleep(3)
self.log.info('Performing repoclosure on %s ... ' % repo)
pod_watcher = '{} wait {}'.format(
cmd,
join_all_pods
)
watch_man = subprocess.call(
shlex.split(pod_watcher),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
for pod in repoclosure_entry_name_list:
checkcmd = '{} ps -f status=exited -f name={}'.format(
cmd,
pod
)
podcheck = subprocess.Popen(
checkcmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
output, errors = podcheck.communicate()
if 'Exited (0)' not in output.decode():
2022-06-12 09:55:32 +00:00
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod
)
2022-05-29 02:28:29 +00:00
bad_exit_list.append(pod)
rmcmd = '{} rm {}'.format(
cmd,
join_all_pods
)
rmpod = subprocess.Popen(
rmcmd,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
shell=True
)
repoclosure_entry_name_list.clear()
self.log.info('Syncing %s completed' % repo)
if len(bad_exit_list) > 0:
self.log.error(
Color.BOLD + Color.RED + 'There were issues closing these '
'repositories:' + Color.END
)
for issue in bad_exit_list:
self.log.error(issue)
2022-05-26 04:39:26 +00:00
2022-06-22 06:51:00 +00:00
def deploy_extra_files(self, global_work_root):
"""
2022-06-15 20:53:12 +00:00
deploys extra files based on info of rlvars including a
extra_files.json
2022-06-24 22:37:32 +00:00
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
a bunch of compose-esque stuff.
"""
2022-06-22 06:03:08 +00:00
cmd = self.git_cmd()
tmpclone = '/tmp/clone'
extra_files_dir = os.path.join(
2022-06-22 06:51:00 +00:00
global_work_root,
2022-06-22 06:03:08 +00:00
'extra-files'
)
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Deploying extra files to work directory ...'
)
2022-06-22 06:51:00 +00:00
if not os.path.exists(extra_files_dir):
os.makedirs(extra_files_dir, exist_ok=True)
2022-06-22 06:03:08 +00:00
clonecmd = '{} clone {} -b {} -q {}'.format(
cmd,
self.extra_files['git_repo'],
self.extra_files['branch'],
tmpclone
)
2022-06-22 06:23:36 +00:00
git_clone = subprocess.call(
shlex.split(clonecmd),
2022-06-22 06:03:08 +00:00
stdout=subprocess.DEVNULL,
2022-06-22 06:23:36 +00:00
stderr=subprocess.DEVNULL
2022-06-22 06:03:08 +00:00
)
# Copy files
for extra in self.extra_files['list']:
src = '/tmp/clone/' + extra
try:
shutil.copy2(src, extra_files_dir)
except:
self.log.warn(
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
'Extra file not copied: ' + src
)
try:
shutil.rmtree(tmpclone)
except OSError as e:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Directory ' + tmpclone + ' could not be removed: ' +
e.strerror
)
# Create metadata here?
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Extra files phase completed.'
)
2022-05-22 06:20:15 +00:00
class SigRepoSync:
"""
This helps us do reposync operations for SIG's. Do not use this for the
base system. Use RepoSync for that.
"""
2022-06-10 23:05:44 +00:00
def __init__(
self,
rlvars,
config,
sigvars,
major,
repo=None,
arch=None,
ignore_source: bool = False,
repoclosure: bool = False,
2022-06-22 06:03:08 +00:00
refresh_extra_files: bool = False,
2022-06-10 23:05:44 +00:00
skip_all: bool = False,
hashed: bool = False,
parallel: bool = False,
dryrun: bool = False,
fullrun: bool = False,
nofail: bool = False,
logger=None
):
self.nofail = nofail
self.dryrun = dryrun
self.fullrun = fullrun
self.arch = arch
self.ignore_source = ignore_source
self.skip_all = skip_all
self.hashed = hashed
self.repoclosure = repoclosure
2022-06-22 06:03:08 +00:00
self.refresh_extra_files = refresh_extra_files
2022-06-10 23:05:44 +00:00
# Enables podman syncing, which should effectively speed up operations
self.parallel = parallel
# Relevant config items
self.major_version = major
self.date_stamp = config['date_stamp']
self.repo_base_url = config['repo_base_url']
self.compose_root = config['compose_root']
self.compose_base = config['compose_root'] + "/" + major
# Relevant major version items
self.sigvars = sigvars
self.sigrepos = sigvars.keys()
#self.arches = sigvars['allowed_arches']
#self.project_id = sigvars['project_id']
self.sigrepo = repo
# each el can have its own designated container to run stuff in,
# otherwise we'll just default to the default config.
self.container = config['container']
if 'container' in rlvars and len(rlvars['container']) > 0:
self.container = rlvars['container']
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
self.repoclosure_map = rlvars['repoclosure_map']
self.staging_dir = os.path.join(
config['staging_root'],
config['sig_category_stub'],
major
)
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
"latest-Rocky-{}-SIG".format(major)
)
self.compose_latest_sync = os.path.join(
self.compose_latest_dir,
"compose"
)
self.compose_log_dir = os.path.join(
self.compose_latest_dir,
"work/logs"
)
2022-06-22 06:51:00 +00:00
self.compose_global_work_root = os.path.join(
2022-06-22 06:03:08 +00:00
self.compose_latest_dir,
"work/global"
)
2022-06-10 23:05:44 +00:00
# This is temporary for now.
if logger is None:
self.log = logging.getLogger("sigreposync")
self.log.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
formatter = logging.Formatter(
'%(asctime)s :: %(name)s :: %(message)s',
'%Y-%m-%d %H:%M:%S'
)
handler.setFormatter(formatter)
self.log.addHandler(handler)
self.log.info('sig reposync init')
self.log.info(major)
#self.dnf_config = self.generate_conf()
def run(self):
"""
This runs the sig sync.
"""
pass