2022-05-22 06:20:15 +00:00
|
|
|
"""
|
|
|
|
Syncs yum repos for mirroring and composing.
|
|
|
|
|
|
|
|
Louis Abel <label AT rockylinux.org>
|
|
|
|
"""
|
|
|
|
#import shutil
|
|
|
|
import logging
|
|
|
|
import sys
|
2022-05-21 07:10:37 +00:00
|
|
|
import os
|
|
|
|
import os.path
|
2022-05-23 07:23:53 +00:00
|
|
|
import subprocess
|
|
|
|
import shlex
|
2022-06-22 06:03:08 +00:00
|
|
|
import shutil
|
2022-05-25 03:35:58 +00:00
|
|
|
import time
|
2022-05-25 07:08:08 +00:00
|
|
|
import re
|
2022-06-15 20:53:12 +00:00
|
|
|
import json
|
2022-07-04 19:23:04 +00:00
|
|
|
import glob
|
2022-05-22 06:20:15 +00:00
|
|
|
#import pipes
|
2022-06-17 03:31:33 +00:00
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
from jinja2 import Environment, FileSystemLoader
|
2022-05-21 07:10:37 +00:00
|
|
|
|
2022-07-04 18:31:24 +00:00
|
|
|
import empanadas
|
2022-06-17 19:54:11 +00:00
|
|
|
from empanadas.common import Color, _rootdir
|
2022-06-28 00:59:21 +00:00
|
|
|
from empanadas.util import Shared
|
2022-06-17 03:31:33 +00:00
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
# initial treeinfo data is made here
|
|
|
|
import productmd.treeinfo
|
|
|
|
|
2022-05-22 06:20:15 +00:00
|
|
|
#HAS_LIBREPO = True
|
|
|
|
#try:
|
|
|
|
# import librepo
|
|
|
|
#except:
|
|
|
|
# HAS_LIBREPO = False
|
2022-05-21 07:10:37 +00:00
|
|
|
|
|
|
|
class RepoSync:
|
|
|
|
"""
|
|
|
|
This helps us do reposync operations for the base system. SIG syncs are a
|
|
|
|
different class entirely. This is on purpose. Please use the SigRepoSync
|
|
|
|
class for SIG syncs.
|
|
|
|
"""
|
2022-05-22 06:20:15 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
rlvars,
|
|
|
|
config,
|
|
|
|
major,
|
|
|
|
repo=None,
|
|
|
|
arch=None,
|
2022-05-26 04:39:26 +00:00
|
|
|
ignore_debug: bool = False,
|
|
|
|
ignore_source: bool = False,
|
|
|
|
repoclosure: bool = False,
|
2022-06-22 06:03:08 +00:00
|
|
|
refresh_extra_files: bool = False,
|
2022-06-26 19:24:56 +00:00
|
|
|
refresh_treeinfo: bool = False,
|
2022-05-26 04:39:26 +00:00
|
|
|
skip_all: bool = False,
|
2022-06-10 23:05:44 +00:00
|
|
|
hashed: bool = False,
|
2022-05-26 04:39:26 +00:00
|
|
|
parallel: bool = False,
|
2022-05-22 06:20:15 +00:00
|
|
|
dryrun: bool = False,
|
|
|
|
fullrun: bool = False,
|
|
|
|
nofail: bool = False,
|
2022-06-12 09:08:48 +00:00
|
|
|
gpgkey: str = 'stable',
|
2022-06-17 05:55:07 +00:00
|
|
|
rlmode: str = 'stable',
|
2022-05-22 06:20:15 +00:00
|
|
|
logger=None
|
|
|
|
):
|
2022-05-21 07:10:37 +00:00
|
|
|
self.nofail = nofail
|
2022-05-22 06:20:15 +00:00
|
|
|
self.dryrun = dryrun
|
|
|
|
self.fullrun = fullrun
|
|
|
|
self.arch = arch
|
|
|
|
self.ignore_debug = ignore_debug
|
|
|
|
self.ignore_source = ignore_source
|
2022-05-24 01:12:11 +00:00
|
|
|
self.skip_all = skip_all
|
2022-06-10 23:05:44 +00:00
|
|
|
self.hashed = hashed
|
2022-05-26 04:39:26 +00:00
|
|
|
self.repoclosure = repoclosure
|
2022-06-22 06:03:08 +00:00
|
|
|
self.refresh_extra_files = refresh_extra_files
|
2022-06-26 19:24:56 +00:00
|
|
|
self.refresh_treeinfo = refresh_treeinfo
|
2022-05-23 07:23:53 +00:00
|
|
|
# Enables podman syncing, which should effectively speed up operations
|
|
|
|
self.parallel = parallel
|
2022-05-21 07:10:37 +00:00
|
|
|
# Relevant config items
|
2022-05-22 06:20:15 +00:00
|
|
|
self.major_version = major
|
2022-05-21 07:10:37 +00:00
|
|
|
self.date_stamp = config['date_stamp']
|
2022-06-30 08:43:14 +00:00
|
|
|
self.timestamp = time.time()
|
2022-05-21 07:10:37 +00:00
|
|
|
self.repo_base_url = config['repo_base_url']
|
2022-05-25 03:35:58 +00:00
|
|
|
self.compose_root = config['compose_root']
|
2022-05-22 06:20:15 +00:00
|
|
|
self.compose_base = config['compose_root'] + "/" + major
|
2022-06-28 00:59:21 +00:00
|
|
|
self.profile = rlvars['profile']
|
2022-06-28 14:49:23 +00:00
|
|
|
self.iso_map = rlvars['iso_map']
|
2022-06-30 08:43:14 +00:00
|
|
|
self.distname = config['distname']
|
|
|
|
self.fullname = rlvars['fullname']
|
|
|
|
self.shortname = config['shortname']
|
2022-05-21 07:10:37 +00:00
|
|
|
|
|
|
|
# Relevant major version items
|
2022-06-16 20:18:18 +00:00
|
|
|
self.shortname = config['shortname']
|
2022-05-21 07:10:37 +00:00
|
|
|
self.revision = rlvars['revision'] + "-" + rlvars['rclvl']
|
2022-06-16 19:56:57 +00:00
|
|
|
self.fullversion = rlvars['revision']
|
2022-05-21 07:10:37 +00:00
|
|
|
self.arches = rlvars['allowed_arches']
|
|
|
|
self.project_id = rlvars['project_id']
|
|
|
|
self.repo_renames = rlvars['renames']
|
|
|
|
self.repos = rlvars['all_repos']
|
2022-05-23 07:23:53 +00:00
|
|
|
self.multilib = rlvars['provide_multilib']
|
2022-05-21 07:10:37 +00:00
|
|
|
self.repo = repo
|
2022-06-06 22:02:08 +00:00
|
|
|
self.extra_files = rlvars['extra_files']
|
2022-06-12 09:08:48 +00:00
|
|
|
self.gpgkey = gpgkey
|
2022-07-04 06:27:49 +00:00
|
|
|
self.checksum = rlvars['checksum']
|
2022-05-21 07:10:37 +00:00
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
self.compose_id = '{}-{}-{}'.format(
|
|
|
|
config['shortname'],
|
|
|
|
rlvars['revision'],
|
|
|
|
config['date_stamp']
|
|
|
|
)
|
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
# Templates
|
2022-06-17 19:54:11 +00:00
|
|
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
2022-06-11 17:49:37 +00:00
|
|
|
self.tmplenv = Environment(loader=file_loader)
|
|
|
|
|
2022-05-25 03:35:58 +00:00
|
|
|
# each el can have its own designated container to run stuff in,
|
|
|
|
# otherwise we'll just default to the default config.
|
|
|
|
self.container = config['container']
|
|
|
|
if 'container' in rlvars and len(rlvars['container']) > 0:
|
|
|
|
self.container = rlvars['container']
|
|
|
|
|
2022-05-26 04:39:26 +00:00
|
|
|
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
|
|
|
|
self.repoclosure_map = rlvars['repoclosure_map']
|
|
|
|
|
2022-05-22 06:20:15 +00:00
|
|
|
self.staging_dir = os.path.join(
|
|
|
|
config['staging_root'],
|
|
|
|
config['category_stub'],
|
|
|
|
self.revision
|
|
|
|
)
|
|
|
|
|
|
|
|
self.compose_latest_dir = os.path.join(
|
|
|
|
config['compose_root'],
|
|
|
|
major,
|
2022-07-01 20:09:52 +00:00
|
|
|
"latest-{}-{}".format(
|
|
|
|
self.shortname,
|
|
|
|
self.profile
|
|
|
|
)
|
2022-05-22 06:20:15 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
self.compose_latest_sync = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
"compose"
|
|
|
|
)
|
|
|
|
|
2022-05-25 07:08:08 +00:00
|
|
|
self.compose_log_dir = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
"work/logs"
|
|
|
|
)
|
|
|
|
|
2022-06-22 06:51:00 +00:00
|
|
|
self.compose_global_work_root = os.path.join(
|
2022-06-22 06:03:08 +00:00
|
|
|
self.compose_latest_dir,
|
|
|
|
"work/global"
|
|
|
|
)
|
|
|
|
|
2022-05-22 06:20:15 +00:00
|
|
|
# This is temporary for now.
|
|
|
|
if logger is None:
|
|
|
|
self.log = logging.getLogger("reposync")
|
|
|
|
self.log.setLevel(logging.INFO)
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
handler.setLevel(logging.INFO)
|
|
|
|
formatter = logging.Formatter(
|
|
|
|
'%(asctime)s :: %(name)s :: %(message)s',
|
|
|
|
'%Y-%m-%d %H:%M:%S'
|
|
|
|
)
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
self.log.addHandler(handler)
|
|
|
|
|
|
|
|
self.log.info('reposync init')
|
|
|
|
self.log.info(self.revision)
|
2022-05-23 07:23:53 +00:00
|
|
|
|
2022-07-07 06:57:38 +00:00
|
|
|
# The repo name should be valid
|
|
|
|
if self.repo is not None:
|
|
|
|
if self.repo not in self.repos:
|
|
|
|
self.log.error(Color.FAIL + 'Invalid repository: ' + self.repo)
|
|
|
|
raise SystemExit()
|
2022-05-22 06:20:15 +00:00
|
|
|
|
2022-05-21 07:10:37 +00:00
|
|
|
def run(self):
|
2022-05-22 06:20:15 +00:00
|
|
|
"""
|
|
|
|
This must be called to perform the sync. This will run through, create
|
|
|
|
the configuration file as required, and try to do a sync of every repo
|
|
|
|
applicable or the repo actually specified. If self.repo is None, it
|
|
|
|
will be assumed all repos are synced as dictated by rlvars.
|
|
|
|
|
|
|
|
* Dry runs only create initial directories and structure
|
|
|
|
* Full runs sync everything from the top and setup structure,
|
2022-07-03 21:08:59 +00:00
|
|
|
including creating a symlink to latest-Rocky-X and creating the
|
|
|
|
kickstart directories
|
2022-05-22 06:20:15 +00:00
|
|
|
* self.repo is ignored during full runs (noted in stdout)
|
|
|
|
* self.arch being set will force only that arch to sync
|
|
|
|
"""
|
|
|
|
if self.fullrun and self.repo:
|
|
|
|
self.log.error('WARNING: repo ignored when doing a full sync')
|
|
|
|
if self.fullrun and self.dryrun:
|
|
|
|
self.log.error('A full and dry run is currently not supported.')
|
|
|
|
raise SystemExit('\nA full and dry run is currently not supported.')
|
2022-05-21 07:10:37 +00:00
|
|
|
|
2022-05-23 07:23:53 +00:00
|
|
|
# This should create the initial compose dir and set the path.
|
|
|
|
# Otherwise, just use the latest link.
|
2022-05-22 06:20:15 +00:00
|
|
|
if self.fullrun:
|
2022-07-03 07:19:13 +00:00
|
|
|
generated_dir = Shared.generate_compose_dirs(
|
|
|
|
self.compose_base,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
self.date_stamp,
|
|
|
|
self.log
|
|
|
|
)
|
2022-05-24 07:16:02 +00:00
|
|
|
work_root = os.path.join(
|
2022-05-25 07:08:08 +00:00
|
|
|
generated_dir,
|
2022-05-24 07:16:02 +00:00
|
|
|
'work'
|
|
|
|
)
|
2022-05-22 06:20:15 +00:00
|
|
|
sync_root = os.path.join(
|
2022-05-25 07:08:08 +00:00
|
|
|
generated_dir,
|
2022-05-22 06:20:15 +00:00
|
|
|
'compose'
|
|
|
|
)
|
|
|
|
else:
|
2022-05-23 07:23:53 +00:00
|
|
|
# Put in a verification here.
|
2022-05-24 07:16:02 +00:00
|
|
|
work_root = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
'work'
|
|
|
|
)
|
2022-05-22 06:20:15 +00:00
|
|
|
sync_root = self.compose_latest_sync
|
|
|
|
|
2022-05-26 04:39:26 +00:00
|
|
|
# Verify if the link even exists
|
|
|
|
if not os.path.exists(self.compose_latest_dir):
|
|
|
|
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
|
|
|
|
self.log.error('!! Please perform a full run if you have not done so.')
|
|
|
|
raise SystemExit()
|
|
|
|
|
2022-05-25 07:08:08 +00:00
|
|
|
log_root = os.path.join(
|
|
|
|
work_root,
|
2022-06-16 19:24:19 +00:00
|
|
|
"logs",
|
|
|
|
self.date_stamp
|
2022-05-25 07:08:08 +00:00
|
|
|
)
|
|
|
|
|
2022-06-22 06:03:08 +00:00
|
|
|
global_work_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
"global",
|
|
|
|
)
|
|
|
|
|
2022-07-13 01:30:59 +00:00
|
|
|
#self.dnf_config = self.generate_conf(dest_path=global_work_root)
|
|
|
|
self.dnf_config = self.generate_conf()
|
|
|
|
|
2022-05-24 01:12:11 +00:00
|
|
|
if self.dryrun:
|
|
|
|
self.log.error('Dry Runs are not supported just yet. Sorry!')
|
|
|
|
raise SystemExit()
|
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
if self.fullrun and self.refresh_extra_files:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + 'A full run implies extra files are also deployed.')
|
2022-06-26 19:24:56 +00:00
|
|
|
|
2022-06-22 06:51:00 +00:00
|
|
|
self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch)
|
2022-05-23 07:23:53 +00:00
|
|
|
|
|
|
|
if self.fullrun:
|
2022-07-01 20:09:52 +00:00
|
|
|
self.deploy_extra_files(sync_root, global_work_root)
|
2022-06-26 19:24:56 +00:00
|
|
|
self.deploy_treeinfo(self.repo, sync_root, self.arch)
|
2022-07-04 04:00:57 +00:00
|
|
|
self.tweak_treeinfo(self.repo, sync_root, self.arch)
|
2022-06-16 20:18:18 +00:00
|
|
|
self.symlink_to_latest(generated_dir)
|
2022-05-23 07:23:53 +00:00
|
|
|
|
2022-05-26 04:39:26 +00:00
|
|
|
if self.repoclosure:
|
|
|
|
self.repoclosure_work(sync_root, work_root, log_root)
|
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
if self.refresh_extra_files and not self.fullrun:
|
2022-07-01 20:09:52 +00:00
|
|
|
self.deploy_extra_files(sync_root, global_work_root)
|
2022-06-22 06:03:08 +00:00
|
|
|
|
2022-07-04 06:27:49 +00:00
|
|
|
# deploy_treeinfo does NOT overwrite any treeinfo files. However,
|
|
|
|
# tweak_treeinfo calls out to a method that does. This should not
|
|
|
|
# cause issues as the method is fairly static in nature.
|
2022-06-26 19:24:56 +00:00
|
|
|
if self.refresh_treeinfo and not self.fullrun:
|
|
|
|
self.deploy_treeinfo(self.repo, sync_root, self.arch)
|
2022-07-04 04:00:57 +00:00
|
|
|
self.tweak_treeinfo(self.repo, sync_root, self.arch)
|
2022-06-26 19:24:56 +00:00
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
self.deploy_metadata(sync_root)
|
|
|
|
|
2022-05-29 02:28:29 +00:00
|
|
|
self.log.info('Compose repo directory: %s' % sync_root)
|
|
|
|
self.log.info('Compose logs: %s' % log_root)
|
|
|
|
self.log.info('Compose completed.')
|
|
|
|
|
2022-06-22 06:51:00 +00:00
|
|
|
def sync(self, repo, sync_root, work_root, log_root, global_work_root, arch=None):
|
2022-05-22 06:20:15 +00:00
|
|
|
"""
|
2022-05-23 07:23:53 +00:00
|
|
|
Calls out syncing of the repos. We generally sync each component of a
|
|
|
|
repo:
|
2022-05-22 06:20:15 +00:00
|
|
|
* each architecture
|
|
|
|
* each architecture debug
|
|
|
|
* each source
|
2022-05-23 07:23:53 +00:00
|
|
|
|
|
|
|
If parallel is true, we will run in podman.
|
|
|
|
"""
|
|
|
|
if self.parallel:
|
2022-06-22 06:51:00 +00:00
|
|
|
self.podman_sync(repo, sync_root, work_root, log_root, global_work_root, arch)
|
2022-05-23 07:23:53 +00:00
|
|
|
else:
|
2022-05-24 07:16:02 +00:00
|
|
|
self.dnf_sync(repo, sync_root, work_root, arch)
|
2022-05-23 07:23:53 +00:00
|
|
|
|
2022-05-24 07:16:02 +00:00
|
|
|
def dnf_sync(self, repo, sync_root, work_root, arch):
|
2022-05-23 07:23:53 +00:00
|
|
|
"""
|
2022-05-24 07:16:02 +00:00
|
|
|
This is for normal dnf syncs. This is very slow.
|
2022-05-22 06:20:15 +00:00
|
|
|
"""
|
2022-06-10 23:05:44 +00:00
|
|
|
self.log.error('DNF syncing has been removed.')
|
|
|
|
self.log.error('Please install podman and enable parallel')
|
|
|
|
raise SystemExit()
|
2022-05-23 07:23:53 +00:00
|
|
|
|
2022-06-22 06:51:00 +00:00
|
|
|
def podman_sync(
|
|
|
|
self,
|
|
|
|
repo,
|
|
|
|
sync_root,
|
|
|
|
work_root,
|
|
|
|
log_root,
|
|
|
|
global_work_root,
|
|
|
|
arch
|
|
|
|
):
|
2022-05-23 07:23:53 +00:00
|
|
|
"""
|
|
|
|
This is for podman syncs
|
|
|
|
|
|
|
|
Create sync_root/work/entries
|
|
|
|
Generate scripts as needed into dir
|
|
|
|
Each container runs their own script
|
|
|
|
wait till all is finished
|
|
|
|
"""
|
2022-07-03 07:19:13 +00:00
|
|
|
cmd = Shared.podman_cmd(self.log)
|
2022-05-23 07:23:53 +00:00
|
|
|
contrunlist = []
|
2022-05-25 07:08:08 +00:00
|
|
|
bad_exit_list = []
|
2022-05-23 07:23:53 +00:00
|
|
|
self.log.info('Generating container entries')
|
2022-05-24 07:16:02 +00:00
|
|
|
entries_dir = os.path.join(work_root, "entries")
|
2022-05-23 07:23:53 +00:00
|
|
|
if not os.path.exists(entries_dir):
|
|
|
|
os.makedirs(entries_dir, exist_ok=True)
|
|
|
|
|
2022-05-25 07:08:08 +00:00
|
|
|
# yeah, I know.
|
2022-06-22 06:03:08 +00:00
|
|
|
if not os.path.exists(global_work_root):
|
|
|
|
os.makedirs(global_work_root, exist_ok=True)
|
|
|
|
|
2022-05-25 07:08:08 +00:00
|
|
|
if not os.path.exists(log_root):
|
|
|
|
os.makedirs(log_root, exist_ok=True)
|
|
|
|
|
2022-05-23 07:23:53 +00:00
|
|
|
sync_single_arch = False
|
|
|
|
arches_to_sync = self.arches
|
|
|
|
if arch:
|
|
|
|
sync_single_arch = True
|
|
|
|
arches_to_sync = [arch]
|
|
|
|
|
|
|
|
sync_single_repo = False
|
|
|
|
repos_to_sync = self.repos
|
|
|
|
if repo and not self.fullrun:
|
|
|
|
sync_single_repo = True
|
|
|
|
repos_to_sync = [repo]
|
|
|
|
|
|
|
|
for r in repos_to_sync:
|
2022-05-25 03:35:58 +00:00
|
|
|
entry_name_list = []
|
|
|
|
repo_name = r
|
2022-05-26 21:46:43 +00:00
|
|
|
arch_sync = arches_to_sync.copy()
|
2022-05-26 04:39:26 +00:00
|
|
|
|
2022-05-25 03:35:58 +00:00
|
|
|
if r in self.repo_renames:
|
|
|
|
repo_name = self.repo_renames[r]
|
2022-05-23 07:23:53 +00:00
|
|
|
|
|
|
|
|
2022-05-26 04:39:26 +00:00
|
|
|
if 'all' in r and 'x86_64' in arches_to_sync and self.multilib:
|
|
|
|
arch_sync.append('i686')
|
|
|
|
|
|
|
|
# There should be a check here that if it's "all" and multilib
|
|
|
|
# is on, i686 should get synced too.
|
|
|
|
|
|
|
|
for a in arch_sync:
|
2022-05-23 07:23:53 +00:00
|
|
|
entry_name = '{}-{}'.format(r, a)
|
|
|
|
debug_entry_name = '{}-debug-{}'.format(r, a)
|
2022-05-21 07:10:37 +00:00
|
|
|
|
2022-05-25 03:35:58 +00:00
|
|
|
entry_name_list.append(entry_name)
|
|
|
|
|
2022-07-01 21:16:14 +00:00
|
|
|
if not self.ignore_debug and not a == 'source':
|
2022-05-25 03:35:58 +00:00
|
|
|
entry_name_list.append(debug_entry_name)
|
|
|
|
|
2022-05-24 01:12:11 +00:00
|
|
|
entry_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
entry_name
|
|
|
|
)
|
|
|
|
|
|
|
|
debug_entry_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
debug_entry_name
|
|
|
|
)
|
|
|
|
|
|
|
|
os_sync_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'os'
|
|
|
|
)
|
|
|
|
|
|
|
|
debug_sync_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'debug/tree'
|
|
|
|
)
|
|
|
|
|
2022-06-12 09:22:01 +00:00
|
|
|
import_gpg_cmd = ("/usr/bin/rpm --import {}{}").format(
|
2022-06-12 09:08:48 +00:00
|
|
|
self.extra_files['git_raw_path'],
|
|
|
|
self.extra_files['gpg'][self.gpgkey]
|
|
|
|
)
|
|
|
|
|
2022-05-27 13:59:27 +00:00
|
|
|
arch_force_cp = ("/usr/bin/sed 's|$basearch|{}|g' {} > {}.{}".format(
|
|
|
|
a,
|
|
|
|
self.dnf_config,
|
|
|
|
self.dnf_config,
|
|
|
|
a
|
|
|
|
))
|
|
|
|
|
2022-06-16 19:24:19 +00:00
|
|
|
sync_log = ("{}/{}-{}.log").format(
|
2022-06-11 17:49:37 +00:00
|
|
|
log_root,
|
|
|
|
repo_name,
|
2022-06-16 19:24:19 +00:00
|
|
|
a
|
2022-06-11 17:49:37 +00:00
|
|
|
)
|
|
|
|
|
2022-06-16 19:24:19 +00:00
|
|
|
debug_sync_log = ("{}/{}-{}-debug.log").format(
|
2022-06-12 09:08:48 +00:00
|
|
|
log_root,
|
|
|
|
repo_name,
|
2022-06-16 19:24:19 +00:00
|
|
|
a
|
2022-06-12 09:08:48 +00:00
|
|
|
)
|
|
|
|
|
2022-05-27 13:59:27 +00:00
|
|
|
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
|
2022-06-12 09:55:32 +00:00
|
|
|
"--repoid={} -p {} --forcearch {} --norepopath "
|
2022-06-23 20:12:53 +00:00
|
|
|
"--gpgcheck --assumeyes 2>&1").format(
|
2022-05-24 01:12:11 +00:00
|
|
|
self.dnf_config,
|
2022-05-27 13:59:27 +00:00
|
|
|
a,
|
2022-05-24 01:12:11 +00:00
|
|
|
r,
|
|
|
|
os_sync_path,
|
2022-06-12 09:08:48 +00:00
|
|
|
a
|
2022-05-24 01:12:11 +00:00
|
|
|
)
|
|
|
|
|
2022-05-27 13:59:27 +00:00
|
|
|
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
|
2022-05-25 07:08:08 +00:00
|
|
|
"--download-metadata --repoid={}-debug -p {} --forcearch {} "
|
2022-06-23 20:12:53 +00:00
|
|
|
"--gpgcheck --norepopath --assumeyes 2>&1").format(
|
2022-05-24 01:12:11 +00:00
|
|
|
self.dnf_config,
|
2022-05-27 13:59:27 +00:00
|
|
|
a,
|
2022-05-24 01:12:11 +00:00
|
|
|
r,
|
|
|
|
debug_sync_path,
|
2022-06-12 09:08:48 +00:00
|
|
|
a
|
2022-06-11 02:59:43 +00:00
|
|
|
)
|
|
|
|
|
2022-06-12 09:08:48 +00:00
|
|
|
dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y"
|
2022-06-11 02:59:43 +00:00
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
sync_template = self.tmplenv.get_template('reposync.tmpl')
|
|
|
|
sync_output = sync_template.render(
|
2022-06-12 09:08:48 +00:00
|
|
|
import_gpg_cmd=import_gpg_cmd,
|
2022-06-11 17:49:37 +00:00
|
|
|
arch_force_cp=arch_force_cp,
|
|
|
|
dnf_plugin_cmd=dnf_plugin_cmd,
|
2022-06-12 09:08:48 +00:00
|
|
|
sync_cmd=sync_cmd,
|
2022-07-13 01:30:59 +00:00
|
|
|
sync_log=sync_log,
|
|
|
|
download_path=os_sync_path
|
2022-06-11 17:49:37 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
debug_sync_template = self.tmplenv.get_template('reposync.tmpl')
|
|
|
|
debug_sync_output = debug_sync_template.render(
|
2022-06-12 09:08:48 +00:00
|
|
|
import_gpg_cmd=import_gpg_cmd,
|
2022-06-11 17:49:37 +00:00
|
|
|
arch_force_cp=arch_force_cp,
|
2022-06-12 09:08:48 +00:00
|
|
|
dnf_plugin_cmd=dnf_plugin_cmd,
|
|
|
|
sync_cmd=debug_sync_cmd,
|
2022-07-13 01:30:59 +00:00
|
|
|
sync_log=debug_sync_log,
|
|
|
|
download_path=debug_sync_path
|
2022-06-11 17:49:37 +00:00
|
|
|
)
|
|
|
|
|
2022-05-24 01:12:11 +00:00
|
|
|
entry_point_open = open(entry_point_sh, "w+")
|
|
|
|
debug_entry_point_open = open(debug_entry_point_sh, "w+")
|
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
entry_point_open.write(sync_output)
|
|
|
|
debug_entry_point_open.write(debug_sync_output)
|
2022-05-24 01:12:11 +00:00
|
|
|
|
|
|
|
entry_point_open.close()
|
|
|
|
debug_entry_point_open.close()
|
2022-05-25 03:35:58 +00:00
|
|
|
|
|
|
|
os.chmod(entry_point_sh, 0o755)
|
|
|
|
os.chmod(debug_entry_point_sh, 0o755)
|
|
|
|
|
2022-07-04 04:00:57 +00:00
|
|
|
# During fullruns, a kickstart directory is made. Kickstart
|
|
|
|
# should not be updated nor touched during regular runs under
|
|
|
|
# any circumstances.
|
|
|
|
if self.fullrun:
|
|
|
|
ks_entry_name = '{}-ks-{}'.format(r, a)
|
|
|
|
entry_name_list.append(ks_entry_name)
|
|
|
|
ks_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
ks_entry_name
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_sync_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'kickstart'
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
|
|
|
|
"--repoid={} -p {} --forcearch {} --norepopath "
|
|
|
|
"--gpgcheck --assumeyes 2>&1").format(
|
|
|
|
self.dnf_config,
|
|
|
|
a,
|
|
|
|
r,
|
|
|
|
ks_sync_path,
|
|
|
|
a
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_sync_log = ("{}/{}-{}-ks.log").format(
|
|
|
|
log_root,
|
|
|
|
repo_name,
|
|
|
|
a
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_sync_template = self.tmplenv.get_template('reposync.tmpl')
|
|
|
|
ks_sync_output = ks_sync_template.render(
|
|
|
|
import_gpg_cmd=import_gpg_cmd,
|
|
|
|
arch_force_cp=arch_force_cp,
|
|
|
|
dnf_plugin_cmd=dnf_plugin_cmd,
|
|
|
|
sync_cmd=ks_sync_cmd,
|
|
|
|
sync_log=ks_sync_log
|
|
|
|
)
|
|
|
|
ks_entry_point_open = open(ks_point_sh, "w+")
|
|
|
|
ks_entry_point_open.write(ks_sync_output)
|
|
|
|
ks_entry_point_open.close()
|
|
|
|
os.chmod(ks_point_sh, 0o755)
|
|
|
|
|
2022-05-25 03:35:58 +00:00
|
|
|
# We ignoring sources?
|
2022-07-01 21:16:14 +00:00
|
|
|
if (not self.ignore_source and not arch) or (
|
|
|
|
not self.ignore_source and arch == 'source'):
|
2022-05-25 03:35:58 +00:00
|
|
|
source_entry_name = '{}-source'.format(r)
|
|
|
|
entry_name_list.append(source_entry_name)
|
|
|
|
|
|
|
|
source_entry_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
source_entry_name
|
|
|
|
)
|
|
|
|
|
|
|
|
source_sync_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
'source/tree'
|
|
|
|
)
|
|
|
|
|
2022-06-16 19:24:19 +00:00
|
|
|
source_sync_log = ("{}/{}-source.log").format(
|
2022-05-25 07:08:08 +00:00
|
|
|
log_root,
|
2022-06-16 19:24:19 +00:00
|
|
|
repo_name
|
2022-05-25 03:35:58 +00:00
|
|
|
)
|
2022-06-11 02:59:43 +00:00
|
|
|
|
2022-06-12 09:08:48 +00:00
|
|
|
source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
|
|
|
|
"--download-metadata --repoid={}-source -p {} "
|
2022-06-23 20:12:53 +00:00
|
|
|
"--gpgcheck --norepopath --assumeyes 2>&1").format(
|
2022-06-12 09:08:48 +00:00
|
|
|
self.dnf_config,
|
|
|
|
r,
|
|
|
|
source_sync_path
|
2022-06-11 02:59:43 +00:00
|
|
|
)
|
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
source_sync_template = self.tmplenv.get_template('reposync-src.tmpl')
|
|
|
|
source_sync_output = source_sync_template.render(
|
2022-06-12 09:08:48 +00:00
|
|
|
import_gpg_cmd=import_gpg_cmd,
|
|
|
|
dnf_plugin_cmd=dnf_plugin_cmd,
|
|
|
|
sync_cmd=source_sync_cmd,
|
|
|
|
sync_log=source_sync_log
|
2022-06-11 17:49:37 +00:00
|
|
|
)
|
|
|
|
|
2022-05-25 03:35:58 +00:00
|
|
|
source_entry_point_open = open(source_entry_point_sh, "w+")
|
2022-06-11 17:49:37 +00:00
|
|
|
source_entry_point_open.write(source_sync_output)
|
2022-05-25 03:35:58 +00:00
|
|
|
source_entry_point_open.close()
|
|
|
|
os.chmod(source_entry_point_sh, 0o755)
|
2022-05-24 01:12:11 +00:00
|
|
|
|
2022-05-24 07:16:02 +00:00
|
|
|
# Spawn up all podman processes for repo
|
2022-05-25 03:35:58 +00:00
|
|
|
self.log.info('Starting podman processes for %s ...' % r)
|
|
|
|
|
|
|
|
#print(entry_name_list)
|
|
|
|
for pod in entry_name_list:
|
2022-07-13 01:30:59 +00:00
|
|
|
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
|
2022-05-25 03:35:58 +00:00
|
|
|
cmd,
|
|
|
|
self.compose_root,
|
|
|
|
self.compose_root,
|
|
|
|
self.dnf_config,
|
|
|
|
self.dnf_config,
|
|
|
|
entries_dir,
|
|
|
|
entries_dir,
|
|
|
|
pod,
|
|
|
|
entries_dir,
|
|
|
|
pod,
|
|
|
|
self.container
|
|
|
|
)
|
|
|
|
#print(podman_cmd_entry)
|
|
|
|
process = subprocess.call(
|
|
|
|
shlex.split(podman_cmd_entry),
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL
|
|
|
|
)
|
|
|
|
|
|
|
|
join_all_pods = ' '.join(entry_name_list)
|
2022-05-25 07:08:08 +00:00
|
|
|
time.sleep(3)
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Syncing ' + r + ' ...')
|
2022-05-25 03:35:58 +00:00
|
|
|
pod_watcher = '{} wait {}'.format(
|
|
|
|
cmd,
|
|
|
|
join_all_pods
|
|
|
|
)
|
|
|
|
|
|
|
|
#print(pod_watcher)
|
|
|
|
watch_man = subprocess.call(
|
|
|
|
shlex.split(pod_watcher),
|
2022-05-25 07:08:08 +00:00
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL
|
|
|
|
)
|
|
|
|
|
|
|
|
# After the above is done, we'll check each pod process for an exit
|
|
|
|
# code.
|
|
|
|
pattern = "Exited (0)"
|
|
|
|
for pod in entry_name_list:
|
|
|
|
checkcmd = '{} ps -f status=exited -f name={}'.format(
|
|
|
|
cmd,
|
|
|
|
pod
|
|
|
|
)
|
|
|
|
podcheck = subprocess.Popen(
|
|
|
|
checkcmd,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
shell=True
|
|
|
|
)
|
|
|
|
|
|
|
|
output, errors = podcheck.communicate()
|
2022-06-12 09:08:48 +00:00
|
|
|
if 'Exited (0)' not in output.decode():
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + pod)
|
2022-05-25 07:08:08 +00:00
|
|
|
bad_exit_list.append(pod)
|
|
|
|
|
|
|
|
rmcmd = '{} rm {}'.format(
|
|
|
|
cmd,
|
|
|
|
join_all_pods
|
|
|
|
)
|
|
|
|
|
|
|
|
rmpod = subprocess.Popen(
|
|
|
|
rmcmd,
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
shell=True
|
2022-05-25 03:35:58 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
entry_name_list.clear()
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Syncing ' + r + ' completed')
|
2022-05-24 01:12:11 +00:00
|
|
|
|
2022-05-25 07:08:08 +00:00
|
|
|
if len(bad_exit_list) > 0:
|
|
|
|
self.log.error(
|
|
|
|
Color.BOLD + Color.RED + 'There were issues syncing these '
|
|
|
|
'repositories:' + Color.END
|
|
|
|
)
|
|
|
|
for issue in bad_exit_list:
|
|
|
|
self.log.error(issue)
|
2022-06-13 14:37:50 +00:00
|
|
|
else:
|
|
|
|
self.log.info(
|
|
|
|
'[' + Color.BOLD + Color.GREEN + ' OK ' + Color.END + '] '
|
|
|
|
'No issues detected.'
|
|
|
|
)
|
2022-05-25 07:08:08 +00:00
|
|
|
|
2022-06-16 20:18:18 +00:00
|
|
|
def symlink_to_latest(self, generated_dir):
|
2022-05-22 06:20:15 +00:00
|
|
|
"""
|
|
|
|
Emulates pungi and symlinks latest-Rocky-X
|
|
|
|
|
|
|
|
This link will be what is updated in full runs. Whatever is in this
|
|
|
|
'latest' directory is what is rsynced on to staging after completion.
|
|
|
|
This link should not change often.
|
|
|
|
"""
|
2022-06-16 20:18:18 +00:00
|
|
|
try:
|
|
|
|
os.remove(self.compose_latest_dir)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version))
|
|
|
|
os.symlink(generated_dir, self.compose_latest_dir)
|
2022-05-22 06:20:15 +00:00
|
|
|
|
2022-05-23 07:23:53 +00:00
|
|
|
def generate_conf(self, dest_path='/var/tmp') -> str:
|
2022-05-21 07:10:37 +00:00
|
|
|
"""
|
|
|
|
Generates the necessary repo conf file for the operation. This repo
|
|
|
|
file should be temporary in nature. This will generate a repo file
|
|
|
|
with all repos by default. If a repo is chosen for sync, that will be
|
|
|
|
the only one synced.
|
|
|
|
|
|
|
|
:param dest_path: The destination where the temporary conf goes
|
|
|
|
:param repo: The repo object to create a file for
|
|
|
|
"""
|
2022-05-22 06:20:15 +00:00
|
|
|
fname = os.path.join(
|
|
|
|
dest_path,
|
2022-07-07 06:56:19 +00:00
|
|
|
"{}-{}-config.repo".format(self.shortname, self.major_version)
|
2022-05-22 06:20:15 +00:00
|
|
|
)
|
2022-07-13 01:30:59 +00:00
|
|
|
pname = os.path.join(
|
|
|
|
'/var/tmp',
|
|
|
|
"{}-{}-config.repo".format(self.shortname, self.major_version)
|
|
|
|
)
|
2022-05-22 06:20:15 +00:00
|
|
|
self.log.info('Generating the repo configuration: %s' % fname)
|
|
|
|
|
|
|
|
if self.repo_base_url.startswith("/"):
|
|
|
|
self.log.error("Local file syncs are not supported.")
|
|
|
|
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
|
|
|
"supported." + Color.END)
|
|
|
|
|
2022-06-10 23:05:44 +00:00
|
|
|
prehashed = ''
|
|
|
|
if self.hashed:
|
|
|
|
prehashed = "hashed-"
|
2022-05-22 06:20:15 +00:00
|
|
|
# create dest_path
|
|
|
|
if not os.path.exists(dest_path):
|
|
|
|
os.makedirs(dest_path, exist_ok=True)
|
|
|
|
config_file = open(fname, "w+")
|
2022-06-11 17:49:37 +00:00
|
|
|
repolist = []
|
2022-05-22 06:20:15 +00:00
|
|
|
for repo in self.repos:
|
2022-06-10 23:05:44 +00:00
|
|
|
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
2022-05-22 06:20:15 +00:00
|
|
|
self.repo_base_url,
|
|
|
|
self.project_id,
|
2022-06-10 23:05:44 +00:00
|
|
|
prehashed,
|
2022-05-22 06:20:15 +00:00
|
|
|
repo,
|
|
|
|
)
|
|
|
|
|
2022-06-10 23:05:44 +00:00
|
|
|
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
2022-05-22 06:20:15 +00:00
|
|
|
self.repo_base_url,
|
|
|
|
self.project_id,
|
2022-06-10 23:05:44 +00:00
|
|
|
prehashed,
|
2022-05-22 06:20:15 +00:00
|
|
|
repo,
|
|
|
|
)
|
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
repodata = {
|
|
|
|
'name': repo,
|
|
|
|
'baseurl': constructed_url,
|
2022-06-23 20:12:53 +00:00
|
|
|
'srcbaseurl': constructed_url_src,
|
|
|
|
'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
|
2022-06-11 17:49:37 +00:00
|
|
|
}
|
|
|
|
repolist.append(repodata)
|
2022-05-22 06:20:15 +00:00
|
|
|
|
2022-06-11 17:49:37 +00:00
|
|
|
template = self.tmplenv.get_template('repoconfig.tmpl')
|
|
|
|
output = template.render(repos=repolist)
|
|
|
|
config_file.write(output)
|
2022-05-24 01:12:11 +00:00
|
|
|
|
|
|
|
config_file.close()
|
2022-07-13 01:30:59 +00:00
|
|
|
#return (fname, pname)
|
2022-05-23 07:23:53 +00:00
|
|
|
return fname
|
2022-05-21 07:10:37 +00:00
|
|
|
|
2022-05-26 04:39:26 +00:00
|
|
|
def repoclosure_work(self, sync_root, work_root, log_root):
|
2022-05-29 02:28:29 +00:00
|
|
|
"""
|
|
|
|
This is where we run repoclosures, based on the configuration of each
|
|
|
|
EL version. Each major version should have a dictionary of lists that
|
|
|
|
point to which repos they'll be targetting. An empty list because the
|
|
|
|
repoclosure is ran against itself, and itself only. In the case of 8,
|
|
|
|
9, and perhaps 10, BaseOS is the only repo that should be checking
|
|
|
|
against itself. (This means BaseOS should be able to survive by
|
|
|
|
itself.)
|
|
|
|
"""
|
2022-07-03 07:19:13 +00:00
|
|
|
cmd = Shared.podman_cmd(self.log)
|
2022-05-29 02:28:29 +00:00
|
|
|
entries_dir = os.path.join(work_root, "entries")
|
|
|
|
bad_exit_list = []
|
|
|
|
|
|
|
|
if not self.parallel:
|
|
|
|
self.log.error('repoclosure is too slow to run one by one. enable parallel mode.')
|
|
|
|
raise SystemExit()
|
|
|
|
|
|
|
|
self.log.info('Beginning repoclosure phase')
|
|
|
|
for repo in self.repoclosure_map['repos']:
|
2022-06-09 03:25:55 +00:00
|
|
|
if self.repo and repo not in self.repo:
|
|
|
|
continue
|
|
|
|
|
2022-05-29 02:28:29 +00:00
|
|
|
repoclosure_entry_name_list = []
|
|
|
|
self.log.info('Setting up repoclosure for {}'.format(repo))
|
|
|
|
|
|
|
|
for arch in self.repoclosure_map['arches']:
|
|
|
|
repo_combination = []
|
|
|
|
repoclosure_entry_name = 'repoclosure-{}-{}'.format(repo, arch)
|
|
|
|
repoclosure_entry_name_list.append(repoclosure_entry_name)
|
|
|
|
repoclosure_arch_list = self.repoclosure_map['arches'][arch]
|
|
|
|
|
|
|
|
# Some repos will have additional repos to close against - this
|
|
|
|
# helps append
|
|
|
|
if len(self.repoclosure_map['repos'][repo]) > 0:
|
|
|
|
for l in self.repoclosure_map['repos'][repo]:
|
|
|
|
stretch = '--repofrompath={},file://{}/{}/{}/os --repo={}'.format(
|
|
|
|
l,
|
|
|
|
sync_root,
|
|
|
|
l,
|
|
|
|
arch,
|
|
|
|
l
|
|
|
|
)
|
|
|
|
repo_combination.append(stretch)
|
|
|
|
|
|
|
|
join_repo_comb = ' '.join(repo_combination)
|
|
|
|
|
|
|
|
repoclosure_entry_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
repoclosure_entry_name
|
|
|
|
)
|
|
|
|
repoclosure_entry_point_sh = os.path.join(
|
|
|
|
entries_dir,
|
|
|
|
repoclosure_entry_name
|
|
|
|
)
|
|
|
|
repoclosure_cmd = ('/usr/bin/dnf repoclosure {} '
|
2022-05-29 20:40:54 +00:00
|
|
|
'--repofrompath={},file://{}/{}/{}/os --repo={} --check={} {} '
|
2022-06-16 19:24:19 +00:00
|
|
|
'| tee -a {}/{}-repoclosure-{}.log').format(
|
2022-05-29 02:28:29 +00:00
|
|
|
repoclosure_arch_list,
|
|
|
|
repo,
|
|
|
|
sync_root,
|
|
|
|
repo,
|
|
|
|
arch,
|
|
|
|
repo,
|
2022-05-29 20:40:54 +00:00
|
|
|
repo,
|
2022-05-29 02:28:29 +00:00
|
|
|
join_repo_comb,
|
|
|
|
log_root,
|
|
|
|
repo,
|
2022-06-16 19:24:19 +00:00
|
|
|
arch
|
2022-05-29 02:28:29 +00:00
|
|
|
)
|
|
|
|
repoclosure_entry_point_open = open(repoclosure_entry_point_sh, "w+")
|
|
|
|
repoclosure_entry_point_open.write('#!/bin/bash\n')
|
|
|
|
repoclosure_entry_point_open.write('set -o pipefail\n')
|
|
|
|
repoclosure_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n')
|
2022-05-29 21:25:32 +00:00
|
|
|
repoclosure_entry_point_open.write('/usr/bin/dnf clean all\n')
|
2022-05-29 02:28:29 +00:00
|
|
|
repoclosure_entry_point_open.write(repoclosure_cmd + '\n')
|
|
|
|
repoclosure_entry_point_open.close()
|
|
|
|
os.chmod(repoclosure_entry_point_sh, 0o755)
|
|
|
|
repo_combination.clear()
|
|
|
|
|
|
|
|
self.log.info('Spawning pods for %s' % repo)
|
|
|
|
for pod in repoclosure_entry_name_list:
|
2022-07-13 01:30:59 +00:00
|
|
|
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
|
2022-05-29 02:28:29 +00:00
|
|
|
cmd,
|
|
|
|
self.compose_root,
|
|
|
|
self.compose_root,
|
|
|
|
self.dnf_config,
|
|
|
|
self.dnf_config,
|
|
|
|
entries_dir,
|
|
|
|
entries_dir,
|
|
|
|
pod,
|
|
|
|
entries_dir,
|
|
|
|
pod,
|
|
|
|
self.container
|
|
|
|
)
|
|
|
|
#print(podman_cmd_entry)
|
|
|
|
process = subprocess.call(
|
|
|
|
shlex.split(podman_cmd_entry),
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL
|
|
|
|
)
|
|
|
|
|
|
|
|
join_all_pods = ' '.join(repoclosure_entry_name_list)
|
|
|
|
time.sleep(3)
|
|
|
|
self.log.info('Performing repoclosure on %s ... ' % repo)
|
|
|
|
pod_watcher = '{} wait {}'.format(
|
|
|
|
cmd,
|
|
|
|
join_all_pods
|
|
|
|
)
|
|
|
|
|
|
|
|
watch_man = subprocess.call(
|
|
|
|
shlex.split(pod_watcher),
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL
|
|
|
|
)
|
|
|
|
|
|
|
|
for pod in repoclosure_entry_name_list:
|
|
|
|
checkcmd = '{} ps -f status=exited -f name={}'.format(
|
|
|
|
cmd,
|
|
|
|
pod
|
|
|
|
)
|
|
|
|
podcheck = subprocess.Popen(
|
|
|
|
checkcmd,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
shell=True
|
|
|
|
)
|
|
|
|
|
|
|
|
output, errors = podcheck.communicate()
|
2022-06-12 09:08:48 +00:00
|
|
|
if 'Exited (0)' not in output.decode():
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + pod)
|
2022-05-29 02:28:29 +00:00
|
|
|
bad_exit_list.append(pod)
|
|
|
|
|
|
|
|
rmcmd = '{} rm {}'.format(
|
|
|
|
cmd,
|
|
|
|
join_all_pods
|
|
|
|
)
|
|
|
|
|
|
|
|
rmpod = subprocess.Popen(
|
|
|
|
rmcmd,
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
shell=True
|
|
|
|
)
|
|
|
|
|
|
|
|
repoclosure_entry_name_list.clear()
|
|
|
|
self.log.info('Syncing %s completed' % repo)
|
|
|
|
|
|
|
|
if len(bad_exit_list) > 0:
|
|
|
|
self.log.error(
|
|
|
|
Color.BOLD + Color.RED + 'There were issues closing these '
|
|
|
|
'repositories:' + Color.END
|
|
|
|
)
|
|
|
|
for issue in bad_exit_list:
|
|
|
|
self.log.error(issue)
|
2022-05-26 04:39:26 +00:00
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
def deploy_extra_files(self, sync_root, global_work_root):
|
2022-06-06 22:02:08 +00:00
|
|
|
"""
|
2022-06-15 20:53:12 +00:00
|
|
|
deploys extra files based on info of rlvars including a
|
|
|
|
extra_files.json
|
|
|
|
|
2022-06-24 22:37:32 +00:00
|
|
|
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
|
|
|
|
a bunch of compose-esque stuff.
|
2022-06-06 22:02:08 +00:00
|
|
|
"""
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo')
|
2022-07-01 20:09:52 +00:00
|
|
|
|
2022-07-03 07:19:13 +00:00
|
|
|
cmd = Shared.git_cmd(self.log)
|
2022-06-22 06:03:08 +00:00
|
|
|
tmpclone = '/tmp/clone'
|
|
|
|
extra_files_dir = os.path.join(
|
2022-06-22 06:51:00 +00:00
|
|
|
global_work_root,
|
2022-06-22 06:03:08 +00:00
|
|
|
'extra-files'
|
|
|
|
)
|
2022-07-01 20:09:52 +00:00
|
|
|
metadata_dir = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
"metadata"
|
2022-06-22 06:03:08 +00:00
|
|
|
)
|
2022-06-22 06:51:00 +00:00
|
|
|
if not os.path.exists(extra_files_dir):
|
|
|
|
os.makedirs(extra_files_dir, exist_ok=True)
|
2022-06-22 06:03:08 +00:00
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
if not os.path.exists(metadata_dir):
|
|
|
|
os.makedirs(metadata_dir, exist_ok=True)
|
|
|
|
|
2022-06-22 06:03:08 +00:00
|
|
|
clonecmd = '{} clone {} -b {} -q {}'.format(
|
|
|
|
cmd,
|
|
|
|
self.extra_files['git_repo'],
|
|
|
|
self.extra_files['branch'],
|
|
|
|
tmpclone
|
|
|
|
)
|
|
|
|
|
2022-06-22 06:23:36 +00:00
|
|
|
git_clone = subprocess.call(
|
|
|
|
shlex.split(clonecmd),
|
2022-06-22 06:03:08 +00:00
|
|
|
stdout=subprocess.DEVNULL,
|
2022-06-22 06:23:36 +00:00
|
|
|
stderr=subprocess.DEVNULL
|
2022-06-22 06:03:08 +00:00
|
|
|
)
|
|
|
|
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Deploying extra files to work and metadata directories ...')
|
2022-07-01 20:09:52 +00:00
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
# Copy files to work root
|
2022-06-22 06:03:08 +00:00
|
|
|
for extra in self.extra_files['list']:
|
|
|
|
src = '/tmp/clone/' + extra
|
2022-06-26 19:24:56 +00:00
|
|
|
# Copy extra files to root of compose here also - The extra files
|
|
|
|
# are meant to be picked up by our ISO creation process and also
|
|
|
|
# exist on our mirrors.
|
2022-06-22 06:03:08 +00:00
|
|
|
try:
|
|
|
|
shutil.copy2(src, extra_files_dir)
|
2022-07-01 20:09:52 +00:00
|
|
|
shutil.copy2(src, metadata_dir)
|
2022-06-22 06:03:08 +00:00
|
|
|
except:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + 'Extra file not copied: ' + src)
|
2022-06-22 06:03:08 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.rmtree(tmpclone)
|
|
|
|
except OSError as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + 'Directory ' + tmpclone +
|
|
|
|
' could not be removed: ' + e.strerror
|
2022-06-22 06:03:08 +00:00
|
|
|
)
|
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
def deploy_metadata(self, sync_root):
|
|
|
|
"""
|
|
|
|
Deploys metadata that defines information about the compose. Some data
|
|
|
|
will be close to how pungi produces it, but it won't be exact nor a
|
|
|
|
perfect replica.
|
|
|
|
"""
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Deploying metadata for this compose')
|
2022-06-30 22:38:50 +00:00
|
|
|
# Create metadata here
|
|
|
|
# Create COMPOSE_ID here (this doesn't necessarily match anything, it's
|
|
|
|
# just an indicator)
|
2022-07-01 20:09:52 +00:00
|
|
|
metadata_dir = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
"metadata"
|
|
|
|
)
|
|
|
|
|
|
|
|
# It should already exist from a full run or refresh. This is just in
|
|
|
|
# case and it doesn't hurt.
|
|
|
|
if not os.path.exists(metadata_dir):
|
|
|
|
os.makedirs(metadata_dir, exist_ok=True)
|
|
|
|
|
2022-07-01 20:13:00 +00:00
|
|
|
with open(metadata_dir + '/COMPOSE_ID', "w+") as f:
|
2022-07-01 20:09:52 +00:00
|
|
|
f.write(self.compose_id)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
Shared.write_metadata(
|
|
|
|
self.timestamp,
|
|
|
|
self.date_stamp,
|
2022-07-01 20:28:24 +00:00
|
|
|
self.distname,
|
2022-07-01 20:09:52 +00:00
|
|
|
self.fullversion,
|
|
|
|
self.compose_id,
|
2022-07-01 20:28:24 +00:00
|
|
|
metadata_dir + '/metadata'
|
2022-07-01 20:09:52 +00:00
|
|
|
)
|
2022-06-22 06:03:08 +00:00
|
|
|
|
2022-07-12 08:57:25 +00:00
|
|
|
# TODO: Add in each repo and their corresponding arch.
|
|
|
|
productmd_date = self.date_stamp.split('.')[0]
|
|
|
|
Shared.composeinfo_write(
|
|
|
|
metadata_dir + '/composeinfo',
|
|
|
|
self.distname,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
'updates',
|
|
|
|
productmd_date
|
|
|
|
)
|
|
|
|
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Metadata files phase completed.')
|
2022-06-06 22:02:08 +00:00
|
|
|
|
2022-07-04 18:31:24 +00:00
|
|
|
# Deploy README to metadata directory
|
|
|
|
readme_template = self.tmplenv.get_template('README.tmpl')
|
|
|
|
readme_output = readme_template.render(
|
|
|
|
fullname=self.fullname,
|
|
|
|
version=empanadas.__version__
|
|
|
|
)
|
|
|
|
|
2022-07-04 20:10:48 +00:00
|
|
|
with open(metadata_dir + '/README', 'w+', encoding='utf-8') as readme_file:
|
2022-07-04 18:31:24 +00:00
|
|
|
readme_file.write(readme_output)
|
|
|
|
readme_file.close()
|
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
def deploy_treeinfo(self, repo, sync_root, arch):
|
|
|
|
"""
|
|
|
|
Deploys initial treeinfo files. These have the potential of being
|
2022-06-28 14:49:23 +00:00
|
|
|
overwritten by our ISO process, which is fine. If there is a treeinfo
|
|
|
|
found, it will be skipped.
|
2022-06-26 19:24:56 +00:00
|
|
|
"""
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo')
|
2022-06-30 08:43:14 +00:00
|
|
|
|
2022-06-26 19:24:56 +00:00
|
|
|
arches_to_tree = self.arches
|
|
|
|
if arch:
|
|
|
|
arches_to_tree = [arch]
|
|
|
|
|
|
|
|
repos_to_tree = self.repos
|
|
|
|
if repo and not self.fullrun:
|
|
|
|
repos_to_tree = [repo]
|
|
|
|
|
2022-06-30 03:36:59 +00:00
|
|
|
# If a treeinfo or discinfo file exists, it should be skipped.
|
2022-06-30 08:43:14 +00:00
|
|
|
for r in repos_to_tree:
|
|
|
|
entry_name_list = []
|
|
|
|
repo_name = r
|
|
|
|
arch_tree = arches_to_tree.copy()
|
|
|
|
|
|
|
|
if r in self.repo_renames:
|
|
|
|
repo_name = self.repo_renames[r]
|
|
|
|
|
|
|
|
# I feel it's necessary to make sure even i686 has .treeinfo and
|
|
|
|
# .discinfo, just for consistency.
|
|
|
|
if 'all' in r and 'x86_64' in arches_to_tree and self.multilib:
|
|
|
|
arch_tree.append('i686')
|
|
|
|
|
|
|
|
for a in arch_tree:
|
2022-07-01 21:16:14 +00:00
|
|
|
if a == 'source':
|
|
|
|
continue
|
|
|
|
|
2022-06-30 08:43:14 +00:00
|
|
|
os_tree_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'os/.treeinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
os_disc_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'os/.discinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
os_media_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'os/media.repo'
|
|
|
|
)
|
|
|
|
|
2022-07-05 15:04:46 +00:00
|
|
|
ks_tree_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'kickstart/.treeinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_disc_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'kickstart/.discinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
ks_media_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'kickstart/media.repo'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-06-30 08:43:14 +00:00
|
|
|
if not os.path.exists(os_tree_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.treeinfo_new_write(
|
|
|
|
os_tree_path,
|
|
|
|
self.distname,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
a,
|
|
|
|
int(self.timestamp),
|
|
|
|
repo_name
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' +
|
|
|
|
a + ' os .treeinfo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a + ' os .treeinfo already exists')
|
2022-06-30 08:43:14 +00:00
|
|
|
|
|
|
|
if not os.path.exists(os_disc_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.discinfo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
a,
|
|
|
|
os_disc_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' +
|
|
|
|
a + ' os .discinfo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a +
|
|
|
|
' os .discinfo already exists'
|
2022-06-30 08:43:14 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(os_media_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.media_repo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
os_media_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' os media.repo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a +
|
|
|
|
' os media.repo already exists'
|
2022-06-30 08:43:14 +00:00
|
|
|
)
|
|
|
|
|
2022-07-05 15:04:46 +00:00
|
|
|
# Kickstart part of the repos
|
|
|
|
if not os.path.exists(ks_tree_path):
|
|
|
|
try:
|
|
|
|
Shared.treeinfo_new_write(
|
|
|
|
ks_tree_path,
|
|
|
|
self.distname,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
a,
|
|
|
|
int(self.timestamp),
|
|
|
|
repo_name
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' kickstart .treeinfo could not be written'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a +
|
|
|
|
' kickstart .treeinfo already exists'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(ks_disc_path):
|
|
|
|
try:
|
|
|
|
Shared.discinfo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
a,
|
|
|
|
ks_disc_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' kickstart .discinfo could not be written'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' kickstart .discinfo already exists'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(ks_media_path):
|
|
|
|
try:
|
|
|
|
Shared.media_repo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
ks_media_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' kickstart media.repo could not be written'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a +
|
|
|
|
' kickstart media.repo already exists'
|
2022-07-05 15:04:46 +00:00
|
|
|
)
|
|
|
|
|
2022-07-01 21:16:14 +00:00
|
|
|
if not self.ignore_debug and not a == 'source':
|
2022-06-30 08:43:14 +00:00
|
|
|
debug_tree_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'debug/tree/.treeinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
debug_disc_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'debug/tree/.discinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
debug_media_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
a,
|
|
|
|
'debug/tree/media.repo'
|
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(debug_tree_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.treeinfo_new_write(
|
|
|
|
debug_tree_path,
|
|
|
|
self.distname,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
a,
|
|
|
|
self.timestamp,
|
|
|
|
repo_name
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' debug .treeinfo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + r + ' ' + a +
|
|
|
|
' debug .treeinfo already exists'
|
2022-06-30 08:43:14 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(debug_disc_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.discinfo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
a,
|
|
|
|
debug_disc_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' debug .discinfo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + r + ' ' + a +
|
|
|
|
' debug .discinfo already exists'
|
2022-06-30 08:43:14 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(debug_media_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.media_repo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
debug_media_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' ' + a +
|
|
|
|
' debug media.repo could not be written'
|
2022-07-05 07:50:02 +00:00
|
|
|
)
|
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' ' + a +
|
|
|
|
' debug media.repo already exists'
|
2022-06-30 08:43:14 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-07-01 21:16:14 +00:00
|
|
|
if not self.ignore_source and not arch:
|
2022-06-30 08:43:14 +00:00
|
|
|
source_tree_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
'source/tree/.treeinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
source_disc_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
'source/tree/.discinfo'
|
|
|
|
)
|
|
|
|
|
|
|
|
source_media_path = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
repo_name,
|
|
|
|
'source/tree/media.repo'
|
|
|
|
)
|
|
|
|
|
|
|
|
if not os.path.exists(source_tree_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.treeinfo_new_write(
|
|
|
|
source_tree_path,
|
|
|
|
self.distname,
|
|
|
|
self.shortname,
|
|
|
|
self.fullversion,
|
|
|
|
'src',
|
|
|
|
self.timestamp,
|
|
|
|
repo_name
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' source os .treeinfo could not be written')
|
2022-07-05 07:50:02 +00:00
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' source os .treeinfo already exists')
|
2022-06-30 08:43:14 +00:00
|
|
|
|
|
|
|
if not os.path.exists(source_disc_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.discinfo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
'src',
|
|
|
|
source_disc_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' source os .discinfo could not be written')
|
2022-07-05 07:50:02 +00:00
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' source .discinfo already exists')
|
2022-06-30 08:43:14 +00:00
|
|
|
|
|
|
|
if not os.path.exists(source_media_path):
|
2022-07-05 07:26:49 +00:00
|
|
|
try:
|
|
|
|
Shared.media_repo_write(
|
|
|
|
self.timestamp,
|
|
|
|
self.fullname,
|
|
|
|
source_media_path
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + repo_name + ' source os media.repo could not be written')
|
2022-07-05 07:50:02 +00:00
|
|
|
self.log.error(e)
|
2022-06-30 08:43:14 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.warn(Color.WARN + repo_name + ' source media.repo already exists')
|
2022-06-30 03:36:59 +00:00
|
|
|
|
2022-07-04 04:00:57 +00:00
|
|
|
def tweak_treeinfo(self, repo, sync_root, arch):
|
|
|
|
"""
|
|
|
|
This modifies treeinfo for the primary repository. If the repository is
|
|
|
|
listed in the iso_map as a non-disc, it will be considered for modification.
|
|
|
|
"""
|
2022-07-04 06:27:49 +00:00
|
|
|
variants_to_tweak = []
|
|
|
|
|
|
|
|
arches_to_tree = self.arches
|
|
|
|
if arch:
|
|
|
|
arches_to_tree = [arch]
|
|
|
|
|
|
|
|
repos_to_tree = self.repos
|
|
|
|
if repo and not self.fullrun:
|
|
|
|
repos_to_tree = [repo]
|
|
|
|
|
|
|
|
for r in repos_to_tree:
|
|
|
|
entry_name_list = []
|
|
|
|
repo_name = r
|
|
|
|
arch_tree = arches_to_tree.copy()
|
|
|
|
|
|
|
|
if r in self.iso_map['images']:
|
|
|
|
variants_to_tweak.append(r)
|
|
|
|
|
2022-07-04 07:53:39 +00:00
|
|
|
if not len(variants_to_tweak) > 0:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'No treeinfo to tweak.')
|
2022-07-04 07:53:39 +00:00
|
|
|
return
|
|
|
|
|
2022-07-04 06:50:08 +00:00
|
|
|
for a in arches_to_tree:
|
|
|
|
for v in variants_to_tweak:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Tweaking treeinfo for ' + a + ' ' + v)
|
2022-07-04 06:27:49 +00:00
|
|
|
image = os.path.join(sync_root, v, a, 'os')
|
|
|
|
imagemap = self.iso_map['images'][v]
|
|
|
|
data = {
|
|
|
|
'arch': a,
|
|
|
|
'variant': v,
|
|
|
|
'variant_path': image,
|
|
|
|
'checksum': self.checksum,
|
|
|
|
'distname': self.distname,
|
|
|
|
'fullname': self.fullname,
|
|
|
|
'shortname': self.shortname,
|
|
|
|
'release': self.fullversion,
|
|
|
|
'timestamp': self.timestamp,
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
2022-07-04 06:42:35 +00:00
|
|
|
Shared.treeinfo_modify_write(data, imagemap, self.log)
|
2022-07-04 06:27:49 +00:00
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + 'There was an error writing os treeinfo.')
|
2022-07-04 06:27:49 +00:00
|
|
|
self.log.error(e)
|
|
|
|
|
|
|
|
if self.fullrun:
|
|
|
|
ksimage = os.path.join(sync_root, v, a, 'kickstart')
|
|
|
|
ksdata = {
|
|
|
|
'arch': a,
|
|
|
|
'variant': v,
|
2022-07-05 04:24:35 +00:00
|
|
|
'variant_path': ksimage,
|
2022-07-04 06:27:49 +00:00
|
|
|
'checksum': self.checksum,
|
|
|
|
'distname': self.distname,
|
|
|
|
'fullname': self.fullname,
|
|
|
|
'shortname': self.shortname,
|
|
|
|
'release': self.fullversion,
|
|
|
|
'timestamp': self.timestamp,
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
2022-07-04 06:42:35 +00:00
|
|
|
Shared.treeinfo_modify_write(ksdata, imagemap, self.log)
|
2022-07-04 06:27:49 +00:00
|
|
|
except Exception as e:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + 'There was an error writing kickstart treeinfo.')
|
2022-07-04 06:27:49 +00:00
|
|
|
self.log.error(e)
|
|
|
|
|
2022-06-30 03:36:59 +00:00
|
|
|
def run_compose_closeout(self):
|
|
|
|
"""
|
2022-07-04 07:53:39 +00:00
|
|
|
Closes out a compose. This ensures the ISO's are synced from work/isos
|
|
|
|
to compose/isos, checks for live media and syncs as well from work/live
|
|
|
|
to compose/live, deploys final metadata.
|
2022-06-30 03:36:59 +00:00
|
|
|
"""
|
2022-07-03 07:19:13 +00:00
|
|
|
# latest-X-Y should exist at all times for this to work.
|
|
|
|
work_root = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
'work'
|
|
|
|
)
|
|
|
|
sync_root = self.compose_latest_sync
|
|
|
|
|
|
|
|
sync_iso_root = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
'isos'
|
|
|
|
)
|
|
|
|
|
|
|
|
tmp_dir = os.path.join(
|
|
|
|
self.compose_root,
|
|
|
|
'partitions'
|
|
|
|
)
|
2022-06-30 03:36:59 +00:00
|
|
|
|
2022-07-03 07:19:13 +00:00
|
|
|
# Verify if the link even exists
|
|
|
|
if not os.path.exists(self.compose_latest_dir):
|
2022-07-04 04:00:57 +00:00
|
|
|
self.log.error(
|
|
|
|
'!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir
|
|
|
|
)
|
|
|
|
self.log.error(
|
|
|
|
'!! Please perform a full run if you have not done so.'
|
|
|
|
)
|
2022-07-03 07:19:13 +00:00
|
|
|
raise SystemExit()
|
|
|
|
|
|
|
|
log_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
"logs",
|
|
|
|
self.date_stamp
|
|
|
|
)
|
|
|
|
|
|
|
|
iso_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
"isos"
|
|
|
|
)
|
|
|
|
|
2022-07-04 07:53:39 +00:00
|
|
|
live_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
"live"
|
|
|
|
)
|
|
|
|
|
2022-07-04 19:36:15 +00:00
|
|
|
sync_live_root = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
'live'
|
|
|
|
)
|
|
|
|
|
2022-07-08 09:22:58 +00:00
|
|
|
images_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
'images'
|
|
|
|
)
|
|
|
|
|
|
|
|
sync_images_root = os.path.join(
|
|
|
|
sync_root,
|
|
|
|
'images'
|
|
|
|
)
|
|
|
|
|
2022-07-03 07:19:13 +00:00
|
|
|
global_work_root = os.path.join(
|
|
|
|
work_root,
|
|
|
|
"global",
|
|
|
|
)
|
|
|
|
|
2022-07-08 21:41:06 +00:00
|
|
|
# Standard ISOs
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Starting to sync ISOs to compose')
|
2022-07-03 07:19:13 +00:00
|
|
|
|
2022-07-04 04:00:57 +00:00
|
|
|
if os.path.exists('/usr/bin/fpsync'):
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Starting up fpsync')
|
2022-07-04 04:00:57 +00:00
|
|
|
message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir)
|
|
|
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Starting up parallel | rsync')
|
2022-07-04 04:00:57 +00:00
|
|
|
message, ret = Shared.rsync_method(iso_root, sync_iso_root)
|
|
|
|
else:
|
|
|
|
self.log.error(
|
2022-07-05 21:47:38 +00:00
|
|
|
Color.FAIL +
|
2022-07-04 04:00:57 +00:00
|
|
|
'fpsync nor parallel + rsync were found on this system. ' +
|
|
|
|
'There is also no built-in parallel rsync method at this ' +
|
|
|
|
'time.'
|
|
|
|
)
|
|
|
|
raise SystemExit()
|
2022-07-03 07:19:13 +00:00
|
|
|
|
2022-07-04 04:00:57 +00:00
|
|
|
if ret != 0:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + message)
|
2022-07-04 04:00:57 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + message)
|
2022-06-26 19:24:56 +00:00
|
|
|
|
2022-07-08 21:41:06 +00:00
|
|
|
# Live images
|
2022-07-04 07:53:39 +00:00
|
|
|
if os.path.exists(live_root):
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + 'Starting to sync live images to compose')
|
2022-07-04 07:53:39 +00:00
|
|
|
|
|
|
|
if os.path.exists('/usr/bin/fpsync'):
|
2022-07-04 19:36:15 +00:00
|
|
|
message, ret = Shared.fpsync_method(live_root, sync_live_root, tmp_dir)
|
2022-07-04 07:53:39 +00:00
|
|
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
2022-07-04 19:36:15 +00:00
|
|
|
message, ret = Shared.rsync_method(live_root, sync_live_root)
|
2022-07-04 07:53:39 +00:00
|
|
|
|
|
|
|
if ret != 0:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.error(Color.FAIL + message)
|
2022-07-04 07:53:39 +00:00
|
|
|
else:
|
2022-07-05 21:47:38 +00:00
|
|
|
self.log.info(Color.INFO + message)
|
2022-07-04 07:53:39 +00:00
|
|
|
|
2022-07-08 21:41:06 +00:00
|
|
|
# Cloud images
|
2022-07-08 09:22:58 +00:00
|
|
|
if os.path.exists(images_root):
|
|
|
|
self.log.info(Color.INFO + 'Starting to sync cloud images to compose')
|
|
|
|
|
|
|
|
if os.path.exists('/usr/bin/fpsync'):
|
|
|
|
message, ret = Shared.fpsync_method(images_root, sync_images_root, tmp_dir)
|
|
|
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
|
|
|
message, ret = Shared.rsync_method(images_root, sync_images_root)
|
|
|
|
|
|
|
|
if ret != 0:
|
|
|
|
self.log.error(Color.FAIL + message)
|
|
|
|
else:
|
|
|
|
self.log.info(Color.INFO + message)
|
|
|
|
|
2022-07-04 07:53:39 +00:00
|
|
|
# Combine all checksums here
|
2022-07-04 19:23:04 +00:00
|
|
|
for arch in self.arches:
|
|
|
|
iso_arch_root = os.path.join(sync_iso_root, arch)
|
|
|
|
iso_arch_checksum = os.path.join(iso_arch_root, 'CHECKSUM')
|
2022-07-08 21:33:07 +00:00
|
|
|
if os.path.exists(iso_arch_root):
|
|
|
|
with open(iso_arch_checksum, 'w+', encoding='utf-8') as fp:
|
|
|
|
for check in glob.iglob(iso_arch_root + '/*.CHECKSUM'):
|
|
|
|
with open(check, 'r', encoding='utf-8') as sum:
|
|
|
|
for line in sum:
|
|
|
|
fp.write(line)
|
|
|
|
sum.close()
|
|
|
|
fp.close()
|
|
|
|
|
|
|
|
live_arch_root = os.path.join(sync_live_root, arch)
|
|
|
|
live_arch_checksum = os.path.join(live_arch_root, 'CHECKSUM')
|
|
|
|
if os.path.exists(live_arch_root):
|
2022-07-04 19:36:15 +00:00
|
|
|
with open(live_arch_checksum, 'w+', encoding='utf-8') as lp:
|
2022-07-08 21:41:06 +00:00
|
|
|
for lcheck in glob.iglob(live_arch_root + '/*.CHECKSUM'):
|
2022-07-04 19:36:15 +00:00
|
|
|
with open(lcheck, 'r', encoding='utf-8') as sum:
|
|
|
|
for line in sum:
|
|
|
|
lp.write(line)
|
|
|
|
sum.close()
|
|
|
|
lp.close()
|
|
|
|
|
2022-07-08 21:41:06 +00:00
|
|
|
images_arch_root = os.path.join(sync_images_root, arch)
|
2022-07-09 03:44:15 +00:00
|
|
|
images_arch_checksum = os.path.join(sync_images_root, 'CHECKSUM')
|
2022-07-08 21:41:06 +00:00
|
|
|
if os.path.exists(images_arch_root):
|
|
|
|
with open(images_arch_checksum, 'w+', encoding='utf-8') as ip:
|
|
|
|
for icheck in glob.iglob(images_arch_root + '/*.CHECKSUM'):
|
|
|
|
with open(icheck, 'r', encoding='utf-8') as sum:
|
|
|
|
for line in sum:
|
|
|
|
ip.write(line)
|
|
|
|
sum.close()
|
|
|
|
ip.close()
|
|
|
|
|
2022-07-04 07:53:39 +00:00
|
|
|
# Deploy final metadata for a close out
|
|
|
|
self.deploy_metadata(sync_root)
|
|
|
|
|
2022-05-22 06:20:15 +00:00
|
|
|
class SigRepoSync:
|
|
|
|
"""
|
|
|
|
This helps us do reposync operations for SIG's. Do not use this for the
|
|
|
|
base system. Use RepoSync for that.
|
|
|
|
"""
|
2022-06-10 23:05:44 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
rlvars,
|
|
|
|
config,
|
|
|
|
sigvars,
|
|
|
|
major,
|
|
|
|
repo=None,
|
|
|
|
arch=None,
|
2022-07-01 20:09:52 +00:00
|
|
|
ignore_debug: bool = False,
|
2022-06-10 23:05:44 +00:00
|
|
|
ignore_source: bool = False,
|
|
|
|
repoclosure: bool = False,
|
2022-06-22 06:03:08 +00:00
|
|
|
refresh_extra_files: bool = False,
|
2022-06-10 23:05:44 +00:00
|
|
|
skip_all: bool = False,
|
|
|
|
hashed: bool = False,
|
|
|
|
parallel: bool = False,
|
|
|
|
dryrun: bool = False,
|
|
|
|
fullrun: bool = False,
|
|
|
|
nofail: bool = False,
|
|
|
|
logger=None
|
|
|
|
):
|
|
|
|
self.nofail = nofail
|
|
|
|
self.dryrun = dryrun
|
|
|
|
self.fullrun = fullrun
|
|
|
|
self.arch = arch
|
2022-07-01 20:09:52 +00:00
|
|
|
self.ignore_debug = ignore_debug
|
2022-06-10 23:05:44 +00:00
|
|
|
self.ignore_source = ignore_source
|
|
|
|
self.skip_all = skip_all
|
|
|
|
self.hashed = hashed
|
|
|
|
self.repoclosure = repoclosure
|
2022-06-22 06:03:08 +00:00
|
|
|
self.refresh_extra_files = refresh_extra_files
|
2022-06-10 23:05:44 +00:00
|
|
|
# Enables podman syncing, which should effectively speed up operations
|
|
|
|
self.parallel = parallel
|
|
|
|
# Relevant config items
|
|
|
|
self.major_version = major
|
|
|
|
self.date_stamp = config['date_stamp']
|
2022-07-01 20:09:52 +00:00
|
|
|
self.timestamp = time.time()
|
2022-06-10 23:05:44 +00:00
|
|
|
self.repo_base_url = config['repo_base_url']
|
|
|
|
self.compose_root = config['compose_root']
|
|
|
|
self.compose_base = config['compose_root'] + "/" + major
|
2022-07-01 20:09:52 +00:00
|
|
|
self.profile = rlvars['profile']
|
2022-07-03 07:19:13 +00:00
|
|
|
self.sigprofile = sigvars['profile']
|
2022-07-01 20:09:52 +00:00
|
|
|
self.iso_map = rlvars['iso_map']
|
|
|
|
self.distname = config['distname']
|
|
|
|
self.fullname = rlvars['fullname']
|
|
|
|
self.shortname = config['shortname']
|
2022-06-10 23:05:44 +00:00
|
|
|
|
|
|
|
# Relevant major version items
|
|
|
|
self.sigvars = sigvars
|
|
|
|
self.sigrepos = sigvars.keys()
|
|
|
|
#self.arches = sigvars['allowed_arches']
|
|
|
|
#self.project_id = sigvars['project_id']
|
|
|
|
self.sigrepo = repo
|
|
|
|
|
2022-07-01 20:09:52 +00:00
|
|
|
# Templates
|
|
|
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
|
|
|
self.tmplenv = Environment(loader=file_loader)
|
|
|
|
|
2022-06-10 23:05:44 +00:00
|
|
|
# each el can have its own designated container to run stuff in,
|
|
|
|
# otherwise we'll just default to the default config.
|
|
|
|
self.container = config['container']
|
|
|
|
if 'container' in rlvars and len(rlvars['container']) > 0:
|
|
|
|
self.container = rlvars['container']
|
|
|
|
|
|
|
|
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
|
|
|
|
self.repoclosure_map = rlvars['repoclosure_map']
|
|
|
|
|
|
|
|
self.staging_dir = os.path.join(
|
|
|
|
config['staging_root'],
|
|
|
|
config['sig_category_stub'],
|
|
|
|
major
|
|
|
|
)
|
|
|
|
|
|
|
|
self.compose_latest_dir = os.path.join(
|
|
|
|
config['compose_root'],
|
|
|
|
major,
|
2022-07-03 07:19:13 +00:00
|
|
|
"latest-{}-{}-SIG-{}".format(
|
|
|
|
self.shortname,
|
|
|
|
major,
|
|
|
|
self.sigprofile
|
|
|
|
)
|
2022-06-10 23:05:44 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
self.compose_latest_sync = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
"compose"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.compose_log_dir = os.path.join(
|
|
|
|
self.compose_latest_dir,
|
|
|
|
"work/logs"
|
|
|
|
)
|
|
|
|
|
2022-06-22 06:51:00 +00:00
|
|
|
self.compose_global_work_root = os.path.join(
|
2022-06-22 06:03:08 +00:00
|
|
|
self.compose_latest_dir,
|
|
|
|
"work/global"
|
|
|
|
)
|
|
|
|
|
2022-06-10 23:05:44 +00:00
|
|
|
# This is temporary for now.
|
|
|
|
if logger is None:
|
|
|
|
self.log = logging.getLogger("sigreposync")
|
|
|
|
self.log.setLevel(logging.INFO)
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
handler.setLevel(logging.INFO)
|
|
|
|
formatter = logging.Formatter(
|
|
|
|
'%(asctime)s :: %(name)s :: %(message)s',
|
|
|
|
'%Y-%m-%d %H:%M:%S'
|
|
|
|
)
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
self.log.addHandler(handler)
|
|
|
|
|
|
|
|
self.log.info('sig reposync init')
|
|
|
|
self.log.info(major)
|
2022-07-03 07:19:13 +00:00
|
|
|
#self.dnf_config = Shared.generate_conf()
|
2022-06-10 23:05:44 +00:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""
|
|
|
|
This runs the sig sync.
|
|
|
|
"""
|
|
|
|
pass
|