finish wrapper, start repoclosure

This commit is contained in:
Louis Abel 2022-05-25 21:39:26 -07:00
parent 550526f535
commit fc227c947e
Signed by: label
GPG Key ID: B37E62D143879B36
5 changed files with 121 additions and 16 deletions

View File

@ -50,4 +50,31 @@
has_modules: has_modules:
- 'AppStream' - 'AppStream'
- 'CRB' - 'CRB'
repoclosure_map:
BaseOS: []
AppStream:
- BaseOS
CRB:
- BaseOS
- AppStream
HighAvailability:
- BaseOS
- AppStream
ResilientStorage:
- BaseOS
- AppStream
RT:
- BaseOS
- AppStream
NFV:
- BaseOS
- AppStream
SAP:
- BaseOS
- AppStream
- HighAvailability
SAPHANA:
- BaseOS
- AppStream
- HighAvailability
... ...

View File

@ -1,18 +1,57 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# This is intended for doing "full" syncs, not periodic update syncs. # This script can be called to do single syncs or full on syncs.
from common import *
import argparse import argparse
from common import *
from util import Checks from util import Checks
from util import RepoSync from util import RepoSync
rlvars = rldict['9'] #rlvars = rldict['9']
#r = Checks(rlvars, config['arch'])
#r.check_valid_arch()
# Start up the parser baby
parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
# All of our options
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--repo', type=str, help="Repository name")
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--ignore-debug', action='store_true')
parser.add_argument('--ignore-source', action='store_true')
parser.add_argument('--repoclosure', action='store_true')
parser.add_argument('--skip-all', action='store_true')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('--full-run', action='store_true')
parser.add_argument('--no-fail', action='store_true')
# I am aware this is confusing, I want podman to be the default option
parser.add_argument('--simple', action='store_false')
parser.add_argument('--logger', type=str)
# Parse them
results = parser.parse_args()
rlvars = rldict[results.release]
r = Checks(rlvars, config['arch']) r = Checks(rlvars, config['arch'])
r.check_valid_arch() r.check_valid_arch()
a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False) # Send them and do whatever I guess
a = RepoSync(
rlvars,
config,
major=results.release,
repo=results.repo,
arch=results.arch,
ignore_debug=results.ignore_debug,
ignore_source=results.ignore_source,
repoclosure=results.repoclosure,
skip_all=results.skip_all,
parallel=results.simple,
dryrun=results.dry_run,
fullrun=results.full_run,
nofail=results.no_fail,
logger=results.logger
)
a.run() a.run()
#a.generate_conf()
#somedir = a.generate_compose_dirs()
#print(a.sync())

16
iso/py/sync-from-peridot-test Executable file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3
# This is a testing script to ensure the RepoSync class is working as intended.
from common import *
import argparse
from util import Checks
from util import RepoSync
rlvars = rldict['9']
r = Checks(rlvars, config['arch'])
r.check_valid_arch()
a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False)
#a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False, fullrun=True)
a.run()

View File

@ -34,10 +34,11 @@ class RepoSync:
major, major,
repo=None, repo=None,
arch=None, arch=None,
ignore_debug=False, ignore_debug: bool = False,
ignore_source=False, ignore_source: bool = False,
skip_all=False, repoclosure: bool = False,
parallel=False, skip_all: bool = False,
parallel: bool = False,
dryrun: bool = False, dryrun: bool = False,
fullrun: bool = False, fullrun: bool = False,
nofail: bool = False, nofail: bool = False,
@ -50,6 +51,7 @@ class RepoSync:
self.ignore_debug = ignore_debug self.ignore_debug = ignore_debug
self.ignore_source = ignore_source self.ignore_source = ignore_source
self.skip_all = skip_all self.skip_all = skip_all
self.repoclosure = repoclosure
# Enables podman syncing, which should effectively speed up operations # Enables podman syncing, which should effectively speed up operations
self.parallel = parallel self.parallel = parallel
# Relevant config items # Relevant config items
@ -74,6 +76,9 @@ class RepoSync:
if 'container' in rlvars and len(rlvars['container']) > 0: if 'container' in rlvars and len(rlvars['container']) > 0:
self.container = rlvars['container'] self.container = rlvars['container']
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
self.repoclosure_map = rlvars['repoclosure_map']
self.staging_dir = os.path.join( self.staging_dir = os.path.join(
config['staging_root'], config['staging_root'],
config['category_stub'], config['category_stub'],
@ -153,6 +158,12 @@ class RepoSync:
) )
sync_root = self.compose_latest_sync sync_root = self.compose_latest_sync
# Verify if the link even exists
if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
self.log.error('!! Please perform a full run if you have not done so.')
raise SystemExit()
log_root = os.path.join( log_root = os.path.join(
work_root, work_root,
"logs" "logs"
@ -167,6 +178,9 @@ class RepoSync:
if self.fullrun: if self.fullrun:
self.symlink_to_latest() self.symlink_to_latest()
if self.repoclosure:
self.repoclosure_work(sync_root, work_root, log_root)
def sync(self, repo, sync_root, work_root, log_root, arch=None): def sync(self, repo, sync_root, work_root, log_root, arch=None):
""" """
Calls out syncing of the repos. We generally sync each component of a Calls out syncing of the repos. We generally sync each component of a
@ -369,13 +383,19 @@ class RepoSync:
for r in repos_to_sync: for r in repos_to_sync:
entry_name_list = [] entry_name_list = []
repo_name = r repo_name = r
arch_sync = arches_to_sync
if r in self.repo_renames: if r in self.repo_renames:
repo_name = self.repo_renames[r] repo_name = self.repo_renames[r]
for a in arches_to_sync:
# There should be a check here that if it's "all" and multilib
# is on, i686 should get synced too.
if 'all' in r and 'x86_64' in arches_to_sync and self.multilib:
arch_sync.append('i686')
# There should be a check here that if it's "all" and multilib
# is on, i686 should get synced too.
for a in arch_sync:
entry_name = '{}-{}'.format(r, a) entry_name = '{}-{}'.format(r, a)
debug_entry_name = '{}-debug-{}'.format(r, a) debug_entry_name = '{}-debug-{}'.format(r, a)
@ -471,7 +491,7 @@ class RepoSync:
"--download-metadata --repoid={}-source -p {} " "--download-metadata --repoid={}-source -p {} "
"--norepopath | tee -a {}/{}-source-{}.log").format( "--norepopath | tee -a {}/{}-source-{}.log").format(
self.dnf_config, self.dnf_config,
repo_name, r,
source_sync_path, source_sync_path,
log_root, log_root,
repo_name, repo_name,
@ -704,6 +724,9 @@ class RepoSync:
) )
return cmd return cmd
def repoclosure_work(self, sync_root, work_root, log_root):
pass
class SigRepoSync: class SigRepoSync:
""" """
This helps us do reposync operations for SIG's. Do not use this for the This helps us do reposync operations for SIG's. Do not use this for the

View File

@ -44,7 +44,7 @@ for x in "${ARCHES[@]}"; do
test -d "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}" test -d "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}"
ret_val=$? ret_val=$?
if [ "$ret_val" -eq 0 ]; then if [ "$ret_val" -eq 0 ]; then
createrepo --update "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}" \ createrepo "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}" \
"--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \ "--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \
--workers 8 --workers 8
sed -i '/<open-size><\/open-size>/d' \ sed -i '/<open-size><\/open-size>/d' \