forked from sig_core/toolkit
add hash flag
This commit is contained in:
parent
58248b1e70
commit
7917876676
@ -22,6 +22,7 @@ class Color:
|
|||||||
|
|
||||||
# vars and additional checks
|
# vars and additional checks
|
||||||
rldict = {}
|
rldict = {}
|
||||||
|
sigdict = {}
|
||||||
config = {
|
config = {
|
||||||
"rlmacro": rpm.expandMacro('%rhel'),
|
"rlmacro": rpm.expandMacro('%rhel'),
|
||||||
"arch": platform.machine(),
|
"arch": platform.machine(),
|
||||||
@ -40,6 +41,11 @@ for conf in glob.iglob('configs/*.yaml'):
|
|||||||
with open(conf, 'r', encoding="utf-8") as file:
|
with open(conf, 'r', encoding="utf-8") as file:
|
||||||
rldict.update(yaml.safe_load(file))
|
rldict.update(yaml.safe_load(file))
|
||||||
|
|
||||||
|
# Import all SIG configs from yaml
|
||||||
|
for conf in glob.iglob('sig/*.yaml'):
|
||||||
|
with open(conf, 'r', encoding="utf-8") as file:
|
||||||
|
sigdict.update(yaml.safe_load(file))
|
||||||
|
|
||||||
# The system needs to be a RHEL-like system. It cannot be Fedora or SuSE.
|
# The system needs to be a RHEL-like system. It cannot be Fedora or SuSE.
|
||||||
#if "%rhel" in config['rlmacro']:
|
#if "%rhel" in config['rlmacro']:
|
||||||
# raise SystemExit(Color.BOLD + 'This is not a RHEL-like system.' + Color.END
|
# raise SystemExit(Color.BOLD + 'This is not a RHEL-like system.' + Color.END
|
||||||
|
@ -1,11 +1,16 @@
|
|||||||
---
|
---
|
||||||
'8':
|
altarch:
|
||||||
|
'8':
|
||||||
rockyrpi:
|
rockyrpi:
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
project_id: ''
|
project_id: ''
|
||||||
additional_dirs:
|
additional_dirs:
|
||||||
- 'images'
|
- 'images'
|
||||||
'9':
|
'9':
|
||||||
rockyrpi:
|
rockyrpi:
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
project_id: ''
|
project_id: ''
|
||||||
additional_dirs:
|
additional_dirs:
|
||||||
- 'images'
|
- 'images'
|
||||||
|
@ -1,10 +1,22 @@
|
|||||||
---
|
---
|
||||||
'8':
|
cloud:
|
||||||
|
'8':
|
||||||
cloud-kernel:
|
cloud-kernel:
|
||||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
|
- x86_64
|
||||||
|
cloud-common:
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
|
- x86_64
|
||||||
|
project_id: ''
|
||||||
|
'9':
|
||||||
cloud-common:
|
cloud-common:
|
||||||
project_id: ''
|
project_id: ''
|
||||||
'9':
|
allowed_arches:
|
||||||
cloud-common:
|
- aarch64
|
||||||
project_id: ''
|
- x86_64
|
||||||
|
- ppc64le
|
||||||
|
- s390x
|
||||||
...
|
...
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
---
|
---
|
||||||
'8':
|
core:
|
||||||
|
'8':
|
||||||
core-common:
|
core-common:
|
||||||
project_id: ''
|
project_id: ''
|
||||||
core-infra:
|
core-infra:
|
||||||
project_id: ''
|
project_id: ''
|
||||||
'9':
|
'9':
|
||||||
core-common:
|
core-common:
|
||||||
project_id: ''
|
project_id: ''
|
||||||
core-infra:
|
core-infra:
|
||||||
|
@ -22,6 +22,7 @@ parser.add_argument('--ignore-debug', action='store_true')
|
|||||||
parser.add_argument('--ignore-source', action='store_true')
|
parser.add_argument('--ignore-source', action='store_true')
|
||||||
parser.add_argument('--repoclosure', action='store_true')
|
parser.add_argument('--repoclosure', action='store_true')
|
||||||
parser.add_argument('--skip-all', action='store_true')
|
parser.add_argument('--skip-all', action='store_true')
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
parser.add_argument('--dry-run', action='store_true')
|
parser.add_argument('--dry-run', action='store_true')
|
||||||
parser.add_argument('--full-run', action='store_true')
|
parser.add_argument('--full-run', action='store_true')
|
||||||
parser.add_argument('--no-fail', action='store_true')
|
parser.add_argument('--no-fail', action='store_true')
|
||||||
@ -47,6 +48,7 @@ a = RepoSync(
|
|||||||
ignore_source=results.ignore_source,
|
ignore_source=results.ignore_source,
|
||||||
repoclosure=results.repoclosure,
|
repoclosure=results.repoclosure,
|
||||||
skip_all=results.skip_all,
|
skip_all=results.skip_all,
|
||||||
|
hashed=results.hashed,
|
||||||
parallel=results.simple,
|
parallel=results.simple,
|
||||||
dryrun=results.dry_run,
|
dryrun=results.dry_run,
|
||||||
fullrun=results.full_run,
|
fullrun=results.full_run,
|
||||||
|
61
iso/py/sync-sig
Executable file
61
iso/py/sync-sig
Executable file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# This script can be called to do single syncs or full on syncs.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from common import *
|
||||||
|
from util import Checks
|
||||||
|
from util import SigRepoSync
|
||||||
|
|
||||||
|
#rlvars = rldict['9']
|
||||||
|
#r = Checks(rlvars, config['arch'])
|
||||||
|
#r.check_valid_arch()
|
||||||
|
|
||||||
|
# Start up the parser baby
|
||||||
|
parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
||||||
|
|
||||||
|
# All of our options
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
|
parser.add_argument('--repo', type=str, help="Repository name")
|
||||||
|
parser.add_argument('--sig', type=str, help="SIG name")
|
||||||
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
|
parser.add_argument('--ignore-debug', action='store_true')
|
||||||
|
parser.add_argument('--ignore-source', action='store_true')
|
||||||
|
parser.add_argument('--repoclosure', action='store_true')
|
||||||
|
parser.add_argument('--skip-all', action='store_true')
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
|
parser.add_argument('--dry-run', action='store_true')
|
||||||
|
parser.add_argument('--full-run', action='store_true')
|
||||||
|
parser.add_argument('--no-fail', action='store_true')
|
||||||
|
# I am aware this is confusing, I want podman to be the default option
|
||||||
|
parser.add_argument('--simple', action='store_false')
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
|
# Parse them
|
||||||
|
results = parser.parse_args()
|
||||||
|
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
sigvars = sigdict[results.sig][results.release]
|
||||||
|
r = Checks(rlvars, config['arch'])
|
||||||
|
r.check_valid_arch()
|
||||||
|
|
||||||
|
# Send them and do whatever I guess
|
||||||
|
a = SigRepoSync(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
sigvars,
|
||||||
|
major=results.release,
|
||||||
|
repo=results.repo,
|
||||||
|
arch=results.arch,
|
||||||
|
ignore_source=results.ignore_source,
|
||||||
|
repoclosure=results.repoclosure,
|
||||||
|
skip_all=results.skip_all,
|
||||||
|
hashed=results.hashed,
|
||||||
|
parallel=results.simple,
|
||||||
|
dryrun=results.dry_run,
|
||||||
|
fullrun=results.full_run,
|
||||||
|
nofail=results.no_fail,
|
||||||
|
logger=results.logger
|
||||||
|
)
|
||||||
|
|
||||||
|
a.run()
|
@ -8,6 +8,7 @@ from .check import (
|
|||||||
|
|
||||||
from .dnf_utils import (
|
from .dnf_utils import (
|
||||||
RepoSync,
|
RepoSync,
|
||||||
|
SigRepoSync
|
||||||
)
|
)
|
||||||
|
|
||||||
from .iso_utils import (
|
from .iso_utils import (
|
||||||
|
@ -38,6 +38,7 @@ class RepoSync:
|
|||||||
ignore_source: bool = False,
|
ignore_source: bool = False,
|
||||||
repoclosure: bool = False,
|
repoclosure: bool = False,
|
||||||
skip_all: bool = False,
|
skip_all: bool = False,
|
||||||
|
hashed: bool = False,
|
||||||
parallel: bool = False,
|
parallel: bool = False,
|
||||||
dryrun: bool = False,
|
dryrun: bool = False,
|
||||||
fullrun: bool = False,
|
fullrun: bool = False,
|
||||||
@ -51,6 +52,7 @@ class RepoSync:
|
|||||||
self.ignore_debug = ignore_debug
|
self.ignore_debug = ignore_debug
|
||||||
self.ignore_source = ignore_source
|
self.ignore_source = ignore_source
|
||||||
self.skip_all = skip_all
|
self.skip_all = skip_all
|
||||||
|
self.hashed = hashed
|
||||||
self.repoclosure = repoclosure
|
self.repoclosure = repoclosure
|
||||||
# Enables podman syncing, which should effectively speed up operations
|
# Enables podman syncing, which should effectively speed up operations
|
||||||
self.parallel = parallel
|
self.parallel = parallel
|
||||||
@ -206,152 +208,9 @@ class RepoSync:
|
|||||||
"""
|
"""
|
||||||
This is for normal dnf syncs. This is very slow.
|
This is for normal dnf syncs. This is very slow.
|
||||||
"""
|
"""
|
||||||
cmd = self.reposync_cmd()
|
self.log.error('DNF syncing has been removed.')
|
||||||
|
self.log.error('Please install podman and enable parallel')
|
||||||
sync_single_arch = False
|
raise SystemExit()
|
||||||
arches_to_sync = self.arches
|
|
||||||
if arch:
|
|
||||||
sync_single_arch = True
|
|
||||||
arches_to_sync = [arch]
|
|
||||||
|
|
||||||
sync_single_repo = False
|
|
||||||
repos_to_sync = self.repos
|
|
||||||
if repo and not self.fullrun:
|
|
||||||
sync_single_repo = True
|
|
||||||
repos_to_sync = [repo]
|
|
||||||
|
|
||||||
|
|
||||||
# dnf reposync --download-metadata \
|
|
||||||
# --repoid fedora -p /tmp/test \
|
|
||||||
# --forcearch aarch64 --norepopath
|
|
||||||
|
|
||||||
self.log.info(
|
|
||||||
Color.BOLD + '!! WARNING !! ' + Color.END + 'You are performing a '
|
|
||||||
'local reposync, which may incur delays in your compose.'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info(
|
|
||||||
Color.BOLD + '!! WARNING !! ' + Color.END + 'Standard dnf reposync '
|
|
||||||
'is not really a supported method. Only use this for general testing.'
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.fullrun:
|
|
||||||
self.log.info(
|
|
||||||
Color.BOLD + '!! WARNING !! ' + Color.END + 'This is a full '
|
|
||||||
'run! This will take a LONG TIME.'
|
|
||||||
)
|
|
||||||
|
|
||||||
for r in repos_to_sync:
|
|
||||||
for a in arches_to_sync:
|
|
||||||
repo_name = r
|
|
||||||
if r in self.repo_renames:
|
|
||||||
repo_name = self.repo_renames[r]
|
|
||||||
|
|
||||||
os_sync_path = os.path.join(
|
|
||||||
sync_root,
|
|
||||||
repo_name,
|
|
||||||
a,
|
|
||||||
'os'
|
|
||||||
)
|
|
||||||
|
|
||||||
debug_sync_path = os.path.join(
|
|
||||||
sync_root,
|
|
||||||
repo_name,
|
|
||||||
a,
|
|
||||||
'debug/tree'
|
|
||||||
)
|
|
||||||
|
|
||||||
sync_cmd = "{} -c {} --download-metadata --repoid={} -p {} --forcearch {} --norepopath".format(
|
|
||||||
cmd,
|
|
||||||
self.dnf_config,
|
|
||||||
r,
|
|
||||||
os_sync_path,
|
|
||||||
a
|
|
||||||
)
|
|
||||||
|
|
||||||
debug_sync_cmd = "{} -c {} --download-metadata --repoid={}-debug -p {} --forcearch {} --norepopath".format(
|
|
||||||
cmd,
|
|
||||||
self.dnf_config,
|
|
||||||
r,
|
|
||||||
debug_sync_path,
|
|
||||||
a
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info('Syncing {} {}'.format(r, a))
|
|
||||||
#self.log.info(sync_cmd)
|
|
||||||
# Try to figure out where to send the actual output of this...
|
|
||||||
# Also consider on running a try/except here? Basically if
|
|
||||||
# something happens (like a repo doesn't exist for some arch,
|
|
||||||
# eg RT for aarch64), make a note of it somehow (but don't
|
|
||||||
# break the entire sync). As it stands with this
|
|
||||||
# implementation, if something fails, it just continues on.
|
|
||||||
process = subprocess.call(
|
|
||||||
shlex.split(sync_cmd),
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.ignore_debug:
|
|
||||||
self.log.info('Syncing {} {} (debug)'.format(r, a))
|
|
||||||
process_debug = subprocess.call(
|
|
||||||
shlex.split(debug_sync_cmd),
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
|
|
||||||
# This is an ugly hack. We don't want to list i686 as an
|
|
||||||
# available arch for an el because that would imply each repo
|
|
||||||
# gets an i686 repo. However, being able to set "arch" to i686
|
|
||||||
# should be possible, thus avoiding this block altogether.
|
|
||||||
# "available_arches" in the configuration isn't meant to be a
|
|
||||||
# restriction here, but mainly a restriction in the lorax
|
|
||||||
# process (which isn't done here)
|
|
||||||
if 'x86_64' in a and 'all' in r and self.multilib:
|
|
||||||
i686_os_sync_path = os.path.join(
|
|
||||||
sync_root,
|
|
||||||
repo_name,
|
|
||||||
a,
|
|
||||||
'os'
|
|
||||||
)
|
|
||||||
|
|
||||||
i686_sync_cmd = "{} -c {} --download-metadata --repoid={} -p {} --forcearch {} --norepopath".format(
|
|
||||||
cmd,
|
|
||||||
self.dnf_config,
|
|
||||||
r,
|
|
||||||
i686_os_sync_path,
|
|
||||||
'i686'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info('Syncing {} {}'.format(r, 'i686'))
|
|
||||||
process_i686 = subprocess.call(
|
|
||||||
shlex.split(i686_sync_cmd),
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.ignore_source:
|
|
||||||
source_sync_path = os.path.join(
|
|
||||||
sync_root,
|
|
||||||
repo_name,
|
|
||||||
'source/tree'
|
|
||||||
)
|
|
||||||
|
|
||||||
source_sync_cmd = "{} -c {} --download-metadata --repoid={}-source -p {} --norepopath".format(
|
|
||||||
cmd,
|
|
||||||
self.dnf_config,
|
|
||||||
r,
|
|
||||||
source_sync_path
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
self.log.info('Syncing {} source'.format(r))
|
|
||||||
process_source = subprocess.call(
|
|
||||||
shlex.split(source_sync_cmd),
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info('Syncing complete')
|
|
||||||
|
|
||||||
def podman_sync(self, repo, sync_root, work_root, log_root, arch):
|
def podman_sync(self, repo, sync_root, work_root, log_root, arch):
|
||||||
"""
|
"""
|
||||||
@ -654,26 +513,32 @@ class RepoSync:
|
|||||||
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
||||||
"supported." + Color.END)
|
"supported." + Color.END)
|
||||||
|
|
||||||
|
prehashed = ''
|
||||||
|
if self.hashed:
|
||||||
|
prehashed = "hashed-"
|
||||||
# create dest_path
|
# create dest_path
|
||||||
if not os.path.exists(dest_path):
|
if not os.path.exists(dest_path):
|
||||||
os.makedirs(dest_path, exist_ok=True)
|
os.makedirs(dest_path, exist_ok=True)
|
||||||
config_file = open(fname, "w+")
|
config_file = open(fname, "w+")
|
||||||
for repo in self.repos:
|
for repo in self.repos:
|
||||||
constructed_url = '{}/{}/repo/{}/$basearch'.format(
|
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||||
self.repo_base_url,
|
self.repo_base_url,
|
||||||
self.project_id,
|
self.project_id,
|
||||||
|
prehashed,
|
||||||
repo,
|
repo,
|
||||||
)
|
)
|
||||||
|
|
||||||
constructed_url_debug = '{}/{}/repo/{}/$basearch-debug'.format(
|
constructed_url_debug = '{}/{}/repo/{}{}/$basearch-debug'.format(
|
||||||
self.repo_base_url,
|
self.repo_base_url,
|
||||||
self.project_id,
|
self.project_id,
|
||||||
|
prehashed,
|
||||||
repo,
|
repo,
|
||||||
)
|
)
|
||||||
|
|
||||||
constructed_url_src = '{}/{}/repo/{}/src'.format(
|
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
||||||
self.repo_base_url,
|
self.repo_base_url,
|
||||||
self.project_id,
|
self.project_id,
|
||||||
|
prehashed,
|
||||||
repo,
|
repo,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -910,3 +775,98 @@ class SigRepoSync:
|
|||||||
This helps us do reposync operations for SIG's. Do not use this for the
|
This helps us do reposync operations for SIG's. Do not use this for the
|
||||||
base system. Use RepoSync for that.
|
base system. Use RepoSync for that.
|
||||||
"""
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
sigvars,
|
||||||
|
major,
|
||||||
|
repo=None,
|
||||||
|
arch=None,
|
||||||
|
ignore_source: bool = False,
|
||||||
|
repoclosure: bool = False,
|
||||||
|
skip_all: bool = False,
|
||||||
|
hashed: bool = False,
|
||||||
|
parallel: bool = False,
|
||||||
|
dryrun: bool = False,
|
||||||
|
fullrun: bool = False,
|
||||||
|
nofail: bool = False,
|
||||||
|
logger=None
|
||||||
|
):
|
||||||
|
self.nofail = nofail
|
||||||
|
self.dryrun = dryrun
|
||||||
|
self.fullrun = fullrun
|
||||||
|
self.arch = arch
|
||||||
|
self.ignore_source = ignore_source
|
||||||
|
self.skip_all = skip_all
|
||||||
|
self.hashed = hashed
|
||||||
|
self.repoclosure = repoclosure
|
||||||
|
# Enables podman syncing, which should effectively speed up operations
|
||||||
|
self.parallel = parallel
|
||||||
|
# Relevant config items
|
||||||
|
self.major_version = major
|
||||||
|
self.date_stamp = config['date_stamp']
|
||||||
|
self.repo_base_url = config['repo_base_url']
|
||||||
|
self.compose_root = config['compose_root']
|
||||||
|
self.compose_base = config['compose_root'] + "/" + major
|
||||||
|
|
||||||
|
# Relevant major version items
|
||||||
|
self.sigvars = sigvars
|
||||||
|
self.sigrepos = sigvars.keys()
|
||||||
|
#self.arches = sigvars['allowed_arches']
|
||||||
|
#self.project_id = sigvars['project_id']
|
||||||
|
self.sigrepo = repo
|
||||||
|
|
||||||
|
# each el can have its own designated container to run stuff in,
|
||||||
|
# otherwise we'll just default to the default config.
|
||||||
|
self.container = config['container']
|
||||||
|
if 'container' in rlvars and len(rlvars['container']) > 0:
|
||||||
|
self.container = rlvars['container']
|
||||||
|
|
||||||
|
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
|
||||||
|
self.repoclosure_map = rlvars['repoclosure_map']
|
||||||
|
|
||||||
|
self.staging_dir = os.path.join(
|
||||||
|
config['staging_root'],
|
||||||
|
config['sig_category_stub'],
|
||||||
|
major
|
||||||
|
)
|
||||||
|
|
||||||
|
self.compose_latest_dir = os.path.join(
|
||||||
|
config['compose_root'],
|
||||||
|
major,
|
||||||
|
"latest-Rocky-{}-SIG".format(major)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.compose_latest_sync = os.path.join(
|
||||||
|
self.compose_latest_dir,
|
||||||
|
"compose"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.compose_log_dir = os.path.join(
|
||||||
|
self.compose_latest_dir,
|
||||||
|
"work/logs"
|
||||||
|
)
|
||||||
|
|
||||||
|
# This is temporary for now.
|
||||||
|
if logger is None:
|
||||||
|
self.log = logging.getLogger("sigreposync")
|
||||||
|
self.log.setLevel(logging.INFO)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s :: %(name)s :: %(message)s',
|
||||||
|
'%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
self.log.addHandler(handler)
|
||||||
|
|
||||||
|
self.log.info('sig reposync init')
|
||||||
|
self.log.info(major)
|
||||||
|
#self.dnf_config = self.generate_conf()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
This runs the sig sync.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
Loading…
Reference in New Issue
Block a user