forked from sig_core/toolkit
move generate confs and add testing script
This commit is contained in:
parent
7e04dd9580
commit
04f2c3ab77
57
iso/empanadas/empanadas/configs/el9alt.yaml
Normal file
57
iso/empanadas/empanadas/configs/el9alt.yaml
Normal file
@ -0,0 +1,57 @@
|
||||
# This is specifically for secondary/tertiary architectures
|
||||
---
|
||||
'9altarch':
|
||||
fullname: 'Rocky Linux 9.0'
|
||||
revision: '9.0'
|
||||
rclvl: 'RC2'
|
||||
major: '9'
|
||||
minor: '0'
|
||||
profile: '9'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
checksum: 'sha256'
|
||||
allowed_arches:
|
||||
- armhfp
|
||||
- riscv64
|
||||
provide_multilib: False
|
||||
project_id: ''
|
||||
renames:
|
||||
all: 'devel'
|
||||
all_repos:
|
||||
- 'all'
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
- 'extras'
|
||||
- 'plus'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map: {}
|
||||
livemap: {}
|
||||
repoclosure_map:
|
||||
arches:
|
||||
armhfp: '--forcearch=armhfp --arch=noarch'
|
||||
riscv64: '--forcearch=riscv64 --arch=noarch'
|
||||
repos:
|
||||
devel: []
|
||||
BaseOS: []
|
||||
AppStream:
|
||||
- BaseOS
|
||||
CRB:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
|
||||
branch: 'r9'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
list:
|
||||
- 'SOURCES/Contributors'
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
...
|
@ -2,8 +2,8 @@
|
||||
cloud:
|
||||
'8':
|
||||
profile: 'cloud'
|
||||
cloud-kernel:
|
||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||
cloud-kernel:
|
||||
allowed_arches:
|
||||
- aarch64
|
||||
- x86_64
|
||||
@ -11,7 +11,6 @@ cloud:
|
||||
allowed_arches:
|
||||
- aarch64
|
||||
- x86_64
|
||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/'
|
||||
@ -21,15 +20,15 @@ cloud:
|
||||
list:
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||
'9':
|
||||
cloud-kernel:
|
||||
profile: 'cloud'
|
||||
project_id: ''
|
||||
cloud-kernel:
|
||||
allowed_arches:
|
||||
- aarch64
|
||||
- x86_64
|
||||
- ppc64le
|
||||
- s390x
|
||||
cloud-common:
|
||||
project_id: ''
|
||||
allowed_arches:
|
||||
- aarch64
|
||||
- x86_64
|
||||
|
@ -58,6 +58,7 @@ class RepoSync:
|
||||
nofail: bool = False,
|
||||
gpgkey: str = 'stable',
|
||||
rlmode: str = 'stable',
|
||||
just_pull_everything: bool = False,
|
||||
logger=None
|
||||
):
|
||||
self.nofail = nofail
|
||||
@ -73,6 +74,9 @@ class RepoSync:
|
||||
self.refresh_treeinfo = refresh_treeinfo
|
||||
# Enables podman syncing, which should effectively speed up operations
|
||||
self.parallel = parallel
|
||||
# This makes it so every repo is synced at the same time.
|
||||
# This is EXTREMELY dangerous.
|
||||
self.just_pull_everything = just_pull_everything
|
||||
# Relevant config items
|
||||
self.major_version = major
|
||||
self.date_stamp = config['date_stamp']
|
||||
@ -234,8 +238,18 @@ class RepoSync:
|
||||
"global",
|
||||
)
|
||||
|
||||
#self.dnf_config = self.generate_conf(dest_path=global_work_root)
|
||||
self.dnf_config = self.generate_conf()
|
||||
self.dnf_config = Shared.generate_conf(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
self.repos,
|
||||
self.repo_base_url,
|
||||
self.project_id,
|
||||
self.hashed,
|
||||
self.extra_files,
|
||||
self.gpgkey,
|
||||
self.tmplenv,
|
||||
self.log
|
||||
)
|
||||
|
||||
if self.dryrun:
|
||||
self.log.error('Dry Runs are not supported just yet. Sorry!')
|
||||
@ -346,13 +360,10 @@ class RepoSync:
|
||||
if r in self.repo_renames:
|
||||
repo_name = self.repo_renames[r]
|
||||
|
||||
|
||||
# Sync all if arch is x86_64 and multilib is true
|
||||
if 'all' in r and 'x86_64' in arches_to_sync and self.multilib:
|
||||
arch_sync.append('i686')
|
||||
|
||||
# There should be a check here that if it's "all" and multilib
|
||||
# is on, i686 should get synced too.
|
||||
|
||||
for a in arch_sync:
|
||||
entry_name = '{}-{}'.format(r, a)
|
||||
debug_entry_name = '{}-debug-{}'.format(r, a)
|
||||
@ -658,70 +669,6 @@ class RepoSync:
|
||||
self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version))
|
||||
os.symlink(generated_dir, self.compose_latest_dir)
|
||||
|
||||
def generate_conf(self, dest_path='/var/tmp') -> str:
|
||||
"""
|
||||
Generates the necessary repo conf file for the operation. This repo
|
||||
file should be temporary in nature. This will generate a repo file
|
||||
with all repos by default. If a repo is chosen for sync, that will be
|
||||
the only one synced.
|
||||
|
||||
:param dest_path: The destination where the temporary conf goes
|
||||
:param repo: The repo object to create a file for
|
||||
"""
|
||||
fname = os.path.join(
|
||||
dest_path,
|
||||
"{}-{}-config.repo".format(self.shortname, self.major_version)
|
||||
)
|
||||
pname = os.path.join(
|
||||
'/var/tmp',
|
||||
"{}-{}-config.repo".format(self.shortname, self.major_version)
|
||||
)
|
||||
self.log.info('Generating the repo configuration: %s' % fname)
|
||||
|
||||
if self.repo_base_url.startswith("/"):
|
||||
self.log.error("Local file syncs are not supported.")
|
||||
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
||||
"supported." + Color.END)
|
||||
|
||||
prehashed = ''
|
||||
if self.hashed:
|
||||
prehashed = "hashed-"
|
||||
# create dest_path
|
||||
if not os.path.exists(dest_path):
|
||||
os.makedirs(dest_path, exist_ok=True)
|
||||
config_file = open(fname, "w+")
|
||||
repolist = []
|
||||
for repo in self.repos:
|
||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||
self.repo_base_url,
|
||||
self.project_id,
|
||||
prehashed,
|
||||
repo,
|
||||
)
|
||||
|
||||
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
||||
self.repo_base_url,
|
||||
self.project_id,
|
||||
prehashed,
|
||||
repo,
|
||||
)
|
||||
|
||||
repodata = {
|
||||
'name': repo,
|
||||
'baseurl': constructed_url,
|
||||
'srcbaseurl': constructed_url_src,
|
||||
'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
|
||||
}
|
||||
repolist.append(repodata)
|
||||
|
||||
template = self.tmplenv.get_template('repoconfig.tmpl')
|
||||
output = template.render(repos=repolist)
|
||||
config_file.write(output)
|
||||
|
||||
config_file.close()
|
||||
#return (fname, pname)
|
||||
return fname
|
||||
|
||||
def repoclosure_work(self, sync_root, work_root, log_root):
|
||||
"""
|
||||
This is where we run repoclosures, based on the configuration of each
|
||||
|
@ -401,7 +401,19 @@ class Shared:
|
||||
return cmd
|
||||
|
||||
@staticmethod
|
||||
def generate_conf(data, logger, dest_path='/var/tmp') -> str:
|
||||
def generate_conf(
|
||||
shortname,
|
||||
major_version,
|
||||
repos,
|
||||
repo_base_url,
|
||||
project_id,
|
||||
hashed,
|
||||
extra_files,
|
||||
gpgkey,
|
||||
templates,
|
||||
logger,
|
||||
dest_path='/var/tmp'
|
||||
) -> str:
|
||||
"""
|
||||
Generates the necessary repo conf file for the operation. This repo
|
||||
file should be temporary in nature. This will generate a repo file
|
||||
@ -413,35 +425,35 @@ class Shared:
|
||||
"""
|
||||
fname = os.path.join(
|
||||
dest_path,
|
||||
"{}-{}-config.repo".format(data.shortname, data.major_version)
|
||||
"{}-{}-config.repo".format(shortname, major_version)
|
||||
)
|
||||
data.log.info('Generating the repo configuration: %s' % fname)
|
||||
logger.info('Generating the repo configuration: %s' % fname)
|
||||
|
||||
if data.repo_base_url.startswith("/"):
|
||||
if repo_base_url.startswith("/"):
|
||||
logger.error("Local file syncs are not supported.")
|
||||
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
||||
"supported." + Color.END)
|
||||
|
||||
prehashed = ''
|
||||
if data.hashed:
|
||||
if hashed:
|
||||
prehashed = "hashed-"
|
||||
# create dest_path
|
||||
if not os.path.exists(dest_path):
|
||||
os.makedirs(dest_path, exist_ok=True)
|
||||
config_file = open(fname, "w+")
|
||||
repolist = []
|
||||
for repo in data.repos:
|
||||
for repo in repos:
|
||||
|
||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||
data.repo_base_url,
|
||||
data.project_id,
|
||||
repo_base_url,
|
||||
project_id,
|
||||
prehashed,
|
||||
repo,
|
||||
)
|
||||
|
||||
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
||||
data.repo_base_url,
|
||||
data.project_id,
|
||||
repo_base_url,
|
||||
project_id,
|
||||
prehashed,
|
||||
repo,
|
||||
)
|
||||
@ -450,11 +462,11 @@ class Shared:
|
||||
'name': repo,
|
||||
'baseurl': constructed_url,
|
||||
'srcbaseurl': constructed_url_src,
|
||||
'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey]
|
||||
'gpgkey': extra_files['git_raw_path'] + extra_files['gpg'][gpgkey]
|
||||
}
|
||||
repolist.append(repodata)
|
||||
|
||||
template = data.tmplenv.get_template('repoconfig.tmpl')
|
||||
template = templates.get_template('repoconfig.tmpl')
|
||||
output = template.render(repos=repolist)
|
||||
config_file.write(output)
|
||||
|
||||
|
@ -14,7 +14,7 @@ if [ $ret_val -eq "0" ]; then
|
||||
TARGET="${PRODUCTION_ROOT}/${CATEGORY_STUB}/${REV:0:3}"
|
||||
mkdir -p "${TARGET}"
|
||||
echo "Syncing ${REVISION}"
|
||||
sudo -l && time fpsync -n 24 -o '-av --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/" "${TARGET}/"
|
||||
sudo -l && time fpsync -n 24 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/" "${TARGET}/"
|
||||
|
||||
# Full file list update for production root
|
||||
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||
|
@ -26,7 +26,7 @@ if [ $ret_val -eq "0" ]; then
|
||||
# shellcheck disable=SC2035
|
||||
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||
# {} "${TARGET}"
|
||||
sudo -l && time fpsync -n 24 -o '-av --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||
sudo -l && time fpsync -n 24 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||
|
||||
# This is temporary until we implement rsync into empanadas
|
||||
#if [ -f "COMPOSE_ID" ]; then
|
||||
|
40
sync/sync-to-staging-fpsync-testing.sh
Normal file
40
sync/sync-to-staging-fpsync-testing.sh
Normal file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "$0")/common"
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "You must specify a short name."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Major Version (eg, 8)
|
||||
MAJ=${RLVER}
|
||||
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||
SHORT=${1}
|
||||
PROFILE=${2}
|
||||
|
||||
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||
ret_val=$?
|
||||
|
||||
if [ $ret_val -eq "0" ]; then
|
||||
TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}"
|
||||
mkdir -p "${TARGET}"
|
||||
# disabling because none of our files should be starting with dashes. If they
|
||||
# are something is *seriously* wrong here.
|
||||
# shellcheck disable=SC2035
|
||||
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||
# {} "${TARGET}"
|
||||
sudo -l && time fpsync -n 18 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||
|
||||
# This is temporary until we implement rsync into empanadas
|
||||
#if [ -f "COMPOSE_ID" ]; then
|
||||
# cp COMPOSE_ID "${TARGET}"
|
||||
# chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||
#fi
|
||||
|
||||
#if [ -d "metadata" ]; then
|
||||
# rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||
#fi
|
||||
fi
|
Loading…
Reference in New Issue
Block a user