Start work on 0.4.0

* Start SIG deployment testing
* Add additional sigs: desktop, virt
* Modify altarch to match repo names in release package
This commit is contained in:
Louis Abel 2022-07-20 16:17:19 -07:00
parent 65a4ec93c3
commit 8d77a63a6a
Signed by untrusted user: label
GPG Key ID: B37E62D143879B36
14 changed files with 277 additions and 90 deletions

View File

@ -1 +1 @@
__version__ = '0.2.0'
__version__ = '0.4.0'

View File

@ -7,6 +7,7 @@
minor: '6'
profile: '8'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
allowed_arches:
- x86_64
- aarch64

View File

@ -14,7 +14,8 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
# All of our options
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
parser.add_argument('--symlink', action='store_true', help="symlink")
parser.add_argument('--sig', type=str, help="SIG Name if applicable")
parser.add_argument('--symlink', action='store_true', help="symlink to latest")
parser.add_argument('--logger', type=str)
# Parse them
@ -48,6 +49,9 @@ def run():
profile = rlvars['profile']
logger = log
if results.sig is not None:
shortname = 'SIG-' + results.sig
generated_dir = Shared.generate_compose_dirs(
compose_base,
shortname,

View File

@ -9,7 +9,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--s3', action='store_true', help="Release Candidate")
parser.add_argument('--s3', action='store_true', help="S3")
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
parser.add_argument('--force-download', action='store_true', help="Force a download")

View File

@ -9,7 +9,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--s3', action='store_true', help="Release Candidate")
parser.add_argument('--s3', action='store_true', help="S3")
parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")

View File

@ -15,7 +15,7 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
# All of our options
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--repo', type=str, help="Repository name")
parser.add_argument('--sig', type=str, help="SIG name")
parser.add_argument('--sig', type=str, help="SIG name", required=True)
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--ignore-debug', action='store_true')
parser.add_argument('--ignore-source', action='store_true')
@ -25,6 +25,7 @@ parser.add_argument('--hashed', action='store_true')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('--full-run', action='store_true')
parser.add_argument('--no-fail', action='store_true')
parser.add_argument('--refresh-extra-files', action='store_true')
# I am aware this is confusing, I want podman to be the default option
parser.add_argument('--simple', action='store_false')
parser.add_argument('--logger', type=str)
@ -46,6 +47,7 @@ a = SigRepoSync(
repo=results.repo,
arch=results.arch,
ignore_source=results.ignore_source,
ignore_debug=results.ignore_debug,
repoclosure=results.repoclosure,
skip_all=results.skip_all,
hashed=results.hashed,

View File

@ -1,12 +1,13 @@
---
altarch:
'8':
profile: 'altarch'
project_id: ''
repo:
common:
altarch-common:
allowed_arches:
- aarch64
rockyrpi:
altarch-rockyrpi:
allowed_arches:
- aarch64
additional_dirs:
@ -20,12 +21,13 @@ altarch:
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-AltArch'
'9':
profile: 'altarch'
project_id: ''
repo:
common:
altarch-common:
allowed_arches:
- aarch64
rockyrpi:
altarch-rockyrpi:
allowed_arches:
- aarch64
additional_dirs:

View File

@ -0,0 +1,37 @@
---
desktop:
'8':
profile: 'desktop'
project_id: '8b3c9b53-0633-47bd-98a3-1ca3ec141278'
addtional_dirs: []
repo:
desktop-common:
allowed_arches:
- x86_64
- aarch64
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-desktop.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-desktop/-/raw/r8/'
branch: 'r8'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
'9':
profile: 'desktop'
project_id: 'b0460c25-22cf-496c-a3a3-067b9a2af14a'
addtional_dirs: []
repo:
desktop-common:
allowed_arches:
- x86_64
- aarch64
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-desktop.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-desktop/-/raw/r9/'
branch: 'r9'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
...

View File

@ -0,0 +1,38 @@
---
virt:
'8':
profile: 'virt'
project_id: 'd911867a-658e-4f41-8343-5ceac6c41f67'
addtional_dirs: []
repo:
virt-common:
allowed_arches:
- x86_64
- aarch64
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-virt.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-virt/-/raw/r8/'
branch: 'r8'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
'9':
profile: 'virt'
project_id: '925ceece-47ce-4f51-90f7-ff8689e4fe5e'
addtional_dirs: []
repo:
virt-common:
allowed_arches:
- x86_64
- aarch64
- ppc64le
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-virt.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-virt/-/raw/r9/'
branch: 'r9'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
list:
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
...

View File

@ -33,3 +33,4 @@ any of the following:
https://keys.openpgp.org/
https://keyserver.ubuntu.com

View File

@ -123,12 +123,6 @@ class RepoSync:
if 'repoclosure_map' in rlvars and len(rlvars['repoclosure_map']) > 0:
self.repoclosure_map = rlvars['repoclosure_map']
self.staging_dir = os.path.join(
config['staging_root'],
config['category_stub'],
self.revision
)
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
@ -261,7 +255,7 @@ class RepoSync:
self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch)
if self.fullrun:
self.deploy_extra_files(sync_root, global_work_root)
Shared.deploy_extra_files(self.extra_files, sync_root, global_work_root, self.log)
self.deploy_treeinfo(self.repo, sync_root, self.arch)
self.tweak_treeinfo(self.repo, sync_root, self.arch)
self.symlink_to_latest(generated_dir)
@ -270,7 +264,7 @@ class RepoSync:
self.repoclosure_work(sync_root, work_root, log_root)
if self.refresh_extra_files and not self.fullrun:
self.deploy_extra_files(sync_root, global_work_root)
Shared.deploy_extra_files(self.extra_files, sync_root, global_work_root, self.log)
# deploy_treeinfo does NOT overwrite any treeinfo files. However,
# tweak_treeinfo calls out to a method that does. This should not
@ -601,7 +595,7 @@ class RepoSync:
os.chmod(source_entry_point_sh, 0o755)
# Spawn up all podman processes for repo
self.log.info('Starting podman processes for %s ...' % r)
self.log.info(Color.INFO + 'Starting podman processes for %s ...' % r)
#print(entry_name_list)
for pod in entry_name_list:
@ -861,66 +855,6 @@ class RepoSync:
for issue in bad_exit_list:
self.log.error(issue)
def deploy_extra_files(self, sync_root, global_work_root):
"""
deploys extra files based on info of rlvars including a
extra_files.json
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
a bunch of compose-esque stuff.
"""
self.log.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo')
cmd = Shared.git_cmd(self.log)
tmpclone = '/tmp/clone'
extra_files_dir = os.path.join(
global_work_root,
'extra-files'
)
metadata_dir = os.path.join(
sync_root,
"metadata"
)
if not os.path.exists(extra_files_dir):
os.makedirs(extra_files_dir, exist_ok=True)
if not os.path.exists(metadata_dir):
os.makedirs(metadata_dir, exist_ok=True)
clonecmd = '{} clone {} -b {} -q {}'.format(
cmd,
self.extra_files['git_repo'],
self.extra_files['branch'],
tmpclone
)
git_clone = subprocess.call(
shlex.split(clonecmd),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
self.log.info(Color.INFO + 'Deploying extra files to work and metadata directories ...')
# Copy files to work root
for extra in self.extra_files['list']:
src = '/tmp/clone/' + extra
# Copy extra files to root of compose here also - The extra files
# are meant to be picked up by our ISO creation process and also
# exist on our mirrors.
try:
shutil.copy2(src, extra_files_dir)
shutil.copy2(src, metadata_dir)
except:
self.log.warn(Color.WARN + 'Extra file not copied: ' + src)
try:
shutil.rmtree(tmpclone)
except OSError as e:
self.log.error(Color.FAIL + 'Directory ' + tmpclone +
' could not be removed: ' + e.strerror
)
def deploy_metadata(self, sync_root):
"""
Deploys metadata that defines information about the compose. Some data
@ -1524,7 +1458,7 @@ class RepoSync:
lp.close()
images_arch_root = os.path.join(sync_images_root, arch)
images_arch_checksum = os.path.join(sync_images_root, 'CHECKSUM')
images_arch_checksum = os.path.join(images_arch_root, 'CHECKSUM')
if os.path.exists(images_arch_root):
with open(images_arch_checksum, 'w+', encoding='utf-8') as ip:
for icheck in glob.iglob(images_arch_root + '/*.CHECKSUM'):
@ -1560,6 +1494,7 @@ class SigRepoSync:
dryrun: bool = False,
fullrun: bool = False,
nofail: bool = False,
gpgkey: str = 'stable',
logger=None
):
self.nofail = nofail
@ -1587,13 +1522,19 @@ class SigRepoSync:
self.distname = config['distname']
self.fullname = rlvars['fullname']
self.shortname = config['shortname']
self.fullversion = rlvars['revision']
self.sigrepo = repo
self.checksum = rlvars['checksum']
# Relevant major version items
self.sigvars = sigvars
self.sigrepos = sigvars.keys()
self.sigrepos = sigvars['repo'].keys()
self.extra_files = sigvars['extra_files']
self.gpgkey = gpgkey
#self.arches = sigvars['allowed_arches']
#self.project_id = sigvars['project_id']
self.sigrepo = repo
self.project_id = sigvars['project_id']
if 'additional_vars' in sigvars:
self.additional_dirs = sigvars['additional_dirs']
# Templates
file_loader = FileSystemLoader(f"{_rootdir}/templates")
@ -1617,10 +1558,9 @@ class SigRepoSync:
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
"latest-{}-{}-SIG-{}".format(
self.shortname,
"latest-SIG-{}-{}".format(
self.sigprofile,
major,
self.sigprofile
)
)
@ -1654,10 +1594,94 @@ class SigRepoSync:
self.log.info('sig reposync init')
self.log.info(major)
#self.dnf_config = Shared.generate_conf()
# The repo name should be valid
if self.sigrepo is not None:
if self.sigrepo not in self.sigrepos:
self.log.error(
Color.FAIL +
'Invalid SIG repository: ' +
self.profile +
' ' +
self.sigrepo
)
def run(self):
"""
This runs the sig sync.
"""
pass
if self.fullrun and self.sigrepo:
self.log.error('WARNING: repo ignored when doing a full sync')
if self.fullrun and self.dryrun:
self.log.error('A full and dry run is currently not supported.')
raise SystemExit('\nA full and dry run is currently not supported.')
# This should create the initial compose dir and set the path.
# Otherwise, just use the latest link.
if self.fullrun:
simplename = 'SIG-' + self.sigprofile
generated_dir = Shared.generate_compose_dirs(
self.compose_base,
simplename,
self.fullversion,
self.date_stamp,
self.log
)
work_root = os.path.join(
generated_dir,
'work'
)
sync_root = os.path.join(
generated_dir,
'compose'
)
else:
# Put in a verification here.
work_root = os.path.join(
self.compose_latest_dir,
'work'
)
sync_root = self.compose_latest_sync
# Verify if the link even exists
if not os.path.exists(self.compose_latest_dir):
self.log.error('!! Latest compose link is broken does not exist: %s' % self.compose_latest_dir)
self.log.error('!! Please perform a full run if you have not done so.')
raise SystemExit()
log_root = os.path.join(
work_root,
"logs",
self.date_stamp
)
global_work_root = os.path.join(
work_root,
"global",
)
sig_sync_root = os.path.join(
sync_root,
self.sigprofile
)
# dnf config here
if self.dryrun:
self.log.error('Dry Runs are not supported just yet. Sorry!')
raise SystemExit()
if self.fullrun and self.refresh_extra_files:
self.log.warn(Color.WARN + 'A full run implies extra files are also deployed.')
#self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch)
if self.fullrun:
Shared.deploy_extra_files(self.extra_files, sig_sync_root, global_work_root, self.log)
Shared.symlink_to_latest(simplename, self.major_version,
generated_dir, self.compose_latest_dir, self.log)
print()
if self.refresh_extra_files and not self.fullrun:
Shared.deploy_extra_files(self.extra_files, sig_sync_root, global_work_root, self.log)
print()

View File

@ -5,6 +5,7 @@ import json
import hashlib
import shlex
import subprocess
import shutil
import yaml
import requests
import boto3
@ -955,7 +956,7 @@ class Shared:
Write compose info similar to pungi.
arches and repos may be better suited for a dictionary. that is a
future thing we will work on for 0.3.0.
future thing we will work on for 0.5.0.
"""
cijson = file_path + '.json'
ciyaml = file_path + '.yaml'
@ -979,3 +980,80 @@ class Shared:
with open(ciyaml, 'w+') as ymdump:
yaml.dump(jsonData, ymdump)
ymdump.close()
@staticmethod
def symlink_to_latest(shortname, major_version, generated_dir, compose_latest_dir, logger):
"""
Emulates pungi and symlinks latest-Rocky-X
This link will be what is updated in full runs. Whatever is in this
'latest' directory is what is rsynced on to staging after completion.
This link should not change often.
"""
try:
os.remove(compose_latest_dir)
except:
pass
logger.info('Symlinking to latest-{}-{}...'.format(shortname, major_version))
os.symlink(generated_dir, compose_latest_dir)
@staticmethod
def deploy_extra_files(extra_files, sync_root, global_work_root, logger):
"""
deploys extra files based on info of rlvars including a
extra_files.json
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
a bunch of compose-esque stuff.
"""
logger.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo')
cmd = Shared.git_cmd(logger)
tmpclone = '/tmp/clone'
extra_files_dir = os.path.join(
global_work_root,
'extra-files'
)
metadata_dir = os.path.join(
sync_root,
"metadata"
)
if not os.path.exists(extra_files_dir):
os.makedirs(extra_files_dir, exist_ok=True)
if not os.path.exists(metadata_dir):
os.makedirs(metadata_dir, exist_ok=True)
clonecmd = '{} clone {} -b {} -q {}'.format(
cmd,
extra_files['git_repo'],
extra_files['branch'],
tmpclone
)
git_clone = subprocess.call(
shlex.split(clonecmd),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
logger.info(Color.INFO + 'Deploying extra files to work and metadata directories ...')
# Copy files to work root
for extra in extra_files['list']:
src = '/tmp/clone/' + extra
# Copy extra files to root of compose here also - The extra files
# are meant to be picked up by our ISO creation process and also
# exist on our mirrors.
try:
shutil.copy2(src, extra_files_dir)
shutil.copy2(src, metadata_dir)
except:
logger.warn(Color.WARN + 'Extra file not copied: ' + src)
try:
shutil.rmtree(tmpclone)
except OSError as e:
logger.error(Color.FAIL + 'Directory ' + tmpclone +
' could not be removed: ' + e.strerror
)

View File

@ -1,6 +1,6 @@
[tool.poetry]
name = "empanadas"
version = "0.3.0"
version = "0.4.0"
description = "hand crafted ISOs with love and spice"
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]

View File

@ -2,4 +2,4 @@ from empanadas import __version__
def test_version():
assert __version__ == '0.2.0'
assert __version__ == '0.4.0'