Expand and Generalize ISO Functions

* Utils is now empanadas.util.Shared
* Each config now has a profile name to determine latest-Rocky-{} link
  (Results RLBT#0000131)
* Check added to see if an ISO was built at some point, and forcefully
  exit if so
* Lorax tar ball should be in the format of of lorax-X.Y-ARCH to ensure
  there's no collisions between stable, beta, and lh builds
This commit is contained in:
Louis Abel 2022-06-27 17:59:21 -07:00
parent 57d51dadc7
commit f4f29d97cc
Signed by: label
GPG key ID: B37E62D143879B36
17 changed files with 126 additions and 91 deletions

View file

@ -21,47 +21,6 @@ class Color:
BOLD = '\033[1m'
END = '\033[0m'
class Utils:
"""
Quick utilities that may be commonly used
"""
@staticmethod
def get_checksum(path, hashtype, logger):
"""
Generates a checksum from the provided path by doing things in chunks.
This way we don't do it in memory.
"""
try:
checksum = hashlib.new(hashtype)
except ValueError:
logger.error("Invalid hash type: %s" % hashtype)
return False
try:
input_file = open(path, "rb")
except IOError as e:
logger.error("Could not open file %s: %s" % (path, e))
return False
while True:
chunk = input_file.read(8192)
if not chunk:
break
checksum.update(chunk)
input_file.close()
stat = os.stat(path)
base = os.path.basename(path)
# This emulates our current syncing scripts that runs stat and
# sha256sum and what not with a very specific output.
return "%s: %s bytes\n%s (%s) = %s\n" % (
base,
stat.st_size,
hashtype.upper(),
base,
checksum.hexdigest()
)
# vars and additional checks
rldict = {}
sigdict = {}

View file

@ -5,6 +5,7 @@
rclvl: 'RC2'
major: '8'
minor: '6'
profile: '8'
bugurl: 'https://bugs.rockylinux.org'
allowed_arches:
- x86_64

View file

@ -1,10 +1,11 @@
---
'9-beta':
fullname: 'Rocky Linux 9'
fullname: 'Rocky Linux 9.1'
revision: '9.1'
rclvl: 'BETA1'
major: '9'
minor: '1'
profile: '9-beta'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
allowed_arches:

View file

@ -1,10 +1,11 @@
---
'9':
fullname: 'Rocky Linux 9'
fullname: 'Rocky Linux 9.0'
revision: '9.0'
rclvl: 'RC1'
major: '9'
minor: '0'
profile: '9'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
allowed_arches:

View file

@ -1,10 +1,11 @@
---
'9-lookahead':
fullname: 'Rocky Linux 9'
fullname: 'Rocky Linux 9.1'
revision: '9.1'
rclvl: 'LH1'
major: '9'
minor: '1'
profile: '9-lookahead'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
allowed_arches:

View file

@ -2,9 +2,10 @@
'rln':
fullname: 'Rocky Linux New'
revision: '10'
rclvl: 'RC1'
rclvl: 'RLN120'
major: '10'
minor: '0'
profile: 'rln'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
allowed_arches:

View file

@ -8,7 +8,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Compose")
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
parser.add_argument('--isolation', type=str, help="mock isolation mode")
parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")

View file

@ -8,7 +8,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Compose")
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--isolation', type=str, help="Mock Isolation")
@ -30,7 +30,7 @@ a = IsoBuild(
extra_iso=results.extra_iso,
extra_iso_mode=results.extra_iso_mode,
compose_dir_is_here=results.local_compose,
logger=results.logger,
logger=results.logger
)
def run():

View file

@ -28,8 +28,9 @@ parser.add_argument('--logger', type=str)
# Parse them
results = parser.parse_args()
rlvars = rldict[results.release]
major = rlvars['major']
r = Checks(rlvars, config['arch'])
r.check_valid_arch()
@ -37,7 +38,7 @@ r.check_valid_arch()
a = RepoSync(
rlvars,
config,
major=rlvars['major'],
major=major,
repo=results.repo,
arch=results.arch,
ignore_debug=results.ignore_debug,

View file

@ -14,4 +14,5 @@ r.check_valid_arch()
a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True)
def run():
a.run()
print(rlvars.keys())
print(rlvars)

View file

@ -5,6 +5,7 @@ set -ex
{{ lorax_pkg_cmd }}
mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
cd {{ compose_work_iso_dir }}/{{ arch }}
test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; }
{% else %}
cd /builddir

View file

@ -7,8 +7,9 @@ VERSION="{{ revision }}"
PRODUCT="{{ distname }}"
MOCKBLD="{{ builddir }}"
LORAXRES="{{ lorax_work_root }}"
LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz"
LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
LOGFILE="lorax-{{ arch }}.log"
BUGURL="{{ bugurl }}"
{% for pkg in lorax %}
sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl
@ -23,6 +24,7 @@ lorax --product="${PRODUCT}" \
{%- for repo in repos %}
--source={{ repo.url }} \
{%- endfor %}
--bugurl="${BUGURL}" \
--variant="${VARIANT}" \
--nomacboot \
--buildarch="${ARCH}" \
@ -45,7 +47,7 @@ if [ -f "/usr/bin/xorriso" ]; then
/usr/bin/xorriso -dev lorax/images/boot.iso --find |
tail -n+2 |
tr -d "'" |
cut -c2- sort >> lorax/images/boot.iso.manifest
cut -c2- | sort >> lorax/images/boot.iso.manifest
elif [ -f "/usr/bin/isoinfo" ]; then
/usr/bin/isoinfo -R -f -i lorax/images/boot.iso |
grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest

View file

@ -9,7 +9,7 @@ MOCK_RESL="${MOCK_ROOT}/result"
MOCK_CHRO="${MOCK_ROOT}/root"
MOCK_LOG="${MOCK_RESL}/mock-output.log"
LORAX_SCR="/var/tmp/buildImage.sh"
LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz"
LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
ISOLATION="{{ isolation }}"
BUILDDIR="{{ builddir }}"

View file

@ -6,6 +6,10 @@ from empanadas.util.check import (
Checks,
)
from empanadas.util.shared import (
Shared,
)
from empanadas.util.dnf_utils import (
RepoSync,
SigRepoSync
@ -18,5 +22,6 @@ from empanadas.util.iso_utils import (
__all__ = [
'Checks',
'RepoSync'
'RepoSync',
'Shared'
]

View file

@ -19,6 +19,7 @@ import json
from jinja2 import Environment, FileSystemLoader
from empanadas.common import Color, _rootdir
from empanadas.util import Shared
# initial treeinfo data is made here
import productmd.treeinfo
@ -76,6 +77,7 @@ class RepoSync:
self.repo_base_url = config['repo_base_url']
self.compose_root = config['compose_root']
self.compose_base = config['compose_root'] + "/" + major
self.profile = rlvars['profile']
# Relevant major version items
self.shortname = config['shortname']
@ -112,7 +114,7 @@ class RepoSync:
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
"latest-Rocky-{}".format(major)
"latest-Rocky-{}".format(self.profile)
)
self.compose_latest_sync = os.path.join(

View file

@ -13,7 +13,6 @@ import shlex
import time
import tarfile
import shutil
import hashlib
# lazy person's s3 parser
import requests
@ -35,7 +34,8 @@ import productmd.treeinfo
from jinja2 import Environment, FileSystemLoader
from empanadas.common import Color, _rootdir, Utils
from empanadas.common import Color, _rootdir
from empanadas.util import Shared
class IsoBuild:
"""
@ -90,6 +90,7 @@ class IsoBuild:
self.extra_iso = extra_iso
self.extra_iso_mode = extra_iso_mode
self.checksum = rlvars['checksum']
self.profile = rlvars['profile']
# Relevant major version items
self.arch = arch
@ -102,6 +103,7 @@ class IsoBuild:
self.repo_base_url = config['repo_base_url']
self.project_id = rlvars['project_id']
self.structure = rlvars['structure']
self.bugurl = rlvars['bugurl']
self.extra_files = rlvars['extra_files']
@ -133,7 +135,7 @@ class IsoBuild:
self.compose_latest_dir = os.path.join(
config['compose_root'],
major,
"latest-Rocky-{}".format(major)
"latest-Rocky-{}".format(self.profile)
)
self.compose_latest_sync = os.path.join(
@ -189,7 +191,7 @@ class IsoBuild:
self.log.info('Compose repo directory: %s' % sync_root)
self.log.info('ISO Build Logs: /var/lib/mock/{}-{}-{}/result'.format(
self.shortname, self.major_version, self.current_arch)
self.shortname.lower(), self.major_version, self.current_arch)
)
self.log.info('ISO Build completed.')
@ -282,6 +284,7 @@ class IsoBuild:
isolation=self.mock_isolation,
builddir=self.mock_work_root,
shortname=self.shortname,
revision=self.release,
)
iso_template_output = iso_template.render(
@ -297,6 +300,7 @@ class IsoBuild:
rc=rclevel,
builddir=self.mock_work_root,
lorax_work_root=self.lorax_result_root,
bugurl=self.bugurl,
)
mock_iso_entry = open(mock_iso_path, "w+")
@ -363,7 +367,7 @@ class IsoBuild:
full_drop = '{}/lorax-{}-{}.tar.gz'.format(
lorax_arch_dir,
self.major_version,
self.release,
arch
)
@ -396,7 +400,7 @@ class IsoBuild:
for arch in arches_to_unpack:
tarname = 'lorax-{}-{}.tar.gz'.format(
self.major_version,
self.release,
arch
)
@ -474,7 +478,7 @@ class IsoBuild:
raise SystemExit()
for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']:
if 'tar.gz' in y['Key']:
if 'tar.gz' in y['Key'] and self.release in y['Key']:
temp.append(y['Key'])
for arch in self.arches:
@ -525,7 +529,7 @@ class IsoBuild:
resp = xmltodict.parse(bucket_data.content)
for y in resp['ListBucketResult']['Contents']:
if 'tar.gz' in y['Key']:
if 'tar.gz' in y['Key'] and self.release in y['Key']:
temp.append(y['Key'])
for arch in self.arches:
@ -690,7 +694,7 @@ class IsoBuild:
shutil.copy2(path_to_src_image + '.manifest', manifest)
self.log.info('Creating checksum for %s boot iso...' % arch)
checksum = Utils.get_checksum(isobootpath, self.checksum, self.log)
checksum = Shared.get_checksum(isobootpath, self.checksum, self.log)
if not checksum:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
@ -824,31 +828,6 @@ class IsoBuild:
# Set default variant
ti.dump(treeinfo, main_variant=primary)
def discinfo_write(self, file_path, arch):
"""
Ensure discinfo is written correctly
"""
with open(file_path, "w+") as f:
f.write("%s\n" % self.timestamp)
f.write("%s\n" % self.fullname)
f.write("%s\n" % arch)
f.write("ALL\n")
f.close()
def write_media_repo(self):
"""
Ensure media.repo exists
"""
data = [
"[InstallMedia]",
"name=%s" % self.fullname,
"mediaid=%s" % self.timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
# Next set of functions are loosely borrowed (in concept) from pungi. Some
# stuff may be combined/mixed together, other things may be simplified or
# reduced in nature.
@ -1075,6 +1054,7 @@ class IsoBuild:
implantmd5=implantmd5,
make_manifest=make_manifest,
lorax_pkg_cmd=lorax_pkg_cmd,
isoname=isoname,
)
mock_iso_entry = open(mock_iso_path, "w+")
@ -1225,7 +1205,7 @@ class IsoBuild:
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Performing checksum for ' + p
)
checksum = Utils.get_checksum(path, self.checksum, self.log)
checksum = Shared.get_checksum(path, self.checksum, self.log)
if not checksum:
self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +

View file

@ -0,0 +1,79 @@
# These are shared utilities used
import os
import hashlib
class Shared:
"""
Quick utilities that may be commonly used
"""
@staticmethod
def get_checksum(path, hashtype, logger):
"""
Generates a checksum from the provided path by doing things in chunks.
This way we don't do it in memory.
"""
try:
checksum = hashlib.new(hashtype)
except ValueError:
logger.error("Invalid hash type: %s" % hashtype)
return False
try:
input_file = open(path, "rb")
except IOError as e:
logger.error("Could not open file %s: %s" % (path, e))
return False
while True:
chunk = input_file.read(8192)
if not chunk:
break
checksum.update(chunk)
input_file.close()
stat = os.stat(path)
base = os.path.basename(path)
# This emulates our current syncing scripts that runs stat and
# sha256sum and what not with a very specific output.
return "%s: %s bytes\n%s (%s) = %s\n" % (
base,
stat.st_size,
hashtype.upper(),
base,
checksum.hexdigest()
)
@staticmethod
def discinfo_write(timestamp, fullname, arch, file_path):
"""
Ensure discinfo is written correctly
"""
data = [
"%s" % timestamp,
"%s" % fullname,
"%s" % arch,
"ALL"
]
with open(file_path, "w+") as f:
f.write("\n".join(data))
f.close()
@staticmethod
def media_repo_write(timestamp, fullname, file_path):
"""
Ensure media.repo exists
"""
data = [
"[InstallMedia]",
"name=%s" % fullname,
"mediaid=%s" % timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
with open(file_path, "w") as f:
f.write("\n".join(data))