Expand and Generalize ISO Functions

* Utils is now empanadas.util.Shared
* Each config now has a profile name to determine latest-Rocky-{} link
  (Results RLBT#0000131)
* Check added to see if an ISO was built at some point, and forcefully
  exit if so
* Lorax tar ball should be in the format of of lorax-X.Y-ARCH to ensure
  there's no collisions between stable, beta, and lh builds
This commit is contained in:
Louis Abel 2022-06-27 17:59:21 -07:00
parent 57d51dadc7
commit f4f29d97cc
Signed by untrusted user: label
GPG Key ID: B37E62D143879B36
17 changed files with 126 additions and 91 deletions

View File

@ -21,47 +21,6 @@ class Color:
BOLD = '\033[1m' BOLD = '\033[1m'
END = '\033[0m' END = '\033[0m'
class Utils:
"""
Quick utilities that may be commonly used
"""
@staticmethod
def get_checksum(path, hashtype, logger):
"""
Generates a checksum from the provided path by doing things in chunks.
This way we don't do it in memory.
"""
try:
checksum = hashlib.new(hashtype)
except ValueError:
logger.error("Invalid hash type: %s" % hashtype)
return False
try:
input_file = open(path, "rb")
except IOError as e:
logger.error("Could not open file %s: %s" % (path, e))
return False
while True:
chunk = input_file.read(8192)
if not chunk:
break
checksum.update(chunk)
input_file.close()
stat = os.stat(path)
base = os.path.basename(path)
# This emulates our current syncing scripts that runs stat and
# sha256sum and what not with a very specific output.
return "%s: %s bytes\n%s (%s) = %s\n" % (
base,
stat.st_size,
hashtype.upper(),
base,
checksum.hexdigest()
)
# vars and additional checks # vars and additional checks
rldict = {} rldict = {}
sigdict = {} sigdict = {}

View File

@ -5,6 +5,7 @@
rclvl: 'RC2' rclvl: 'RC2'
major: '8' major: '8'
minor: '6' minor: '6'
profile: '8'
bugurl: 'https://bugs.rockylinux.org' bugurl: 'https://bugs.rockylinux.org'
allowed_arches: allowed_arches:
- x86_64 - x86_64

View File

@ -1,10 +1,11 @@
--- ---
'9-beta': '9-beta':
fullname: 'Rocky Linux 9' fullname: 'Rocky Linux 9.1'
revision: '9.1' revision: '9.1'
rclvl: 'BETA1' rclvl: 'BETA1'
major: '9' major: '9'
minor: '1' minor: '1'
profile: '9-beta'
bugurl: 'https://bugs.rockylinux.org' bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256' checksum: 'sha256'
allowed_arches: allowed_arches:

View File

@ -1,10 +1,11 @@
--- ---
'9': '9':
fullname: 'Rocky Linux 9' fullname: 'Rocky Linux 9.0'
revision: '9.0' revision: '9.0'
rclvl: 'RC1' rclvl: 'RC1'
major: '9' major: '9'
minor: '0' minor: '0'
profile: '9'
bugurl: 'https://bugs.rockylinux.org' bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256' checksum: 'sha256'
allowed_arches: allowed_arches:

View File

@ -1,10 +1,11 @@
--- ---
'9-lookahead': '9-lookahead':
fullname: 'Rocky Linux 9' fullname: 'Rocky Linux 9.1'
revision: '9.1' revision: '9.1'
rclvl: 'LH1' rclvl: 'LH1'
major: '9' major: '9'
minor: '1' minor: '1'
profile: '9-lookahead'
bugurl: 'https://bugs.rockylinux.org' bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256' checksum: 'sha256'
allowed_arches: allowed_arches:

View File

@ -2,9 +2,10 @@
'rln': 'rln':
fullname: 'Rocky Linux New' fullname: 'Rocky Linux New'
revision: '10' revision: '10'
rclvl: 'RC1' rclvl: 'RLN120'
major: '10' major: '10'
minor: '0' minor: '0'
profile: 'rln'
bugurl: 'https://bugs.rockylinux.org' bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256' checksum: 'sha256'
allowed_arches: allowed_arches:

View File

@ -8,7 +8,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Compose") parser = argparse.ArgumentParser(description="ISO Compose")
parser.add_argument('--release', type=str, help="Major Release Version", required=True) parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
parser.add_argument('--isolation', type=str, help="mock isolation mode") parser.add_argument('--isolation', type=str, help="mock isolation mode")
parser.add_argument('--rc', action='store_true', help="Release Candidate") parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here") parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")

View File

@ -8,7 +8,7 @@ from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Compose") parser = argparse.ArgumentParser(description="ISO Compose")
parser.add_argument('--release', type=str, help="Major Release Version", required=True) parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
parser.add_argument('--rc', action='store_true', help="Release Candidate") parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--arch', type=str, help="Architecture") parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--isolation', type=str, help="Mock Isolation") parser.add_argument('--isolation', type=str, help="Mock Isolation")
@ -30,7 +30,7 @@ a = IsoBuild(
extra_iso=results.extra_iso, extra_iso=results.extra_iso,
extra_iso_mode=results.extra_iso_mode, extra_iso_mode=results.extra_iso_mode,
compose_dir_is_here=results.local_compose, compose_dir_is_here=results.local_compose,
logger=results.logger, logger=results.logger
) )
def run(): def run():

View File

@ -28,8 +28,9 @@ parser.add_argument('--logger', type=str)
# Parse them # Parse them
results = parser.parse_args() results = parser.parse_args()
rlvars = rldict[results.release] rlvars = rldict[results.release]
major = rlvars['major']
r = Checks(rlvars, config['arch']) r = Checks(rlvars, config['arch'])
r.check_valid_arch() r.check_valid_arch()
@ -37,7 +38,7 @@ r.check_valid_arch()
a = RepoSync( a = RepoSync(
rlvars, rlvars,
config, config,
major=rlvars['major'], major=major,
repo=results.repo, repo=results.repo,
arch=results.arch, arch=results.arch,
ignore_debug=results.ignore_debug, ignore_debug=results.ignore_debug,

View File

@ -14,4 +14,5 @@ r.check_valid_arch()
a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True) a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True)
def run(): def run():
a.run() print(rlvars.keys())
print(rlvars)

View File

@ -5,6 +5,7 @@ set -ex
{{ lorax_pkg_cmd }} {{ lorax_pkg_cmd }}
mkdir -p {{ compose_work_iso_dir }}/{{ arch }} mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
cd {{ compose_work_iso_dir }}/{{ arch }} cd {{ compose_work_iso_dir }}/{{ arch }}
test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; }
{% else %} {% else %}
cd /builddir cd /builddir

View File

@ -7,8 +7,9 @@ VERSION="{{ revision }}"
PRODUCT="{{ distname }}" PRODUCT="{{ distname }}"
MOCKBLD="{{ builddir }}" MOCKBLD="{{ builddir }}"
LORAXRES="{{ lorax_work_root }}" LORAXRES="{{ lorax_work_root }}"
LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
LOGFILE="lorax-{{ arch }}.log" LOGFILE="lorax-{{ arch }}.log"
BUGURL="{{ bugurl }}"
{% for pkg in lorax %} {% for pkg in lorax %}
sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl
@ -23,6 +24,7 @@ lorax --product="${PRODUCT}" \
{%- for repo in repos %} {%- for repo in repos %}
--source={{ repo.url }} \ --source={{ repo.url }} \
{%- endfor %} {%- endfor %}
--bugurl="${BUGURL}" \
--variant="${VARIANT}" \ --variant="${VARIANT}" \
--nomacboot \ --nomacboot \
--buildarch="${ARCH}" \ --buildarch="${ARCH}" \
@ -45,7 +47,7 @@ if [ -f "/usr/bin/xorriso" ]; then
/usr/bin/xorriso -dev lorax/images/boot.iso --find | /usr/bin/xorriso -dev lorax/images/boot.iso --find |
tail -n+2 | tail -n+2 |
tr -d "'" | tr -d "'" |
cut -c2- sort >> lorax/images/boot.iso.manifest cut -c2- | sort >> lorax/images/boot.iso.manifest
elif [ -f "/usr/bin/isoinfo" ]; then elif [ -f "/usr/bin/isoinfo" ]; then
/usr/bin/isoinfo -R -f -i lorax/images/boot.iso | /usr/bin/isoinfo -R -f -i lorax/images/boot.iso |
grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest

View File

@ -9,7 +9,7 @@ MOCK_RESL="${MOCK_ROOT}/result"
MOCK_CHRO="${MOCK_ROOT}/root" MOCK_CHRO="${MOCK_ROOT}/root"
MOCK_LOG="${MOCK_RESL}/mock-output.log" MOCK_LOG="${MOCK_RESL}/mock-output.log"
LORAX_SCR="/var/tmp/buildImage.sh" LORAX_SCR="/var/tmp/buildImage.sh"
LORAX_TAR="lorax-{{ major }}-{{ arch }}.tar.gz" LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
ISOLATION="{{ isolation }}" ISOLATION="{{ isolation }}"
BUILDDIR="{{ builddir }}" BUILDDIR="{{ builddir }}"

View File

@ -6,6 +6,10 @@ from empanadas.util.check import (
Checks, Checks,
) )
from empanadas.util.shared import (
Shared,
)
from empanadas.util.dnf_utils import ( from empanadas.util.dnf_utils import (
RepoSync, RepoSync,
SigRepoSync SigRepoSync
@ -18,5 +22,6 @@ from empanadas.util.iso_utils import (
__all__ = [ __all__ = [
'Checks', 'Checks',
'RepoSync' 'RepoSync',
'Shared'
] ]

View File

@ -19,6 +19,7 @@ import json
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from empanadas.common import Color, _rootdir from empanadas.common import Color, _rootdir
from empanadas.util import Shared
# initial treeinfo data is made here # initial treeinfo data is made here
import productmd.treeinfo import productmd.treeinfo
@ -76,6 +77,7 @@ class RepoSync:
self.repo_base_url = config['repo_base_url'] self.repo_base_url = config['repo_base_url']
self.compose_root = config['compose_root'] self.compose_root = config['compose_root']
self.compose_base = config['compose_root'] + "/" + major self.compose_base = config['compose_root'] + "/" + major
self.profile = rlvars['profile']
# Relevant major version items # Relevant major version items
self.shortname = config['shortname'] self.shortname = config['shortname']
@ -112,7 +114,7 @@ class RepoSync:
self.compose_latest_dir = os.path.join( self.compose_latest_dir = os.path.join(
config['compose_root'], config['compose_root'],
major, major,
"latest-Rocky-{}".format(major) "latest-Rocky-{}".format(self.profile)
) )
self.compose_latest_sync = os.path.join( self.compose_latest_sync = os.path.join(

View File

@ -13,7 +13,6 @@ import shlex
import time import time
import tarfile import tarfile
import shutil import shutil
import hashlib
# lazy person's s3 parser # lazy person's s3 parser
import requests import requests
@ -35,7 +34,8 @@ import productmd.treeinfo
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from empanadas.common import Color, _rootdir, Utils from empanadas.common import Color, _rootdir
from empanadas.util import Shared
class IsoBuild: class IsoBuild:
""" """
@ -90,6 +90,7 @@ class IsoBuild:
self.extra_iso = extra_iso self.extra_iso = extra_iso
self.extra_iso_mode = extra_iso_mode self.extra_iso_mode = extra_iso_mode
self.checksum = rlvars['checksum'] self.checksum = rlvars['checksum']
self.profile = rlvars['profile']
# Relevant major version items # Relevant major version items
self.arch = arch self.arch = arch
@ -102,6 +103,7 @@ class IsoBuild:
self.repo_base_url = config['repo_base_url'] self.repo_base_url = config['repo_base_url']
self.project_id = rlvars['project_id'] self.project_id = rlvars['project_id']
self.structure = rlvars['structure'] self.structure = rlvars['structure']
self.bugurl = rlvars['bugurl']
self.extra_files = rlvars['extra_files'] self.extra_files = rlvars['extra_files']
@ -133,7 +135,7 @@ class IsoBuild:
self.compose_latest_dir = os.path.join( self.compose_latest_dir = os.path.join(
config['compose_root'], config['compose_root'],
major, major,
"latest-Rocky-{}".format(major) "latest-Rocky-{}".format(self.profile)
) )
self.compose_latest_sync = os.path.join( self.compose_latest_sync = os.path.join(
@ -189,7 +191,7 @@ class IsoBuild:
self.log.info('Compose repo directory: %s' % sync_root) self.log.info('Compose repo directory: %s' % sync_root)
self.log.info('ISO Build Logs: /var/lib/mock/{}-{}-{}/result'.format( self.log.info('ISO Build Logs: /var/lib/mock/{}-{}-{}/result'.format(
self.shortname, self.major_version, self.current_arch) self.shortname.lower(), self.major_version, self.current_arch)
) )
self.log.info('ISO Build completed.') self.log.info('ISO Build completed.')
@ -282,6 +284,7 @@ class IsoBuild:
isolation=self.mock_isolation, isolation=self.mock_isolation,
builddir=self.mock_work_root, builddir=self.mock_work_root,
shortname=self.shortname, shortname=self.shortname,
revision=self.release,
) )
iso_template_output = iso_template.render( iso_template_output = iso_template.render(
@ -297,6 +300,7 @@ class IsoBuild:
rc=rclevel, rc=rclevel,
builddir=self.mock_work_root, builddir=self.mock_work_root,
lorax_work_root=self.lorax_result_root, lorax_work_root=self.lorax_result_root,
bugurl=self.bugurl,
) )
mock_iso_entry = open(mock_iso_path, "w+") mock_iso_entry = open(mock_iso_path, "w+")
@ -363,7 +367,7 @@ class IsoBuild:
full_drop = '{}/lorax-{}-{}.tar.gz'.format( full_drop = '{}/lorax-{}-{}.tar.gz'.format(
lorax_arch_dir, lorax_arch_dir,
self.major_version, self.release,
arch arch
) )
@ -396,7 +400,7 @@ class IsoBuild:
for arch in arches_to_unpack: for arch in arches_to_unpack:
tarname = 'lorax-{}-{}.tar.gz'.format( tarname = 'lorax-{}-{}.tar.gz'.format(
self.major_version, self.release,
arch arch
) )
@ -474,7 +478,7 @@ class IsoBuild:
raise SystemExit() raise SystemExit()
for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']: for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']:
if 'tar.gz' in y['Key']: if 'tar.gz' in y['Key'] and self.release in y['Key']:
temp.append(y['Key']) temp.append(y['Key'])
for arch in self.arches: for arch in self.arches:
@ -525,7 +529,7 @@ class IsoBuild:
resp = xmltodict.parse(bucket_data.content) resp = xmltodict.parse(bucket_data.content)
for y in resp['ListBucketResult']['Contents']: for y in resp['ListBucketResult']['Contents']:
if 'tar.gz' in y['Key']: if 'tar.gz' in y['Key'] and self.release in y['Key']:
temp.append(y['Key']) temp.append(y['Key'])
for arch in self.arches: for arch in self.arches:
@ -690,7 +694,7 @@ class IsoBuild:
shutil.copy2(path_to_src_image + '.manifest', manifest) shutil.copy2(path_to_src_image + '.manifest', manifest)
self.log.info('Creating checksum for %s boot iso...' % arch) self.log.info('Creating checksum for %s boot iso...' % arch)
checksum = Utils.get_checksum(isobootpath, self.checksum, self.log) checksum = Shared.get_checksum(isobootpath, self.checksum, self.log)
if not checksum: if not checksum:
self.log.error( self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
@ -824,31 +828,6 @@ class IsoBuild:
# Set default variant # Set default variant
ti.dump(treeinfo, main_variant=primary) ti.dump(treeinfo, main_variant=primary)
def discinfo_write(self, file_path, arch):
"""
Ensure discinfo is written correctly
"""
with open(file_path, "w+") as f:
f.write("%s\n" % self.timestamp)
f.write("%s\n" % self.fullname)
f.write("%s\n" % arch)
f.write("ALL\n")
f.close()
def write_media_repo(self):
"""
Ensure media.repo exists
"""
data = [
"[InstallMedia]",
"name=%s" % self.fullname,
"mediaid=%s" % self.timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
# Next set of functions are loosely borrowed (in concept) from pungi. Some # Next set of functions are loosely borrowed (in concept) from pungi. Some
# stuff may be combined/mixed together, other things may be simplified or # stuff may be combined/mixed together, other things may be simplified or
# reduced in nature. # reduced in nature.
@ -1075,6 +1054,7 @@ class IsoBuild:
implantmd5=implantmd5, implantmd5=implantmd5,
make_manifest=make_manifest, make_manifest=make_manifest,
lorax_pkg_cmd=lorax_pkg_cmd, lorax_pkg_cmd=lorax_pkg_cmd,
isoname=isoname,
) )
mock_iso_entry = open(mock_iso_path, "w+") mock_iso_entry = open(mock_iso_path, "w+")
@ -1225,7 +1205,7 @@ class IsoBuild:
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' + '[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Performing checksum for ' + p 'Performing checksum for ' + p
) )
checksum = Utils.get_checksum(path, self.checksum, self.log) checksum = Shared.get_checksum(path, self.checksum, self.log)
if not checksum: if not checksum:
self.log.error( self.log.error(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + '[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +

View File

@ -0,0 +1,79 @@
# These are shared utilities used
import os
import hashlib
class Shared:
"""
Quick utilities that may be commonly used
"""
@staticmethod
def get_checksum(path, hashtype, logger):
"""
Generates a checksum from the provided path by doing things in chunks.
This way we don't do it in memory.
"""
try:
checksum = hashlib.new(hashtype)
except ValueError:
logger.error("Invalid hash type: %s" % hashtype)
return False
try:
input_file = open(path, "rb")
except IOError as e:
logger.error("Could not open file %s: %s" % (path, e))
return False
while True:
chunk = input_file.read(8192)
if not chunk:
break
checksum.update(chunk)
input_file.close()
stat = os.stat(path)
base = os.path.basename(path)
# This emulates our current syncing scripts that runs stat and
# sha256sum and what not with a very specific output.
return "%s: %s bytes\n%s (%s) = %s\n" % (
base,
stat.st_size,
hashtype.upper(),
base,
checksum.hexdigest()
)
@staticmethod
def discinfo_write(timestamp, fullname, arch, file_path):
"""
Ensure discinfo is written correctly
"""
data = [
"%s" % timestamp,
"%s" % fullname,
"%s" % arch,
"ALL"
]
with open(file_path, "w+") as f:
f.write("\n".join(data))
f.close()
@staticmethod
def media_repo_write(timestamp, fullname, file_path):
"""
Ensure media.repo exists
"""
data = [
"[InstallMedia]",
"name=%s" % fullname,
"mediaid=%s" % timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
with open(file_path, "w") as f:
f.write("\n".join(data))