Bump main to 0.6.0
This commit is contained in:
commit
166ee1d38a
@ -1 +1 @@
|
||||
__version__ = '0.5.0'
|
||||
__version__ = '0.6.0'
|
||||
|
@ -109,6 +109,7 @@
|
||||
XFCE: rocky-live-xfce.ks
|
||||
KDE: rocky-live-kde.ks
|
||||
MATE: rocky-live-mate.ks
|
||||
Cinnamon: rocky-live-cinnamon.ks
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
|
@ -9,6 +9,7 @@ from empanadas.util.check import (
|
||||
from empanadas.util.shared import (
|
||||
Shared,
|
||||
ArchCheck,
|
||||
Idents,
|
||||
)
|
||||
|
||||
from empanadas.util.dnf_utils import (
|
||||
|
@ -289,7 +289,9 @@ class RepoSync:
|
||||
Shared.deploy_extra_files(self.extra_files, sync_root, global_work_root, self.log)
|
||||
self.deploy_treeinfo(self.repo, sync_root, self.arch)
|
||||
self.tweak_treeinfo(self.repo, sync_root, self.arch)
|
||||
self.symlink_to_latest(generated_dir)
|
||||
#self.symlink_to_latest(generated_dir)
|
||||
Shared.symlink_to_latest(self.shortname, self.major_version,
|
||||
generated_dir, self.compose_latest_dir, self.log)
|
||||
|
||||
if self.repoclosure:
|
||||
self.repoclosure_work(sync_root, work_root, log_root)
|
||||
@ -323,7 +325,7 @@ class RepoSync:
|
||||
if self.parallel:
|
||||
self.podman_sync(repo, sync_root, work_root, log_root, global_work_root, arch)
|
||||
else:
|
||||
Shared.dnf_sync(repo, sync_root, work_root, arch, self.log)
|
||||
Shared.norm_dnf_sync(self, repo, sync_root, work_root, arch, self.log)
|
||||
|
||||
def podman_sync(
|
||||
self,
|
||||
@ -709,22 +711,6 @@ class RepoSync:
|
||||
'No issues detected.'
|
||||
)
|
||||
|
||||
def symlink_to_latest(self, generated_dir):
|
||||
"""
|
||||
Emulates pungi and symlinks latest-Rocky-X
|
||||
|
||||
This link will be what is updated in full runs. Whatever is in this
|
||||
'latest' directory is what is rsynced on to staging after completion.
|
||||
This link should not change often.
|
||||
"""
|
||||
try:
|
||||
os.remove(self.compose_latest_dir)
|
||||
except:
|
||||
pass
|
||||
|
||||
self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version))
|
||||
os.symlink(generated_dir, self.compose_latest_dir)
|
||||
|
||||
def repoclosure_work(self, sync_root, work_root, log_root):
|
||||
"""
|
||||
This is where we run repoclosures, based on the configuration of each
|
||||
|
@ -35,7 +35,7 @@ import productmd.treeinfo
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from empanadas.common import Color, _rootdir
|
||||
from empanadas.util import Shared, ArchCheck
|
||||
from empanadas.util import Shared, ArchCheck, Idents
|
||||
|
||||
class IsoBuild:
|
||||
"""
|
||||
@ -138,10 +138,7 @@ class IsoBuild:
|
||||
self.compose_latest_dir = os.path.join(
|
||||
config['compose_root'],
|
||||
major,
|
||||
"latest-{}-{}".format(
|
||||
self.shortname,
|
||||
self.profile
|
||||
)
|
||||
f"latest-{self.shortname}-{self.profile}"
|
||||
)
|
||||
|
||||
self.compose_latest_sync = os.path.join(
|
||||
@ -371,11 +368,7 @@ class IsoBuild:
|
||||
|
||||
source_path = latest_artifacts[arch]
|
||||
|
||||
full_drop = '{}/lorax-{}-{}.tar.gz'.format(
|
||||
lorax_arch_dir,
|
||||
self.release,
|
||||
arch
|
||||
)
|
||||
full_drop = f'{lorax_arch_dir}/lorax-{self.release}-{arch}.tar.gz'
|
||||
|
||||
if not os.path.exists(lorax_arch_dir):
|
||||
os.makedirs(lorax_arch_dir, exist_ok=True)
|
||||
@ -403,10 +396,7 @@ class IsoBuild:
|
||||
self.log.info(Color.INFO + 'Beginning unpack phase...')
|
||||
|
||||
for arch in arches_to_unpack:
|
||||
tarname = 'lorax-{}-{}.tar.gz'.format(
|
||||
self.release,
|
||||
arch
|
||||
)
|
||||
tarname = f'lorax-{self.release}-{arch}.tar.gz'
|
||||
|
||||
tarball = os.path.join(
|
||||
self.lorax_work_dir,
|
||||
@ -523,22 +513,13 @@ class IsoBuild:
|
||||
if self.release_candidate:
|
||||
rclevel = '-' + self.rclvl
|
||||
|
||||
discname = '{}-{}.{}{}-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
self.minor_version,
|
||||
rclevel,
|
||||
arch,
|
||||
'boot'
|
||||
)
|
||||
discname = f'{self.shortname}-{self.major_version}.{self.minor_version}{rclevel}-{arch}-boot.iso'
|
||||
|
||||
isobootpath = os.path.join(iso_to_go, discname)
|
||||
manifest = '{}.manifest'.format(isobootpath)
|
||||
link_name = '{}-{}-boot.iso'.format(self.shortname, arch)
|
||||
manifest = f'{isobootpath}.manifest'
|
||||
link_name = f'{self.shortname}-{arch}-boot.iso'
|
||||
link_manifest = link_name + '.manifest'
|
||||
latest_link_name = '{}-{}-latest-{}-boot.iso'.format(self.shortname,
|
||||
self.major_version,
|
||||
arch)
|
||||
latest_link_name = f'{self.shortname}-{self.major_version}-latest-{arch}-boot.iso'
|
||||
latest_link_manifest = latest_link_name + '.manifest'
|
||||
isobootpath = os.path.join(iso_to_go, discname)
|
||||
linkbootpath = os.path.join(iso_to_go, link_name)
|
||||
@ -813,11 +794,11 @@ class IsoBuild:
|
||||
xorriso_template = self.tmplenv.get_template('xorriso.tmpl.txt')
|
||||
iso_readme_template = self.tmplenv.get_template('ISOREADME.tmpl')
|
||||
|
||||
mock_iso_path = '/var/tmp/lorax-{}.cfg'.format(self.major_version)
|
||||
mock_sh_path = '{}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image)
|
||||
iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image)
|
||||
xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image)
|
||||
iso_readme_path = '{}/{}/README'.format(self.iso_work_dir, arch)
|
||||
mock_iso_path = f'/var/tmp/lorax-{self.major_version}.cfg'
|
||||
mock_sh_path = f'{entries_dir}/extraisobuild-{arch}-{image}.sh'
|
||||
iso_template_path = f'{entries_dir}/buildExtraImage-{arch}-{image}.sh'
|
||||
xorriso_template_path = f'{entries_dir}/xorriso-{arch}-{image}.txt'
|
||||
iso_readme_path = f'{self.iso_work_dir}/{arch}/README'
|
||||
#print(iso_readme_path)
|
||||
|
||||
log_root = os.path.join(
|
||||
@ -829,7 +810,7 @@ class IsoBuild:
|
||||
if not os.path.exists(log_root):
|
||||
os.makedirs(log_root, exist_ok=True)
|
||||
|
||||
log_path_command = '| tee -a {}/{}-{}.log'.format(log_root, arch, image)
|
||||
log_path_command = f'| tee -a {log_root}/{arch}-{image}.log'
|
||||
|
||||
# This is kind of a hack. Installing xorrisofs sets the alternatives to
|
||||
# it, so backwards compatibility is sort of guaranteed. But we want to
|
||||
@ -850,31 +831,10 @@ class IsoBuild:
|
||||
if self.updated_image:
|
||||
datestamp = '-' + self.updated_image_date
|
||||
|
||||
volid = '{}-{}-{}{}-{}-{}'.format(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
self.minor_version,
|
||||
rclevel,
|
||||
arch,
|
||||
volname
|
||||
)
|
||||
|
||||
isoname = '{}-{}{}{}-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.revision,
|
||||
rclevel,
|
||||
datestamp,
|
||||
arch,
|
||||
image
|
||||
)
|
||||
|
||||
generic_isoname = '{}-{}-{}.iso'.format(self.shortname, arch, image)
|
||||
latest_isoname = '{}-{}-latest-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
arch,
|
||||
image
|
||||
)
|
||||
volid = f'{self.shortname}-{self.major_version}-{self.minor_version}{rclevel}-{arch}-{volname}'
|
||||
isoname = f'{self.shortname}-{self.revision}{rclevel}{datestamp}-{arch}-{image}.iso'
|
||||
generic_isoname = f'{self.shortname}-{arch}-{image}.iso'
|
||||
latest_isoname = f'{self.shortname}-{self.major_version}-latest-{arch}-{image}.iso'
|
||||
|
||||
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
||||
' '.join(required_pkgs),
|
||||
@ -986,7 +946,7 @@ class IsoBuild:
|
||||
have mock available.
|
||||
"""
|
||||
entries_dir = os.path.join(work_root, "entries")
|
||||
extra_iso_cmd = '/bin/bash {}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image)
|
||||
extra_iso_cmd = f'/bin/bash {entries_dir}/extraisobuild-{arch}-{image}.sh'
|
||||
self.log.info('Starting mock build...')
|
||||
p = subprocess.call(shlex.split(extra_iso_cmd))
|
||||
if p != 0:
|
||||
@ -1022,7 +982,7 @@ class IsoBuild:
|
||||
arch_sync = arches.copy()
|
||||
|
||||
for a in arch_sync:
|
||||
entry_name = 'buildExtraImage-{}-{}.sh'.format(a, i)
|
||||
entry_name = f'buildExtraImage-{a}-{i}.sh'
|
||||
entry_name_list.append(entry_name)
|
||||
|
||||
rclevel = ''
|
||||
@ -1080,10 +1040,7 @@ class IsoBuild:
|
||||
join_all_pods = ' '.join(entry_name_list)
|
||||
time.sleep(3)
|
||||
self.log.info(Color.INFO + 'Building ' + i + ' ...')
|
||||
pod_watcher = '{} wait {}'.format(
|
||||
cmd,
|
||||
join_all_pods
|
||||
)
|
||||
pod_watcher = f'{cmd} wait {join_all_pods}'
|
||||
|
||||
watch_man = subprocess.call(
|
||||
shlex.split(pod_watcher),
|
||||
@ -1095,10 +1052,7 @@ class IsoBuild:
|
||||
# code.
|
||||
pattern = "Exited (0)"
|
||||
for pod in entry_name_list:
|
||||
checkcmd = '{} ps -f status=exited -f name={}'.format(
|
||||
cmd,
|
||||
pod
|
||||
)
|
||||
checkcmd = f'{cmd} ps -f status=exited -f name={pod}'
|
||||
podcheck = subprocess.Popen(
|
||||
checkcmd,
|
||||
stdout=subprocess.PIPE,
|
||||
@ -1111,10 +1065,7 @@ class IsoBuild:
|
||||
self.log.error(Color.FAIL + pod)
|
||||
bad_exit_list.append(pod)
|
||||
|
||||
rmcmd = '{} rm {}'.format(
|
||||
cmd,
|
||||
join_all_pods
|
||||
)
|
||||
rmcmd = f'{cmd} rm {join_all_pods}'
|
||||
|
||||
rmpod = subprocess.Popen(
|
||||
rmcmd,
|
||||
@ -1202,32 +1153,41 @@ class IsoBuild:
|
||||
for k, v in self._get_grafts([rd_for_var]).items():
|
||||
files[os.path.join(repo, "repodata", k)] = v
|
||||
|
||||
grafts = '{}/{}-{}-grafts'.format(
|
||||
lorax_base_dir,
|
||||
iso,
|
||||
arch
|
||||
)
|
||||
grafts = f'{lorax_base_dir}/{iso}-{arch}-grafts'
|
||||
|
||||
xorrs = '{}/xorriso-{}.txt'.format(
|
||||
lorax_base_dir,
|
||||
arch
|
||||
)
|
||||
xorrs = f'{lorax_base_dir}/xorriso-{iso}-{arch}.txt'
|
||||
|
||||
# Generate exclusion list/dict from boot.iso manifest
|
||||
boot_manifest = f'{lorax_base_dir}/lorax/images/boot.iso.manifest'
|
||||
# Boot configs and images that may change
|
||||
# It's unlikely these will be changed in empanadas, they're used as is
|
||||
# and it works fine. This is a carry over from a recent pungi commit,
|
||||
# based on an issue I had filed. The above was the original part, the
|
||||
# below is a pungi "buildinstall" thing that we don't do, but may
|
||||
# include as a feature if it ever happens.
|
||||
updatable_files = set(ArchCheck.boot_configs + ArchCheck.boot_images + ['.discinfo'])
|
||||
ignores = set()
|
||||
updatables = set()
|
||||
|
||||
try:
|
||||
with open(boot_manifest) as i:
|
||||
# ignores = set(line.lstrip("/").rstrip("\n") for line in i)
|
||||
for line in i:
|
||||
path = line.lstrip("/").rstrip("\n")
|
||||
if path in updatable_files:
|
||||
updatables.add(path)
|
||||
else:
|
||||
ignores.add(path)
|
||||
except Exception as e:
|
||||
self.log.error(Color.FAIL + 'File was likely not found.')
|
||||
raise SystemExit(e)
|
||||
|
||||
self._write_grafts(
|
||||
grafts,
|
||||
xorrs,
|
||||
files,
|
||||
exclude=[
|
||||
"*/lost+found",
|
||||
"*/boot.iso",
|
||||
"*/boot.iso.manifest",
|
||||
"EFI/*",
|
||||
"images/*",
|
||||
"isolinux/*",
|
||||
"boot/*",
|
||||
"ppc/*",
|
||||
"generic.ins"
|
||||
]
|
||||
exclude=ignores,
|
||||
update=updatables
|
||||
)
|
||||
|
||||
if self.iso_map['xorrisofs']:
|
||||
@ -1249,12 +1209,12 @@ class IsoBuild:
|
||||
if isinstance(p, dict):
|
||||
tree = p
|
||||
else:
|
||||
tree = self._scanning(p)
|
||||
result = self._merging(result, tree)
|
||||
tree = Idents.scanning(p)
|
||||
result = Idents.merging(result, tree)
|
||||
|
||||
for p in exclusive_paths:
|
||||
tree = self._scanning(p)
|
||||
result = self._merging(result, tree, exclusive=True)
|
||||
tree = Idents.scanning(p)
|
||||
result = Idents.merging(result, tree, exclusive=True)
|
||||
|
||||
# Resolves possible symlinks
|
||||
for key in result.keys():
|
||||
@ -1267,12 +1227,17 @@ class IsoBuild:
|
||||
|
||||
return result
|
||||
|
||||
def _write_grafts(self, filepath, xorrspath, u, exclude=None):
|
||||
def _write_grafts(self, filepath, xorrspath, u, exclude=None, update=None):
|
||||
"""
|
||||
Write out the graft points
|
||||
"""
|
||||
seen = set()
|
||||
# There are files that are on the exclude list typically.
|
||||
exclude = exclude or []
|
||||
# There is a chance files may get updated before being placed in a
|
||||
# variant ISO - it's rare though. most that will be different is
|
||||
# .discinfo
|
||||
update = update or []
|
||||
result = {}
|
||||
for zl in sorted(u, reverse=True):
|
||||
dirn = os.path.dirname(zl)
|
||||
@ -1291,119 +1256,43 @@ class IsoBuild:
|
||||
result[zl] = u[zl]
|
||||
seen.add(dirn)
|
||||
|
||||
# We check first if a file needs to be updated first before relying on
|
||||
# the boot.iso manifest to exclude a file
|
||||
if self.iso_map['xorrisofs']:
|
||||
fx = open(xorrspath, "w")
|
||||
for zm in sorted(result, key=self._sorting):
|
||||
for zm in sorted(result, key=Idents.sorting):
|
||||
found = False
|
||||
replace = False
|
||||
for upda in update:
|
||||
if fnmatch(zm, upda):
|
||||
#print(f'updating: {zm} {upda}')
|
||||
replace = True
|
||||
break
|
||||
for excl in exclude:
|
||||
if fnmatch(zm, excl):
|
||||
#print(f'ignoring: {zm} {excl}')
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
fx.write("-map %s %s\n" % (u[zm], zm))
|
||||
mcmd = "-update" if replace else "-map"
|
||||
fx.write("%s %s %s\n" % (mcmd, u[zm], zm))
|
||||
fx.close()
|
||||
else:
|
||||
fh = open(filepath, "w")
|
||||
for zl in sorted(result, key=self._sorting):
|
||||
found = False
|
||||
for excl in exclude:
|
||||
if fnmatch(zl, excl):
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
self.log.info(Color.WARN + 'Nothing should be excluded in legacy ' +
|
||||
'genisoimage. Ignoring exclude list.')
|
||||
for zl in sorted(result, key=Idents.sorting):
|
||||
#found = False
|
||||
#for excl in exclude:
|
||||
# if fnmatch(zl, excl):
|
||||
# found = True
|
||||
# break
|
||||
#if found:
|
||||
# continue
|
||||
fh.write("%s=%s\n" % (zl, u[zl]))
|
||||
fh.close()
|
||||
|
||||
def _scanning(self, p):
|
||||
"""
|
||||
Scan tree
|
||||
"""
|
||||
path = os.path.abspath(p)
|
||||
result = {}
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
abspath = os.path.join(root, file)
|
||||
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
|
||||
result[relpath] = abspath
|
||||
|
||||
# Include empty directories too
|
||||
if root != path:
|
||||
abspath = os.path.join(root, "")
|
||||
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
|
||||
result[relpath] = abspath
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _merging(self, tree_a, tree_b, exclusive=False):
|
||||
"""
|
||||
Merge tree
|
||||
"""
|
||||
result = tree_b.copy()
|
||||
all_dirs = set(
|
||||
[os.path.dirname(dirn).rstrip("/") for dirn in result if os.path.dirname(dirn) != ""]
|
||||
)
|
||||
|
||||
for dirn in tree_a:
|
||||
dn = os.path.dirname(dirn)
|
||||
if exclusive:
|
||||
match = False
|
||||
for x in all_dirs:
|
||||
if dn == x or dn.startswith("%s/" % x):
|
||||
match = True
|
||||
break
|
||||
if match:
|
||||
continue
|
||||
|
||||
if dirn in result:
|
||||
continue
|
||||
|
||||
result[dirn] = tree_a[dirn]
|
||||
return result
|
||||
|
||||
def _sorting(self, k):
|
||||
"""
|
||||
Sorting using the is_rpm and is_image funcs. Images are first, extras
|
||||
next, rpm's last.
|
||||
"""
|
||||
rolling = (0 if self._is_image(k) else 2 if self._is_rpm(k) else 1, k)
|
||||
return rolling
|
||||
|
||||
def _is_rpm(self, k):
|
||||
"""
|
||||
Is this an RPM? :o
|
||||
"""
|
||||
result = k.endswith(".rpm")
|
||||
return result
|
||||
|
||||
def _is_image(self, k):
|
||||
"""
|
||||
Is this an image? :o
|
||||
"""
|
||||
if (
|
||||
k.startswith("images/") or
|
||||
k.startswith("isolinux/") or
|
||||
k.startswith("EFI/") or
|
||||
k.startswith("etc/") or
|
||||
k.startswith("ppc/")
|
||||
):
|
||||
return True
|
||||
|
||||
if (
|
||||
k.endswith(".img") or
|
||||
k.endswith(".ins")
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _get_vol_id(self):
|
||||
"""
|
||||
Gets a volume ID
|
||||
"""
|
||||
|
||||
def run_pull_generic_images(self):
|
||||
"""
|
||||
Pulls generic images built in peridot and places them where they need
|
||||
@ -1500,15 +1389,9 @@ class IsoBuild:
|
||||
drop_name = source_path.split('/')[-3] + fsuffix
|
||||
|
||||
checksum_name = drop_name + '.CHECKSUM'
|
||||
full_drop = '{}/{}'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
full_drop = f'{image_arch_dir}/{drop_name}'
|
||||
|
||||
checksum_drop = '{}/{}.CHECKSUM'.format(
|
||||
image_arch_dir,
|
||||
drop_name
|
||||
)
|
||||
checksum_drop = f'{image_arch_dir}/{drop_name}.CHECKSUM'
|
||||
|
||||
if not os.path.exists(image_arch_dir):
|
||||
os.makedirs(image_arch_dir, exist_ok=True)
|
||||
@ -1694,10 +1577,7 @@ class LiveBuild:
|
||||
self.compose_latest_dir = os.path.join(
|
||||
config['compose_root'],
|
||||
major,
|
||||
"latest-{}-{}".format(
|
||||
self.shortname,
|
||||
self.profile
|
||||
)
|
||||
f"latest-{self.shortname}-{self.profile}"
|
||||
)
|
||||
|
||||
self.compose_latest_sync = os.path.join(
|
||||
@ -1820,17 +1700,9 @@ class LiveBuild:
|
||||
if self.peridot:
|
||||
kloc = 'peridot'
|
||||
|
||||
mock_iso_path = '/var/tmp/live-{}.cfg'.format(self.major_version)
|
||||
mock_sh_path = '{}/liveisobuild-{}-{}.sh'.format(
|
||||
entries_dir,
|
||||
self.current_arch,
|
||||
image
|
||||
)
|
||||
iso_template_path = '{}/buildLiveImage-{}-{}.sh'.format(
|
||||
entries_dir,
|
||||
self.current_arch,
|
||||
image
|
||||
)
|
||||
mock_iso_path = f'/var/tmp/live-{self.major_version}.cfg'
|
||||
mock_sh_path = f'{entries_dir}/liveisobuild-{self.current_arch}-{image}.sh'
|
||||
iso_template_path = f'{entries_dir}/buildLiveImage-{self.current_arch}-{image}.sh'
|
||||
|
||||
log_root = os.path.join(
|
||||
work_root,
|
||||
@ -1843,27 +1715,12 @@ class LiveBuild:
|
||||
if not os.path.exists(log_root):
|
||||
os.makedirs(log_root, exist_ok=True)
|
||||
|
||||
log_path_command = '| tee -a {}/{}-{}.log'.format(
|
||||
log_root,
|
||||
self.current_arch,
|
||||
image
|
||||
)
|
||||
log_path_command = f'| tee -a {log_root}/{self.current_arch}-{image}.log'
|
||||
required_pkgs = self.livemap['required_pkgs']
|
||||
|
||||
volid = '{}-{}-{}-{}'.format(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
self.minor_version,
|
||||
image
|
||||
)
|
||||
volid = f'{self.shortname}-{self.major_version}-{self.minor_version}-{image}'
|
||||
|
||||
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.release,
|
||||
image,
|
||||
self.current_arch,
|
||||
self.date
|
||||
)
|
||||
isoname = f'{self.shortname}-{self.release}-{image}-{self.current_arch}-{self.date}.iso'
|
||||
|
||||
live_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
||||
' '.join(required_pkgs),
|
||||
@ -1960,17 +1817,10 @@ class LiveBuild:
|
||||
self.log.warn(Color.WARN + 'This mode does not work properly. It will fail.')
|
||||
for i in images:
|
||||
image_name = i
|
||||
entry_name = 'buildLiveImage-{}-{}.sh'.format(arch, i)
|
||||
entry_name = f'buildLiveImage-{arch}-{i}.sh'
|
||||
entry_name_list.append(entry_name)
|
||||
|
||||
isoname = '{}/{}-{}-{}-{}-{}.iso'.format(
|
||||
arch,
|
||||
self.shortname,
|
||||
i,
|
||||
self.major_version,
|
||||
arch,
|
||||
self.date
|
||||
)
|
||||
isoname = f'{arch}/{self.shortname}-{i}-{self.major_version}-{arch}-{self.date}.iso'
|
||||
|
||||
checksum_list.append(isoname)
|
||||
|
||||
@ -1998,10 +1848,7 @@ class LiveBuild:
|
||||
time.sleep(3)
|
||||
self.log.info(Color.INFO + 'Building requested live images ...')
|
||||
|
||||
pod_watcher = '{} wait {}'.format(
|
||||
cmd,
|
||||
join_all_pods
|
||||
)
|
||||
pod_watcher = f'{cmd} wait {join_all_pods}'
|
||||
|
||||
watch_man = subprocess.call(
|
||||
shlex.split(pod_watcher),
|
||||
@ -2013,10 +1860,7 @@ class LiveBuild:
|
||||
# code.
|
||||
pattern = "Exited (0)"
|
||||
for pod in entry_name_list:
|
||||
checkcmd = '{} ps -f status=exited -f name={}'.format(
|
||||
cmd,
|
||||
pod
|
||||
)
|
||||
checkcmd = f'{cmd} ps -f status=exited -f name={pod}'
|
||||
podcheck = subprocess.Popen(
|
||||
checkcmd,
|
||||
stdout=subprocess.PIPE,
|
||||
@ -2029,10 +1873,7 @@ class LiveBuild:
|
||||
self.log.error(Color.FAIL + pod)
|
||||
bad_exit_list.append(pod)
|
||||
|
||||
rmcmd = '{} rm {}'.format(
|
||||
cmd,
|
||||
join_all_pods
|
||||
)
|
||||
rmcmd = f'{cmd} rm {join_all_pods}'
|
||||
|
||||
rmpod = subprocess.Popen(
|
||||
rmcmd,
|
||||
@ -2072,25 +1913,9 @@ class LiveBuild:
|
||||
"""
|
||||
entries_dir = os.path.join(work_root, "entries")
|
||||
live_dir_arch = os.path.join(self.live_work_dir, arch)
|
||||
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.release,
|
||||
image,
|
||||
arch,
|
||||
self.date
|
||||
)
|
||||
isolink = '{}-{}-{}-{}-{}.iso'.format(
|
||||
self.shortname,
|
||||
self.major_version,
|
||||
image,
|
||||
arch,
|
||||
'latest'
|
||||
)
|
||||
live_res_dir = '/var/lib/mock/{}-{}-{}/result'.format(
|
||||
self.shortname.lower(),
|
||||
self.major_version,
|
||||
arch
|
||||
)
|
||||
isoname = f'{self.shortname}-{self.release}-{image}-{arch}-{self.date}.iso'
|
||||
isolink = f'{self.shortname}-{self.major_version}-{image}-{arch}-latest.iso'
|
||||
live_res_dir = f'/var/lib/mock/{self.shortname.lower()}-{self.major_version}-{arch}/result'
|
||||
|
||||
if self.justcopyit:
|
||||
if os.path.exists(os.path.join(live_dir_arch, isoname)):
|
||||
@ -2101,7 +1926,7 @@ class LiveBuild:
|
||||
self.log.warn(Color.WARN + 'Skipping.')
|
||||
return
|
||||
|
||||
live_iso_cmd = '/bin/bash {}/liveisobuild-{}-{}.sh'.format(entries_dir, arch, image)
|
||||
live_iso_cmd = f'/bin/bash {entries_dir}/liveisobuild-{arch}-{image}.sh'
|
||||
self.log.info('Starting mock build...')
|
||||
p = subprocess.call(shlex.split(live_iso_cmd))
|
||||
if p != 0:
|
||||
|
@ -40,6 +40,18 @@ class ArchCheck:
|
||||
]
|
||||
}
|
||||
|
||||
# These are files that can potentially change on an image.
|
||||
boot_configs = [
|
||||
"isolinux/isolinux.cfg",
|
||||
"etc/yaboot.conf",
|
||||
"ppc/ppc64/yaboot.conf",
|
||||
"EFI/BOOT/BOOTX64.conf",
|
||||
"EFI/BOOT/grub.cfg"
|
||||
]
|
||||
boot_images = [
|
||||
"images/efiboot.img"
|
||||
]
|
||||
|
||||
class Shared:
|
||||
"""
|
||||
Quick utilities that may be commonly used
|
||||
@ -73,7 +85,7 @@ class Shared:
|
||||
base = os.path.basename(path)
|
||||
# This emulates our current syncing scripts that runs stat and
|
||||
# sha256sum and what not with a very specific output.
|
||||
return "%s: %s bytes\n%s (%s) = %s\n" % (
|
||||
return "# %s: %s bytes\n%s (%s) = %s\n" % (
|
||||
base,
|
||||
stat.st_size,
|
||||
hashtype.upper(),
|
||||
@ -1141,3 +1153,208 @@ class Shared:
|
||||
logger.error('DNF syncing has been removed.')
|
||||
logger.error('Please install podman and enable parallel')
|
||||
raise SystemExit()
|
||||
|
||||
@staticmethod
|
||||
def norm_dnf_sync(data, repo, sync_root, work_root, arch, logger):
|
||||
"""
|
||||
This is for normal dnf syncs. This is very slow.
|
||||
"""
|
||||
cmd = Shared.reposync_cmd(logger)
|
||||
sync_single_arch = False
|
||||
arches_to_sync = data.arches
|
||||
if arch:
|
||||
sync_single_arch = True
|
||||
arches_to_sync = [arch]
|
||||
|
||||
logger.info(
|
||||
Color.BOLD + '!! WARNING !! ' + Color.END + 'You are performing a '
|
||||
'local reposync, which will incur delays in your compose.'
|
||||
)
|
||||
|
||||
if data.fullrun:
|
||||
logger.info(
|
||||
Color.BOLD + '!! WARNING !! ' + Color.END + 'This is a full '
|
||||
'sync. Expect a few days for it to complete.'
|
||||
)
|
||||
|
||||
for r in repos_to_sync:
|
||||
for a in arches_to_sync:
|
||||
repo_name = r
|
||||
if r in data.repo_renames:
|
||||
repo_name = data.repo_renames[r]
|
||||
|
||||
os_sync_path = os.path.join(
|
||||
sync_root,
|
||||
repo_name,
|
||||
a,
|
||||
'os'
|
||||
)
|
||||
|
||||
debug_sync_path = os.path.join(
|
||||
sync_root,
|
||||
repo_name,
|
||||
a,
|
||||
'debug/tree'
|
||||
)
|
||||
|
||||
sync_cmd = "{} -c {} --download-metadata --repoid={} -p {} --forcearch {} --norepopath".format(
|
||||
cmd,
|
||||
data.dnf_config,
|
||||
r,
|
||||
os_sync_path,
|
||||
a
|
||||
)
|
||||
|
||||
debug_sync_cmd = "{} -c {} --download-metadata --repoid={}-debug -p {} --forcearch {} --norepopath".format(
|
||||
cmd,
|
||||
data.dnf_config,
|
||||
r,
|
||||
debug_sync_path,
|
||||
a
|
||||
)
|
||||
|
||||
logger.info('Syncing {} {}'.format(r, a))
|
||||
#logger.info(sync_cmd)
|
||||
# Try to figure out where to send the actual output of this...
|
||||
# Also consider on running a try/except here? Basically if
|
||||
# something happens (like a repo doesn't exist for some arch,
|
||||
# eg RT for aarch64), make a note of it somehow (but don't
|
||||
# break the entire sync). As it stands with this
|
||||
# implementation, if something fails, it just continues on.
|
||||
process = subprocess.call(
|
||||
shlex.split(sync_cmd),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
|
||||
if not data.ignore_debug:
|
||||
logger.info('Syncing {} {} (debug)'.format(r, a))
|
||||
process_debug = subprocess.call(
|
||||
shlex.split(debug_sync_cmd),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
|
||||
# There should be a check here that if it's "all" and multilib
|
||||
# is on, i686 should get synced too.
|
||||
|
||||
if not data.ignore_source:
|
||||
source_sync_path = os.path.join(
|
||||
sync_root,
|
||||
repo_name,
|
||||
'source/tree'
|
||||
)
|
||||
|
||||
source_sync_cmd = "{} -c {} --download-metadata --repoid={}-source -p {} --norepopath".format(
|
||||
cmd,
|
||||
data.dnf_config,
|
||||
r,
|
||||
source_sync_path
|
||||
)
|
||||
|
||||
|
||||
logger.info('Syncing {} source'.format(r))
|
||||
process_source = subprocess.call(
|
||||
shlex.split(source_sync_cmd),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
|
||||
logger.info('Syncing complete')
|
||||
|
||||
class Idents:
|
||||
"""
|
||||
Identifiers or locators
|
||||
"""
|
||||
@staticmethod
|
||||
def scanning(p):
|
||||
"""
|
||||
Scan tree
|
||||
"""
|
||||
path = os.path.abspath(p)
|
||||
result = {}
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
abspath = os.path.join(root, file)
|
||||
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
|
||||
result[relpath] = abspath
|
||||
|
||||
# Include empty directories too
|
||||
if root != path:
|
||||
abspath = os.path.join(root, "")
|
||||
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
|
||||
result[relpath] = abspath
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def merging(tree_a, tree_b, exclusive=False):
|
||||
"""
|
||||
Merge tree
|
||||
"""
|
||||
result = tree_b.copy()
|
||||
all_dirs = set(
|
||||
[os.path.dirname(dirn).rstrip("/") for dirn in result if os.path.dirname(dirn) != ""]
|
||||
)
|
||||
|
||||
for dirn in tree_a:
|
||||
dn = os.path.dirname(dirn)
|
||||
if exclusive:
|
||||
match = False
|
||||
for x in all_dirs:
|
||||
if dn == x or dn.startswith("%s/" % x):
|
||||
match = True
|
||||
break
|
||||
if match:
|
||||
continue
|
||||
|
||||
if dirn in result:
|
||||
continue
|
||||
|
||||
result[dirn] = tree_a[dirn]
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def sorting(k):
|
||||
"""
|
||||
Sorting using the is_rpm and is_image funcs. Images are first, extras
|
||||
next, rpm's last.
|
||||
"""
|
||||
rolling = (0 if Idents.is_image(k) else 2 if Idents.is_rpm(k) else 1, k)
|
||||
return rolling
|
||||
|
||||
@staticmethod
|
||||
def is_rpm(k):
|
||||
"""
|
||||
Is this an RPM? :o
|
||||
"""
|
||||
result = k.endswith(".rpm")
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def is_image(k):
|
||||
"""
|
||||
Is this an image? :o
|
||||
"""
|
||||
if (
|
||||
k.startswith("images/") or
|
||||
k.startswith("isolinux/") or
|
||||
k.startswith("EFI/") or
|
||||
k.startswith("etc/") or
|
||||
k.startswith("ppc/")
|
||||
):
|
||||
return True
|
||||
|
||||
if (
|
||||
k.endswith(".img") or
|
||||
k.endswith(".ins")
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_vol_id(opts):
|
||||
"""
|
||||
Gets a volume ID
|
||||
"""
|
||||
|
167
iso/empanadas/poetry.lock
generated
167
iso/empanadas/poetry.lock
generated
@ -1,6 +1,6 @@
|
||||
[[package]]
|
||||
name = "atomicwrites"
|
||||
version = "1.4.0"
|
||||
version = "1.4.1"
|
||||
description = "Atomic file writes."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -22,14 +22,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.24.22"
|
||||
version = "1.26.89"
|
||||
description = "The AWS SDK for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.27.22,<1.28.0"
|
||||
botocore = ">=1.29.89,<1.30.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.6.0,<0.7.0"
|
||||
|
||||
@ -38,7 +38,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.27.22"
|
||||
version = "1.29.89"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -50,11 +50,11 @@ python-dateutil = ">=2.1,<3.0.0"
|
||||
urllib3 = ">=1.25.4,<1.27"
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.13.8)"]
|
||||
crt = ["awscrt (==0.16.9)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.6.15"
|
||||
version = "2022.12.7"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -62,26 +62,23 @@ python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "2.1.0"
|
||||
version = "3.1.0"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
|
||||
[package.extras]
|
||||
unicode_backport = ["unicodedata2"]
|
||||
python-versions = ">=3.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.5"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.3"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -89,7 +86,7 @@ python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "4.12.0"
|
||||
version = "6.0.0"
|
||||
description = "Read metadata from Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -100,13 +97,13 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
|
||||
perf = ["ipython"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-resources"
|
||||
version = "5.8.0"
|
||||
version = "5.12.0"
|
||||
description = "Read resources from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -116,8 +113,8 @@ python-versions = ">=3.7"
|
||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
|
||||
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
@ -162,22 +159,19 @@ python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "more-itertools"
|
||||
version = "8.13.0"
|
||||
version = "9.1.0"
|
||||
description = "More routines for operating on iterables, beyond itertools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "21.3"
|
||||
version = "23.0"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
@ -212,17 +206,6 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.9"
|
||||
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.8"
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["railroad-diagrams", "jinja2"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "5.4.3"
|
||||
@ -267,7 +250,7 @@ python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.28.1"
|
||||
version = "2.28.2"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -275,7 +258,7 @@ python-versions = ">=3.7, <4"
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<3"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
@ -315,7 +298,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.3.0"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -323,20 +306,20 @@ python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.9"
|
||||
version = "1.26.15"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "wcwidth"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "Measures the displayed width of unicode strings in a terminal"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -352,15 +335,15 @@ python-versions = ">=3.4"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.8.0"
|
||||
version = "3.15.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
|
||||
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
@ -368,46 +351,19 @@ python-versions = ">=3.7,<4"
|
||||
content-hash = "42676fd0ceb350c8cd90246dc688cfcd404e14d22229052d0527fe342c135b95"
|
||||
|
||||
[metadata.files]
|
||||
atomicwrites = [
|
||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||
]
|
||||
atomicwrites = []
|
||||
attrs = [
|
||||
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
|
||||
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||
]
|
||||
boto3 = [
|
||||
{file = "boto3-1.24.22-py3-none-any.whl", hash = "sha256:c9a9f893561f64f5b81de197714ac4951251a328672a8dba28ad4c4a589c3adf"},
|
||||
{file = "boto3-1.24.22.tar.gz", hash = "sha256:67d404c643091d4aa37fc485193289ad859f1f65f94d0fa544e13bdd1d4187c1"},
|
||||
]
|
||||
botocore = [
|
||||
{file = "botocore-1.27.22-py3-none-any.whl", hash = "sha256:7145d9b7cae87999a9f074de700d02a1b3222ee7d1863aa631ff56c5fc868035"},
|
||||
{file = "botocore-1.27.22.tar.gz", hash = "sha256:f57cb33446deef92e552b0be0e430d475c73cf64bc9e46cdb4783cdfe39cb6bb"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
||||
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
||||
]
|
||||
charset-normalizer = [
|
||||
{file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"},
|
||||
{file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
||||
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
importlib-metadata = [
|
||||
{file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
|
||||
{file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
|
||||
]
|
||||
importlib-resources = [
|
||||
{file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"},
|
||||
{file = "importlib_resources-5.8.0.tar.gz", hash = "sha256:568c9f16cb204f9decc8d6d24a572eeea27dacbb4cee9e6b03a8025736769751"},
|
||||
]
|
||||
boto3 = []
|
||||
botocore = []
|
||||
certifi = []
|
||||
charset-normalizer = []
|
||||
colorama = []
|
||||
idna = []
|
||||
importlib-metadata = []
|
||||
importlib-resources = []
|
||||
jinja2 = [
|
||||
{file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"},
|
||||
{file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"},
|
||||
@ -416,9 +372,7 @@ jmespath = [
|
||||
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
|
||||
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||
]
|
||||
kobo = [
|
||||
{file = "kobo-0.24.2.tar.gz", hash = "sha256:1b3c17260a93d933d2238884373fbf3485ecd417d930acf984285dc012410e2b"},
|
||||
]
|
||||
kobo = []
|
||||
markupsafe = [
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
|
||||
@ -490,14 +444,8 @@ markupsafe = [
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
|
||||
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
|
||||
]
|
||||
more-itertools = [
|
||||
{file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"},
|
||||
{file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||
]
|
||||
more-itertools = []
|
||||
packaging = []
|
||||
pluggy = [
|
||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||
@ -510,10 +458,6 @@ py = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
||||
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
||||
]
|
||||
pytest = [
|
||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
||||
@ -557,10 +501,7 @@ pyyaml = [
|
||||
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
|
||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
||||
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
||||
]
|
||||
requests = []
|
||||
rpm-py-installer = [
|
||||
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
||||
]
|
||||
@ -572,23 +513,11 @@ six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
|
||||
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
|
||||
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
|
||||
]
|
||||
wcwidth = [
|
||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
typing-extensions = []
|
||||
urllib3 = []
|
||||
wcwidth = []
|
||||
xmltodict = [
|
||||
{file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
|
||||
{file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
|
||||
]
|
||||
zipp = [
|
||||
{file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
|
||||
{file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
|
||||
]
|
||||
zipp = []
|
||||
|
68
mangle/fix-aws-quotas.sh
Normal file
68
mangle/fix-aws-quotas.sh
Normal file
@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/common"
|
||||
|
||||
usage() {
|
||||
echo "usage: $0"
|
||||
}
|
||||
|
||||
aws() {
|
||||
# shellcheck disable=SC2068
|
||||
command aws --profile resf-ami --output json $@
|
||||
}
|
||||
|
||||
# Get the quota code for Public AMIs once
|
||||
quota_code=$(aws service-quotas list-service-quotas --service-code ec2 --region us-east-1 --query "Quotas[*].{QuotaCode:QuotaCode,QuotaName:QuotaName}" | jq '.[] | select(.QuotaName == "Public AMIs") | .QuotaCode' | tr -d '"')
|
||||
|
||||
function get_current_quota() {
|
||||
region=$1
|
||||
# Get the current value of the quota
|
||||
current_value=$(aws service-quotas get-service-quota --service-code ec2 --quota-code "$quota_code" --region "$region" 2>/dev/null | jq .Quota.Value 2>/dev/null)
|
||||
# shellcheck disable=SC2181
|
||||
if [[ $? -gt 0 ]]; then
|
||||
echo "ERR"
|
||||
fi
|
||||
echo "$current_value"
|
||||
}
|
||||
|
||||
function request_quota_increase() {
|
||||
mapfile -t regions <<<"$@"
|
||||
for region in "${regions[@]}"; do
|
||||
# Get the current value of the quota
|
||||
current_value=$(get_current_quota "$region")
|
||||
if ((current_value >= QUOTA)); then
|
||||
echo "Quota for Public AMIs in region $region is already set to $current_value, skipping request."
|
||||
else
|
||||
# Request the quota increase
|
||||
request_output=$(aws service-quotas request-service-quota-increase --service-code ec2 --quota-code "$quota_code" --region "$region" --desired-value "$QUOTA")
|
||||
request_id=$(echo "$request_output" | jq .RequestedQuota.Id | tr -d '"')
|
||||
echo "Successfully submitted request with ID: $request_id"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function list_quotas() {
|
||||
mapfile -t regions <<<"$@"
|
||||
output="Region\tQuota\tDesired\tStatus"
|
||||
for region in "${regions[@]}"; do
|
||||
current_quota=$(get_current_quota "$region")
|
||||
request_info=$(aws service-quotas list-requested-service-quota-change-history-by-quota --service-code ec2 --quota-code "$quota_code" --region "$region" --query "RequestedQuotas[-1:].{DesiredValue:DesiredValue,Status:Status}" 2>/dev/null)
|
||||
requested_value=$(echo "$request_info" | jq .[].DesiredValue)
|
||||
case_status=$(echo "$request_info" | jq .[].Status | tr -d '"')
|
||||
output="$output\n$region $current_quota $requested_value $case_status"
|
||||
done
|
||||
echo -e "$output" | column -t
|
||||
}
|
||||
|
||||
REGIONS=$(aws ec2 describe-regions \
|
||||
--all-regions \
|
||||
--query "Regions[].{Name:RegionName}" \
|
||||
--output text)
|
||||
|
||||
QUOTA=50
|
||||
|
||||
list_quotas "$REGIONS"
|
||||
|
||||
# request_quota_increase "$REGIONS"
|
4
mangle/generators/.gitignore
vendored
4
mangle/generators/.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
__pycache__/ *.py[cod]
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
generate_prepopulate_from_self
|
||||
|
@ -17,4 +17,6 @@ if [ "$?" -ne 0 ]; then
|
||||
fi
|
||||
|
||||
STREAM_COMPOSE_BASEURL="https://composes.stream.centos.org/production"
|
||||
STREAM_KOJI_REPO="https://kojihub.stream.centos.org/kojifiles/repos"
|
||||
PERIDOT_REPO="https://yumrepofs.build.resf.org/v1/projects"
|
||||
COMPOSE_BASEDIR="/mnt/compose"
|
||||
|
@ -1,6 +1,18 @@
|
||||
import sys
|
||||
|
||||
class common:
|
||||
def rlver(self, rlver):
|
||||
def rlver(self, rlver, stream=False, all_repo=False):
|
||||
default = "Not Supported"
|
||||
if stream and all_repo:
|
||||
print("incompatible options used")
|
||||
sys.exit(1)
|
||||
|
||||
if stream:
|
||||
return getattr(self, 'c' + str(rlver) + 's', lambda: default)()
|
||||
|
||||
if all_repo:
|
||||
return getattr(self, 'rl' + str(rlver) + 'all', lambda: default)()
|
||||
|
||||
return getattr(self, 'rl' + str(rlver), lambda: default)()
|
||||
|
||||
def rl8(self):
|
||||
@ -28,4 +40,23 @@ class common:
|
||||
}
|
||||
return REPOS
|
||||
|
||||
def rl9all(self):
|
||||
REPOS = {
|
||||
'all': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||
}
|
||||
return REPOS
|
||||
|
||||
# Parse tags of koji
|
||||
def c8s(self):
|
||||
REPOS = {
|
||||
'c8s-build': ['aarch64', 'ppc64le', 'x86_64'],
|
||||
}
|
||||
return REPOS
|
||||
|
||||
def c9s(self):
|
||||
REPOS = {
|
||||
'c9s-build': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||
}
|
||||
return REPOS
|
||||
|
||||
switcher = common()
|
||||
|
@ -2,3 +2,16 @@
|
||||
|
||||
REPO=("BaseOS" "AppStream" "CRB" "HighAvailability" "ResilientStorage" "NFV" "RT" "SAP" "SAPHANA")
|
||||
ARCH=("aarch64" "ppc64le" "s390x" "x86_64")
|
||||
|
||||
VER="r${MAJOR}${LH}"
|
||||
|
||||
case "$VER" in
|
||||
r9)
|
||||
export PERIDOT_PROJECT_ID=0048077b-1573-4cb7-8ba7-cce823857ba5 ;;
|
||||
r9lh)
|
||||
export PERIDOT_PROJECT_ID=6794b5a8-290b-4d0d-ad5a-47164329cbb0 ;;
|
||||
*)
|
||||
echo "Not supported"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
20
mangle/generators/generate_catalog_branched
Normal file
20
mangle/generators/generate_catalog_branched
Normal file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
# Creates a prepopulate from KPR and then creates a catalog
|
||||
set -e
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
NEWMAJOR=$1
|
||||
else
|
||||
echo "Major version not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tmpdir=$(mktemp -d)
|
||||
bash generate_prepopulate_from_kpr ${NEWMAJOR}
|
||||
git clone https://github.com/rocky-linux/peridot-releng.git "${tmpdir}/peridot-releng"
|
||||
cp prepopcatalog.py "${tmpdir}/peridot-releng/pungicatalog/"
|
||||
pushd "${tmpdir}/peridot-releng" || { echo "Could not change directory"; exit 1; }
|
||||
python3 pungicatalog/prepopcatalog.py --major ${NEWMAJOR} --minor 0 --prepopulate-path /tmp/prepopulate.json --output-path "${tmpdir}/branched.cfg"
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
echo "File located at: ${tmpdir}/branched.cfg"
|
42
mangle/generators/generate_prepopulate_from_kpr
Executable file
42
mangle/generators/generate_prepopulate_from_kpr
Executable file
@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
# Parses a koji tag for a branch out
|
||||
#set -x
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
MAJOR=$1
|
||||
else
|
||||
echo "Major version not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export RLVER=$MAJOR
|
||||
source common
|
||||
|
||||
drop="${PREPOPDROP}"
|
||||
current=$(pwd)
|
||||
tmpdir=$(mktemp -d)
|
||||
tag_template="c${MAJOR}s-build"
|
||||
stream_repo_url="${STREAM_KOJI_REPO}/${tag_template}/latest"
|
||||
|
||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||
for y in "${ARCH[@]}"; do
|
||||
repodatas=( $(dnf reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||
mkdir -p "${tag_template}/${y}/repodata"
|
||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||
for z in "${repodatas[@]}"; do
|
||||
wget -q -nc "${z}"
|
||||
done
|
||||
wget -q -nc "${stream_repo_url}/${y}/repodata/repomd.xml"
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
done
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py" --version $RLVER --stream
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
if [ "$ret_val" -ne "0" ]; then
|
||||
echo "There was an error running through the parser."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sed -i "s|${tag_template}|branched|g" $drop
|
||||
echo "File located at: $drop"
|
@ -27,7 +27,7 @@ current=$(pwd)
|
||||
compose_dir="${COMPOSE_BASEDIR}/${MAJOR}/Rocky-${MAJOR}-${DATE}/compose"
|
||||
|
||||
pushd "${compose_dir}" || { echo "Could not change directory"; exit 1; }
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py"
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py" --version ${RLVER}
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
|
@ -17,7 +17,7 @@ current=$(pwd)
|
||||
compose_dir="${COMPOSE_BASEDIR}/${MAJOR}/latest-Rocky-${MAJOR}/compose"
|
||||
|
||||
pushd "${compose_dir}" || { echo "Could not change directory"; exit 1; }
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py" --pungi
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py" --pungi --version ${RLVER}
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
|
@ -41,7 +41,7 @@ for x in "${REPO[@]}"; do
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
done
|
||||
done
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py"
|
||||
/usr/bin/python3 "${current}/prepopulate_parser.py" --version ${MAJOR}
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
|
41
mangle/generators/generate_versions_from_kpr
Executable file
41
mangle/generators/generate_versions_from_kpr
Executable file
@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
# Parses a CentOS Stream Koji Tag
|
||||
#set -x
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
MAJOR="$1"
|
||||
else
|
||||
echo "Major version not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export RLVER="${MAJOR}"
|
||||
source common
|
||||
|
||||
drop="${VERSDROP}"
|
||||
current=$(pwd)
|
||||
tmpdir=$(mktemp -d)
|
||||
tag_template="c${MAJOR}s-build"
|
||||
stream_repo_url="${STREAM_KOJI_REPO}/${tag_template}/latest"
|
||||
|
||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||
for y in "${ARCH[@]}"; do
|
||||
repodatas=( $(dnf reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||
mkdir -p "${tag_template}/${y}/repodata"
|
||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||
for z in "${repodatas[@]}"; do
|
||||
wget -q -nc "${z}"
|
||||
done
|
||||
wget -q -nc "${stream_repo_url}/${y}/repodata/repomd.xml"
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
done
|
||||
/usr/bin/python3 "${current}/version_parser.py" --version "${MAJOR}" --stream
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
if [ "$ret_val" -ne "0" ]; then
|
||||
echo "There was an error running through the parser."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "File located at: $drop"
|
45
mangle/generators/generate_versions_from_ppr
Executable file
45
mangle/generators/generate_versions_from_ppr
Executable file
@ -0,0 +1,45 @@
|
||||
#!/bin/bash
|
||||
# Parses a peridot project
|
||||
#set -x
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
MAJOR="$1"
|
||||
else
|
||||
echo "Major version not specified"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$2" ] && [[ "$2" == "lh" ]]; then
|
||||
export LH="lh"
|
||||
fi
|
||||
|
||||
export RLVER="${MAJOR}"
|
||||
source common
|
||||
|
||||
drop="${VERSDROP}"
|
||||
current=$(pwd)
|
||||
tmpdir=$(mktemp -d)
|
||||
tag_template="all"
|
||||
peridot_repo_url="${PERIDOT_REPO}/${PERIDOT_PROJECT_ID}/repo/${tag_template}"
|
||||
|
||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||
for y in "${ARCH[@]}"; do
|
||||
repodatas=( $(dnf reposync --repofrompath ${tag_template},${peridot_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||
mkdir -p "${tag_template}/${y}/repodata"
|
||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||
for z in "${repodatas[@]}"; do
|
||||
wget -q -nc "${z}"
|
||||
done
|
||||
wget -q -nc "${peridot_repo_url}/${y}/repodata/repomd.xml"
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
done
|
||||
/usr/bin/python3 "${current}/version_parser.py" --version "${MAJOR}" --all-repo
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
if [ "$ret_val" -ne "0" ]; then
|
||||
echo "There was an error running through the parser."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "File located at: $drop"
|
@ -3,8 +3,8 @@
|
||||
#set -x
|
||||
|
||||
if [ -n "$1" ] && [ -n "$2" ]; then
|
||||
MAJOR=$1
|
||||
DATE=$2
|
||||
MAJOR="$1"
|
||||
DATE="$2"
|
||||
else
|
||||
echo "Major version not specified"
|
||||
exit 1
|
||||
@ -18,7 +18,7 @@ if [ "$grep_val" -ne 0 ]; then
|
||||
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||
fi
|
||||
|
||||
export RLVER=$MAJOR
|
||||
export RLVER="${MAJOR}"
|
||||
source common
|
||||
|
||||
drop="${VERSDROP}"
|
||||
@ -40,7 +40,7 @@ for x in "${REPO[@]}"; do
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
done
|
||||
done
|
||||
/usr/bin/python3 "${current}/version_parser.py"
|
||||
/usr/bin/python3 "${current}/version_parser.py" --version "${MAJOR}"
|
||||
ret_val=$?
|
||||
popd || { echo "Could not change back..."; exit 1; }
|
||||
|
||||
|
156
mangle/generators/prepopcatalog.py
Normal file
156
mangle/generators/prepopcatalog.py
Normal file
@ -0,0 +1,156 @@
|
||||
#!/usr/bin/env python3
|
||||
# This is copied into peridot-releng and ran. This file is likely temporary
|
||||
# until we decide if this goes into that git repo.
|
||||
import argparse
|
||||
import os
|
||||
import json
|
||||
|
||||
from catalog import (
|
||||
PeridotCatalogSync,
|
||||
PeridotCatalogSyncPackage,
|
||||
PeridotCatalogSyncPackageType,
|
||||
PeridotCatalogSyncRepository,
|
||||
)
|
||||
|
||||
def main(prepopulate: str, output_path: str, major: int, minor: int):
|
||||
print(f"Using prepopulate file: {prepopulate}")
|
||||
|
||||
with open(prepopulate) as json_file:
|
||||
prepop = json.load(json_file)
|
||||
json_file.close()
|
||||
|
||||
# Create a catalog
|
||||
catalog = PeridotCatalogSync()
|
||||
catalog.major = major
|
||||
catalog.minor = minor
|
||||
|
||||
# Create indexes
|
||||
package_index = {}
|
||||
repo_module_index = {}
|
||||
module_name_index = {}
|
||||
module_defaults = []
|
||||
|
||||
# Read prepopulate json and create package objects
|
||||
all_arches = []
|
||||
for repo in prepop.keys():
|
||||
for arch in prepop[repo].keys():
|
||||
if arch not in all_arches:
|
||||
all_arches.append(arch)
|
||||
for package in prepop[repo][arch].keys():
|
||||
if package not in package_index:
|
||||
package_index[package] = {}
|
||||
if repo not in package_index[package]:
|
||||
package_index[package][repo] = {
|
||||
"include_filter": [],
|
||||
"multilib": [],
|
||||
}
|
||||
na_list = prepop[repo][arch][package]
|
||||
for na in na_list:
|
||||
splitted = na.split(".")
|
||||
arch_package = splitted[len(splitted) - 1]
|
||||
if arch != arch_package and arch_package != "noarch":
|
||||
if arch not in package_index[package][repo]["multilib"]:
|
||||
package_index[package][repo]["multilib"].append(arch)
|
||||
if na not in package_index[package][repo]["include_filter"]:
|
||||
package_index[package][repo]["include_filter"].append(na)
|
||||
|
||||
arch_specific_excludes = {}
|
||||
na_index = {}
|
||||
for pkg in package_index.keys():
|
||||
for repo in package_index[pkg].keys():
|
||||
na_list = list(
|
||||
filter(
|
||||
lambda x: x.endswith(".noarch"),
|
||||
package_index[pkg][repo]["include_filter"],
|
||||
)
|
||||
)
|
||||
if not na_list:
|
||||
continue
|
||||
exclude_arches = {}
|
||||
for na in na_list:
|
||||
for arch in all_arches:
|
||||
if (
|
||||
arch not in prepop[repo]
|
||||
or pkg not in prepop[repo][arch]
|
||||
or na not in prepop[repo][arch][pkg]
|
||||
):
|
||||
if na not in exclude_arches:
|
||||
exclude_arches[na] = []
|
||||
exclude_arches[na].append(arch)
|
||||
na_index[na] = na
|
||||
if not exclude_arches:
|
||||
continue
|
||||
if pkg not in arch_specific_excludes:
|
||||
arch_specific_excludes[pkg] = {}
|
||||
if repo not in arch_specific_excludes[pkg]:
|
||||
arch_specific_excludes[pkg][repo] = []
|
||||
arch_specific_excludes[pkg][repo].append(exclude_arches)
|
||||
|
||||
# Index arch specific excludes by repo and arch
|
||||
repo_arch_index = {}
|
||||
for pkg in arch_specific_excludes.keys():
|
||||
for repo in arch_specific_excludes[pkg].keys():
|
||||
if repo not in repo_arch_index:
|
||||
repo_arch_index[repo] = {}
|
||||
for arches2 in arch_specific_excludes[pkg][repo]:
|
||||
for na in arches2.keys():
|
||||
for arch in arches2[na]:
|
||||
if arch not in repo_arch_index[repo]:
|
||||
repo_arch_index[repo][arch] = []
|
||||
if na not in repo_arch_index[repo][arch]:
|
||||
repo_arch_index[repo][arch].append(na)
|
||||
|
||||
# Add noarch packages not in a specific arch to exclude filter
|
||||
for repo in repo_arch_index.keys():
|
||||
repo_key = f"^{repo}$"
|
||||
filter_tuple = {}
|
||||
for arch in repo_arch_index[repo].keys():
|
||||
if arch not in filter_tuple:
|
||||
filter_tuple[arch] = []
|
||||
for na in repo_arch_index[repo][arch]:
|
||||
na = na.removesuffix(".noarch")
|
||||
if na not in filter_tuple[arch]:
|
||||
filter_tuple[arch].append(na)
|
||||
catalog.exclude_filter.append((repo_key, filter_tuple))
|
||||
|
||||
for package in package_index.keys():
|
||||
package_type = PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_FORK
|
||||
if package in module_name_index:
|
||||
package_type = PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_FORK_MODULE
|
||||
elif package.startswith("rocky-"):
|
||||
package_type = PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_SRC
|
||||
|
||||
catalog.add_package(
|
||||
PeridotCatalogSyncPackage(
|
||||
package,
|
||||
package_type,
|
||||
[
|
||||
PeridotCatalogSyncRepository(
|
||||
x,
|
||||
package_index[package][x]["include_filter"],
|
||||
package_index[package][x]["multilib"],
|
||||
(get_modules_for_repo(package, x, repo_module_index) if x in repo_module_index else None) if package in module_name_index else None,
|
||||
)
|
||||
for x in package_index[package].keys()
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
print(f"Found {len(catalog.packages)} packages")
|
||||
|
||||
f = open(output_path, "w")
|
||||
f.write(catalog.to_prototxt())
|
||||
f.close()
|
||||
|
||||
print(f"Catalog written to {output_path}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Create a catalog from a standalone prepopulate.json"
|
||||
)
|
||||
parser.add_argument("--prepopulate-path", type=str, required=True)
|
||||
parser.add_argument("--major", type=int, required=True)
|
||||
parser.add_argument("--minor", type=int, required=True)
|
||||
parser.add_argument("--output-path", type=str, default="hidden.cfg")
|
||||
args = parser.parse_args()
|
||||
main(args.prepopulate_path, args.output_path, args.major, args.minor)
|
@ -7,8 +7,6 @@ import dnf
|
||||
import createrepo_c as cr
|
||||
from common import *
|
||||
|
||||
REPOS = switcher.rlver((os.environ['RLVER']))
|
||||
|
||||
# Source packages we do not ship or are rocky branded
|
||||
IGNORES = [
|
||||
'insights-client',
|
||||
@ -22,10 +20,16 @@ IGNORES = [
|
||||
]
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--version', type=str, required=True)
|
||||
parser.add_argument("--pungi", help="local pungi is here", action='store_true')
|
||||
args = parser.parse_args()
|
||||
parser.add_argument('--stream', action='store_true', help="Stream koji only")
|
||||
|
||||
if args.pungi:
|
||||
results = parser.parse_args()
|
||||
|
||||
REPOS = switcher.rlver(results.version,
|
||||
stream=results.stream)
|
||||
|
||||
if results.pungi:
|
||||
APPEND_TO_PATH = '/os'
|
||||
else:
|
||||
APPEND_TO_PATH = ''
|
||||
|
@ -10,7 +10,7 @@ conf = base.conf
|
||||
base.read_all_repos()
|
||||
all_repos = base.repos.all()
|
||||
all_repos.disable()
|
||||
base.repos.add_new_repo('all-source', conf, baseurl=['https://yumrepofs.build.resf.org/v1/projects/55b17281-bc54-4929-8aca-a8a11d628738/repo/all/src'])
|
||||
base.repos.add_new_repo('all-source', conf, baseurl=['https://yumrepofs.build.resf.org/v1/projects/0048077b-1573-4cb7-8ba7-cce823857ba5/repo/all/src'])
|
||||
base.fill_sack()
|
||||
|
||||
q = base.sack.query()
|
||||
|
@ -2,11 +2,20 @@
|
||||
import os
|
||||
import os.path
|
||||
import json
|
||||
import argparse
|
||||
import dnf
|
||||
import createrepo_c as cr
|
||||
from common import *
|
||||
|
||||
REPOS = switcher.rlver((os.environ['RLVER']))
|
||||
parser = argparse.ArgumentParser(description="Version Parser")
|
||||
parser.add_argument('--version', type=str, required=True)
|
||||
parser.add_argument('--stream', action='store_true', help="Stream koji only")
|
||||
parser.add_argument('--all-repo', action='store_true', help="Looks at the all repo for peridot")
|
||||
results = parser.parse_args()
|
||||
|
||||
REPOS = switcher.rlver(results.version,
|
||||
stream=results.stream,
|
||||
all_repo=results.all_repo)
|
||||
|
||||
# Source packages we do not ship or are rocky branded
|
||||
IGNORES = [
|
||||
|
146
mangle/quotas.go
Normal file
146
mangle/quotas.go
Normal file
@ -0,0 +1,146 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/aws/aws-sdk-go/service/servicequotas"
|
||||
)
|
||||
|
||||
func getQuotaCode(sqSvc *servicequotas.ServiceQuotas) string {
|
||||
input := &servicequotas.ListServiceQuotasInput{
|
||||
ServiceCode: aws.String("ec2"),
|
||||
}
|
||||
|
||||
for {
|
||||
output, err := sqSvc.ListServiceQuotas(input)
|
||||
if err != nil {
|
||||
log.Println("Error getting quota code:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _, quota := range output.Quotas {
|
||||
if *quota.QuotaName == "Public AMIs" {
|
||||
return *quota.QuotaCode
|
||||
}
|
||||
}
|
||||
|
||||
if output.NextToken == nil {
|
||||
break
|
||||
}
|
||||
input.NextToken = output.NextToken
|
||||
}
|
||||
log.Println("Quota code not found")
|
||||
os.Exit(1)
|
||||
return ""
|
||||
}
|
||||
|
||||
func getRegions(ec2Svc *ec2.EC2) ([]*string, error) {
|
||||
input := &ec2.DescribeRegionsInput{}
|
||||
|
||||
output, err := ec2Svc.DescribeRegions(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var regions []*string
|
||||
for _, region := range output.Regions {
|
||||
regions = append(regions, region.RegionName)
|
||||
}
|
||||
|
||||
return regions, nil
|
||||
}
|
||||
|
||||
type QuotaInfo struct {
|
||||
CurrentQuota float64
|
||||
DesiredValue float64
|
||||
Status string
|
||||
CaseId string
|
||||
}
|
||||
|
||||
func getQuotaInfo(sqSvc *servicequotas.ServiceQuotas, quotaCode string, region string) *QuotaInfo {
|
||||
input := &servicequotas.GetServiceQuotaInput{
|
||||
ServiceCode: aws.String("ec2"),
|
||||
QuotaCode: aws.String(quotaCode),
|
||||
}
|
||||
|
||||
output, err := sqSvc.GetServiceQuota(input)
|
||||
if err != nil {
|
||||
log.Printf("Error getting quota info for %s: %s\n", region, err)
|
||||
return nil
|
||||
// os.Exit(1)
|
||||
}
|
||||
|
||||
currentValue := *output.Quota.Value
|
||||
requestOutput, err := sqSvc.ListRequestedServiceQuotaChangeHistoryByQuota(&servicequotas.ListRequestedServiceQuotaChangeHistoryByQuotaInput{
|
||||
ServiceCode: aws.String("ec2"),
|
||||
QuotaCode: aws.String(quotaCode),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Println("Error getting request info:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
var desiredValue float64
|
||||
var status string
|
||||
var caseId string
|
||||
if len(requestOutput.RequestedQuotas) > 0 {
|
||||
desiredValue = *requestOutput.RequestedQuotas[len(requestOutput.RequestedQuotas)-1].DesiredValue
|
||||
status = *requestOutput.RequestedQuotas[len(requestOutput.RequestedQuotas)-1].Status
|
||||
caseId = *requestOutput.RequestedQuotas[len(requestOutput.RequestedQuotas)-1].CaseId
|
||||
}
|
||||
return &QuotaInfo{currentValue, desiredValue, status, caseId}
|
||||
}
|
||||
|
||||
func listQuotas(sess *session.Session, quotaCode string, regions []*string) {
|
||||
fmt.Println("Region\tQuota\tDesired\tStatus\tCaseId")
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(regions))
|
||||
|
||||
for _, region := range regions {
|
||||
go func(region string) {
|
||||
defer wg.Done()
|
||||
regionSqSvc := servicequotas.New(sess, &aws.Config{Region: aws.String(region)})
|
||||
quotaInfo := getQuotaInfo(regionSqSvc, quotaCode, region)
|
||||
if quotaInfo != nil {
|
||||
fmt.Printf("%s\t%.0f\t%.0f\t%s\t%s\n", region, quotaInfo.CurrentQuota, quotaInfo.DesiredValue, quotaInfo.Status, quotaInfo.CaseId)
|
||||
}
|
||||
}(aws.StringValue(region))
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Create session
|
||||
sess := session.Must(session.NewSessionWithOptions(session.Options{
|
||||
SharedConfigState: session.SharedConfigEnable,
|
||||
}))
|
||||
|
||||
// Create EC2 client
|
||||
ec2Svc := ec2.New(sess, &aws.Config{Region: aws.String("us-east-1")})
|
||||
|
||||
// Create Service Quotas client
|
||||
sqSvc := servicequotas.New(sess)
|
||||
|
||||
// Get the quota code for Public AMIs once
|
||||
quotaCode := getQuotaCode(sqSvc)
|
||||
|
||||
// Get all regions
|
||||
regions, err := getRegions(ec2Svc)
|
||||
if err != nil {
|
||||
log.Println("Error getting regions:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// List quotas for all regions
|
||||
listQuotas(sess, quotaCode, regions)
|
||||
|
||||
// Request quota increase for all regions
|
||||
// requestQuotaIncrease(sqSvc, quotaCode, regions)
|
||||
}
|
@ -15,9 +15,14 @@ if [ -z "$RLVER" ]; then
|
||||
export RLVER=8
|
||||
fi
|
||||
|
||||
if [ -z "$RLREL" ]; then
|
||||
export RLREL=stable
|
||||
fi
|
||||
|
||||
# Override: Not Allowed
|
||||
STAGING_ROOT="/mnt/repos-staging"
|
||||
PRODUCTION_ROOT="/mnt/repos-production"
|
||||
COMPOSE_ROOT="/mnt/compose"
|
||||
|
||||
# Override: Not Allowed
|
||||
# relative to ${ENV}_ROOT
|
||||
|
@ -1,20 +1,35 @@
|
||||
# To be sourced by scripts to use
|
||||
|
||||
# Revision must always start with a major number
|
||||
REVISION=8.7
|
||||
|
||||
MAJOR="${REVISION:0:1}"
|
||||
MINOR="${REVISION:2:1}"
|
||||
|
||||
# comment or blank if needed
|
||||
APPEND_TO_DIR="-RC1"
|
||||
case "${RLREL}" in
|
||||
stable)
|
||||
REVISION=8.7
|
||||
APPEND_TO_DIR="-RC1"
|
||||
;;
|
||||
beta)
|
||||
REVISION=8.8
|
||||
APPEND_TO_DIR="-BETA"
|
||||
COMPOSE_APPEND="${APPEND_TO_DIR}"
|
||||
;;
|
||||
lh)
|
||||
REVISION=8.8
|
||||
APPEND_TO_DIR="-LookAhead"
|
||||
COMPOSE_APPEND="${APPEND_TO_DIR}"
|
||||
;;
|
||||
*)
|
||||
echo "option no recognized"
|
||||
;;
|
||||
esac
|
||||
|
||||
MAJOR="${REVISION%.*}"
|
||||
MINOR="${REVISION#*.}"
|
||||
STAGING_ROOT="/mnt/repos-staging"
|
||||
PRODUCTION_ROOT="/mnt/repos-production"
|
||||
|
||||
# relative to ${ENV}_ROOT
|
||||
CATEGORY_STUB="mirror/pub/rocky"
|
||||
RELEASE_DIR="${CATEGORY_STUB}/${REVISION}${APPEND_TO_DIR}"
|
||||
RELEASE_COMPOSE_ROOT="${COMPOSE_ROOT}/${MAJOR}${COMPOSE_APPEND}/latest-${SHORT}-${MAJOR}"
|
||||
|
||||
# Set all repos that have no comps/groups associated with them. This is even in
|
||||
# cases where repos will not be available by normal means. It's just for
|
||||
@ -23,7 +38,6 @@ NONMODS_REPOS=(
|
||||
extras
|
||||
Devel
|
||||
plus
|
||||
rockyrpi
|
||||
)
|
||||
|
||||
# These repos were originally separate from the main compose and need symlinks
|
||||
|
@ -1,22 +1,34 @@
|
||||
# To be sourced by scripts to use
|
||||
|
||||
# Revision must always start with a major number
|
||||
REVISION=9.1
|
||||
case "${RLREL}" in
|
||||
stable)
|
||||
REVISION=9.1
|
||||
APPEND_TO_DIR="-RC4"
|
||||
;;
|
||||
beta)
|
||||
REVISION=9.2
|
||||
APPEND_TO_DIR="-beta"
|
||||
;;
|
||||
lh)
|
||||
REVISION=9.2
|
||||
APPEND_TO_DIR="-lookahead"
|
||||
;;
|
||||
*)
|
||||
echo "option no recognized"
|
||||
;;
|
||||
esac
|
||||
|
||||
MAJOR="${REVISION%.*}"
|
||||
MINOR="${REVISION#*.}"
|
||||
ARCHES=(x86_64 aarch64 ppc64le s390x)
|
||||
|
||||
MAJOR="${REVISION:0:1}"
|
||||
MINOR="${REVISION:2:1}"
|
||||
|
||||
# comment or blank if needed
|
||||
APPEND_TO_DIR="-RC4"
|
||||
|
||||
STAGING_ROOT="/mnt/repos-staging"
|
||||
PRODUCTION_ROOT="/mnt/repos-production"
|
||||
|
||||
# relative to ${ENV}_ROOT
|
||||
CATEGORY_STUB="mirror/pub/rocky"
|
||||
RELEASE_DIR="${CATEGORY_STUB}/${REVISION}${APPEND_TO_DIR}"
|
||||
RELEASE_COMPOSE_ROOT="${COMPOSE_ROOT}/${MAJOR}/latest-${SHORT}-${MAJOR}"
|
||||
|
||||
# Set all repos that have no comps/groups associated with them. This is even in
|
||||
# cases where repos will not be available by normal means. It's just for
|
||||
@ -25,7 +37,6 @@ NONMODS_REPOS=(
|
||||
extras
|
||||
Devel
|
||||
plus
|
||||
rockyrpi
|
||||
)
|
||||
|
||||
# These repos were originally separate from the main compose and need symlinks
|
||||
|
107
sync/lh-release-sync-to-staging.sh
Normal file
107
sync/lh-release-sync-to-staging.sh
Normal file
@ -0,0 +1,107 @@
|
||||
#!/bin/bash
|
||||
# Performs a full on sync of a minor release, directories and all. It calls the
|
||||
# other scripts in this directory to assist where necessary.
|
||||
# Note that this is EL8 specific
|
||||
#
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source $(dirname "$0")/common
|
||||
|
||||
# Major Version (eg, 8)
|
||||
MAJ=${RLVER}
|
||||
|
||||
# sync all pieces of a release, including extras, nfv, etc
|
||||
for COMPOSE in "${NONSIG_COMPOSE[@]}"; do
|
||||
echo "${COMPOSE}: Syncing"
|
||||
pushd "${RELEASE_COMPOSE_ROOT}/compose" || { echo "${COMPOSE}: Failed to change directory"; break; }
|
||||
|
||||
if [[ "${COMPOSE}" == "Rocky" ]]; then
|
||||
# ISO Work before syncing
|
||||
mkdir -p isos/{x86_64,aarch64}
|
||||
|
||||
# Sort the ISO's
|
||||
for ARCH in "${ARCHES[@]}"; do
|
||||
for x in BaseOS Minimal; do
|
||||
echo "${x} ${ARCH}: Moving ISO images"
|
||||
mv "${x}/${ARCH}/iso/"* "isos/${ARCH}/"
|
||||
done
|
||||
pushd "isos/${ARCH}" || { echo "${ARCH}: Failed to change directory"; break; }
|
||||
ln -s "Rocky-${REVISION}-${ARCH}-boot.iso" "Rocky-${ARCH}-boot.iso"
|
||||
ln -s "Rocky-${REVISION}-${ARCH}-dvd1.iso" "Rocky-${ARCH}-dvd1.iso"
|
||||
ln -s "Rocky-${REVISION}-${ARCH}-dvd1.iso" "Rocky-${ARCH}-dvd.iso"
|
||||
ln -s "Rocky-${REVISION}-${ARCH}-minimal.iso" "Rocky-${ARCH}-minimal.iso"
|
||||
for file in *.iso; do
|
||||
printf "# %s: %s bytes\n%s\n" \
|
||||
"${file}" \
|
||||
"$(stat -c %s ${file})" \
|
||||
"$(sha256sum --tag ${file})" \
|
||||
| sudo tee -a CHECKSUM;
|
||||
done
|
||||
popd || { echo "Could not change directory"; break; }
|
||||
done
|
||||
fi
|
||||
|
||||
TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}"
|
||||
mkdir -p "${TARGET}"
|
||||
# disabling because none of our files should be starting with dashes. If they
|
||||
# are something is *seriously* wrong here.
|
||||
# shellcheck disable=SC2035
|
||||
sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||
{} "${TARGET}"
|
||||
|
||||
if [[ "${COMPOSE}" == "Rocky" ]]; then
|
||||
cp COMPOSE_ID "${TARGET}"
|
||||
chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||
rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||
fi
|
||||
|
||||
# Return back to where we started
|
||||
popd || { echo "${COMPOSE}: Failed to change back"; break; }
|
||||
|
||||
# Create extra stuff
|
||||
pushd "${TARGET}" || { echo "${COMPOSE}: Failed to change directory"; break; }
|
||||
mkdir -p Live/x86_64
|
||||
ln -s Live live
|
||||
popd || { echo "${COMPOSE}: Failed to change back"; break; }
|
||||
done
|
||||
|
||||
|
||||
# Create symlinks for repos that were once separate from the main compose
|
||||
for LINK in "${LINK_REPOS[@]}"; do
|
||||
ln -sr "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/${LINK}" \
|
||||
"${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/${LINK_REPOS[$LINK]}"
|
||||
done
|
||||
|
||||
# make a kickstart directory
|
||||
for ARCH in "${ARCHES[@]}"; do
|
||||
for REPO in "${MODS_REPOS[@]}"; do
|
||||
SOURCE="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/${REPO}/${ARCH}/os"
|
||||
TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/${REPO}/${ARCH}/kickstart"
|
||||
echo "Making golden kickstart directory"
|
||||
cp -na "${SOURCE}" "${TARGET}"
|
||||
done
|
||||
done
|
||||
|
||||
# fix treeinfo
|
||||
for ARCH in "${ARCHES[@]}"; do
|
||||
echo "Ensuring treeinfo is correct"
|
||||
treeinfoModder "${ARCH}"
|
||||
treeinfoModderKickstart "${ARCH}"
|
||||
treeinfoSaver "${ARCH}"
|
||||
done
|
||||
|
||||
# sign all repos
|
||||
echo "Signing all repositories"
|
||||
test -f $(dirname "$0")/sign-repos-only.sh
|
||||
ret_val=$?
|
||||
|
||||
if [ "$ret_val" -eq 0 ]; then
|
||||
$(dirname "$0")/sign-repos-only.sh
|
||||
fi
|
||||
|
||||
# Change Symlink if required
|
||||
echo "Setting symlink to ${REV}"
|
||||
pushd "${STAGING_ROOT}/${CATEGORY_STUB}" || exit
|
||||
/bin/rm "${STAGING_ROOT}/${CATEGORY_STUB}/latest-8"
|
||||
ln -sr "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" latest-8
|
||||
popd || exit
|
@ -13,7 +13,7 @@ MAJ=${RLVER}
|
||||
# sync all pieces of a release, including extras, nfv, etc
|
||||
for COMPOSE in "${NONSIG_COMPOSE[@]}"; do
|
||||
echo "${COMPOSE}: Syncing"
|
||||
pushd "/mnt/compose/${MAJ}/latest-${COMPOSE}-${MAJ}/compose" || { echo "${COMPOSE}: Failed to change directory"; break; }
|
||||
pushd "${RELEASE_COMPOSE_ROOT}/compose" || { echo "${COMPOSE}: Failed to change directory"; break; }
|
||||
|
||||
if [[ "${COMPOSE}" == "Rocky" ]]; then
|
||||
# ISO Work before syncing
|
||||
|
@ -23,7 +23,7 @@ for y in "${ALL_REPOS[@]}"; do
|
||||
if [ "$ret_val" -eq 0 ]; then
|
||||
createrepo "${STAGING_ROOT}/${RELEASE_DIR}/${y}/source/tree" \
|
||||
"--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \
|
||||
--workers 8
|
||||
--workers 12
|
||||
sed -i '/<open-size><\/open-size>/d' \
|
||||
"${STAGING_ROOT}/${RELEASE_DIR}/${y}/source/tree/repodata/repomd.xml"
|
||||
test -f /root/bin/sign-repo.sh && /root/bin/sign-repo.sh \
|
||||
@ -46,7 +46,7 @@ for x in "${ARCHES[@]}"; do
|
||||
if [ "$ret_val" -eq 0 ]; then
|
||||
createrepo "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}" \
|
||||
"--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \
|
||||
--workers 8
|
||||
--workers 12
|
||||
sed -i '/<open-size><\/open-size>/d' \
|
||||
"${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/${z}/repodata/repomd.xml"
|
||||
test -f /root/bin/sign-repo.sh && /root/bin/sign-repo.sh \
|
||||
@ -63,7 +63,7 @@ for x in "${ARCHES[@]}"; do
|
||||
if [ "$ret_val" -eq 0 ]; then
|
||||
createrepo --update "${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/debug/tree" \
|
||||
"--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \
|
||||
--workers 8
|
||||
--workers 12 --retain-old-md-by-age 1d
|
||||
sed -i '/<open-size><\/open-size>/d' \
|
||||
"${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/debug/tree/repodata/repomd.xml"
|
||||
test -f /root/bin/sign-repo.sh && /root/bin/sign-repo.sh \
|
||||
@ -83,7 +83,7 @@ for x in "${ARCHES[@]}"; do
|
||||
--groupfile="/mnt/compose/8/latest-Rocky-8/work/${x}/comps/comps-${y}.${x}.xml" \
|
||||
--xz --revision="${REVISION}" \
|
||||
"--distro=cpe:/o:rocky:rocky:${REVISION:0:1},Rocky Linux ${REVISION:0:1}" \
|
||||
--workers=8 --checksum=sha256
|
||||
--workers=12 --checksum=sha256 --retain-old-md-by-age 1d
|
||||
sed -i '/<open-size><\/open-size>/d' \
|
||||
"${STAGING_ROOT}/${RELEASE_DIR}/${y}/${x}/os/repodata/repomd.xml"
|
||||
sed -i '/<open-size><\/open-size>/d' \
|
||||
|
@ -33,6 +33,8 @@ function copy(){
|
||||
for region in $REGIONS; do
|
||||
if find_image_by_name $region; then
|
||||
echo "Found copy of $source_ami in $region - $found_image_id - Skipping"
|
||||
unset ami_ids[$region]
|
||||
ami_ids[$region]=$(echo $found_image_id | tr -d "'")
|
||||
continue
|
||||
fi
|
||||
echo -n "Creating copy job for $region..."
|
||||
@ -68,15 +70,32 @@ function change_privacy(){
|
||||
local finished=false
|
||||
while ! $finished; do
|
||||
for region in "${!ami_ids[@]}"; do
|
||||
echo -n "Making ${ami_ids[$region]} in $region $status..."
|
||||
aws --profile resf-ami ec2 modify-image-attribute \
|
||||
image_id=${ami_ids[$region]}
|
||||
echo -n "Making ${image_id} in $region $status..."
|
||||
if aws --profile resf-ami ec2 modify-image-attribute \
|
||||
--region $region \
|
||||
--image-id "${ami_ids[$region]}" \
|
||||
--launch-permission "${launch_permission}" 2>/dev/null
|
||||
if [[ $? -eq 0 ]]; then
|
||||
unset ami_ids[$region]
|
||||
echo ". Done"
|
||||
continue
|
||||
--image-id "$image_id" \
|
||||
--launch-permission "${launch_permission}" 2>/dev/null; then
|
||||
|
||||
snapshot_id=$(aws --profile resf-ami ec2 describe-images \
|
||||
--region $region \
|
||||
--image-ids "${image_id}" \
|
||||
--query 'Images[*].BlockDeviceMappings[0].Ebs.SnapshotId' \
|
||||
--output text 2>&1)
|
||||
permissions=$(aws --profile resf-ami ec2 describe-snapshot-attribute \
|
||||
--region $region \
|
||||
--snapshot-id "${snapshot_id}" \
|
||||
--attribute createVolumePermission \
|
||||
--query 'CreateVolumePermissions[0].Group' \
|
||||
--output text 2>&1)
|
||||
if [[ "$permissions" == "all" ]] || aws --profile resf-ami ec2 modify-snapshot-attribute \
|
||||
--region $region \
|
||||
--snapshot-id "${snapshot_id}" \
|
||||
--create-volume-permission "${launch_permission}" 2>/dev/null; then
|
||||
unset ami_ids[$region]
|
||||
echo ". Done"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
echo ". Still pending"
|
||||
done
|
||||
|
@ -1,6 +1,10 @@
|
||||
#!/bin/bash
|
||||
# Syncs everything from staging to production
|
||||
|
||||
if [[ "$RLREL" == "lh" ]] || [[ "$RLREL" == "beta" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "$0")/common"
|
||||
|
@ -1,6 +1,10 @@
|
||||
#!/bin/bash
|
||||
# Syncs everything from staging to production
|
||||
|
||||
if [[ "$RLREL" == "lh" ]] || [[ "$RLREL" == "beta" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "$0")/common"
|
||||
|
@ -1,5 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||
SHORT=${1}
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "$0")/common"
|
||||
@ -11,10 +14,8 @@ fi
|
||||
|
||||
# Major Version (eg, 8)
|
||||
MAJ=${RLVER}
|
||||
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||
SHORT=${1}
|
||||
|
||||
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||
cd "${RELEASE_COMPOSE_ROOT}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||
ret_val=$?
|
||||
|
||||
if [ $ret_val -eq "0" ]; then
|
||||
|
@ -1,5 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||
SHORT=${1}
|
||||
|
||||
# Source common variables
|
||||
# shellcheck disable=SC2046,1091,1090
|
||||
source "$(dirname "$0")/common"
|
||||
@ -11,10 +14,8 @@ fi
|
||||
|
||||
# Major Version (eg, 8)
|
||||
MAJ=${RLVER}
|
||||
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||
SHORT=${1}
|
||||
|
||||
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||
cd "${RELEASE_COMPOSE_ROOT}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||
ret_val=$?
|
||||
|
||||
if [ $ret_val -eq "0" ]; then
|
||||
|
Loading…
Reference in New Issue
Block a user