Compare commits
16 Commits
devel
...
fresh-empa
Author | SHA1 | Date | |
---|---|---|---|
9951a99803 | |||
27c649c141 | |||
1376c91f6d | |||
68f06d38e0 | |||
92f79885ce | |||
6128dd990a | |||
9639176823 | |||
364e92dcb8 | |||
8b643fd566 | |||
688ef9c8c2 | |||
a27ee5f35a | |||
aa89ed109b | |||
85a28fa8fb | |||
d349dff365 | |||
43e723ef2b | |||
ee019321ae |
@ -1,4 +1,4 @@
|
|||||||
FROM quay.io/centos/centos:stream9
|
FROM quay.io/rockylinux/rockylinux:9
|
||||||
|
|
||||||
ADD images/get_arch /get_arch
|
ADD images/get_arch /get_arch
|
||||||
|
|
||||||
|
@ -38,6 +38,8 @@ RUN dnf install -y \
|
|||||||
sudo \
|
sudo \
|
||||||
mock \
|
mock \
|
||||||
python-pip \
|
python-pip \
|
||||||
|
mock \
|
||||||
|
fuse-overlayfs \
|
||||||
imagefactory \
|
imagefactory \
|
||||||
imagefactory-plugins*
|
imagefactory-plugins*
|
||||||
|
|
||||||
|
5
iso/empanadas/empanadas/backends/__init__.py
Normal file
5
iso/empanadas/empanadas/backends/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Empanadas Backends (fillings)"""
|
||||||
|
|
||||||
|
from .imagefactory import ImageFactoryBackend
|
||||||
|
from .kiwi import KiwiBackend
|
||||||
|
from .interface import BackendInterface
|
318
iso/empanadas/empanadas/backends/imagefactory.py
Normal file
318
iso/empanadas/empanadas/backends/imagefactory.py
Normal file
@ -0,0 +1,318 @@
|
|||||||
|
"""Backend for ImageFactory"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from .interface import BackendInterface
|
||||||
|
from empanadas.builders import utils
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
from typing import List, Optional, Callable, Union
|
||||||
|
|
||||||
|
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
|
||||||
|
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
|
||||||
|
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ImageFactoryBackend(BackendInterface):
|
||||||
|
"""Build an image using ImageFactory"""
|
||||||
|
kickstart_arg: List[str] = field(factory=list)
|
||||||
|
kickstart_path: pathlib.Path = field(init=False)
|
||||||
|
base_uuid: Optional[str] = field(default="")
|
||||||
|
target_uuid: Optional[str] = field(default="")
|
||||||
|
tdl_path: pathlib.Path = field(init=False)
|
||||||
|
out_type: str = field(init=False)
|
||||||
|
command_args: List[str] = field(factory=list)
|
||||||
|
common_args: List[str] = field(factory=list)
|
||||||
|
package_args: List[str] = field(factory=list)
|
||||||
|
metadata: pathlib.Path = field(init=False)
|
||||||
|
stage_commands: Optional[List[List[Union[str, Callable]]]] = field(init=False)
|
||||||
|
|
||||||
|
# The url to use in the path when fetching artifacts for the build
|
||||||
|
kickstart_dir: str = field() # 'os' or 'kickstart'
|
||||||
|
|
||||||
|
# The git repository to fetch kickstarts from
|
||||||
|
kickstart_repo: str = field()
|
||||||
|
|
||||||
|
def prepare(self):
|
||||||
|
self.out_type = self.image_format()
|
||||||
|
|
||||||
|
tdl_template = self.ctx.tmplenv.get_template('icicle/tdl.xml.tmpl')
|
||||||
|
|
||||||
|
self.tdl_path = self.render_icicle_template(tdl_template)
|
||||||
|
if not self.tdl_path:
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
self.metadata = pathlib.Path(self.ctx.outdir, ".imagefactory-metadata.json")
|
||||||
|
|
||||||
|
self.kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.ctx.architecture.major}-{self.ctx.type_variant}.ks")
|
||||||
|
|
||||||
|
self.checkout_kickstarts()
|
||||||
|
self.kickstart_arg = self.kickstart_imagefactory_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.mkdir(self.ctx.outdir)
|
||||||
|
except FileExistsError:
|
||||||
|
self.log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
||||||
|
except Exception as e:
|
||||||
|
self.log.exception("Some other exception occured while creating the output directory", e)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if os.path.exists(self.metadata):
|
||||||
|
self.ctx.log.info(f"Found metadata at {self.metadata}")
|
||||||
|
with open(self.metadata, "r") as f:
|
||||||
|
try:
|
||||||
|
o = json.load(f)
|
||||||
|
self.base_uuid = o['base_uuid']
|
||||||
|
self.target_uuid = o['target_uuid']
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
self.ctx.log.exception("Couldn't decode metadata file", e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
self.command_args = self._command_args()
|
||||||
|
self.package_args = self._package_args()
|
||||||
|
self.common_args = self._common_args()
|
||||||
|
|
||||||
|
self.setup_staging()
|
||||||
|
|
||||||
|
def build(self) -> int:
|
||||||
|
if self.base_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.fix_ks()
|
||||||
|
|
||||||
|
# TODO(neil): this should be a lambda which is called from the function
|
||||||
|
ret, out, err, uuid = self.ctx.prepare_and_run(self.build_command(), search=True)
|
||||||
|
if uuid:
|
||||||
|
self.base_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
if ret > 0:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
ret = self.package()
|
||||||
|
|
||||||
|
if ret > 0:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
with open(self.metadata, "w") as f:
|
||||||
|
try:
|
||||||
|
o = {
|
||||||
|
name: getattr(self, name) for name in [
|
||||||
|
"base_uuid", "target_uuid"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
self.ctx.log.debug(o)
|
||||||
|
json.dump(o, f)
|
||||||
|
except AttributeError as e:
|
||||||
|
self.ctx.log.error("Couldn't find attribute in object. Something is probably wrong", e)
|
||||||
|
except Exception as e:
|
||||||
|
self.ctx.log.exception(e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
def package(self) -> int:
|
||||||
|
# Some build types don't need to be packaged by imagefactory
|
||||||
|
# @TODO remove business logic if possible
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "EC2", "Azure", "Vagrant", "OCP", "RPI", "GenericArm"]:
|
||||||
|
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
|
||||||
|
|
||||||
|
if self.target_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
ret, out, err, uuid = self.ctx.prepare_and_run(self.package_command(), search=True)
|
||||||
|
if uuid:
|
||||||
|
self.target_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def stage(self) -> int:
|
||||||
|
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
||||||
|
self.ctx.log.info("Executing staging commands")
|
||||||
|
if not hasattr(self, 'stage_commands'):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
returns = []
|
||||||
|
for command in self.stage_commands: # type: ignore
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(command, search=False)
|
||||||
|
returns.append(ret)
|
||||||
|
|
||||||
|
if (res := all(ret > 0 for ret in returns) > 0):
|
||||||
|
raise Exception(res)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def checkout_kickstarts(self) -> int:
|
||||||
|
cmd = ["git", "clone", "--branch", f"r{self.ctx.architecture.major}",
|
||||||
|
self.kickstart_repo, f"{KICKSTART_PATH}"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
if ret > 0:
|
||||||
|
ret = self.pull_kickstarts()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def pull_kickstarts(self) -> int:
|
||||||
|
cmd: utils.CMD_PARAM_T = ["git", "-C", f"{KICKSTART_PATH}", "reset", "--hard", "HEAD"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
if ret == 0:
|
||||||
|
cmd = ["git", "-C", f"{KICKSTART_PATH}", "pull"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _command_args(self):
|
||||||
|
args_mapping = {
|
||||||
|
"debug": "--debug",
|
||||||
|
}
|
||||||
|
# NOTE(neil): i'm intentionally leaving this as is; deprecated
|
||||||
|
return [param for name, param in args_mapping.items() if self.ctx.debug]
|
||||||
|
|
||||||
|
def _package_args(self) -> List[str]:
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
return ["--parameter", "compress", "xz"]
|
||||||
|
return [""]
|
||||||
|
|
||||||
|
def _common_args(self) -> List[str]:
|
||||||
|
args = []
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
args = ["--parameter", "offline_icicle", "true"]
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "EC2", "Vagrant", "Azure", "OCP", "RPI", "GenericArm"]:
|
||||||
|
args = ["--parameter", "generate_icicle", "false"]
|
||||||
|
return args
|
||||||
|
|
||||||
|
def image_format(self) -> str:
|
||||||
|
mapping = {
|
||||||
|
"Container": "docker"
|
||||||
|
}
|
||||||
|
return mapping[self.ctx.image_type] if self.ctx.image_type in mapping.keys() else ''
|
||||||
|
|
||||||
|
def kickstart_imagefactory_args(self) -> List[str]:
|
||||||
|
|
||||||
|
if not self.kickstart_path.is_file():
|
||||||
|
self.ctx.log.warning(f"Kickstart file is not available: {self.kickstart_path}")
|
||||||
|
if not self.ctx.debug:
|
||||||
|
self.ctx.log.warning("Exiting because debug mode is not enabled.")
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
return ["--file-parameter", "install_script", str(self.kickstart_path)]
|
||||||
|
|
||||||
|
def render_icicle_template(self, tdl_template) -> pathlib.Path:
|
||||||
|
output = tempfile.NamedTemporaryFile(delete=False).name
|
||||||
|
return utils.render_template(output, tdl_template,
|
||||||
|
architecture=self.ctx.architecture.name,
|
||||||
|
iso8601date=self.ctx.build_time.strftime("%Y%m%d"),
|
||||||
|
installdir=self.kickstart_dir,
|
||||||
|
major=self.ctx.architecture.major,
|
||||||
|
minor=self.ctx.architecture.minor,
|
||||||
|
release=self.ctx.release,
|
||||||
|
size="10G",
|
||||||
|
type=self.ctx.image_type,
|
||||||
|
utcnow=self.ctx.build_time,
|
||||||
|
version_variant=self.ctx.architecture.version if not self.ctx.variant else f"{self.ctx.architecture.version}-{self.ctx.variant}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def build_command(self) -> List[str]:
|
||||||
|
build_command = ["imagefactory", "--timeout", self.ctx.timeout,
|
||||||
|
*self.command_args, "base_image", *self.common_args,
|
||||||
|
*self.kickstart_arg, self.tdl_path]
|
||||||
|
return build_command
|
||||||
|
|
||||||
|
def package_command(self) -> List[str]:
|
||||||
|
package_command = ["imagefactory", *self.command_args, "target_image",
|
||||||
|
self.out_type, *self.common_args,
|
||||||
|
"--id", f"{self.base_uuid}",
|
||||||
|
*self.package_args,
|
||||||
|
"--parameter", "repository", self.ctx.outname]
|
||||||
|
return package_command
|
||||||
|
|
||||||
|
def fix_ks(self):
|
||||||
|
cmd: utils.CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.ctx.architecture.name},", str(self.kickstart_path)]
|
||||||
|
self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
|
||||||
|
def setup_staging(self):
|
||||||
|
# Yes, this is gross. I'll fix it later.
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["tar", "-C", f"{self.ctx.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"],
|
||||||
|
["xz", f"{self.ctx.outdir}/layer.tar"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["RPI"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.raw"],
|
||||||
|
["xz", f"{self.ctx.outdir}/{self.ctx.outname}.raw"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "OCP", "GenericArm"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", "qcow2",
|
||||||
|
lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["EC2"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["Azure"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["/prep-azure.sh", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{STORAGE_DIR}"],
|
||||||
|
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.vhd", f"{self.ctx.outdir}/{self.ctx.outname}.vhd"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["Vagrant"]:
|
||||||
|
_map = {
|
||||||
|
"Vbox": {"format": "vmdk", "provider": "virtualbox"},
|
||||||
|
"Libvirt": {"format": "qcow2", "provider": "libvirt", "virtual_size": 10},
|
||||||
|
"VMware": {"format": "vmdk", "provider": "vmware_desktop"}
|
||||||
|
}
|
||||||
|
output = f"{_map[self.ctx.variant]['format']}" # type: ignore
|
||||||
|
provider = f"{_map[self.ctx.variant]['provider']}" # type: ignore
|
||||||
|
|
||||||
|
# pop from the options map that will be passed to the vagrant metadata.json
|
||||||
|
convert_options = _map[self.ctx.variant].pop('convertOptions') if 'convertOptions' in _map[self.ctx.variant].keys() else '' # type: ignore
|
||||||
|
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", output, *convert_options,
|
||||||
|
lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.{output}"],
|
||||||
|
["tar", "-C", self.ctx.outdir, "-czf", f"/tmp/{self.ctx.outname}.box", '.'],
|
||||||
|
["mv", f"/tmp/{self.ctx.outname}.box", self.ctx.outdir]
|
||||||
|
]
|
||||||
|
self.prepare_vagrant(_map[self.ctx.variant])
|
||||||
|
|
||||||
|
if self.stage_commands:
|
||||||
|
self.stage_commands.append(["cp", "-v", lambda: f"{STORAGE_DIR}/{self.target_uuid}.meta", f"{self.ctx.outdir}/build.meta"])
|
||||||
|
|
||||||
|
def prepare_vagrant(self, options):
|
||||||
|
"""Setup the output directory for the Vagrant type variant, dropping templates as required"""
|
||||||
|
|
||||||
|
templates = {}
|
||||||
|
templates['Vagrantfile'] = self.ctx.tmplenv.get_template(f"vagrant/Vagrantfile.{self.ctx.variant}")
|
||||||
|
templates['metadata.json'] = self.ctx.tmplenv.get_template('vagrant/metadata.tmpl.json')
|
||||||
|
templates['info.json'] = self.ctx.tmplenv.get_template('vagrant/info.tmpl.json')
|
||||||
|
|
||||||
|
if self.ctx.variant == "VMware":
|
||||||
|
templates[f"{self.ctx.outname}.vmx"] = self.ctx.tmplenv.get_template('vagrant/vmx.tmpl')
|
||||||
|
|
||||||
|
if self.ctx.variant == "Vbox":
|
||||||
|
templates['box.ovf'] = self.ctx.tmplenv.get_template('vagrant/box.tmpl.ovf')
|
||||||
|
|
||||||
|
if self.ctx.variant == "Libvirt":
|
||||||
|
# Libvirt vagrant driver expects the qcow2 file to be called box.img.
|
||||||
|
qemu_command_index = [i for i, d in enumerate(self.stage_commands) if d[0] == "qemu-img"][0]
|
||||||
|
self.stage_commands.insert(qemu_command_index+1, ["mv", f"{self.ctx.outdir}/{self.ctx.outname}.qcow2", f"{self.ctx.outdir}/box.img"])
|
||||||
|
|
||||||
|
for name, template in templates.items():
|
||||||
|
utils.render_template(f"{self.ctx.outdir}/{name}", template,
|
||||||
|
name=self.ctx.outname,
|
||||||
|
arch=self.ctx.architecture.name,
|
||||||
|
options=options
|
||||||
|
)
|
40
iso/empanadas/empanadas/backends/interface.py
Normal file
40
iso/empanadas/empanadas/backends/interface.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"""
|
||||||
|
empanadas backend interface
|
||||||
|
"""
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
|
||||||
|
@define
|
||||||
|
class BackendInterface(ABC):
|
||||||
|
ctx = field(init=False)
|
||||||
|
"""
|
||||||
|
Interface to build images (or whatever)
|
||||||
|
"""
|
||||||
|
@abstractmethod
|
||||||
|
def prepare(self):
|
||||||
|
"""
|
||||||
|
Prepares the environment necessary for building the image.
|
||||||
|
This might include setting up directories, checking prerequisites, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def build(self):
|
||||||
|
"""
|
||||||
|
Performs the image build operation. This is the core method
|
||||||
|
where the actual image building logic is implemented.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def stage(self):
|
||||||
|
"""
|
||||||
|
Transforms and copies artifacts from build directory to the
|
||||||
|
location expected by the builder (usually in /tmp/)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def clean(self):
|
||||||
|
"""
|
||||||
|
Cleans up any resources or temporary files created during
|
||||||
|
the image building process.
|
||||||
|
"""
|
241
iso/empanadas/empanadas/backends/kiwi.py
Normal file
241
iso/empanadas/empanadas/backends/kiwi.py
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
"""Backend for Kiwi"""
|
||||||
|
|
||||||
|
from .interface import BackendInterface
|
||||||
|
from .kiwi_imagedata import ImagesData
|
||||||
|
|
||||||
|
from empanadas.builders import utils
|
||||||
|
from empanadas.common import AttributeDict
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
from functools import wraps
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import git
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# TODO(neil): this should be part of the config, somewhere
|
||||||
|
temp = AttributeDict(
|
||||||
|
{
|
||||||
|
"Azure": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Cloud-Azure",
|
||||||
|
"fileType": "raw", # post-converted into vhd on MB boundary
|
||||||
|
"outputKey": "disk_format_image",
|
||||||
|
},
|
||||||
|
"OCP": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Cloud-OCP",
|
||||||
|
"fileType": "qcow2",
|
||||||
|
"outputKey": "disk_format_image",
|
||||||
|
},
|
||||||
|
"GenericCloud": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Cloud-GenericCloud",
|
||||||
|
"fileType": "qcow2",
|
||||||
|
"outputKey": "disk_format_image",
|
||||||
|
},
|
||||||
|
"EC2": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Cloud-EC2",
|
||||||
|
"fileType": "qcow2",
|
||||||
|
"outputKey": "disk_format_image",
|
||||||
|
},
|
||||||
|
"Vagrant": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Vagrant",
|
||||||
|
"fileType": "box",
|
||||||
|
"outputKey": "disk_format_image",
|
||||||
|
},
|
||||||
|
"Container": {
|
||||||
|
"kiwiType": "oci",
|
||||||
|
"kiwiProfile": "Container",
|
||||||
|
"fileType": "tar.xz",
|
||||||
|
"outputKey": "container"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_kiwi_conf(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
if not hasattr(self, 'kiwi_conf') or self.kiwi_conf is None:
|
||||||
|
self.kiwi_conf = temp[self.ctx.image_type]
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@define
|
||||||
|
class KiwiBackend(BackendInterface):
|
||||||
|
"""Build an image using Kiwi"""
|
||||||
|
|
||||||
|
build_args: List[str] = field(factory=list)
|
||||||
|
image_result: ImagesData = field(init=False)
|
||||||
|
kiwi_conf: AttributeDict = field(init=False)
|
||||||
|
|
||||||
|
def prepare(self):
|
||||||
|
"""
|
||||||
|
Checkout mock-rocky-configs and rocky-kiwi-descriptions,
|
||||||
|
init the mock env, and setup to run kiwi
|
||||||
|
"""
|
||||||
|
self.checkout_repos()
|
||||||
|
self.setup_mock()
|
||||||
|
self.setup_kiwi()
|
||||||
|
|
||||||
|
@ensure_kiwi_conf
|
||||||
|
def build(self):
|
||||||
|
self.build_args += [f"--type={self.kiwi_conf.kiwiType}", f"--profile={self.kiwi_conf.kiwiProfile}-{self.ctx.variant}"]
|
||||||
|
|
||||||
|
kiwi_command = [
|
||||||
|
"kiwi-ng", "--color-output",
|
||||||
|
*self.build_args,
|
||||||
|
]
|
||||||
|
if self.ctx.debug:
|
||||||
|
kiwi_command.append("--debug")
|
||||||
|
|
||||||
|
kiwi_system_command = [
|
||||||
|
"system", "build",
|
||||||
|
"--description='/builddir/rocky-kiwi-descriptions'",
|
||||||
|
"--target-dir", f"/builddir/{self.ctx.outdir}"
|
||||||
|
]
|
||||||
|
|
||||||
|
build_command = [
|
||||||
|
"--shell", "--enable-network", "--", *kiwi_command, *kiwi_system_command
|
||||||
|
]
|
||||||
|
ret, out, err = self.run_mock_command(build_command)
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception(f"Kiwi build failed: code {ret}")
|
||||||
|
sys.exit(ret)
|
||||||
|
|
||||||
|
@ensure_kiwi_conf
|
||||||
|
def stage(self):
|
||||||
|
ret, out, err = self.run_mock_command(["--copyout", f"/builddir/{self.ctx.outdir}", self.ctx.outdir])
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception("failed to copy build result out")
|
||||||
|
|
||||||
|
kiwi_result_path = pathlib.Path(f"{self.ctx.outdir}/kiwi.result.json")
|
||||||
|
if not os.path.exists(kiwi_result_path):
|
||||||
|
raise Exception("Missing kiwi.result.json. Aborting")
|
||||||
|
|
||||||
|
with open(kiwi_result_path, "r") as kiwi_result:
|
||||||
|
self.image_result = ImagesData.from_json(kiwi_result.read()).images
|
||||||
|
|
||||||
|
source = self.image_result[self.kiwi_conf.outputKey].filename
|
||||||
|
filetype = self.kiwi_conf.fileType
|
||||||
|
|
||||||
|
source = utils.remove_first_directory(source)
|
||||||
|
dest = f"{self.ctx.outdir}/{self.ctx.outname}.{filetype}"
|
||||||
|
|
||||||
|
# NOTE(neil): only because we are preparing the 'final' image in clean step...
|
||||||
|
if self.ctx.image_type == 'Container':
|
||||||
|
dest = f"{self.ctx.outdir}/{self.ctx.outname}.oci"
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.move(source, dest)
|
||||||
|
except Exception as e:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
# TODO(neil): refactor
|
||||||
|
if self.ctx.image_type == 'Azure':
|
||||||
|
try:
|
||||||
|
utils.resize_and_convert_raw_image_to_vhd(dest, self.ctx.outdir)
|
||||||
|
# Remove old raw image
|
||||||
|
pathlib.Path(f"{self.ctx.outdir}/{self.ctx.outname}.raw").unlink()
|
||||||
|
except Exception as e:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
# TODO(neil): refactor
|
||||||
|
if self.ctx.image_type == 'Container':
|
||||||
|
# need to do this before we remove it, otherwise we have to extract from the OCI tarball
|
||||||
|
root = f"/builddir{self.ctx.outdir}"
|
||||||
|
builddir = f"{root}/build/image-root"
|
||||||
|
ret, out, err = self.run_mock_command(["--shell", "--", "tar", "-C", builddir, "-cJf", f"{root}/{self.ctx.outname}.tar.xz", "."])
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception(err)
|
||||||
|
|
||||||
|
ret, out, err = self.run_mock_command(["--shell", "rm", "-fr", f"/builddir/{self.ctx.outdir}/build/"])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def run_mock_command(self, mock_command: List[str]):
|
||||||
|
mock_args = ["--configdir", "/tmp/mock-rocky-configs/etc/mock", "-r", f"rl-9-{self.ctx.architecture.name}-core-infra"]
|
||||||
|
if self.ctx.image_type != 'Container':
|
||||||
|
mock_args.append("--isolation=simple")
|
||||||
|
command = [
|
||||||
|
"mock",
|
||||||
|
*mock_args,
|
||||||
|
*mock_command,
|
||||||
|
]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(command)
|
||||||
|
return ret, out, err
|
||||||
|
|
||||||
|
def setup_mock(self):
|
||||||
|
# TODO(neil): add error checking
|
||||||
|
ret, out, err = self.run_mock_command(["--init"])
|
||||||
|
|
||||||
|
packages = [
|
||||||
|
"kiwi-boxed-plugin",
|
||||||
|
"kiwi-cli",
|
||||||
|
"git",
|
||||||
|
"dracut-kiwi-live",
|
||||||
|
"fuse-overlayfs",
|
||||||
|
"kiwi-systemdeps-bootloaders",
|
||||||
|
"kiwi-systemdeps-containers",
|
||||||
|
"kiwi-systemdeps-core",
|
||||||
|
"kiwi-systemdeps-disk-images",
|
||||||
|
"kiwi-systemdeps-filesystems",
|
||||||
|
"kiwi-systemdeps-image-validation",
|
||||||
|
"kiwi-systemdeps-iso-media",
|
||||||
|
"epel-release",
|
||||||
|
"rocky-release-core"
|
||||||
|
]
|
||||||
|
ret, out, err = self.run_mock_command(["--install", *packages])
|
||||||
|
|
||||||
|
ret, out, err = self.run_mock_command(["--copyin", "/tmp/rocky-kiwi-descriptions", "/builddir/"])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def checkout_repos(self):
|
||||||
|
"""
|
||||||
|
Checkout sig_core/mock-rocky-configs and sig_core/rocky-kiwi-descriptions to /tmp
|
||||||
|
"""
|
||||||
|
repos = {
|
||||||
|
"mock-rocky-configs": "main",
|
||||||
|
"rocky-kiwi-descriptions": "r9"
|
||||||
|
}
|
||||||
|
|
||||||
|
for repo, branch in repos.items():
|
||||||
|
repo_url = f"https://git.resf.org/sig_core/{repo}"
|
||||||
|
clone_dir = f"/tmp/{repo}"
|
||||||
|
|
||||||
|
if os.path.isdir(os.path.join(clone_dir, ".git")):
|
||||||
|
try:
|
||||||
|
# The directory exists and is a git repository, so attempt to pull the latest changes
|
||||||
|
git.Repo(clone_dir).remotes.origin.pull(branch)
|
||||||
|
self.ctx.log.info(f"pulled the latest changes for {branch} branch in {clone_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Failed to pull the repository: {str(e)}")
|
||||||
|
finally:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
git.Repo.clone_from(repo_url, clone_dir, branch=branch)
|
||||||
|
print(f"Repository cloned into {clone_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to clone repository: {str(e)}")
|
||||||
|
|
||||||
|
def setup_kiwi(self):
|
||||||
|
self.ctx.log.info("Generating kiwi.yml from template")
|
||||||
|
template = self.ctx.tmplenv.get_template('kiwi/kiwi.yml.j2')
|
||||||
|
output = tempfile.NamedTemporaryFile(delete=False).name
|
||||||
|
res = utils.render_template(output, template)
|
||||||
|
|
||||||
|
self.ctx.log.info("Copying generated kiwi.yml into build root")
|
||||||
|
ret, out, err = self.run_mock_command(["--copyin", res, "/etc/kiwi.yml"])
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception("Failed to configure kiwi")
|
||||||
|
|
||||||
|
self.ctx.log.info("Finished setting up kiwi")
|
24
iso/empanadas/empanadas/backends/kiwi_imagedata.py
Normal file
24
iso/empanadas/empanadas/backends/kiwi_imagedata.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from attrs import define, field
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
@define(auto_attribs=True, kw_only=True)
|
||||||
|
class ImageInfo:
|
||||||
|
compress: bool
|
||||||
|
filename: str
|
||||||
|
shasum: bool
|
||||||
|
use_for_bundle: bool
|
||||||
|
|
||||||
|
|
||||||
|
@define(auto_attribs=True, kw_only=True)
|
||||||
|
class ImagesData:
|
||||||
|
images: Dict[str, ImageInfo] = field(factory=dict)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_json(data: str) -> 'ImagesData':
|
||||||
|
json_data = json.loads(data)
|
||||||
|
images = {key: ImageInfo(**value) for key, value in json_data.items()}
|
||||||
|
|
||||||
|
return ImagesData(images=images)
|
1
iso/empanadas/empanadas/builders/__init__.py
Normal file
1
iso/empanadas/empanadas/builders/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .imagebuild import ImageBuild
|
118
iso/empanadas/empanadas/builders/imagebuild.py
Normal file
118
iso/empanadas/empanadas/builders/imagebuild.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
"""Build an image with a given backend"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
from empanadas.backends import BackendInterface, KiwiBackend
|
||||||
|
from empanadas.common import Architecture
|
||||||
|
from empanadas.common import _rootdir
|
||||||
|
from . import utils
|
||||||
|
|
||||||
|
from jinja2 import Environment, FileSystemLoader, Template
|
||||||
|
from typing import List, Optional, Tuple, Callable
|
||||||
|
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ImageBuild: # pylint: disable=too-few-public-methods
|
||||||
|
"""Image builder using a given backend"""
|
||||||
|
tmplenv: Environment = field(init=False)
|
||||||
|
|
||||||
|
# Only things we know we're keeping in this class here
|
||||||
|
architecture: Architecture = field()
|
||||||
|
backend: BackendInterface = field()
|
||||||
|
build_time: datetime.datetime = field()
|
||||||
|
debug: bool = field(default=False)
|
||||||
|
log: logging.Logger = field()
|
||||||
|
release: int = field(default=0)
|
||||||
|
timeout: str = field(default='3600')
|
||||||
|
|
||||||
|
image_type: str = field() # the type of the image
|
||||||
|
type_variant: str = field(init=False)
|
||||||
|
variant: Optional[str] = field()
|
||||||
|
|
||||||
|
# Kubernetes job template
|
||||||
|
job_template: Optional[Template] = field(init=False) # the kube Job tpl
|
||||||
|
|
||||||
|
# Commands to stage artifacts
|
||||||
|
|
||||||
|
# Where the artifacts should go to
|
||||||
|
outdir: pathlib.Path = field(init=False)
|
||||||
|
outname: str = field(init=False)
|
||||||
|
|
||||||
|
def __attrs_post_init__(self):
|
||||||
|
self.backend.ctx = self
|
||||||
|
|
||||||
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
|
self.tmplenv = Environment(loader=file_loader)
|
||||||
|
|
||||||
|
self.job_template = self.tmplenv.get_template('kube/Job.tmpl')
|
||||||
|
|
||||||
|
self.type_variant = self.type_variant_name()
|
||||||
|
self.outdir, self.outname = self.output_name()
|
||||||
|
|
||||||
|
def output_name(self) -> Tuple[pathlib.Path, str]:
|
||||||
|
directory = f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{self.build_time.strftime('%Y%m%d')}.{self.release}"
|
||||||
|
name = f"{directory}.{self.architecture.name}"
|
||||||
|
outdir = pathlib.Path("/tmp/", directory)
|
||||||
|
return outdir, name
|
||||||
|
|
||||||
|
def type_variant_name(self):
|
||||||
|
return self.image_type if not self.variant else f"{self.image_type}-{self.variant}"
|
||||||
|
|
||||||
|
def prepare_and_run(self, command: utils.CMD_PARAM_T, search: Callable = None) -> utils.CMD_RESULT_T:
|
||||||
|
return utils.runCmd(self, self.prepare_command(command), search)
|
||||||
|
|
||||||
|
def prepare_command(self, command_list: utils.CMD_PARAM_T) -> List[str]:
|
||||||
|
"""
|
||||||
|
Commands may be a callable, which should be a lambda to be evaluated at
|
||||||
|
preparation time with available locals. This can be used to, among
|
||||||
|
other things, perform lazy evaluations of f-strings which have values
|
||||||
|
not available at assignment time. e.g., filling in a second command
|
||||||
|
with a value extracted from the previous step or command.
|
||||||
|
"""
|
||||||
|
|
||||||
|
r = []
|
||||||
|
for c in command_list:
|
||||||
|
if callable(c) and c.__name__ == '<lambda>':
|
||||||
|
r.append(c())
|
||||||
|
else:
|
||||||
|
r.append(str(c))
|
||||||
|
return r
|
||||||
|
|
||||||
|
def render_kubernetes_job(self):
|
||||||
|
# TODO(neil): should this be put in the builder class itself to return the right thing for us?
|
||||||
|
if self.backend == KiwiBackend:
|
||||||
|
self.log.error("Kube not implemented for Kiwi")
|
||||||
|
|
||||||
|
commands = [self.backend.build_command(), self.backend.package_command(), self.backend.copy_command()]
|
||||||
|
if not self.job_template:
|
||||||
|
return None
|
||||||
|
template = self.job_template.render(
|
||||||
|
architecture=self.architecture.name,
|
||||||
|
backoffLimit=4,
|
||||||
|
buildTime=self.build_time.strftime("%s"),
|
||||||
|
command=commands,
|
||||||
|
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
|
||||||
|
jobname="buildimage",
|
||||||
|
namespace="empanadas",
|
||||||
|
major=self.architecture.major,
|
||||||
|
minor=self.architecture.minor,
|
||||||
|
restartPolicy="Never",
|
||||||
|
)
|
||||||
|
return template
|
||||||
|
|
||||||
|
def upload(self, skip=False) -> int:
|
||||||
|
if not skip:
|
||||||
|
self.log.info("Copying files to output directory")
|
||||||
|
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/",
|
||||||
|
f"s3://resf-empanadas/buildimage-{self.architecture.version}-{self.architecture.name}/{self.outname}/{self.build_time.strftime('%s')}/"
|
||||||
|
]
|
||||||
|
ret, out, err, _ = self.prepare_and_run(copy_command, search=False)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
self.ctx.log.info(f"Build complete! Output available in {self.ctx.outdir}/")
|
||||||
|
return 0
|
139
iso/empanadas/empanadas/builders/utils.py
Normal file
139
iso/empanadas/empanadas/builders/utils.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from typing import Callable, List, Tuple, Union
|
||||||
|
|
||||||
|
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
||||||
|
|
||||||
|
STR_NONE_T = Union[bytes, None]
|
||||||
|
BYTES_NONE_T = Union[bytes, None]
|
||||||
|
# Tuple of int, stdout, stderr, uuid
|
||||||
|
CMD_RESULT_T = Tuple[int, BYTES_NONE_T, BYTES_NONE_T, STR_NONE_T]
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(logging.INFO)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s :: %(name)s :: %(message)s',
|
||||||
|
'%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
log.addHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
def render_template(path, template, **kwargs) -> pathlib.Path:
|
||||||
|
with open(path, "wb") as f:
|
||||||
|
_template = template.render(**kwargs)
|
||||||
|
f.write(_template.encode())
|
||||||
|
f.flush()
|
||||||
|
output = pathlib.Path(path)
|
||||||
|
if not output.exists():
|
||||||
|
raise Exception("Failed to template")
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def runCmd(ctx, prepared_command: List[str], search: Callable = None) -> CMD_RESULT_T:
|
||||||
|
ctx.log.info(f"Running command: {' '.join(prepared_command)}")
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"stderr": subprocess.PIPE,
|
||||||
|
"stdout": subprocess.PIPE
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.debug:
|
||||||
|
del kwargs["stderr"]
|
||||||
|
|
||||||
|
with subprocess.Popen(prepared_command, **kwargs) as p:
|
||||||
|
uuid = None
|
||||||
|
# @TODO implement this as a callback?
|
||||||
|
if search:
|
||||||
|
for _, line in enumerate(p.stdout): # type: ignore
|
||||||
|
ln = line.decode()
|
||||||
|
if ln.startswith("UUID: "):
|
||||||
|
uuid = ln.split(" ")[-1]
|
||||||
|
ctx.log.debug(f"found uuid: {uuid}")
|
||||||
|
|
||||||
|
out, err = p.communicate()
|
||||||
|
res = p.wait(), out, err, uuid
|
||||||
|
|
||||||
|
if res[0] > 0:
|
||||||
|
ctx.log.error(f"Problem while executing command: '{prepared_command}'")
|
||||||
|
if search and not res[3]:
|
||||||
|
ctx.log.error("UUID not found in stdout. Dumping stdout and stderr")
|
||||||
|
log_subprocess(ctx, res)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def log_subprocess(ctx, result: CMD_RESULT_T):
|
||||||
|
def log_lines(title, lines):
|
||||||
|
ctx.log.info(f"====={title}=====")
|
||||||
|
ctx.log.info(lines.decode())
|
||||||
|
ctx.log.info(f"Command return code: {result[0]}")
|
||||||
|
stdout = result[1]
|
||||||
|
stderr = result[2]
|
||||||
|
if stdout:
|
||||||
|
log_lines("Command STDOUT", stdout)
|
||||||
|
if stderr:
|
||||||
|
log_lines("Command STDERR", stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_first_directory(path):
|
||||||
|
p = pathlib.Path(path)
|
||||||
|
# Check if the path is absolute
|
||||||
|
if p.is_absolute():
|
||||||
|
# For an absolute path, start the new path with the root
|
||||||
|
new_path = pathlib.Path(p.root, *p.parts[2:])
|
||||||
|
else:
|
||||||
|
# For a relative path, simply skip the first part
|
||||||
|
new_path = pathlib.Path(*p.parts[1:])
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
|
||||||
|
def resize_and_convert_raw_image_to_vhd(raw_image_path, outdir=None):
|
||||||
|
log.info(f"Will resize and convert {raw_image_path}")
|
||||||
|
MB = 1024 * 1024 # For calculations - 1048576 bytes
|
||||||
|
|
||||||
|
if outdir is None:
|
||||||
|
outdir = os.getcwd()
|
||||||
|
|
||||||
|
# Ensure the output directory exists
|
||||||
|
pathlib.Path(outdir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Getting the size of the raw image
|
||||||
|
result = subprocess.run(['qemu-img', 'info', '-f', 'raw', '--output', 'json', raw_image_path], capture_output=True, text=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
log.error("Error getting image info")
|
||||||
|
raise Exception(result)
|
||||||
|
|
||||||
|
image_info = json.loads(result.stdout)
|
||||||
|
size = int(image_info['virtual-size'])
|
||||||
|
|
||||||
|
# Calculate the new size rounded to the nearest MB
|
||||||
|
rounded_size = ((size + MB - 1) // MB) * MB
|
||||||
|
|
||||||
|
# Prepare output filename (.raw replaced by .vhd)
|
||||||
|
outfilename = pathlib.Path(raw_image_path).name.replace("raw", "vhd")
|
||||||
|
outfile = os.path.join(outdir, outfilename)
|
||||||
|
|
||||||
|
# Resize the image
|
||||||
|
log.info(f"Resizing {raw_image_path} to nearest MB boundary")
|
||||||
|
result = subprocess.run(['qemu-img', 'resize', '-f', 'raw', raw_image_path, str(rounded_size)])
|
||||||
|
if result.returncode != 0:
|
||||||
|
log.error("Error resizing image")
|
||||||
|
raise Exception(result)
|
||||||
|
|
||||||
|
# Convert the image
|
||||||
|
log.info(f"Converting {raw_image_path} to vhd")
|
||||||
|
result = subprocess.run(['qemu-img', 'convert', '-f', 'raw', '-o', 'subformat=fixed,force_size', '-O', 'vpc', raw_image_path, outfile])
|
||||||
|
if result.returncode != 0:
|
||||||
|
log.error("Error converting image to VHD format")
|
||||||
|
raise Exception(result)
|
||||||
|
|
||||||
|
log.info(f"Image converted and saved to {outfile}")
|
@ -1,21 +1,22 @@
|
|||||||
# All imports are here
|
# All imports are here
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Tuple
|
from attrs import define, field
|
||||||
|
|
||||||
|
|
||||||
import rpm
|
import rpm
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
# An implementation from the Fabric python library
|
# An implementation from the Fabric python library
|
||||||
class AttributeDict(defaultdict):
|
class AttributeDict(dict):
|
||||||
def __init__(self):
|
def __init__(self, *args, **kwargs):
|
||||||
super(AttributeDict, self).__init__(AttributeDict)
|
super(AttributeDict, self).__init__(*args, **kwargs)
|
||||||
|
for key, value in self.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
self[key] = AttributeDict(value)
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
try:
|
try:
|
||||||
@ -26,6 +27,11 @@ class AttributeDict(defaultdict):
|
|||||||
def __setattr__(self, key, value):
|
def __setattr__(self, key, value):
|
||||||
self[key] = value
|
self[key] = value
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
value = AttributeDict(value)
|
||||||
|
super(AttributeDict, self).__setitem__(key, value)
|
||||||
|
|
||||||
|
|
||||||
# These are a bunch of colors we may use in terminal output
|
# These are a bunch of colors we may use in terminal output
|
||||||
class Color:
|
class Color:
|
||||||
@ -59,6 +65,7 @@ config = {
|
|||||||
"category_stub": "mirror/pub/rocky",
|
"category_stub": "mirror/pub/rocky",
|
||||||
"sig_category_stub": "mirror/pub/sig",
|
"sig_category_stub": "mirror/pub/sig",
|
||||||
"repo_base_url": "https://yumrepofs.build.resf.org/v1/projects",
|
"repo_base_url": "https://yumrepofs.build.resf.org/v1/projects",
|
||||||
|
"staging_base_url": "https://dl.rockylinux.org/stg/rocky",
|
||||||
"mock_work_root": "/builddir",
|
"mock_work_root": "/builddir",
|
||||||
"container": "centos:stream9",
|
"container": "centos:stream9",
|
||||||
"distname": "Rocky Linux",
|
"distname": "Rocky Linux",
|
||||||
@ -107,7 +114,7 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
|||||||
|
|
||||||
ALLOWED_TYPE_VARIANTS = {
|
ALLOWED_TYPE_VARIANTS = {
|
||||||
"Azure": ["Base", "LVM"],
|
"Azure": ["Base", "LVM"],
|
||||||
"Container": ["Base", "Minimal", "UBI", "WSL"],
|
"Container": ["Base", "Minimal", "UBI", "WSL", "Toolbox"],
|
||||||
"EC2": ["Base", "LVM"],
|
"EC2": ["Base", "LVM"],
|
||||||
"GenericCloud": ["Base", "LVM"],
|
"GenericCloud": ["Base", "LVM"],
|
||||||
"Vagrant": ["Libvirt", "Vbox", "VMware"],
|
"Vagrant": ["Libvirt", "Vbox", "VMware"],
|
||||||
@ -120,7 +127,7 @@ ALLOWED_TYPE_VARIANTS = {
|
|||||||
def valid_type_variant(_type: str, variant: str = "") -> bool:
|
def valid_type_variant(_type: str, variant: str = "") -> bool:
|
||||||
if _type not in ALLOWED_TYPE_VARIANTS:
|
if _type not in ALLOWED_TYPE_VARIANTS:
|
||||||
raise Exception(f"Type is invalid: ({_type}, {variant})")
|
raise Exception(f"Type is invalid: ({_type}, {variant})")
|
||||||
if ALLOWED_TYPE_VARIANTS[_type] == None:
|
if ALLOWED_TYPE_VARIANTS[_type] is None:
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
raise Exception(f"{_type} Type expects no variant type.")
|
raise Exception(f"{_type} Type expects no variant type.")
|
||||||
return True
|
return True
|
||||||
@ -135,9 +142,6 @@ def valid_type_variant(_type: str, variant: str = "") -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
from attrs import define, field
|
|
||||||
|
|
||||||
|
|
||||||
@define(kw_only=True)
|
@define(kw_only=True)
|
||||||
class Architecture:
|
class Architecture:
|
||||||
name: str = field()
|
name: str = field()
|
||||||
|
@ -31,6 +31,7 @@
|
|||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
- 'CRB'
|
- 'CRB'
|
||||||
- 'HighAvailability'
|
- 'HighAvailability'
|
||||||
|
- 'ResilientStorage'
|
||||||
- 'RT'
|
- 'RT'
|
||||||
- 'NFV'
|
- 'NFV'
|
||||||
- 'SAP'
|
- 'SAP'
|
||||||
@ -189,6 +190,9 @@
|
|||||||
HighAvailability:
|
HighAvailability:
|
||||||
- BaseOS
|
- BaseOS
|
||||||
- AppStream
|
- AppStream
|
||||||
|
ResilientStorage:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
RT:
|
RT:
|
||||||
- BaseOS
|
- BaseOS
|
||||||
- AppStream
|
- AppStream
|
||||||
|
@ -31,6 +31,7 @@
|
|||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
- 'CRB'
|
- 'CRB'
|
||||||
- 'HighAvailability'
|
- 'HighAvailability'
|
||||||
|
- 'ResilientStorage'
|
||||||
- 'RT'
|
- 'RT'
|
||||||
- 'NFV'
|
- 'NFV'
|
||||||
- 'SAP'
|
- 'SAP'
|
||||||
@ -189,6 +190,9 @@
|
|||||||
HighAvailability:
|
HighAvailability:
|
||||||
- BaseOS
|
- BaseOS
|
||||||
- AppStream
|
- AppStream
|
||||||
|
ResilientStorage:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
RT:
|
RT:
|
||||||
- BaseOS
|
- BaseOS
|
||||||
- AppStream
|
- AppStream
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
- x86_64
|
- x86_64
|
||||||
- aarch64
|
- aarch64
|
||||||
provide_multilib: False
|
provide_multilib: False
|
||||||
project_id: 'e9cfc87c-d2d2-42d5-a121-852101f1a966'
|
project_id: 'df5bcbfc-ba83-4da8-84d6-ae0168921b4d'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
devel: 'Devel'
|
devel: 'Devel'
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
---
|
---
|
||||||
'9-beta':
|
'9-beta':
|
||||||
fullname: 'Rocky Linux 9.6'
|
fullname: 'Rocky Linux 9.4'
|
||||||
revision: '9.6'
|
revision: '9.4'
|
||||||
rclvl: 'BETA1'
|
rclvl: 'BETA1'
|
||||||
major: '9'
|
major: '9'
|
||||||
minor: '6'
|
minor: '4'
|
||||||
profile: '9-beta'
|
profile: '9-beta'
|
||||||
disttag: 'el9'
|
disttag: 'el9'
|
||||||
code: "Blue Onyx"
|
code: "Blue Onyx"
|
||||||
@ -20,7 +20,7 @@
|
|||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
provide_multilib: True
|
provide_multilib: True
|
||||||
project_id: 'ae163d6a-f050-484f-bbaa-100ca673f146'
|
project_id: 'df5bcbfc-ba83-4da8-84d6-ae0168921b4d'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames:
|
renames:
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
---
|
---
|
||||||
'9':
|
'9':
|
||||||
fullname: 'Rocky Linux 9.5'
|
fullname: 'Rocky Linux 9.4'
|
||||||
revision: '9.5'
|
revision: '9.4'
|
||||||
rclvl: 'RC1'
|
rclvl: 'RC1'
|
||||||
major: '9'
|
major: '9'
|
||||||
minor: '5'
|
minor: '4'
|
||||||
profile: '9'
|
profile: '9'
|
||||||
disttag: 'el9'
|
disttag: 'el9'
|
||||||
code: "Blue Onyx"
|
code: "Blue Onyx"
|
||||||
@ -20,7 +20,7 @@
|
|||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
provide_multilib: True
|
provide_multilib: True
|
||||||
project_id: 'ae163d6a-f050-484f-bbaa-100ca673f146'
|
project_id: 'df5bcbfc-ba83-4da8-84d6-ae0168921b4d'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames:
|
renames:
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
---
|
---
|
||||||
'9-lookahead':
|
'9-lookahead':
|
||||||
fullname: 'Rocky Linux 9.6'
|
fullname: 'Rocky Linux 9.5'
|
||||||
revision: '9.6'
|
revision: '9.5'
|
||||||
rclvl: 'LH1'
|
rclvl: 'LH1'
|
||||||
major: '9'
|
major: '9'
|
||||||
minor: '6'
|
minor: '5'
|
||||||
profile: '9-lookahead'
|
profile: '9-lookahead'
|
||||||
disttag: 'el9'
|
disttag: 'el9'
|
||||||
code: "Blue Onyx"
|
code: "Blue Onyx"
|
||||||
@ -20,7 +20,7 @@
|
|||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
provide_multilib: True
|
provide_multilib: True
|
||||||
project_id: 'ae163d6a-f050-484f-bbaa-100ca673f146'
|
project_id: '6794b5a8-290b-4d0d-ad5a-47164329cbb0'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames:
|
renames:
|
||||||
|
@ -1,44 +1,49 @@
|
|||||||
# Builds an image given a version, type, variant, and architecture
|
# Builds an image given a version, type, variant, anctx.d architecture
|
||||||
# Defaults to the running host's architecture
|
# Defaults to the running host's architecture
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
|
|
||||||
from attrs import define, Factory, field, asdict
|
|
||||||
from botocore import args
|
|
||||||
from jinja2 import Environment, FileSystemLoader, Template
|
|
||||||
from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union
|
|
||||||
|
|
||||||
from empanadas.common import Architecture, rldict, valid_type_variant
|
from empanadas.common import Architecture, rldict, valid_type_variant
|
||||||
from empanadas.common import _rootdir
|
from empanadas.builders import ImageBuild
|
||||||
|
from empanadas.backends import ImageFactoryBackend, KiwiBackend
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
|
|
||||||
parser.add_argument('--version', type=str, help="Release Version (8.6, 9.1)", required=True)
|
parser.add_argument('--version',
|
||||||
|
type=str, help="Release Version (8.6, 9.1)", required=True)
|
||||||
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
||||||
parser.add_argument('--kickstartdir', action='store_true', help="Use the kickstart dir instead of the os dir for repositories")
|
parser.add_argument('--kickstartdir', action='store_true',
|
||||||
|
help="Use the kickstart dir instead of the os dir")
|
||||||
parser.add_argument('--debug', action='store_true', help="debug?")
|
parser.add_argument('--debug', action='store_true', help="debug?")
|
||||||
parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True)
|
parser.add_argument('--skip', type=str,
|
||||||
|
help="what stage(s) to skip",
|
||||||
|
required=False)
|
||||||
|
parser.add_argument('--type', type=str,
|
||||||
|
help="Image type (container, genclo, azure, aws, vagrant)",
|
||||||
|
required=True)
|
||||||
parser.add_argument('--variant', type=str, help="", required=False)
|
parser.add_argument('--variant', type=str, help="", required=False)
|
||||||
parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False)
|
parser.add_argument('--release', type=str,
|
||||||
parser.add_argument('--kube', action='store_true', help="output as a K8s job(s)", required=False)
|
help="Image release for builds with the same date stamp",
|
||||||
parser.add_argument('--timeout', type=str, help="change timeout for imagefactory build process (default 3600)", required=False, default='3600')
|
required=False)
|
||||||
|
parser.add_argument('--kube', action='store_true',
|
||||||
|
help="output as a K8s job(s)",
|
||||||
|
required=False)
|
||||||
|
parser.add_argument('--timeout', type=str,
|
||||||
|
help="change timeout for imagefactory build process",
|
||||||
|
required=False, default='3600')
|
||||||
|
parser.add_argument('--backend', type=str,
|
||||||
|
help="which backend to use (kiwi|imagefactory)",
|
||||||
|
required=False, default='kiwi')
|
||||||
|
|
||||||
|
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.version]
|
rlvars = rldict[results.version]
|
||||||
major = rlvars["major"]
|
major = rlvars["major"]
|
||||||
|
|
||||||
|
|
||||||
debug = results.debug
|
debug = results.debug
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -52,405 +57,6 @@ formatter = logging.Formatter(
|
|||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
log.addHandler(handler)
|
log.addHandler(handler)
|
||||||
|
|
||||||
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
|
|
||||||
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
|
|
||||||
BUILDTIME = datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
|
||||||
|
|
||||||
@define(kw_only=True)
|
|
||||||
class ImageBuild:
|
|
||||||
architecture: Architecture = field()
|
|
||||||
base_uuid: Optional[str] = field(default="")
|
|
||||||
cli_args: argparse.Namespace = field()
|
|
||||||
command_args: List[str] = field(factory=list)
|
|
||||||
common_args: List[str] = field(factory=list)
|
|
||||||
debug: bool = field(default=False)
|
|
||||||
image_type: str = field()
|
|
||||||
job_template: Optional[Template] = field(init=False)
|
|
||||||
kickstart_arg: List[str] = field(factory=list)
|
|
||||||
kickstart_path: pathlib.Path = field(init=False)
|
|
||||||
metadata: pathlib.Path = field(init=False)
|
|
||||||
out_type: str = field(init=False)
|
|
||||||
outdir: pathlib.Path = field(init=False)
|
|
||||||
outname: str = field(init=False)
|
|
||||||
package_args: List[str] = field(factory=list)
|
|
||||||
release: int = field(default=0)
|
|
||||||
stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False)
|
|
||||||
target_uuid: Optional[str] = field(default="")
|
|
||||||
tdl_path: pathlib.Path = field(init=False)
|
|
||||||
template: Template = field()
|
|
||||||
timeout: str = field(default='3600')
|
|
||||||
type_variant: str = field(init=False)
|
|
||||||
variant: Optional[str] = field()
|
|
||||||
|
|
||||||
def __attrs_post_init__(self):
|
|
||||||
self.tdl_path = self.render_icicle_template()
|
|
||||||
if not self.tdl_path:
|
|
||||||
exit(2)
|
|
||||||
self.type_variant = self.type_variant_name()
|
|
||||||
self.outdir, self.outname = self.output_name()
|
|
||||||
self.out_type = self.image_format()
|
|
||||||
self.command_args = self._command_args()
|
|
||||||
self.package_args = self._package_args()
|
|
||||||
self.common_args = self._common_args()
|
|
||||||
|
|
||||||
self.metadata = pathlib.Path(self.outdir, ".imagefactory-metadata.json")
|
|
||||||
|
|
||||||
self.kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.major}-{self.type_variant}.ks")
|
|
||||||
|
|
||||||
self.checkout_kickstarts()
|
|
||||||
self.kickstart_arg = self.kickstart_imagefactory_args()
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(self.outdir)
|
|
||||||
except FileExistsError as e:
|
|
||||||
log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
|
||||||
except Exception as e:
|
|
||||||
log.exception("Some other exception occured while creating the output directory", e)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
if os.path.exists(self.metadata):
|
|
||||||
with open(self.metadata, "r") as f:
|
|
||||||
try:
|
|
||||||
o = json.load(f)
|
|
||||||
self.base_uuid = o['base_uuid']
|
|
||||||
self.target_uuid = o['target_uuid']
|
|
||||||
except json.decoder.JSONDecodeError as e:
|
|
||||||
log.exception("Couldn't decode metadata file", e)
|
|
||||||
finally:
|
|
||||||
f.flush()
|
|
||||||
|
|
||||||
# Yes, this is gross. I'll fix it later.
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"],
|
|
||||||
["xz", f"{self.outdir}/layer.tar"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["RPI"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.raw"],
|
|
||||||
["xz", f"{self.outdir}/{self.outname}.raw"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["GenericCloud", "OCP", "GenericArm"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-c", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["EC2"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["Azure"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["/prep-azure.sh", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{STORAGE_DIR}"],
|
|
||||||
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.vhd", f"{self.outdir}/{self.outname}.vhd"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["Vagrant"]:
|
|
||||||
_map = {
|
|
||||||
"Vbox": {"format": "vmdk", "provider": "virtualbox"},
|
|
||||||
"Libvirt": {"format": "qcow2", "provider": "libvirt", "virtual_size": 10},
|
|
||||||
"VMware": {"format": "vmdk", "provider": "vmware_desktop"}
|
|
||||||
}
|
|
||||||
output = f"{_map[self.variant]['format']}" #type: ignore
|
|
||||||
provider = f"{_map[self.variant]['provider']}" # type: ignore
|
|
||||||
|
|
||||||
# pop from the options map that will be passed to the vagrant metadata.json
|
|
||||||
convert_options = _map[self.variant].pop('convertOptions') if 'convertOptions' in _map[self.variant].keys() else '' #type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-c", "-f", "raw", "-O", output, *convert_options, lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.{output}"],
|
|
||||||
["tar", "-C", self.outdir, "-czf", f"/tmp/{self.outname}.box", '.'],
|
|
||||||
["mv", f"/tmp/{self.outname}.box", self.outdir]
|
|
||||||
]
|
|
||||||
self.prepare_vagrant(_map[self.variant])
|
|
||||||
|
|
||||||
if self.stage_commands:
|
|
||||||
self.stage_commands.append(["cp", "-v", lambda: f"{STORAGE_DIR}/{self.target_uuid}.meta", f"{self.outdir}/build.meta"])
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_vagrant(self, options):
|
|
||||||
"""Setup the output directory for the Vagrant type variant, dropping templates as required"""
|
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
|
||||||
tmplenv = Environment(loader=file_loader)
|
|
||||||
|
|
||||||
templates = {}
|
|
||||||
templates['Vagrantfile'] = tmplenv.get_template(f"vagrant/Vagrantfile.{self.variant}")
|
|
||||||
templates['metadata.json'] = tmplenv.get_template('vagrant/metadata.tmpl.json')
|
|
||||||
templates['info.json'] = tmplenv.get_template('vagrant/info.tmpl.json')
|
|
||||||
|
|
||||||
if self.variant == "VMware":
|
|
||||||
templates[f"{self.outname}.vmx"] = tmplenv.get_template('vagrant/vmx.tmpl')
|
|
||||||
|
|
||||||
if self.variant == "Vbox":
|
|
||||||
templates['box.ovf'] = tmplenv.get_template('vagrant/box.tmpl.ovf')
|
|
||||||
|
|
||||||
if self.variant == "Libvirt":
|
|
||||||
# Libvirt vagrant driver expects the qcow2 file to be called box.img.
|
|
||||||
qemu_command_index = [i for i, d in enumerate(self.stage_commands) if d[0] == "qemu-img"][0]
|
|
||||||
self.stage_commands.insert(qemu_command_index+1, ["mv", f"{self.outdir}/{self.outname}.qcow2", f"{self.outdir}/box.img"])
|
|
||||||
|
|
||||||
for name, template in templates.items():
|
|
||||||
self.render_template(f"{self.outdir}/{name}", template,
|
|
||||||
name=self.outname,
|
|
||||||
arch=self.architecture.name,
|
|
||||||
options=options
|
|
||||||
)
|
|
||||||
|
|
||||||
def checkout_kickstarts(self) -> int:
|
|
||||||
cmd = ["git", "clone", "--branch", f"r{self.architecture.major}", rlvars['livemap']['git_repo'], f"{KICKSTART_PATH}"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
if ret > 0:
|
|
||||||
ret = self.pull_kickstarts()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def pull_kickstarts(self) -> int:
|
|
||||||
cmd: CMD_PARAM_T = ["git", "-C", f"{KICKSTART_PATH}", "reset", "--hard", "HEAD"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
if ret == 0:
|
|
||||||
cmd = ["git", "-C", f"{KICKSTART_PATH}", "pull"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def output_name(self) -> Tuple[pathlib.Path, str]:
|
|
||||||
directory = f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{BUILDTIME.strftime('%Y%m%d')}.{self.release}"
|
|
||||||
name = f"{directory}.{self.architecture.name}"
|
|
||||||
outdir = pathlib.Path(f"/tmp/", directory)
|
|
||||||
return outdir, name
|
|
||||||
|
|
||||||
def type_variant_name(self):
|
|
||||||
return self.image_type if not self.variant else f"{self.image_type}-{self.variant}"
|
|
||||||
|
|
||||||
def _command_args(self):
|
|
||||||
args_mapping = {
|
|
||||||
"debug": "--debug",
|
|
||||||
}
|
|
||||||
return [param for name, param in args_mapping.items() if getattr(self.cli_args, name)]
|
|
||||||
|
|
||||||
def _package_args(self) -> List[str]:
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
return ["--parameter", "compress", "xz"]
|
|
||||||
return [""]
|
|
||||||
|
|
||||||
def _common_args(self) -> List[str]:
|
|
||||||
args = []
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
args = ["--parameter", "offline_icicle", "true"]
|
|
||||||
if self.image_type in ["GenericCloud", "EC2", "Vagrant", "Azure", "OCP", "RPI", "GenericArm"]:
|
|
||||||
args = ["--parameter", "generate_icicle", "false"]
|
|
||||||
return args
|
|
||||||
|
|
||||||
def image_format(self) -> str:
|
|
||||||
mapping = {
|
|
||||||
"Container": "docker"
|
|
||||||
}
|
|
||||||
return mapping[self.image_type] if self.image_type in mapping.keys() else ''
|
|
||||||
|
|
||||||
def kickstart_imagefactory_args(self) -> List[str]:
|
|
||||||
|
|
||||||
if not self.kickstart_path.is_file():
|
|
||||||
log.warning(f"Kickstart file is not available: {self.kickstart_path}")
|
|
||||||
if not debug:
|
|
||||||
log.warning("Exiting because debug mode is not enabled.")
|
|
||||||
exit(2)
|
|
||||||
|
|
||||||
return ["--file-parameter", "install_script", str(self.kickstart_path)]
|
|
||||||
|
|
||||||
def render_template(self, path, template, **kwargs) -> pathlib.Path:
|
|
||||||
with open(path, "wb") as f:
|
|
||||||
_template = template.render(**kwargs)
|
|
||||||
f.write(_template.encode())
|
|
||||||
f.flush()
|
|
||||||
output = pathlib.Path(path)
|
|
||||||
if not output.exists():
|
|
||||||
log.error("Failed to write template")
|
|
||||||
raise Exception("Failed to template")
|
|
||||||
return output
|
|
||||||
|
|
||||||
def render_icicle_template(self) -> pathlib.Path:
|
|
||||||
output = tempfile.NamedTemporaryFile(delete=False).name
|
|
||||||
return self.render_template(output, self.template,
|
|
||||||
architecture=self.architecture.name,
|
|
||||||
iso8601date=BUILDTIME.strftime("%Y%m%d"),
|
|
||||||
installdir="kickstart" if self.cli_args.kickstartdir else "os",
|
|
||||||
major=self.architecture.major,
|
|
||||||
minor=self.architecture.minor,
|
|
||||||
release=self.release,
|
|
||||||
size="10G",
|
|
||||||
type=self.image_type,
|
|
||||||
utcnow=BUILDTIME,
|
|
||||||
version_variant=self.architecture.version if not self.variant else f"{self.architecture.version}-{self.variant}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def build_command(self) -> List[str]:
|
|
||||||
build_command = ["imagefactory", "--timeout", self.timeout, *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path]
|
|
||||||
return build_command
|
|
||||||
def package_command(self) -> List[str]:
|
|
||||||
package_command = ["imagefactory", *self.command_args, "target_image", self.out_type, *self.common_args,
|
|
||||||
"--id", f"{self.base_uuid}",
|
|
||||||
*self.package_args,
|
|
||||||
"--parameter", "repository", self.outname,
|
|
||||||
]
|
|
||||||
return package_command
|
|
||||||
|
|
||||||
def copy_command(self) -> List[str]:
|
|
||||||
|
|
||||||
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/",
|
|
||||||
f"s3://resf-empanadas/buildimage-{self.architecture.version}-{self.architecture.name}/{ self.outname }/{ BUILDTIME.strftime('%s') }/"
|
|
||||||
]
|
|
||||||
|
|
||||||
return copy_command
|
|
||||||
|
|
||||||
def build(self) -> int:
|
|
||||||
if self.base_uuid:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
self.fix_ks()
|
|
||||||
|
|
||||||
ret, out, err, uuid = self.runCmd(self.build_command())
|
|
||||||
if uuid:
|
|
||||||
self.base_uuid = uuid.rstrip()
|
|
||||||
self.save()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def package(self) -> int:
|
|
||||||
# Some build types don't need to be packaged by imagefactory
|
|
||||||
# @TODO remove business logic if possible
|
|
||||||
if self.image_type in ["GenericCloud", "EC2", "Azure", "Vagrant", "OCP", "RPI", "GenericArm"]:
|
|
||||||
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
|
|
||||||
|
|
||||||
if self.target_uuid:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
ret, out, err, uuid = self.runCmd(self.package_command())
|
|
||||||
if uuid:
|
|
||||||
self.target_uuid = uuid.rstrip()
|
|
||||||
self.save()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def stage(self) -> int:
|
|
||||||
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
|
||||||
if not hasattr(self,'stage_commands'):
|
|
||||||
return 0
|
|
||||||
|
|
||||||
returns = []
|
|
||||||
for command in self.stage_commands: #type: ignore
|
|
||||||
ret, out, err, _ = self.runCmd(command, search=False)
|
|
||||||
returns.append(ret)
|
|
||||||
|
|
||||||
return all(ret > 0 for ret in returns)
|
|
||||||
|
|
||||||
def copy(self, skip=False) -> int:
|
|
||||||
# move or unpack if necessary
|
|
||||||
log.info("Executing staging commands")
|
|
||||||
if (stage := self.stage() > 0):
|
|
||||||
raise Exception(stage)
|
|
||||||
|
|
||||||
if not skip:
|
|
||||||
log.info("Copying files to output directory")
|
|
||||||
ret, out, err, _ = self.runCmd(self.copy_command(), search=False)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
log.info(f"Build complete! Output available in {self.outdir}/")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def runCmd(self, command: CMD_PARAM_T, search: bool = True) -> Tuple[int, Union[bytes,None], Union[bytes,None], Union[str,None]]:
|
|
||||||
prepared, _ = self.prepare_command(command)
|
|
||||||
log.info(f"Running command: {' '.join(prepared)}")
|
|
||||||
|
|
||||||
kwargs = {
|
|
||||||
"stderr": subprocess.PIPE,
|
|
||||||
"stdout": subprocess.PIPE
|
|
||||||
}
|
|
||||||
if debug: del kwargs["stderr"]
|
|
||||||
|
|
||||||
with subprocess.Popen(prepared, **kwargs) as p:
|
|
||||||
uuid = None
|
|
||||||
# @TODO implement this as a callback?
|
|
||||||
if search:
|
|
||||||
for _, line in enumerate(p.stdout): # type: ignore
|
|
||||||
ln = line.decode()
|
|
||||||
if ln.startswith("UUID: "):
|
|
||||||
uuid = ln.split(" ")[-1]
|
|
||||||
log.debug(f"found uuid: {uuid}")
|
|
||||||
|
|
||||||
out, err = p.communicate()
|
|
||||||
res = p.wait(), out, err, uuid
|
|
||||||
|
|
||||||
if res[0] > 0:
|
|
||||||
log.error(f"Problem while executing command: '{prepared}'")
|
|
||||||
if search and not res[3]:
|
|
||||||
log.error("UUID not found in stdout. Dumping stdout and stderr")
|
|
||||||
self.log_subprocess(res)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
def prepare_command(self, command_list: CMD_PARAM_T) -> Tuple[List[str],List[None]]:
|
|
||||||
"""
|
|
||||||
Commands may be a callable, which should be a lambda to be evaluated at
|
|
||||||
preparation time with available locals. This can be used to, among
|
|
||||||
other things, perform lazy evaluations of f-strings which have values
|
|
||||||
not available at assignment time. e.g., filling in a second command
|
|
||||||
with a value extracted from the previous step or command.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = []
|
|
||||||
return r, [r.append(c()) if (callable(c) and c.__name__ == '<lambda>') else r.append(str(c)) for c in command_list]
|
|
||||||
|
|
||||||
def log_subprocess(self, result: Tuple[int, Union[bytes, None], Union[bytes, None], Union[str, None]]):
|
|
||||||
def log_lines(title, lines):
|
|
||||||
log.info(f"====={title}=====")
|
|
||||||
log.info(lines.decode())
|
|
||||||
log.info(f"Command return code: {result[0]}")
|
|
||||||
stdout = result[1]
|
|
||||||
stderr = result[2]
|
|
||||||
if stdout:
|
|
||||||
log_lines("Command STDOUT", stdout)
|
|
||||||
if stderr:
|
|
||||||
log_lines("Command STDERR", stderr)
|
|
||||||
|
|
||||||
def fix_ks(self):
|
|
||||||
cmd: CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.architecture.name},", str(self.kickstart_path)]
|
|
||||||
self.runCmd(cmd, search=False)
|
|
||||||
|
|
||||||
def render_kubernetes_job(self):
|
|
||||||
commands = [self.build_command(), self.package_command(), self.copy_command()]
|
|
||||||
if not self.job_template:
|
|
||||||
return None
|
|
||||||
template = self.job_template.render(
|
|
||||||
architecture=self.architecture.name,
|
|
||||||
backoffLimit=4,
|
|
||||||
buildTime=BUILDTIME.strftime("%s"),
|
|
||||||
command=commands,
|
|
||||||
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
|
|
||||||
jobname="buildimage",
|
|
||||||
namespace="empanadas",
|
|
||||||
major=major,
|
|
||||||
restartPolicy="Never",
|
|
||||||
)
|
|
||||||
return template
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
with open(self.metadata, "w") as f:
|
|
||||||
try:
|
|
||||||
o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] }
|
|
||||||
log.debug(o)
|
|
||||||
json.dump(o, f)
|
|
||||||
except AttributeError as e:
|
|
||||||
log.error("Couldn't find attribute in object. Something is probably wrong", e)
|
|
||||||
except Exception as e:
|
|
||||||
log.exception(e)
|
|
||||||
finally:
|
|
||||||
f.flush()
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
try:
|
try:
|
||||||
@ -459,28 +65,51 @@ def run():
|
|||||||
log.exception(e)
|
log.exception(e)
|
||||||
exit(2)
|
exit(2)
|
||||||
|
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
|
||||||
tmplenv = Environment(loader=file_loader)
|
|
||||||
tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl')
|
|
||||||
|
|
||||||
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
||||||
|
|
||||||
for architecture in arches:
|
for architecture in arches:
|
||||||
|
if results.backend == "kiwi":
|
||||||
|
backend = KiwiBackend()
|
||||||
|
else:
|
||||||
|
backend = ImageFactoryBackend(
|
||||||
|
kickstart_dir="kickstart" if results.kickstartdir else "os",
|
||||||
|
kickstart_repo=rlvars['livemap']['git_repo']
|
||||||
|
)
|
||||||
IB = ImageBuild(
|
IB = ImageBuild(
|
||||||
architecture=Architecture.from_version(architecture, rlvars['revision']),
|
architecture=Architecture.from_version(architecture, rlvars['revision']),
|
||||||
cli_args=results,
|
|
||||||
debug=results.debug,
|
debug=results.debug,
|
||||||
image_type=results.type,
|
image_type=results.type,
|
||||||
release=results.release if results.release else 0,
|
release=results.release if results.release else 0,
|
||||||
template=tdl_template,
|
|
||||||
variant=results.variant,
|
variant=results.variant,
|
||||||
|
build_time=datetime.datetime.utcnow(),
|
||||||
|
backend=backend,
|
||||||
|
log=log,
|
||||||
)
|
)
|
||||||
if results.kube:
|
|
||||||
IB.job_template = tmplenv.get_template('kube/Job.tmpl')
|
|
||||||
#commands = IB.kube_commands()
|
|
||||||
print(IB.render_kubernetes_job())
|
|
||||||
else:
|
|
||||||
ret = IB.build()
|
|
||||||
ret = IB.package()
|
|
||||||
ret = IB.copy()
|
|
||||||
|
|
||||||
|
if results.kube:
|
||||||
|
# commands = IB.kube_commands()
|
||||||
|
print(IB.render_kubernetes_job())
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
skip_stages = results.skip.split(',') if results.skip else []
|
||||||
|
stages = ["prepare", "build", "clean", "stage"]
|
||||||
|
for i, stage in enumerate(stages):
|
||||||
|
skip_stage = stage in skip_stages
|
||||||
|
|
||||||
|
log.info(f"Stage {i} - {stage}{' SKIP' if skip_stage else ''}")
|
||||||
|
|
||||||
|
if skip_stage:
|
||||||
|
continue
|
||||||
|
|
||||||
|
method = getattr(IB.backend, stage)
|
||||||
|
if callable(method):
|
||||||
|
method()
|
||||||
|
else:
|
||||||
|
log.fatal(f"Unable to execute {stage}")
|
||||||
|
|
||||||
|
if 'upload' in skip_stages:
|
||||||
|
return
|
||||||
|
|
||||||
|
log.info("Final stage - Upload")
|
||||||
|
|
||||||
|
IB.upload(skip='upload' in skip_stages)
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from empanadas.common import *
|
from empanadas.common import config, rldict
|
||||||
from empanadas.util import Checks
|
|
||||||
from empanadas.util import IsoBuild
|
from empanadas.util import IsoBuild
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
@ -29,5 +28,6 @@ a = IsoBuild(
|
|||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
a.run()
|
a.run()
|
||||||
|
@ -9,6 +9,7 @@ from empanadas.util import IsoBuild
|
|||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
|
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
||||||
parser.add_argument('--arch', type=str, help="Architecture")
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
parser.add_argument('--isolation', type=str, help="Mock Isolation")
|
parser.add_argument('--isolation', type=str, help="Mock Isolation")
|
||||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
@ -26,6 +27,7 @@ a = IsoBuild(
|
|||||||
rlvars,
|
rlvars,
|
||||||
config,
|
config,
|
||||||
major=major,
|
major=major,
|
||||||
|
rc=results.rc,
|
||||||
arch=results.arch,
|
arch=results.arch,
|
||||||
isolation=results.isolation,
|
isolation=results.isolation,
|
||||||
extra_iso=results.extra_iso,
|
extra_iso=results.extra_iso,
|
||||||
|
@ -11,7 +11,6 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
|||||||
|
|
||||||
# All of our options
|
# All of our options
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
parser.add_argument('--overwrite', action='store_true', help="Overwrites current treeinfo and discinfo files")
|
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
@ -27,7 +26,6 @@ a = RepoSync(
|
|||||||
rlvars,
|
rlvars,
|
||||||
config,
|
config,
|
||||||
major=major,
|
major=major,
|
||||||
refresh_treeinfo=results.overwrite,
|
|
||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -33,6 +33,7 @@ parser.add_argument('--logger', type=str)
|
|||||||
parser.add_argument('--disable-gpg-check', action='store_false')
|
parser.add_argument('--disable-gpg-check', action='store_false')
|
||||||
parser.add_argument('--disable-repo-gpg-check', action='store_false')
|
parser.add_argument('--disable-repo-gpg-check', action='store_false')
|
||||||
parser.add_argument('--clean-old-packages', action='store_true')
|
parser.add_argument('--clean-old-packages', action='store_true')
|
||||||
|
parser.add_argument('--use-staging', action='store_true')
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
@ -64,6 +65,7 @@ a = RepoSync(
|
|||||||
gpg_check=results.disable_gpg_check,
|
gpg_check=results.disable_gpg_check,
|
||||||
repo_gpg_check=results.disable_repo_gpg_check,
|
repo_gpg_check=results.disable_repo_gpg_check,
|
||||||
reposync_clean_old=results.clean_old_packages,
|
reposync_clean_old=results.clean_old_packages,
|
||||||
|
use_staging=results.use_staging,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
@ -22,7 +22,7 @@ lorax --product="${PRODUCT}" \
|
|||||||
--isfinal \
|
--isfinal \
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- for repo in repos %}
|
{%- for repo in repos %}
|
||||||
--source={{ repo.url }} \
|
--source='{{ repo.url }}' \
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- if squashfs_only %}
|
{%- if squashfs_only %}
|
||||||
--squashfs-only \
|
--squashfs-only \
|
||||||
|
162
iso/empanadas/empanadas/templates/kiwi/kiwi.yml.j2
Normal file
162
iso/empanadas/empanadas/templates/kiwi/kiwi.yml.j2
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# KIWI - Build configuration file
|
||||||
|
#
|
||||||
|
# Below all configuration parameters available to control
|
||||||
|
# KIWI's build process are listed as comments. The values
|
||||||
|
# used here provides the default values applied by KIWI if
|
||||||
|
# no other information is specified.
|
||||||
|
#
|
||||||
|
# To make any of the below effective, please uncomment the
|
||||||
|
# respective section(s) and adapt the parameters according
|
||||||
|
# to your needs
|
||||||
|
#
|
||||||
|
|
||||||
|
# Setup access to security keys
|
||||||
|
#credentials:
|
||||||
|
# # Specify private key(s) used for signing operations
|
||||||
|
# - verification_metadata_signing_key_file: /path/to/private.pem
|
||||||
|
|
||||||
|
# Setup access options for the Open BuildService
|
||||||
|
#obs:
|
||||||
|
# # Specify the URL of the Open BuildService download server
|
||||||
|
# - download_url: http://download.opensuse.org/repositories
|
||||||
|
# # Specify if the BuildService download server is public or private.
|
||||||
|
# # This information is used to verify if the request to populate
|
||||||
|
# # the repositories via the imageinclude attribute is possible
|
||||||
|
# - public: true
|
||||||
|
|
||||||
|
|
||||||
|
# Setup behaviour of the kiwi result bundle command
|
||||||
|
#bundle:
|
||||||
|
# # Specify if the bundle tarball should contain compressed results.
|
||||||
|
# # Note: Already compressed result information will not be touched.
|
||||||
|
# # Build results that generate an encrypted filesystem, i.e.
|
||||||
|
# # luks setup, will not be compressed. The intention for result compression
|
||||||
|
# # is to produce a smaller representation of the original. Encrypted data
|
||||||
|
# # generally grows when an attempt is made to compress the data. This is
|
||||||
|
# # due to the nature of compression algorithms. Therefore this setting is
|
||||||
|
# # ignored when encryption is enabled.
|
||||||
|
# - compress: false
|
||||||
|
# # Specify if the image build result and bundle should contain
|
||||||
|
# # a .changes file. The .changes file contains the package changelog
|
||||||
|
# # information from all packages installed into the image.
|
||||||
|
# - has_package_changes: false
|
||||||
|
|
||||||
|
|
||||||
|
# Setup behaviour of XZ compressor
|
||||||
|
#xz:
|
||||||
|
# # Specify options used in any xz compression call
|
||||||
|
# - options: '--threads=0'
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for container image creation
|
||||||
|
#container:
|
||||||
|
# # Specify compression for container images
|
||||||
|
# # Possible values are true, false, xz or none.
|
||||||
|
# - compress: true
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for ISO image creation
|
||||||
|
#iso:
|
||||||
|
# # Specify tool category which should be used to build iso images
|
||||||
|
# # Possible values are: xorriso
|
||||||
|
# - tool_category: xorriso
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for OCI toolchain
|
||||||
|
#oci:
|
||||||
|
# # Specify OCI archive tool which should be used on creation of
|
||||||
|
# # container archives for OCI compliant images, e.g docker
|
||||||
|
# # Possible values are umoci and buildah
|
||||||
|
# - archive_tool: buildah
|
||||||
|
|
||||||
|
|
||||||
|
# Specify build constraints that applies during the image build
|
||||||
|
# process. If one or more constraints are violated the build exits
|
||||||
|
# with an appropriate error message.
|
||||||
|
#build_constraints:
|
||||||
|
# # Maximum result image size. The value can be specified in
|
||||||
|
# # bytes or it can be specified with m=MB or g=GB. The constraint
|
||||||
|
# # is checked prior to the result bundle creation
|
||||||
|
# - max_size: 700m
|
||||||
|
|
||||||
|
# Setup process parameters for partition mapping
|
||||||
|
mapper:
|
||||||
|
# # Specify tool to use for creating partition maps
|
||||||
|
# # Possible values are: kpartx and partx
|
||||||
|
- part_mapper: {{ "partx" if architecture in ["s390x"] else "kpartx" }}
|
||||||
|
|
||||||
|
# Setup process parameters to handle runtime checks
|
||||||
|
#runtime_checks:
|
||||||
|
# # Specify list of runtime checks to disable
|
||||||
|
# - disable:
|
||||||
|
# # verify that the host has the required container tools installed
|
||||||
|
# - check_container_tool_chain_installed
|
||||||
|
|
||||||
|
# # verify that there are repositories configured
|
||||||
|
# - check_repositories_configured
|
||||||
|
|
||||||
|
# # verify that the URL for imageinclude repos is accessable
|
||||||
|
# - check_image_include_repos_publicly_resolvable
|
||||||
|
|
||||||
|
# # verify secure boot setup disabled for overlay configured disk images
|
||||||
|
# - check_efi_mode_for_disk_overlay_correctly_setup
|
||||||
|
|
||||||
|
# # verify for legacy kiwi boot images that they exist on the host
|
||||||
|
# - check_boot_description_exists
|
||||||
|
|
||||||
|
# # verify if kiwi initrd_system was set if a boot attribute exists
|
||||||
|
# - check_initrd_selection_required
|
||||||
|
|
||||||
|
# # verify for legacy kiwi boot images that the same kernel is used
|
||||||
|
# - check_consistent_kernel_in_boot_and_system_image
|
||||||
|
|
||||||
|
# # check for reserved label names used in LVM setup
|
||||||
|
# - check_volume_setup_defines_reserved_labels
|
||||||
|
|
||||||
|
# # verify only one full size volume is specified for LVM images
|
||||||
|
# - check_volume_setup_defines_multiple_fullsize_volumes
|
||||||
|
|
||||||
|
# # verify no / volume setup is setup but the @root volume is used
|
||||||
|
# - check_volume_setup_has_no_root_definition
|
||||||
|
|
||||||
|
# # verify if volume label is really used with a volume setup
|
||||||
|
# - check_volume_label_used_with_lvm
|
||||||
|
|
||||||
|
# # verify that there is a xen domain setup for xen images
|
||||||
|
# - check_xen_uniquely_setup_as_server_or_guest
|
||||||
|
|
||||||
|
# # verify mediacheck is installed for ISO images that requests it
|
||||||
|
# - check_mediacheck_installed
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-live is installed for ISO images
|
||||||
|
# - check_dracut_module_for_live_iso_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-overlay is installed for overlay disk images
|
||||||
|
# - check_dracut_module_for_disk_overlay_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-repart is installed for OEM disk images
|
||||||
|
# - check_dracut_module_for_disk_oem_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-oem-dump is installed for OEM install images
|
||||||
|
# - check_dracut_module_for_oem_install_in_package_list
|
||||||
|
|
||||||
|
# # verify configured firmware is compatible with host architecture
|
||||||
|
# - check_architecture_supports_iso_firmware_setup
|
||||||
|
|
||||||
|
# # verify WSL naming conventions
|
||||||
|
# - check_appx_naming_conventions_valid
|
||||||
|
|
||||||
|
# # check kiwi dracut modules compatible with kiwi builder
|
||||||
|
# - check_dracut_module_versions_compatible_to_kiwi
|
||||||
|
|
||||||
|
# # check for unresolved include statements in the XML description
|
||||||
|
# - check_include_references_unresolvable
|
||||||
|
|
||||||
|
# # validate options passed to cryptsetup via luksformat element
|
||||||
|
# - check_luksformat_options_valid
|
||||||
|
|
||||||
|
# # check devicepersistency compatible with partition table type
|
||||||
|
# - check_partuuid_persistency_type_used_with_mbr
|
||||||
|
|
||||||
|
# # check efifatimagesize does not exceed the max El Torito load size
|
||||||
|
# - check_efi_fat_image_has_correct_size
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -62,7 +62,9 @@ class RepoSync:
|
|||||||
fpsync: bool = False,
|
fpsync: bool = False,
|
||||||
logger=None,
|
logger=None,
|
||||||
log_level='INFO',
|
log_level='INFO',
|
||||||
|
use_staging: bool = False,
|
||||||
):
|
):
|
||||||
|
|
||||||
self.nofail = nofail
|
self.nofail = nofail
|
||||||
self.dryrun = dryrun
|
self.dryrun = dryrun
|
||||||
self.fullrun = fullrun
|
self.fullrun = fullrun
|
||||||
@ -80,11 +82,14 @@ class RepoSync:
|
|||||||
# This makes it so every repo is synced at the same time.
|
# This makes it so every repo is synced at the same time.
|
||||||
# This is EXTREMELY dangerous.
|
# This is EXTREMELY dangerous.
|
||||||
self.just_pull_everything = just_pull_everything
|
self.just_pull_everything = just_pull_everything
|
||||||
|
# Use staging url instead of pulling from peridot (or, for EL8)
|
||||||
|
self.use_staging = use_staging
|
||||||
# Relevant config items
|
# Relevant config items
|
||||||
self.major_version = major
|
self.major_version = major
|
||||||
self.date_stamp = config['date_stamp']
|
self.date_stamp = config['date_stamp']
|
||||||
self.timestamp = time.time()
|
self.timestamp = time.time()
|
||||||
self.repo_base_url = config['repo_base_url']
|
self.repo_base_url = config['repo_base_url']
|
||||||
|
self.staging_base_url = config['staging_base_url']
|
||||||
self.compose_root = config['compose_root']
|
self.compose_root = config['compose_root']
|
||||||
self.compose_base = config['compose_root'] + "/" + major
|
self.compose_base = config['compose_root'] + "/" + major
|
||||||
self.profile = rlvars['profile']
|
self.profile = rlvars['profile']
|
||||||
@ -102,6 +107,7 @@ class RepoSync:
|
|||||||
self.project_id = rlvars['project_id']
|
self.project_id = rlvars['project_id']
|
||||||
self.repo_renames = rlvars['renames']
|
self.repo_renames = rlvars['renames']
|
||||||
self.repos = rlvars['all_repos']
|
self.repos = rlvars['all_repos']
|
||||||
|
self.extra_repos = rlvars['extra_repos']
|
||||||
self.multilib = rlvars['provide_multilib']
|
self.multilib = rlvars['provide_multilib']
|
||||||
self.repo = repo
|
self.repo = repo
|
||||||
self.extra_files = rlvars['extra_files']
|
self.extra_files = rlvars['extra_files']
|
||||||
@ -270,7 +276,9 @@ class RepoSync:
|
|||||||
self.gpg_check,
|
self.gpg_check,
|
||||||
self.repo_gpg_check,
|
self.repo_gpg_check,
|
||||||
self.tmplenv,
|
self.tmplenv,
|
||||||
self.log
|
self.log,
|
||||||
|
staging_base_url=self.staging_base_url,
|
||||||
|
use_staging=self.use_staging,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
@ -661,7 +669,10 @@ class RepoSync:
|
|||||||
repoclosure_entry_name_list = []
|
repoclosure_entry_name_list = []
|
||||||
self.log.info('Setting up repoclosure for {}'.format(repo))
|
self.log.info('Setting up repoclosure for {}'.format(repo))
|
||||||
|
|
||||||
for arch in self.repoclosure_map['arches']:
|
arches_for_repoclosure = self.arches
|
||||||
|
if self.arch:
|
||||||
|
arches_for_repoclosure = self.arch.split(',')
|
||||||
|
for arch in arches_for_repoclosure:
|
||||||
repo_combination = []
|
repo_combination = []
|
||||||
repoclosure_entry_name = f'repoclosure-{repo}-{arch}'
|
repoclosure_entry_name = f'repoclosure-{repo}-{arch}'
|
||||||
repoclosure_entry_name_list.append(repoclosure_entry_name)
|
repoclosure_entry_name_list.append(repoclosure_entry_name)
|
||||||
@ -1243,7 +1254,7 @@ class RepoSync:
|
|||||||
It is rare that this should be called.
|
It is rare that this should be called.
|
||||||
"""
|
"""
|
||||||
sync_root = self.compose_latest_sync
|
sync_root = self.compose_latest_sync
|
||||||
self.deploy_treeinfo(self.repo, sync_root, self.arch, refresh=self.refresh_treeinfo)
|
self.deploy_treeinfo(self.repo, sync_root, self.arch, refresh=True)
|
||||||
self.tweak_treeinfo(self.repo, sync_root, self.arch)
|
self.tweak_treeinfo(self.repo, sync_root, self.arch)
|
||||||
|
|
||||||
def run_compose_closeout(self):
|
def run_compose_closeout(self):
|
||||||
@ -1446,7 +1457,8 @@ class RepoSync:
|
|||||||
self.gpg_check,
|
self.gpg_check,
|
||||||
self.repo_gpg_check,
|
self.repo_gpg_check,
|
||||||
self.tmplenv,
|
self.tmplenv,
|
||||||
self.log
|
self.log,
|
||||||
|
staging_base_url=self.staging_base_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,6 +110,7 @@ class IsoBuild:
|
|||||||
self.revision = rlvars['revision']
|
self.revision = rlvars['revision']
|
||||||
self.rclvl = rlvars['rclvl']
|
self.rclvl = rlvars['rclvl']
|
||||||
self.repos = rlvars['iso_map']['lorax']['repos']
|
self.repos = rlvars['iso_map']['lorax']['repos']
|
||||||
|
self.extra_repos = rlvars['extra_repos']
|
||||||
self.repo_base_url = config['repo_base_url']
|
self.repo_base_url = config['repo_base_url']
|
||||||
self.project_id = rlvars['project_id']
|
self.project_id = rlvars['project_id']
|
||||||
self.structure = rlvars['structure']
|
self.structure = rlvars['structure']
|
||||||
@ -202,7 +203,8 @@ class IsoBuild:
|
|||||||
self.current_arch,
|
self.current_arch,
|
||||||
self.compose_latest_sync,
|
self.compose_latest_sync,
|
||||||
self.compose_dir_is_here,
|
self.compose_dir_is_here,
|
||||||
self.hashed
|
self.hashed,
|
||||||
|
self.extra_repos
|
||||||
)
|
)
|
||||||
self.log.info(self.revision_level)
|
self.log.info(self.revision_level)
|
||||||
|
|
||||||
@ -268,6 +270,8 @@ class IsoBuild:
|
|||||||
dist=self.disttag,
|
dist=self.disttag,
|
||||||
repos=self.repolist,
|
repos=self.repolist,
|
||||||
user_agent='{{ user_agent }}',
|
user_agent='{{ user_agent }}',
|
||||||
|
compose_dir_is_here=self.compose_dir_is_here,
|
||||||
|
compose_dir=self.compose_root,
|
||||||
)
|
)
|
||||||
|
|
||||||
mock_sh_template_output = mock_sh_template.render(
|
mock_sh_template_output = mock_sh_template.render(
|
||||||
@ -771,7 +775,6 @@ class IsoBuild:
|
|||||||
if not os.path.exists(lorax_path):
|
if not os.path.exists(lorax_path):
|
||||||
self.log.error(Color.FAIL + 'Lorax not found at all. This is considered fatal.')
|
self.log.error(Color.FAIL + 'Lorax not found at all. This is considered fatal.')
|
||||||
|
|
||||||
# do we need to do a hard exit here?
|
|
||||||
raise SystemExit()
|
raise SystemExit()
|
||||||
|
|
||||||
grafts = self._generate_graft_points(
|
grafts = self._generate_graft_points(
|
||||||
@ -780,12 +783,7 @@ class IsoBuild:
|
|||||||
self.iso_map['images'][y]['repos'],
|
self.iso_map['images'][y]['repos'],
|
||||||
reposcan=reposcan
|
reposcan=reposcan
|
||||||
)
|
)
|
||||||
try:
|
self._extra_iso_local_config(a, y, grafts, work_root, volname)
|
||||||
self._extra_iso_local_config(a, y, grafts, work_root)
|
|
||||||
except ValueError as exc:
|
|
||||||
self.log.error(Color.FAIL + f'An error occured while configuring extra ISO build {exc}')
|
|
||||||
self.log.error(Color.FAIL + f'Error: {exc}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if self.extra_iso_mode == 'local':
|
if self.extra_iso_mode == 'local':
|
||||||
self._extra_iso_local_run(a, y, work_root)
|
self._extra_iso_local_run(a, y, work_root)
|
||||||
@ -805,7 +803,7 @@ class IsoBuild:
|
|||||||
|
|
||||||
self._extra_iso_podman_run(arches_to_build, images_to_build_podman, work_root)
|
self._extra_iso_podman_run(arches_to_build, images_to_build_podman, work_root)
|
||||||
|
|
||||||
def _extra_iso_local_config(self, arch, image, grafts, work_root):
|
def _extra_iso_local_config(self, arch, image, grafts, work_root, volname):
|
||||||
"""
|
"""
|
||||||
Local ISO build configuration - This generates the configuration for
|
Local ISO build configuration - This generates the configuration for
|
||||||
both mock and podman entries
|
both mock and podman entries
|
||||||
@ -825,6 +823,7 @@ class IsoBuild:
|
|||||||
iso_template_path = f'{entries_dir}/buildExtraImage-{arch}-{image}.sh'
|
iso_template_path = f'{entries_dir}/buildExtraImage-{arch}-{image}.sh'
|
||||||
xorriso_template_path = f'{entries_dir}/xorriso-{arch}-{image}.txt'
|
xorriso_template_path = f'{entries_dir}/xorriso-{arch}-{image}.txt'
|
||||||
iso_readme_path = f'{self.iso_work_dir}/{arch}/README'
|
iso_readme_path = f'{self.iso_work_dir}/{arch}/README'
|
||||||
|
#print(iso_readme_path)
|
||||||
|
|
||||||
log_root = os.path.join(
|
log_root = os.path.join(
|
||||||
work_root,
|
work_root,
|
||||||
@ -845,15 +844,12 @@ class IsoBuild:
|
|||||||
if self.updated_image:
|
if self.updated_image:
|
||||||
datestamp = '-' + self.updated_image_date
|
datestamp = '-' + self.updated_image_date
|
||||||
|
|
||||||
volid = Idents.get_vol_id(boot_iso)
|
volid = f'{self.shortname}-{self.major_version}-{self.minor_version}{rclevel}-{arch}-{volname}'
|
||||||
isoname = f'{self.shortname}-{self.release}{rclevel}{datestamp}-{arch}-{image}.iso'
|
isoname = f'{self.shortname}-{self.release}{rclevel}{datestamp}-{arch}-{image}.iso'
|
||||||
generic_isoname = f'{self.shortname}-{arch}-{image}.iso'
|
generic_isoname = f'{self.shortname}-{arch}-{image}.iso'
|
||||||
latest_isoname = f'{self.shortname}-{self.major_version}-latest-{arch}-{image}.iso'
|
latest_isoname = f'{self.shortname}-{self.major_version}-latest-{arch}-{image}.iso'
|
||||||
required_pkgs = self.iso_map['lorax']['required_pkgs']
|
required_pkgs = self.iso_map['lorax']['required_pkgs']
|
||||||
|
|
||||||
if not volid:
|
|
||||||
raise ValueError('Volume ID could not be determined')
|
|
||||||
|
|
||||||
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
||||||
' '.join(required_pkgs),
|
' '.join(required_pkgs),
|
||||||
log_path_command
|
log_path_command
|
||||||
@ -862,6 +858,7 @@ class IsoBuild:
|
|||||||
mock_iso_template_output = mock_iso_template.render(
|
mock_iso_template_output = mock_iso_template.render(
|
||||||
arch=self.current_arch,
|
arch=self.current_arch,
|
||||||
major=self.major_version,
|
major=self.major_version,
|
||||||
|
releasever=self.release,
|
||||||
fullname=self.fullname,
|
fullname=self.fullname,
|
||||||
shortname=self.shortname,
|
shortname=self.shortname,
|
||||||
required_pkgs=required_pkgs,
|
required_pkgs=required_pkgs,
|
||||||
@ -875,6 +872,7 @@ class IsoBuild:
|
|||||||
mock_sh_template_output = mock_sh_template.render(
|
mock_sh_template_output = mock_sh_template.render(
|
||||||
arch=self.current_arch,
|
arch=self.current_arch,
|
||||||
major=self.major_version,
|
major=self.major_version,
|
||||||
|
releasever=self.release,
|
||||||
isolation=self.mock_isolation,
|
isolation=self.mock_isolation,
|
||||||
builddir=self.mock_work_root,
|
builddir=self.mock_work_root,
|
||||||
shortname=self.shortname,
|
shortname=self.shortname,
|
||||||
@ -891,8 +889,6 @@ class IsoBuild:
|
|||||||
'iso_level': self.iso_map['iso_level'],
|
'iso_level': self.iso_map['iso_level'],
|
||||||
}
|
}
|
||||||
|
|
||||||
self.log.info(Color.INFO + f'boot.iso volume name: {volid}')
|
|
||||||
|
|
||||||
# Generate a xorriso compatible dialog
|
# Generate a xorriso compatible dialog
|
||||||
with open(grafts) as xp:
|
with open(grafts) as xp:
|
||||||
xorpoint = xp.read()
|
xorpoint = xp.read()
|
||||||
|
@ -13,8 +13,6 @@ import boto3
|
|||||||
import xmltodict
|
import xmltodict
|
||||||
import productmd.treeinfo
|
import productmd.treeinfo
|
||||||
import productmd.composeinfo
|
import productmd.composeinfo
|
||||||
import pycdlib
|
|
||||||
import magic
|
|
||||||
import empanadas
|
import empanadas
|
||||||
import kobo.shortcuts
|
import kobo.shortcuts
|
||||||
from empanadas.common import Color
|
from empanadas.common import Color
|
||||||
@ -26,13 +24,8 @@ class ArchCheck:
|
|||||||
archfile = {
|
archfile = {
|
||||||
'x86_64': [
|
'x86_64': [
|
||||||
'isolinux/vmlinuz',
|
'isolinux/vmlinuz',
|
||||||
'images/efiboot.img',
|
|
||||||
'images/eltorito.img',
|
|
||||||
'images/grub.conf',
|
'images/grub.conf',
|
||||||
'images/install.img',
|
'EFI/BOOT/BOOTX64.EFI'
|
||||||
'boot/grub2/grub.cfg',
|
|
||||||
'EFI/BOOT/BOOTX64.EFI',
|
|
||||||
'EFI/BOOT/grub.cfg'
|
|
||||||
],
|
],
|
||||||
'aarch64': [
|
'aarch64': [
|
||||||
'EFI/BOOT/BOOTAA64.EFI'
|
'EFI/BOOT/BOOTAA64.EFI'
|
||||||
@ -453,7 +446,9 @@ class Shared:
|
|||||||
repo_gpg_check,
|
repo_gpg_check,
|
||||||
templates,
|
templates,
|
||||||
logger,
|
logger,
|
||||||
dest_path='/var/tmp'
|
dest_path='/var/tmp',
|
||||||
|
staging_base_url='https://dl.rockylinux.org/stg',
|
||||||
|
use_staging=False,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Generates the necessary repo conf file for the operation. This repo
|
Generates the necessary repo conf file for the operation. This repo
|
||||||
@ -482,9 +477,23 @@ class Shared:
|
|||||||
if not os.path.exists(dest_path):
|
if not os.path.exists(dest_path):
|
||||||
os.makedirs(dest_path, exist_ok=True)
|
os.makedirs(dest_path, exist_ok=True)
|
||||||
config_file = open(fname, "w+")
|
config_file = open(fname, "w+")
|
||||||
|
|
||||||
repolist = []
|
repolist = []
|
||||||
for repo in repos:
|
for repo in repos:
|
||||||
|
|
||||||
|
if use_staging:
|
||||||
|
constructed_url = '{}/{}/{}/$basearch/os'.format(
|
||||||
|
staging_base_url,
|
||||||
|
major_version,
|
||||||
|
repo,
|
||||||
|
)
|
||||||
|
|
||||||
|
constructed_url_src = '{}/{}/{}/source/tree'.format(
|
||||||
|
staging_base_url,
|
||||||
|
major_version,
|
||||||
|
repo,
|
||||||
|
)
|
||||||
|
else:
|
||||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||||
repo_base_url,
|
repo_base_url,
|
||||||
project_id,
|
project_id,
|
||||||
@ -910,7 +919,9 @@ class Shared:
|
|||||||
compose_latest_sync,
|
compose_latest_sync,
|
||||||
compose_dir_is_here: bool = False,
|
compose_dir_is_here: bool = False,
|
||||||
hashed: bool = False,
|
hashed: bool = False,
|
||||||
extra_repos: list = None
|
extra_repos: list = None,
|
||||||
|
staging_base_url: str = 'https://dl.rockylinux.org/stg',
|
||||||
|
use_staging: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Builds the repo dictionary
|
Builds the repo dictionary
|
||||||
@ -945,7 +956,9 @@ class Shared:
|
|||||||
repolist.append(repodata)
|
repolist.append(repodata)
|
||||||
|
|
||||||
if extra_repos:
|
if extra_repos:
|
||||||
repolist.append(repo for repo in Shared.parse_extra_repos(extra_repos))
|
extras = Shared.parse_extra_repos(extra_repos)
|
||||||
|
for repo in extras:
|
||||||
|
repolist.append(repo)
|
||||||
|
|
||||||
return repolist
|
return repolist
|
||||||
|
|
||||||
@ -954,6 +967,9 @@ class Shared:
|
|||||||
# must be in format URL[,PRIORITY]
|
# must be in format URL[,PRIORITY]
|
||||||
result = []
|
result = []
|
||||||
for idx, candidate in enumerate(extra_repos):
|
for idx, candidate in enumerate(extra_repos):
|
||||||
|
if isinstance(candidate, dict):
|
||||||
|
url, priority = candidate['url'], candidate.get('priority', None)
|
||||||
|
else:
|
||||||
url, priority = candidate.split(',')
|
url, priority = candidate.split(',')
|
||||||
if not priority:
|
if not priority:
|
||||||
priority = 100
|
priority = 100
|
||||||
@ -1344,42 +1360,10 @@ class Idents:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_magic(m):
|
def get_vol_id(opts):
|
||||||
"""
|
"""
|
||||||
Gets magic data of a given file
|
Gets a volume ID
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
meta = magic.detect_from_filename(m)
|
|
||||||
except ValueError as exc:
|
|
||||||
print(exc)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return meta
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_vol_id(i):
|
|
||||||
"""
|
|
||||||
Gets a volume ID of a given ISO
|
|
||||||
"""
|
|
||||||
iso = pycdlib.PyCdlib()
|
|
||||||
try:
|
|
||||||
iso.open(i)
|
|
||||||
except pycdlib.pycdlibexception.PyCdlibInvalidISO as exc:
|
|
||||||
print(exc)
|
|
||||||
return False
|
|
||||||
# This is for s390x. a temporary hack
|
|
||||||
except pycdlib.pycdlibexception.PyCdlibInvalidInput as exc:
|
|
||||||
print(exc)
|
|
||||||
print('Trying magic instead')
|
|
||||||
magic_data = Idents.get_magic(i)
|
|
||||||
if magic_data.mime_type == 'application/x-iso9660-image':
|
|
||||||
volume_id = magic_data.name.split("'")[1]
|
|
||||||
return volume_id
|
|
||||||
|
|
||||||
pvd = iso.pvd
|
|
||||||
volume_id = pvd.volume_identifier.decode('UTF-8').strip()
|
|
||||||
iso.close()
|
|
||||||
return volume_id
|
|
||||||
|
|
||||||
class Syncs:
|
class Syncs:
|
||||||
"""
|
"""
|
||||||
|
463
iso/empanadas/poetry.lock
generated
463
iso/empanadas/poetry.lock
generated
@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atomicwrites"
|
name = "atomicwrites"
|
||||||
@ -12,36 +12,36 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "24.2.0"
|
version = "23.2.0"
|
||||||
description = "Classes Without Boilerplate"
|
description = "Classes Without Boilerplate"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
|
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
||||||
{file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
|
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||||
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
dev = ["attrs[tests]", "pre-commit"]
|
||||||
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
|
||||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "boto3"
|
name = "boto3"
|
||||||
version = "1.35.62"
|
version = "1.34.159"
|
||||||
description = "The AWS SDK for Python"
|
description = "The AWS SDK for Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "boto3-1.35.62-py3-none-any.whl", hash = "sha256:e6574047701ab009c2b2bb17b530a3a2fb34de8698b77f8bbb34dd0c9286c117"},
|
{file = "boto3-1.34.159-py3-none-any.whl", hash = "sha256:21120d23cc37c0e80dc4f64434bc5664d2a5645dcd9bf8a8fa97ed5c82164ca0"},
|
||||||
{file = "boto3-1.35.62.tar.gz", hash = "sha256:f80eefe7506aa01799b1027d03eddfd3c4a60548d6db5c32f139e1dec9f3f4f5"},
|
{file = "boto3-1.34.159.tar.gz", hash = "sha256:ffe7bbb88ba81b5d54bc8fa0cfb2f3b7fe63a6cffa0f9207df2ef5c22a1c0587"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
botocore = ">=1.35.62,<1.36.0"
|
botocore = ">=1.34.159,<1.35.0"
|
||||||
jmespath = ">=0.7.1,<2.0.0"
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
s3transfer = ">=0.10.0,<0.11.0"
|
s3transfer = ">=0.10.0,<0.11.0"
|
||||||
|
|
||||||
@ -50,13 +50,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "botocore"
|
name = "botocore"
|
||||||
version = "1.35.62"
|
version = "1.34.159"
|
||||||
description = "Low-level, data-driven core of boto 3."
|
description = "Low-level, data-driven core of boto 3."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "botocore-1.35.62-py3-none-any.whl", hash = "sha256:4c3960a33289371d96eba5116364c41e6b848b5afbed3a43f5d8c7ba36f55e1d"},
|
{file = "botocore-1.34.159-py3-none-any.whl", hash = "sha256:7633062491457419a49f5860c014251ae85689f78266a3ce020c2c8688a76b97"},
|
||||||
{file = "botocore-1.35.62.tar.gz", hash = "sha256:9df762294d5c727d9ea1c48b98579729a0ba40fd317c3262a6b8d8e12fb67489"},
|
{file = "botocore-1.34.159.tar.gz", hash = "sha256:dc28806eb21e3c8d690c422530dff8b4b242ac033cbe98f160a9d37796c09cb1"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -68,131 +68,116 @@ urllib3 = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
crt = ["awscrt (==0.22.0)"]
|
crt = ["awscrt (==0.21.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2024.8.30"
|
version = "2024.7.4"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
|
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
|
||||||
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
|
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "3.4.0"
|
version = "3.3.2"
|
||||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7.0"
|
python-versions = ">=3.7.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
|
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
|
||||||
{file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
|
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
|
||||||
{file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
|
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
|
||||||
{file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
|
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
|
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
|
||||||
{file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
|
||||||
{file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
|
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
|
||||||
{file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
|
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
|
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
|
|
||||||
{file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
|
|
||||||
{file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
|
|
||||||
{file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -206,17 +191,6 @@ files = [
|
|||||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "file-magic"
|
|
||||||
version = "0.4.1"
|
|
||||||
description = "Python front end for libmagic(3)"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7"
|
|
||||||
files = [
|
|
||||||
{file = "file-magic-0.4.1.tar.gz", hash = "sha256:a91d1483117f7ed48cd0238ad9be36b04824d57e9c38ea7523113989e81b9c53"},
|
|
||||||
{file = "file_magic-0.4.1-py3-none-any.whl", hash = "sha256:cb9496a1656baf75cadd771479f63b53081095e968d0be72b9b7a7ed538e4fb8"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gitdb"
|
name = "gitdb"
|
||||||
version = "4.0.11"
|
version = "4.0.11"
|
||||||
@ -251,18 +225,15 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit",
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.10"
|
version = "3.7"
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||||
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-resources"
|
name = "importlib-resources"
|
||||||
version = "5.13.0"
|
version = "5.13.0"
|
||||||
@ -311,13 +282,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kobo"
|
name = "kobo"
|
||||||
version = "0.37.0"
|
version = "0.36.2"
|
||||||
description = "A pile of python modules used by Red Hat release engineering to build their tools"
|
description = "A pile of python modules used by Red Hat release engineering to build their tools"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "kobo-0.37.0-py3-none-any.whl", hash = "sha256:3a76e9748a1d218aa91e7ae6ffaed43dcd25984cb09a9c1db19a1f7648665141"},
|
{file = "kobo-0.36.2-py3-none-any.whl", hash = "sha256:e41aeb5739d2b3578fb411978ee66f5d84a8d47a0f23fe076f905eb94806ec32"},
|
||||||
{file = "kobo-0.37.0.tar.gz", hash = "sha256:05ee956fb40aff3eaa2abf2cdec931245f5d6fb6ba8ea6c19f6e26c7ea602fb9"},
|
{file = "kobo-0.36.2.tar.gz", hash = "sha256:e451421280a08df9d794582af9693f03fe2290db5041a08fdbee02a587a51bbf"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -325,94 +296,93 @@ six = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "markupsafe"
|
name = "markupsafe"
|
||||||
version = "3.0.2"
|
version = "2.1.5"
|
||||||
description = "Safely add untrusted strings to HTML/XML markup."
|
description = "Safely add untrusted strings to HTML/XML markup."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
|
||||||
{file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
|
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
|
||||||
{file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
|
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
|
||||||
{file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
|
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
|
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
|
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
|
||||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
|
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
|
||||||
{file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
|
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||||
{file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "more-itertools"
|
name = "more-itertools"
|
||||||
version = "10.5.0"
|
version = "10.4.0"
|
||||||
description = "More routines for operating on iterables, beyond itertools"
|
description = "More routines for operating on iterables, beyond itertools"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"},
|
{file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"},
|
||||||
{file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"},
|
{file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "24.2"
|
version = "24.1"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
|
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
|
||||||
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
|
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -454,17 +424,6 @@ files = [
|
|||||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pycdlib"
|
|
||||||
version = "1.14.0"
|
|
||||||
description = "Pure python ISO manipulation library"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "pycdlib-1.14.0-py2.py3-none-any.whl", hash = "sha256:a905827335f0066af3fd416c5cf9b1f29dffaf4d0914b714555213d1809f38d4"},
|
|
||||||
{file = "pycdlib-1.14.0.tar.gz", hash = "sha256:8ec306b31d9c850f28c5fda52438d904edd1e8fcf862c5ffd756272efac9f422"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "5.4.3"
|
version = "5.4.3"
|
||||||
@ -589,13 +548,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rpm"
|
name = "rpm"
|
||||||
version = "0.3.1"
|
version = "0.2.0"
|
||||||
description = "Shim RPM module for use in virtualenvs."
|
description = "Shim RPM module for use in virtualenvs."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "rpm-0.3.1-py3-none-any.whl", hash = "sha256:6a130be8b953781bdbe652927dd9c50d2160f26344ec6e3c0defe823aeda1789"},
|
{file = "rpm-0.2.0-py3-none-any.whl", hash = "sha256:4050b6033f7403be0a34f42a742c49ba74f2b0c6129f0247115b6078b24ddd71"},
|
||||||
{file = "rpm-0.3.1.tar.gz", hash = "sha256:d75c5dcb581f1e9c4f89cb6667e938e944c6e7c17dd96829e1553c39f3a4c961"},
|
{file = "rpm-0.2.0.tar.gz", hash = "sha256:b92285f65c9ddf77678cb3e51aa67827426408fac34cdd8d537d8c14e3eaffbf"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -603,13 +562,13 @@ testing = ["tox"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "s3transfer"
|
name = "s3transfer"
|
||||||
version = "0.10.3"
|
version = "0.10.2"
|
||||||
description = "An Amazon S3 Transfer Manager"
|
description = "An Amazon S3 Transfer Manager"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"},
|
{file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"},
|
||||||
{file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"},
|
{file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -642,13 +601,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "1.26.20"
|
version = "1.26.19"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
|
||||||
{file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -658,13 +617,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "2.2.3"
|
version = "2.2.2"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
|
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
|
||||||
{file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
|
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -686,35 +645,31 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xmltodict"
|
name = "xmltodict"
|
||||||
version = "0.14.2"
|
version = "0.13.0"
|
||||||
description = "Makes working with XML feel like you are working with JSON"
|
description = "Makes working with XML feel like you are working with JSON"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.4"
|
||||||
files = [
|
files = [
|
||||||
{file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"},
|
{file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
|
||||||
{file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"},
|
{file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zipp"
|
name = "zipp"
|
||||||
version = "3.21.0"
|
version = "3.20.0"
|
||||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
|
{file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"},
|
||||||
{file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
|
{file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
|
||||||
cover = ["pytest-cov"]
|
|
||||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||||
enabler = ["pytest-enabler (>=2.2)"]
|
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||||
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
|
|
||||||
type = ["pytest-mypy"]
|
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.9,<4"
|
python-versions = ">=3.9,<4"
|
||||||
content-hash = "60742cc3e07d8fe7b8bbe48db7be51cb2b5e09c194eedb0c4aef064f2d2c5eba"
|
content-hash = "4ef7a9006f0f5485e840d95417574a6dc2f25981fceeaf938690eccd9ce0c73e"
|
||||||
|
@ -1,25 +1,23 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "empanadas"
|
name = "empanadas"
|
||||||
version = "0.7.1"
|
version = "0.7.0"
|
||||||
description = "hand crafted ISOs with love and spice"
|
description = "hand crafted ISOs with love and spice"
|
||||||
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.9,<4"
|
python = ">=3.9,<4"
|
||||||
rpm = ">=0.1.0"
|
rpm = ">=0.1.0"
|
||||||
MarkupSafe = "==3.0.2"
|
MarkupSafe = "==2.1.5"
|
||||||
PyYAML = "~6.0.2"
|
PyYAML = "~6.0.1"
|
||||||
Jinja2 = "~3"
|
Jinja2 = "~3"
|
||||||
productmd = "~1.38"
|
productmd = "~1.38"
|
||||||
importlib-resources = "^5.8.0"
|
importlib-resources = "^5.8.0"
|
||||||
boto3 = "^1.35.55"
|
boto3 = "^1.34.110"
|
||||||
xmltodict = "^0.14.2"
|
xmltodict = "^0.13.0"
|
||||||
requests = "^2.32.2"
|
requests = "^2.32.2"
|
||||||
kobo = "^0.37.0"
|
kobo = "^0.36.0"
|
||||||
attrs = "^24.2.0"
|
attrs = "^23.2.0"
|
||||||
GitPython = ">=3.1.30"
|
GitPython = ">=3.1.30"
|
||||||
file-magic = ">=0.4.0"
|
|
||||||
pycdlib = ">=1.14.0"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "~5"
|
pytest = "~5"
|
||||||
@ -41,6 +39,16 @@ generate-compose = "empanadas.scripts.generate_compose:run"
|
|||||||
peridot-repoclosure = "empanadas.scripts.peridot_repoclosure:run"
|
peridot-repoclosure = "empanadas.scripts.peridot_repoclosure:run"
|
||||||
refresh-all-treeinfo = "empanadas.scripts.refresh_all_treeinfo:run"
|
refresh-all-treeinfo = "empanadas.scripts.refresh_all_treeinfo:run"
|
||||||
|
|
||||||
|
[tool.pylint.main]
|
||||||
|
init-hook ="""
|
||||||
|
try:
|
||||||
|
import pylint_venv
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
pylint_venv.inithook()
|
||||||
|
"""
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
2
iso/empanadas/tox.ini
Normal file
2
iso/empanadas/tox.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[pycodestyle]
|
||||||
|
max-line-length = 160
|
@ -47,6 +47,7 @@ class common:
|
|||||||
'CRB': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
'CRB': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
'HighAvailability': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
'HighAvailability': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
'NFV': ['x86_64'],
|
'NFV': ['x86_64'],
|
||||||
|
'ResilientStorage': ['ppc64le', 's390x', 'x86_64'],
|
||||||
'RT': ['x86_64'],
|
'RT': ['x86_64'],
|
||||||
'SAP': ['ppc64le', 's390x', 'x86_64'],
|
'SAP': ['ppc64le', 's390x', 'x86_64'],
|
||||||
'SAPHANA': ['ppc64le', 'x86_64']
|
'SAPHANA': ['ppc64le', 'x86_64']
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# To be sourced by scripts to use
|
# To be sourced by scripts to use
|
||||||
|
|
||||||
REPO=("BaseOS" "AppStream" "CRB" "HighAvailability" "NFV" "RT" "SAP" "SAPHANA")
|
REPO=("BaseOS" "AppStream" "CRB" "HighAvailability" "ResilientStorage" "NFV" "RT" "SAP" "SAPHANA")
|
||||||
ARCH=("aarch64" "ppc64le" "s390x" "x86_64")
|
ARCH=("aarch64" "ppc64le" "s390x" "x86_64")
|
||||||
|
|
||||||
MAJOR="10"
|
MAJOR="10"
|
||||||
|
@ -9,12 +9,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER=$MAJOR
|
export RLVER=$MAJOR
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -26,7 +20,7 @@ eln_repo_url="${ELN_KOJI_REPO}/${tag_template}/latest"
|
|||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${tag_template},${eln_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${tag_template},${eln_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${tag_template}/${y}/repodata"
|
mkdir -p "${tag_template}/${y}/repodata"
|
||||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -9,12 +9,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER=$MAJOR
|
export RLVER=$MAJOR
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -26,7 +20,7 @@ stream_repo_url="${STREAM_KOJI_REPO}/${tag_template}/latest"
|
|||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${tag_template}/${y}/repodata"
|
mkdir -p "${tag_template}/${y}/repodata"
|
||||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -10,12 +10,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Verify the date format
|
# Verify the date format
|
||||||
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
grep_val=$?
|
grep_val=$?
|
||||||
|
@ -9,12 +9,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER=$MAJOR
|
export RLVER=$MAJOR
|
||||||
source common
|
source common
|
||||||
|
|
||||||
|
@ -9,12 +9,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER=$MAJOR
|
export RLVER=$MAJOR
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -27,7 +21,7 @@ stream_repo_url="https://kojidev.rockylinux.org/kojifiles/repos/${tag_template}/
|
|||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for y in x86_64 aarch64 i386; do
|
for y in x86_64 aarch64 i386; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${str_template},${stream_repo_url}/${y} --download-metadata --repoid=${str_template} -p ${str_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${str_template},${stream_repo_url}/${y} --download-metadata --repoid=${str_template} -p ${str_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${str_template}/${y}/repodata"
|
mkdir -p "${str_template}/${y}/repodata"
|
||||||
pushd "${str_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${str_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -10,12 +10,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Verify the date format
|
# Verify the date format
|
||||||
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
grep_val=$?
|
grep_val=$?
|
||||||
@ -37,7 +31,7 @@ pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
|||||||
for x in "${REPO[@]}"; do
|
for x in "${REPO[@]}"; do
|
||||||
echo "Working on ${x}"
|
echo "Working on ${x}"
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${x}/${y}/repodata"
|
mkdir -p "${x}/${y}/repodata"
|
||||||
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -10,12 +10,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Verify the date format
|
# Verify the date format
|
||||||
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
grep_val=$?
|
grep_val=$?
|
||||||
@ -33,17 +27,11 @@ current=$(pwd)
|
|||||||
tmpdir=$(mktemp -d)
|
tmpdir=$(mktemp -d)
|
||||||
stream_compose_url="https://composes.stream.centos.org/stream-${MAJOR}/production/CentOS-Stream-${MAJOR}-${DATE}/compose"
|
stream_compose_url="https://composes.stream.centos.org/stream-${MAJOR}/production/CentOS-Stream-${MAJOR}-${DATE}/compose"
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for x in "${REPO[@]}"; do
|
for x in "${REPO[@]}"; do
|
||||||
echo "Working on ${x}"
|
echo "Working on ${x}"
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${x}/${y}/repodata"
|
mkdir -p "${x}/${y}/repodata"
|
||||||
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -9,12 +9,6 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER="${MAJOR}"
|
export RLVER="${MAJOR}"
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -26,7 +20,7 @@ stream_repo_url="${STREAM_KOJI_REPO}/${tag_template}/latest"
|
|||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${tag_template},${stream_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${tag_template}/${y}/repodata"
|
mkdir -p "${tag_template}/${y}/repodata"
|
||||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -13,12 +13,6 @@ if [ -n "$2" ] && [[ "$2" == "lh" ]]; then
|
|||||||
export LH="lh"
|
export LH="lh"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER="${MAJOR}"
|
export RLVER="${MAJOR}"
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -30,7 +24,7 @@ peridot_repo_url="${PERIDOT_REPO}/${PERIDOT_PROJECT_ID}/repo/${tag_template}"
|
|||||||
|
|
||||||
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${tag_template},${peridot_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${tag_template},${peridot_repo_url}/${y} --download-metadata --repoid=${tag_template} -p ${tag_template}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${tag_template}/${y}/repodata"
|
mkdir -p "${tag_template}/${y}/repodata"
|
||||||
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${tag_template}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -18,12 +18,6 @@ if [ "$grep_val" -ne 0 ]; then
|
|||||||
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER="${MAJOR}"
|
export RLVER="${MAJOR}"
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -36,7 +30,7 @@ pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
|||||||
for x in "${REPO[@]}"; do
|
for x in "${REPO[@]}"; do
|
||||||
echo "Working on ${x}"
|
echo "Working on ${x}"
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${x}/${y}/repodata"
|
mkdir -p "${x}/${y}/repodata"
|
||||||
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -18,12 +18,6 @@ if [ "$grep_val" -ne 0 ]; then
|
|||||||
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/dnf4 ]; then
|
|
||||||
SAFEDNF=/usr/bin/dnf4
|
|
||||||
else
|
|
||||||
SAFEDNF=/usr/bin/dnf
|
|
||||||
fi
|
|
||||||
|
|
||||||
export RLVER="${MAJOR}"
|
export RLVER="${MAJOR}"
|
||||||
source common
|
source common
|
||||||
|
|
||||||
@ -37,7 +31,7 @@ pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
|||||||
for x in "${REPO[@]}"; do
|
for x in "${REPO[@]}"; do
|
||||||
echo "Working on ${x}"
|
echo "Working on ${x}"
|
||||||
for y in "${ARCH[@]}"; do
|
for y in "${ARCH[@]}"; do
|
||||||
repodatas=( $($SAFEDNF reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
mkdir -p "${x}/${y}/repodata"
|
mkdir -p "${x}/${y}/repodata"
|
||||||
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
for z in "${repodatas[@]}"; do
|
for z in "${repodatas[@]}"; do
|
||||||
|
@ -12,8 +12,6 @@ IGNORES = [
|
|||||||
'insights-client',
|
'insights-client',
|
||||||
'lorax-templates-rhel',
|
'lorax-templates-rhel',
|
||||||
'shim',
|
'shim',
|
||||||
'shim-unsigned-x64',
|
|
||||||
'shim-unsigned-aarch64',
|
|
||||||
'redhat-cloud-client-configuration',
|
'redhat-cloud-client-configuration',
|
||||||
'rhc',
|
'rhc',
|
||||||
'rhc-worker-playbook',
|
'rhc-worker-playbook',
|
||||||
|
@ -20,9 +20,6 @@ REPOS = switcher.rlver(results.version,
|
|||||||
# Source packages we do not ship or are rocky branded
|
# Source packages we do not ship or are rocky branded
|
||||||
IGNORES = [
|
IGNORES = [
|
||||||
'insights-client',
|
'insights-client',
|
||||||
'shim',
|
|
||||||
'shim-unsigned-x64',
|
|
||||||
'shim-unsigned-aarch64',
|
|
||||||
'redhat-cloud-client-configuration',
|
'redhat-cloud-client-configuration',
|
||||||
'rhc',
|
'rhc',
|
||||||
'rhc-worker-playbook',
|
'rhc-worker-playbook',
|
||||||
|
@ -304,7 +304,7 @@ class IPAAudit:
|
|||||||
}
|
}
|
||||||
|
|
||||||
print('User Information')
|
print('User Information')
|
||||||
print('------------------------------------------')
|
print('----------------------------------------')
|
||||||
for key, value in starter_user.items():
|
for key, value in starter_user.items():
|
||||||
if len(value) > 0:
|
if len(value) > 0:
|
||||||
print(f'{key: <16}{value}')
|
print(f'{key: <16}{value}')
|
||||||
@ -312,54 +312,14 @@ class IPAAudit:
|
|||||||
|
|
||||||
if deep:
|
if deep:
|
||||||
group_list = [] if not user_results.get('memberof_group', None) else user_results['memberof_group']
|
group_list = [] if not user_results.get('memberof_group', None) else user_results['memberof_group']
|
||||||
hbac_list = [] if not user_results.get('memberof_hbacrule', None) else user_results['memberof_hbacrule']
|
IPAAudit.user_deep_list(api, name, group_list)
|
||||||
IPAAudit.user_deep_list(api, name, group_list, hbac_list)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def group_pull(api, name, deep):
|
def group_pull(api, name, deep):
|
||||||
"""
|
"""
|
||||||
Gets requested rbac info
|
Gets requested rbac info
|
||||||
"""
|
"""
|
||||||
try:
|
print()
|
||||||
group_results = IPAQuery.group_data(api, name)
|
|
||||||
except:
|
|
||||||
print(f'Could not find {name}', sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
group_name = '' if not group_results.get('cn', None) else group_results['cn'][0]
|
|
||||||
group_gidnum = '' if not group_results.get('gidnumber', None) else group_results['gidnumber'][0]
|
|
||||||
group_members_direct = [] if not group_results.get('member_user', None) else group_results['member_user']
|
|
||||||
group_members_indirect = [] if not group_results.get('memberindirect_user', None) else group_results['memberindirect_user']
|
|
||||||
group_members = list(group_members_direct) + list(group_members_indirect)
|
|
||||||
num_of_group_members = str(len(group_members))
|
|
||||||
|
|
||||||
group_hbacs_direct = [] if not group_results.get('memberof_hbacrule', None) else group_results['memberof_hbacrule']
|
|
||||||
group_hbacs_indirect = [] if not group_results.get('memberofindirect_hbacrule', None) else group_results['memberofindirect_hbacrule']
|
|
||||||
group_hbacs = list(group_hbacs_direct) + list(group_hbacs_indirect)
|
|
||||||
num_of_hbacs = str(len(group_hbacs))
|
|
||||||
|
|
||||||
group_sudo_direct = [] if not group_results.get('memberof_sudorule', None) else group_results['memberof_sudorule']
|
|
||||||
group_sudo_indirect = [] if not group_results.get('memberofindirect_sudorule', None) else group_results['memberofindirect_sudorule']
|
|
||||||
group_sudos = list(group_sudo_direct) + list(group_sudo_indirect)
|
|
||||||
num_of_sudos = str(len(group_sudos))
|
|
||||||
|
|
||||||
starter_group = {
|
|
||||||
'Group name': group_name,
|
|
||||||
'GID': group_gidnum,
|
|
||||||
'Number of Users': num_of_group_members,
|
|
||||||
'Number of HBAC Rules': num_of_hbacs,
|
|
||||||
'Number of SUDO Rules': num_of_sudos,
|
|
||||||
}
|
|
||||||
|
|
||||||
print('Group Information')
|
|
||||||
print('------------------------------------------')
|
|
||||||
for key, value in starter_group.items():
|
|
||||||
if len(value) > 0:
|
|
||||||
print(f'{key: <24}{value}')
|
|
||||||
print('')
|
|
||||||
|
|
||||||
if deep:
|
|
||||||
IPAAudit.group_deep_list(api, name, group_members, group_hbacs, group_sudos)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def hbac_pull(api, name, deep):
|
def hbac_pull(api, name, deep):
|
||||||
@ -503,13 +463,14 @@ class IPAAudit:
|
|||||||
print(f'{key: <24}{value}')
|
print(f'{key: <24}{value}')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def user_deep_list(api, user, groups, hbacs):
|
def user_deep_list(api, user, groups):
|
||||||
"""
|
"""
|
||||||
Does a recursive dig on a user
|
Does a recursive dig on a user
|
||||||
"""
|
"""
|
||||||
hbac_rule_list = list(hbacs)
|
hbac_rule_list = []
|
||||||
hbac_rule_all_hosts = []
|
hbac_rule_all_hosts = []
|
||||||
host_list = []
|
host_list = []
|
||||||
|
hostgroup_list = []
|
||||||
for group in groups:
|
for group in groups:
|
||||||
group_results = IPAQuery.group_data(api, group)
|
group_results = IPAQuery.group_data(api, group)
|
||||||
hbac_list = [] if not group_results.get('memberof_hbacrule', None) else group_results['memberof_hbacrule']
|
hbac_list = [] if not group_results.get('memberof_hbacrule', None) else group_results['memberof_hbacrule']
|
||||||
@ -520,13 +481,12 @@ class IPAAudit:
|
|||||||
# TODO: Add HBAC list (including services)
|
# TODO: Add HBAC list (including services)
|
||||||
# TODO: Add RBAC list
|
# TODO: Add RBAC list
|
||||||
|
|
||||||
hbac_host_dict = {}
|
|
||||||
for hbac in hbac_rule_list:
|
|
||||||
hbac_hosts = []
|
hbac_hosts = []
|
||||||
|
for hbac in hbac_rule_list:
|
||||||
hbac_results = IPAQuery.hbac_data(api, hbac)
|
hbac_results = IPAQuery.hbac_data(api, hbac)
|
||||||
hbac_host_list = [] if not hbac_results.get('memberhost_host', None) else hbac_results['memberhost_host']
|
hbac_host_list = [] if not hbac_results.get('memberhost_host', None) else hbac_results['memberhost_host']
|
||||||
hbac_hostgroup_list = [] if not hbac_results.get('memberhost_hostgroup', None) else hbac_results['memberhost_hostgroup']
|
hbac_hostgroup_list = [] if not hbac_results.get('memberhost_hostgroup', None) else hbac_results['memberhost_hostgroup']
|
||||||
if hbac_results.get('hostcategory'):
|
if hbac_results.get('servicecategory'):
|
||||||
hbac_rule_all_hosts.append(hbac)
|
hbac_rule_all_hosts.append(hbac)
|
||||||
|
|
||||||
for host in hbac_host_list:
|
for host in hbac_host_list:
|
||||||
@ -537,29 +497,19 @@ class IPAAudit:
|
|||||||
host_list = [] if not hostgroup_data.get('member_host', None) else hostgroup_data['member_host']
|
host_list = [] if not hostgroup_data.get('member_host', None) else hostgroup_data['member_host']
|
||||||
hbac_hosts.extend(host_list)
|
hbac_hosts.extend(host_list)
|
||||||
|
|
||||||
hbac_host_dict[hbac] = hbac_hosts
|
new_hbac_hosts = sorted(set(hbac_hosts))
|
||||||
|
|
||||||
#new_hbac_hosts = sorted(set(hbac_hosts))
|
|
||||||
print('User Has Access To These Hosts')
|
print('User Has Access To These Hosts')
|
||||||
print('------------------------------------------')
|
print('------------------------------------------')
|
||||||
|
for hhost in new_hbac_hosts:
|
||||||
|
print(hhost)
|
||||||
if len(hbac_rule_all_hosts) > 0:
|
if len(hbac_rule_all_hosts) > 0:
|
||||||
print('!! Notice: User has access to ALL hosts from the following rules:')
|
print('!! Notice: User has access to ALL hosts from the following rules:')
|
||||||
hbac_rule_all_hosts = sorted(set(hbac_rule_all_hosts))
|
hbac_rule_all_hosts = sorted(set(hbac_rule_all_hosts))
|
||||||
for allrule in hbac_rule_all_hosts:
|
for allrule in hbac_rule_all_hosts:
|
||||||
print(allrule)
|
print(allrule)
|
||||||
else:
|
|
||||||
for hrule in hbac_host_dict:
|
|
||||||
print()
|
|
||||||
print(f'HBAC Rule: {hrule}')
|
|
||||||
print('==========================================')
|
|
||||||
for h in hbac_host_dict[hrule]:
|
|
||||||
print(h)
|
|
||||||
|
|
||||||
if len(hbac_host_dict[hrule]) == 0:
|
|
||||||
print('(No hosts set for this rule)')
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def group_deep_list(api, group, members, hbacs, sudos):
|
def group_deep_list(api, group):
|
||||||
"""
|
"""
|
||||||
Does a recursive dig on a group
|
Does a recursive dig on a group
|
||||||
"""
|
"""
|
||||||
|
@ -3,19 +3,19 @@
|
|||||||
# Revision must always start with a major number
|
# Revision must always start with a major number
|
||||||
case "${RLREL}" in
|
case "${RLREL}" in
|
||||||
stable)
|
stable)
|
||||||
REVISION=9.5
|
REVISION=9.4
|
||||||
PREREV=9.4
|
PREREV=9.3
|
||||||
APPEND_TO_DIR="-RC1"
|
APPEND_TO_DIR="-RC1"
|
||||||
;;
|
;;
|
||||||
beta)
|
beta)
|
||||||
REVISION=9.6
|
REVISION=9.5
|
||||||
PREREV=9.5
|
PREREV=9.4
|
||||||
APPEND_TO_DIR="-beta"
|
APPEND_TO_DIR="-beta"
|
||||||
COMPOSE_APPEND="-beta"
|
COMPOSE_APPEND="-beta"
|
||||||
;;
|
;;
|
||||||
lh)
|
lh)
|
||||||
REVISION=9.6
|
REVISION=9.5
|
||||||
PREREV=9.5
|
PREREV=9.4
|
||||||
APPEND_TO_DIR="-lookahead"
|
APPEND_TO_DIR="-lookahead"
|
||||||
COMPOSE_APPEND="-lookahead"
|
COMPOSE_APPEND="-lookahead"
|
||||||
;;
|
;;
|
||||||
|
Loading…
Reference in New Issue
Block a user