Containerize and refactor image building

* Add new fedora container to run imagefactory
* Make architecture class from py attrs and make it raise exceptions
* Change build-image script to primarily invoke imagefactory directly. A
  second wrapper will be added to support running in kubernetes if
needed.
This commit is contained in:
Neil Hanlon 2022-07-03 22:27:08 -04:00
parent 957bf5ef3f
commit 361c155481
Signed by: neil
GPG Key ID: 705BC21EC3C70F34
7 changed files with 441 additions and 138 deletions

View File

@ -0,0 +1,68 @@
FROM docker.io/fedora:36
ADD images/get_arch /get_arch
ENV TINI_VERSION v0.19.0
RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)"
RUN chmod +x /tini
RUN dnf install -y \
bash \
bzip2 \
cpio \
diffutils \
findutils \
gawk \
gcc \
gcc-c++ \
git \
grep \
gzip \
info \
make \
patch \
python3 \
redhat-rpm-config \
rpm-build \
scl-utils-build \
sed \
shadow-utils \
tar \
unzip \
util-linux \
which \
xz \
dnf-plugins-core \
createrepo_c \
rpm-sign \
sudo \
mock \
python-pip \
imagefactory \
imagefactory-plugins*
RUN sed -i -e 's/# memory = 1024/memory = 2048/' /etc/oz/oz.cfg
RUN ssh-keygen -t rsa -q -f "$HOME/.ssh/id_rsa" -N ""
RUN dnf clean all
RUN rm -rf /etc/yum.repos.d/*.repo /get_arch
# RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder
# RUN chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d && chown -R peridotbuilder:mock /var/lib/imagefactory/storage
RUN pip install awscli
ENV BRANCH r9
RUN git clone https://git.rockylinux.org/rocky/kickstarts.git --branch $BRANCH /kickstarts
RUN cp /kickstarts/Rocky-9-Container.ks /kickstarts/Rocky-9-Container-Base.ks
RUN sed -i "s/\$basearch/$(uname -m)/" /kickstarts/Rocky-9-Container-Base.ks
# devel only
COPY . /empanadas
RUN pip install -e /empanadas
# prod
#RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas'
ENV LIBGUESTFS_BACKEND direct
ENTRYPOINT ["/tini", "--"]

View File

@ -97,24 +97,35 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
#COMPOSE_ISO_WORKDIR = COMPOSE_ROOT + "work/" + arch + "/" + date_stamp
def valid_type_variant(_type: str, variant: str="") -> Tuple[bool, str]:
ALLOWED_TYPE_VARIANTS = {
"Container": ["Base", "Minimal"],
"GenericCloud": [],
}
ALLOWED_TYPE_VARIANTS = {
"Azure": None,
"Container": ["Base", "Minimal"],
"EC2": None,
"GenericCloud": None,
"Vagrant": ["Libvirt", "VBox"]
}
def valid_type_variant(_type: str, variant: str="") -> bool:
if _type not in ALLOWED_TYPE_VARIANTS:
return False, f"Type is invalid: ({_type}, {variant})"
elif variant not in ALLOWED_TYPE_VARIANTS[_type]:
raise Exception(f"Type is invalid: ({_type}, {variant})")
if ALLOWED_TYPE_VARIANTS[_type] == None:
if variant is not None:
raise Exception(f"{_type} Type expects no variant type.")
return True
if variant not in ALLOWED_TYPE_VARIANTS[_type]:
if variant.capitalize() in ALLOWED_TYPE_VARIANTS[_type]:
return False, f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})"
return False, f"Type/Variant Combination is not allowed: ({_type}, {variant})"
return True, ""
raise Exception(f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})")
raise Exception(f"Type/Variant Combination is not allowed: ({_type}, {variant})")
return True
class Architecture(str):
@staticmethod
def New(architecture: str, version: int):
from attrs import define, field
@define
class Architecture:
name: str = field()
version: str = field()
@classmethod
def New(cls, architecture: str, version: int):
if architecture not in rldict[version]["allowed_arches"]:
print("Invalid architecture/version combo, skipping")
exit()
return Architecture(architecture)
return cls(architecture, version)

View File

@ -3,12 +3,21 @@
import argparse
import datetime
import json
import logging
import subprocess
import sys
import time
import os
import tempfile
import pathlib
import platform
from botocore import args
from attrs import define, Factory, field, asdict
from jinja2 import Environment, FileSystemLoader, Template
from typing import List, Tuple
from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union
from empanadas.common import Architecture, rldict, valid_type_variant
from empanadas.common import _rootdir
@ -22,123 +31,337 @@ parser.add_argument('--debug', action='store_true', help="debug?")
parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True)
parser.add_argument('--variant', type=str, help="", required=False)
parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False)
parser.add_argument('--kube', action='store_true', help="output as a K8s job(s)", required=False)
results = parser.parse_args()
rlvars = rldict[results.version]
major = rlvars["major"]
debug = results.debug
log = logging.getLogger(__name__)
log.setLevel(logging.INFO if not debug else logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO if not debug else logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s :: %(name)s :: %(message)s',
'%Y-%m-%d %H:%M:%S'
)
handler.setFormatter(formatter)
log.addHandler(handler)
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
BUILDTIME = datetime.datetime.utcnow()
@define(kw_only=True)
class ImageBuild:
architecture: Architecture = field()
base_uuid: Optional[str] = field(default="")
command_args: List[str] = field(factory=list)
common_args: List[str] = field(factory=list)
debug: bool = field(default=False)
image_type: str = field()
job_template: Optional[Template] = field(init=False)
kickstart_arg: List[str] = field(factory=list)
out_type: str = field(init=False)
outdir: pathlib.Path = field(init=False)
outname: str = field(init=False)
package_args: List[str] = field(factory=list)
target_uuid: Optional[str] = field(default="")
tdl_path: pathlib.Path = field(init=False)
template: Template = field()
type_variant: str = field(init=False)
stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False)
variant: Optional[str] = field()
revision: Optional[int] = field()
metadata: pathlib.Path = field(init=False)
fedora_release: int = field()
def render_icicle_template(template: Template, architecture: Architecture) -> str:
handle, output = tempfile.mkstemp()
if not handle:
exit(3)
with os.fdopen(handle, "wb") as tmp:
_template = template.render(
architecture=architecture,
fedora_version=rlvars["fedora_release"],
iso8601date=BUILDTIME.strftime("%Y%m%d"),
installdir="kickstart" if results.kickstartdir else "os",
major=major,
release=results.release if results.release else 0,
size="10G",
type=results.type.capitalize(),
utcnow=BUILDTIME,
version_variant=rlvars["revision"] if not results.variant else f"{rlvars['revision']}-{results.variant.capitalize()}",
)
tmp.write(_template.encode())
return output
def generate_kickstart_imagefactory_args(debug: bool = False) -> str:
type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup
kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{major}-{type_variant}.ks")
if not kickstart_path.is_file():
print(f"Kickstart file is not available: {kickstart_path}")
if not debug:
def __attrs_post_init__(self):
self.tdl_path = self.render_icicle_template()
if not self.tdl_path:
exit(2)
self.type_variant = self.type_variant_name()
self.outname = self.output_name()
self.outdir = pathlib.Path(f"/tmp/{self.outname}")
self.out_type = self.image_format()
self.command_args = self._command_args()
self.package_args = self._package_args()
self.common_args = self._common_args()
self.kickstart_arg = self.kickstart_imagefactory_args()
return f"--file-parameter install_script {kickstart_path}"
self.metadata = pathlib.Path(self.outdir, "metadata.json")
def get_image_format(_type: str) -> str:
mapping = {
"Container": "docker"
}
return mapping[_type] if _type in mapping.keys() else ''
def generate_imagefactory_commands(tdl_template: Template, architecture: Architecture) -> List[List[str]]:
template_path = render_icicle_template(tdl_template, architecture)
if not template_path:
exit(2)
args_mapping = {
"debug": "--debug"
}
# only supports boolean flags right now?
args = [param for name, param in args_mapping.items() if getattr(results,name)]
package_args = []
kickstart_arg = generate_kickstart_imagefactory_args(True) # REMOVE DEBUG ARG
if results.type == "Container":
args += ["--parameter", "offline_icicle", "true"]
package_args += ["--parameter", "compress", "xz"]
tar_command = ["tar", "-Oxf", f"{STORAGE_DIR}/*.body" "./layer.tar"]
type_variant = results.type if not results.variant else f"{results.type}-{results.variant}" # todo -cleanup
outname = f"Rocky-{rlvars['major']}-{type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{architecture}"
outdir = pathlib.Path(f"/tmp/{outname}")
build_command = (f"imagefactory base_image {kickstart_arg} {' '.join(args)} {template_path}"
f" | tee -a {outdir}/logs/base_image-{outname}.out"
f" | tail -n4 > {outdir}/base.meta || exit 2"
)
out_type = get_image_format(results.type)
package_command = ["imagefactory", "target_image", *args, template_path,
"--id", "$(awk '$1==\"UUID:\"{print $NF}'"+f" /tmp/{outname}/base.meta)",
*package_args,
"--parameter", "repository", outname, out_type,
"|", "tee", "-a", f"{outdir}/base_image-{outname}.out",
"|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3"
if self.image_type == "Container":
self.stage_commands = [
["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"]
]
if self.image_type == "GenericCloud":
self.stage_commands = [
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
]
copy_command = (f"aws s3 cp --recursive {outdir}/* s3://resf-empanadas/buildimage-{ outname }/{ BUILDTIME.strftime('%s') }/")
commands = [build_command, package_command, copy_command]
return commands
try:
os.mkdir(self.outdir)
except FileExistsError as e:
log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
except Exception as e:
log.exception("Some other exception occured while creating the output directory", e)
return 0
if os.path.exists(self.metadata):
with open(self.metadata, "r") as f:
try:
o = json.load(f)
self.base_uuid = o['base_uuid']
self.target_uuid = o['target_uuid']
except json.decoder.JSONDecodeError as e:
log.exception("Couldn't decode metadata file", e)
def output_name(self):
return f"Rocky-{self.architecture.version}-{self.type_variant}.{BUILDTIME.strftime('%Y%m%d')}.{results.release if results.release else 0}.{self.architecture.name}"
def type_variant_name(self):
return self.image_type if not self.variant else f"{self.image_type}-{self.variant.capitalize()}"
def _command_args(self):
args_mapping = {
"debug": "--debug"
}
return [param for name, param in args_mapping.items() if getattr(results,name)]
def _package_args(self) -> List[str]:
if results.type == "Container":
return ["--parameter", "compress", "xz"]
return [""]
def _common_args(self) -> List[str]:
args = []
if self.image_type == "Container":
args = ["--parameter", "offline_icicle", "true"]
if self.image_type == "GenericCloud":
args = ["--parameter", "generate_icicle", "false"]
return args
def image_format(self) -> str:
mapping = {
"Container": "docker"
}
return mapping[self.image_type] if self.image_type in mapping.keys() else ''
def kickstart_imagefactory_args(self) -> List[str]:
kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.version}-{self.type_variant}.ks")
if not kickstart_path.is_file():
log.warn(f"Kickstart file is not available: {kickstart_path}")
if not debug:
log.warn("Exiting because debug mode is not enabled.")
exit(2)
return ["--file-parameter", "install_script", str(kickstart_path)]
def render_icicle_template(self) -> pathlib.Path:
handle, output = tempfile.mkstemp()
if not handle:
exit(3)
with os.fdopen(handle, "wb") as tmp:
_template = self.template.render(
architecture=self.architecture.name,
fedora_version=self.fedora_release,
iso8601date=BUILDTIME.strftime("%Y%m%d"),
installdir="kickstart" if results.kickstartdir else "os",
major=self.architecture.version,
release=results.release if results.release else 0,
size="10G",
type=self.image_type,
utcnow=BUILDTIME,
version_variant=self.revision if not self.variant else f"{self.revision}-{self.variant}",
)
tmp.write(_template.encode())
tmp.flush()
return pathlib.Path(output)
def build_command(self) -> List[str]:
build_command = ["imagefactory", *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path
# "|", "tee", "-a", f"{outdir}/logs/base_image-{outname}.out",
# "|", "tail", "-n4", ">", f"{outdir}/base.meta", "||", "exit", "2"
]
return build_command
def package_command(self) -> List[str]:
package_command = ["imagefactory", *self.command_args, "target_image", self.out_type, *self.common_args,
"--id", f"{self.base_uuid}",
*self.package_args,
"--parameter", "repository", self.outname,
# "|", "tee", "-a", f"{outdir}/base_image-{outname}.out",
# "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3"
]
return package_command
def copy_command(self) -> List[str]:
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/", f"s3://resf-empanadas/buildimage-{ self.outname }/{ BUILDTIME.strftime('%s') }/"]
return copy_command
def build(self) -> int:
if self.base_uuid:
return 0
ret, out, err, uuid = self.runCmd(self.build_command())
if ret > 0:
#error in build command
log.error("Problem during build.")
if not uuid:
log.error("Build UUID not found in stdout. Dumping stdout and stderr")
self.log_subprocess(ret, out, err)
return ret
self.base_uuid = uuid.rstrip()
self.save()
return ret
def package(self) -> int:
# Some build types don't need to be packaged by imagefactory
if self.image_type == "GenericCloud":
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
if self.target_uuid:
return 0
ret, out, err, uuid = self.runCmd(self.package_command())
if ret > 0:
log.error("Problem during packaging")
if not uuid:
log.error("Target Image UUID not found in stdout. Dumping stdout and stderr")
self.log_subprocess(ret, out, err)
return ret
self.target_uuid = uuid.rstrip()
self.save()
return ret
def stage(self) -> int:
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
if not self.stage_commands:
return 0
returns = []
for command in self.stage_commands:
ret, out, err, _ = self.runCmd(command, search=False)
if ret > 0:
log.error("Problem during unpack.")
self.log_subprocess(ret, out, err)
returns.append(ret)
return all(ret > 0 for ret in returns)
def copy(self) -> int:
# move or unpack if necessary
if (stage := self.stage() > 0):
raise Exception(stage)
ret, out, err, _ = self.runCmd(self.copy_command(), search=False)
if ret > 0:
#error in build command
log.error("Problem during build.")
return ret
def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[IO[bytes],None], Union[IO[bytes],None], Union[str,None]]:
prepared, _ = self.prepare_command(command)
log.info(f"Running command: {' '.join(prepared)}")
kwargs = {
"stderr": subprocess.PIPE,
"stdout": subprocess.PIPE
}
if debug: del kwargs["stderr"]
with subprocess.Popen(prepared, **kwargs) as p:
uuid = None
if search:
for _, line in enumerate(p.stdout): # type: ignore
ln = line.decode()
if ln.startswith("UUID: "):
uuid = ln.split(" ")[-1]
log.debug(f"found uuid: {uuid}")
return p.wait(), p.stdout, p.stdin, uuid
def prepare_command(self, command_list: List[Union[str, Callable]]) -> Tuple[List[str],List[None]]:
"""
Commands may be a callable, which should be a lambda to be evaluated at
preparation time with available locals. This can be used to, among
other things, perform lazy evaluations of f-strings which have values
not available at assignment time. e.g., filling in a second command
with a value extracted from the previous step or command.
"""
r = []
return r, [r.append(c()) if (callable(c) and c.__name__ == '<lambda>') else r.append(str(c)) for c in command_list]
def log_subprocess(self, return_code: int, stdout: Union[IO[bytes], None], stderr: Union[IO[bytes], None]):
def log_lines(title, lines):
log.info(f"====={title}=====")
for _, line in lines:
log.info(line.decode())
log.info(f"Command return code: {return_code}")
log_lines("Command STDOUT", enumerate(stdout)) # type: ignore
log_lines("Command STDERR", enumerate(stderr)) # type: ignore
def render_kubernetes_job(self):
commands = [self.build_command(), self.package_command(), self.copy_command()]
if not self.job_template:
return None
template = self.job_template.render(
architecture=self.architecture.name,
backoffLimit=4,
buildTime=BUILDTIME.strftime("%s"),
command=commands,
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
jobname="buildimage",
namespace="empanadas",
major=major,
restartPolicy="Never",
)
return template
def save(self):
with open(pathlib.Path(self.outdir, "metadata.json"), "w") as f:
o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] }
log.debug(o)
json.dump(o, f)
def run():
result, error = valid_type_variant(results.type, results.variant)
if not result:
print(error)
try:
valid_type_variant(results.type, results.variant)
except Exception as e:
log.exception(e)
exit(2)
file_loader = FileSystemLoader(f"{_rootdir}/templates")
tmplenv = Environment(loader=file_loader)
tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl')
job_template = tmplenv.get_template('kube/Job.tmpl')
for architecture in rlvars["allowed_arches"]:
architecture = Architecture.New(architecture, major)
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
commands = generate_imagefactory_commands(tdl_template, architecture)
for architecture in arches:
IB = ImageBuild(
image_type=results.type,
variant=results.variant,
architecture=Architecture.New(architecture, major),
template=tdl_template,
revision=rlvars['revision'],
fedora_release=rlvars['fedora_release'],
debug=True
)
if results.kube:
IB.job_template = tmplenv.get_template('kube/Job.tmpl')
#commands = IB.kube_commands()
print(IB.render_kubernetes_job())
else:
ret = IB.build()
ret = IB.package()
ret = IB.copy()
print(job_template.render(
architecture=architecture,
backoffLimit=4,
buildTime=datetime.datetime.utcnow().strftime("%s"),
command=commands,
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
jobname="buildimage",
namespace="empanadas",
major=major,
restartPolicy="Never",
))

View File

@ -5,7 +5,7 @@
<version>{{fedora_version}}</version>
<arch>{{architecture}}</arch>
<install type='url'>
<url>https://dl.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}}/</url>
<url>https://dl.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}}</url>
</install>
<icicle>
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}

View File

@ -10,7 +10,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
name = "attrs"
version = "21.4.0"
description = "Classes Without Boilerplate"
category = "dev"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
@ -22,14 +22,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
[[package]]
name = "boto3"
version = "1.24.14"
version = "1.24.22"
description = "The AWS SDK for Python"
category = "main"
optional = false
python-versions = ">= 3.7"
[package.dependencies]
botocore = ">=1.27.14,<1.28.0"
botocore = ">=1.27.22,<1.28.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.6.0,<0.7.0"
@ -38,7 +38,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.27.14"
version = "1.27.22"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
@ -62,11 +62,11 @@ python-versions = ">=3.6"
[[package]]
name = "charset-normalizer"
version = "2.0.12"
version = "2.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.5.0"
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
@ -89,7 +89,7 @@ python-versions = ">=3.5"
[[package]]
name = "importlib-metadata"
version = "4.11.4"
version = "4.12.0"
description = "Read metadata from Python packages"
category = "dev"
optional = false
@ -102,7 +102,7 @@ zipp = ">=0.5"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
perf = ["ipython"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
[[package]]
name = "importlib-resources"
@ -143,7 +143,7 @@ python-versions = ">=3.7"
[[package]]
name = "kobo"
version = "0.24.1"
version = "0.24.2"
description = "A pile of python modules used by Red Hat release engineering to build their tools"
category = "main"
optional = false
@ -267,7 +267,7 @@ python-versions = ">=3.6"
[[package]]
name = "requests"
version = "2.28.0"
version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
@ -275,13 +275,13 @@ python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2.0.0,<2.1.0"
charset-normalizer = ">=2,<3"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rpm-py-installer"
@ -315,7 +315,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "typing-extensions"
version = "4.2.0"
version = "4.3.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
@ -365,7 +365,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = ">=3.7,<4"
content-hash = "ccd47ad1b0819968dbad34b68c3f9afd98bd657ee639f9037731fd2a0746bd16"
content-hash = "42676fd0ceb350c8cd90246dc688cfcd404e14d22229052d0527fe342c135b95"
[metadata.files]
atomicwrites = [
@ -377,20 +377,20 @@ attrs = [
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
boto3 = [
{file = "boto3-1.24.14-py3-none-any.whl", hash = "sha256:490f5e88f5551b33ae3019a37412158b76426d63d1fb910968ade9b6a024e5fe"},
{file = "boto3-1.24.14.tar.gz", hash = "sha256:e284705da36faa668c715ae1f74ebbff4320dbfbe3a733df3a8ab076d1ed1226"},
{file = "boto3-1.24.22-py3-none-any.whl", hash = "sha256:c9a9f893561f64f5b81de197714ac4951251a328672a8dba28ad4c4a589c3adf"},
{file = "boto3-1.24.22.tar.gz", hash = "sha256:67d404c643091d4aa37fc485193289ad859f1f65f94d0fa544e13bdd1d4187c1"},
]
botocore = [
{file = "botocore-1.27.14-py3-none-any.whl", hash = "sha256:df1e9b208ff93daac7c645b0b04fb6dccd7f20262eae24d87941727025cbeece"},
{file = "botocore-1.27.14.tar.gz", hash = "sha256:bb56fa77b8fa1ec367c2e16dee62d60000451aac5140dcce3ebddc167fd5c593"},
{file = "botocore-1.27.22-py3-none-any.whl", hash = "sha256:7145d9b7cae87999a9f074de700d02a1b3222ee7d1863aa631ff56c5fc868035"},
{file = "botocore-1.27.22.tar.gz", hash = "sha256:f57cb33446deef92e552b0be0e430d475c73cf64bc9e46cdb4783cdfe39cb6bb"},
]
certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
charset-normalizer = [
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
{file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"},
{file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"},
]
colorama = [
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
@ -401,8 +401,8 @@ idna = [
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
importlib-metadata = [
{file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"},
{file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"},
{file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
{file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
]
importlib-resources = [
{file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"},
@ -417,7 +417,7 @@ jmespath = [
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
]
kobo = [
{file = "kobo-0.24.1.tar.gz", hash = "sha256:d5a30cc20c323f3e9d9b4b2e511650c4b98929b88859bd8cf57463876686e407"},
{file = "kobo-0.24.2.tar.gz", hash = "sha256:1b3c17260a93d933d2238884373fbf3485ecd417d930acf984285dc012410e2b"},
]
markupsafe = [
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
@ -558,8 +558,8 @@ pyyaml = [
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
requests = [
{file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"},
{file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"},
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
rpm-py-installer = [
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
@ -573,8 +573,8 @@ six = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
typing-extensions = [
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
]
urllib3 = [
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},

View File

@ -16,6 +16,7 @@ boto3 = "^1.24.12"
xmltodict = "^0.13.0"
requests = "^2.28.0"
kobo = "^0.24.1"
attrs = "^21.4.0"
[tool.poetry.dev-dependencies]
pytest = "~5"