forked from sig_core/toolkit
refactor and support multiple image backends
This commit is contained in:
parent
333f3614f9
commit
ee019321ae
5
iso/empanadas/empanadas/backends/__init__.py
Normal file
5
iso/empanadas/empanadas/backends/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Empanadas Backends (fillings)"""
|
||||||
|
|
||||||
|
from .imagefactory import ImageFactoryBackend
|
||||||
|
from .kiwi import KiwiBackend
|
||||||
|
from .interface import BackendInterface
|
313
iso/empanadas/empanadas/backends/imagefactory.py
Normal file
313
iso/empanadas/empanadas/backends/imagefactory.py
Normal file
@ -0,0 +1,313 @@
|
|||||||
|
"""Backend for ImageFactory"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from .interface import BackendInterface
|
||||||
|
from empanadas.builders import utils
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
from typing import List, Optional, Callable, Union
|
||||||
|
|
||||||
|
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
|
||||||
|
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
|
||||||
|
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ImageFactoryBackend(BackendInterface):
|
||||||
|
"""Build an image using ImageFactory"""
|
||||||
|
kickstart_arg: List[str] = field(factory=list)
|
||||||
|
kickstart_path: pathlib.Path = field(init=False)
|
||||||
|
base_uuid: Optional[str] = field(default="")
|
||||||
|
target_uuid: Optional[str] = field(default="")
|
||||||
|
tdl_path: pathlib.Path = field(init=False)
|
||||||
|
out_type: str = field(init=False)
|
||||||
|
command_args: List[str] = field(factory=list)
|
||||||
|
common_args: List[str] = field(factory=list)
|
||||||
|
package_args: List[str] = field(factory=list)
|
||||||
|
metadata: pathlib.Path = field(init=False)
|
||||||
|
ctx = field(init=False)
|
||||||
|
stage_commands: Optional[List[List[Union[str, Callable]]]] = field(init=False)
|
||||||
|
|
||||||
|
# The url to use in the path when fetching artifacts for the build
|
||||||
|
kickstart_dir: str = field() # 'os' or 'kickstart'
|
||||||
|
|
||||||
|
# The git repository to fetch kickstarts from
|
||||||
|
kickstart_repo: str = field()
|
||||||
|
|
||||||
|
def prepare(self):
|
||||||
|
self.out_type = self.image_format()
|
||||||
|
|
||||||
|
tdl_template = self.ctx.tmplenv.get_template('icicle/tdl.xml.tmpl')
|
||||||
|
|
||||||
|
self.tdl_path = self.render_icicle_template(tdl_template)
|
||||||
|
if not self.tdl_path:
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
self.metadata = pathlib.Path(self.ctx.outdir, ".imagefactory-metadata.json")
|
||||||
|
|
||||||
|
self.kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.ctx.architecture.major}-{self.ctx.type_variant}.ks")
|
||||||
|
|
||||||
|
self.checkout_kickstarts()
|
||||||
|
self.kickstart_arg = self.kickstart_imagefactory_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.mkdir(self.ctx.outdir)
|
||||||
|
except FileExistsError:
|
||||||
|
self.ctx.log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
||||||
|
except Exception as e:
|
||||||
|
self.ctx.log.exception("Some other exception occured while creating the output directory", e)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if os.path.exists(self.metadata):
|
||||||
|
self.ctx.log.info(f"Found metadata at {self.metadata}")
|
||||||
|
with open(self.metadata, "r") as f:
|
||||||
|
try:
|
||||||
|
o = json.load(f)
|
||||||
|
self.base_uuid = o['base_uuid']
|
||||||
|
self.target_uuid = o['target_uuid']
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
self.ctx.log.exception("Couldn't decode metadata file", e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
self.command_args = self._command_args()
|
||||||
|
self.package_args = self._package_args()
|
||||||
|
self.common_args = self._common_args()
|
||||||
|
|
||||||
|
self.setup_staging()
|
||||||
|
|
||||||
|
def build(self) -> int:
|
||||||
|
if self.base_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.fix_ks()
|
||||||
|
|
||||||
|
# TODO(neil): this should be a lambda which is called from the function
|
||||||
|
ret, out, err, uuid = self.ctx.prepare_and_run(self.build_command(), search=True)
|
||||||
|
if uuid:
|
||||||
|
self.base_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
if ret > 0:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
ret = self.package()
|
||||||
|
|
||||||
|
if ret > 0:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
ret = self.copy()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
with open(self.metadata, "w") as f:
|
||||||
|
try:
|
||||||
|
o = {
|
||||||
|
name: getattr(self, name) for name in [
|
||||||
|
"base_uuid", "target_uuid"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
self.ctx.log.debug(o)
|
||||||
|
json.dump(o, f)
|
||||||
|
except AttributeError as e:
|
||||||
|
self.ctx.log.error("Couldn't find attribute in object. Something is probably wrong", e)
|
||||||
|
except Exception as e:
|
||||||
|
self.ctx.log.exception(e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
def package(self) -> int:
|
||||||
|
# Some build types don't need to be packaged by imagefactory
|
||||||
|
# @TODO remove business logic if possible
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "EC2", "Azure", "Vagrant", "OCP", "RPI", "GenericArm"]:
|
||||||
|
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
|
||||||
|
|
||||||
|
if self.target_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
ret, out, err, uuid = self.ctx.prepare_and_run(self.package_command(), search=True)
|
||||||
|
if uuid:
|
||||||
|
self.target_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def stage(self) -> int:
|
||||||
|
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
||||||
|
if not hasattr(self, 'stage_commands'):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
returns = []
|
||||||
|
for command in self.stage_commands: # type: ignore
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(command, search=False)
|
||||||
|
returns.append(ret)
|
||||||
|
|
||||||
|
return all(ret > 0 for ret in returns)
|
||||||
|
|
||||||
|
def copy(self, skip=False) -> int:
|
||||||
|
# move or unpack if necessary
|
||||||
|
self.ctx.log.info("Executing staging commands")
|
||||||
|
if (stage := self.stage() > 0):
|
||||||
|
raise Exception(stage)
|
||||||
|
|
||||||
|
if not skip:
|
||||||
|
self.ctx.log.info("Copying files to output directory")
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(self.copy_command(), search=False)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
self.ctx.log.info(f"Build complete! Output available in {self.ctx.outdir}/")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def checkout_kickstarts(self) -> int:
|
||||||
|
cmd = ["git", "clone", "--branch", f"r{self.ctx.architecture.major}",
|
||||||
|
self.kickstart_repo, f"{KICKSTART_PATH}"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
if ret > 0:
|
||||||
|
ret = self.pull_kickstarts()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def pull_kickstarts(self) -> int:
|
||||||
|
cmd: utils.CMD_PARAM_T = ["git", "-C", f"{KICKSTART_PATH}", "reset", "--hard", "HEAD"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
if ret == 0:
|
||||||
|
cmd = ["git", "-C", f"{KICKSTART_PATH}", "pull"]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
self.ctx.log.debug(out)
|
||||||
|
self.ctx.log.debug(err)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _command_args(self):
|
||||||
|
args_mapping = {
|
||||||
|
"debug": "--debug",
|
||||||
|
}
|
||||||
|
# NOTE(neil): i'm intentionally leaving this as is; deprecated
|
||||||
|
return [param for name, param in args_mapping.items() if self.ctx.debug]
|
||||||
|
|
||||||
|
def _package_args(self) -> List[str]:
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
return ["--parameter", "compress", "xz"]
|
||||||
|
return [""]
|
||||||
|
|
||||||
|
def _common_args(self) -> List[str]:
|
||||||
|
args = []
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
args = ["--parameter", "offline_icicle", "true"]
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "EC2", "Vagrant", "Azure", "OCP", "RPI", "GenericArm"]:
|
||||||
|
args = ["--parameter", "generate_icicle", "false"]
|
||||||
|
return args
|
||||||
|
|
||||||
|
def image_format(self) -> str:
|
||||||
|
mapping = {
|
||||||
|
"Container": "docker"
|
||||||
|
}
|
||||||
|
return mapping[self.ctx.image_type] if self.ctx.image_type in mapping.keys() else ''
|
||||||
|
|
||||||
|
def kickstart_imagefactory_args(self) -> List[str]:
|
||||||
|
|
||||||
|
if not self.kickstart_path.is_file():
|
||||||
|
self.ctx.log.warning(f"Kickstart file is not available: {self.kickstart_path}")
|
||||||
|
if not self.ctx.debug:
|
||||||
|
self.ctx.log.warning("Exiting because debug mode is not enabled.")
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
return ["--file-parameter", "install_script", str(self.kickstart_path)]
|
||||||
|
|
||||||
|
def render_icicle_template(self, tdl_template) -> pathlib.Path:
|
||||||
|
output = tempfile.NamedTemporaryFile(delete=False).name
|
||||||
|
return utils.render_template(output, tdl_template,
|
||||||
|
architecture=self.ctx.architecture.name,
|
||||||
|
iso8601date=self.ctx.build_time.strftime("%Y%m%d"),
|
||||||
|
installdir=self.kickstart_dir,
|
||||||
|
major=self.ctx.architecture.major,
|
||||||
|
minor=self.ctx.architecture.minor,
|
||||||
|
release=self.ctx.release,
|
||||||
|
size="10G",
|
||||||
|
type=self.ctx.image_type,
|
||||||
|
utcnow=self.ctx.build_time,
|
||||||
|
version_variant=self.ctx.architecture.version if not self.ctx.variant else f"{self.ctx.architecture.version}-{self.ctx.variant}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def build_command(self) -> List[str]:
|
||||||
|
build_command = ["imagefactory", "--timeout", self.ctx.timeout,
|
||||||
|
*self.command_args, "base_image", *self.common_args,
|
||||||
|
*self.kickstart_arg, self.tdl_path]
|
||||||
|
return build_command
|
||||||
|
|
||||||
|
def package_command(self) -> List[str]:
|
||||||
|
package_command = ["imagefactory", *self.command_args, "target_image",
|
||||||
|
self.out_type, *self.common_args,
|
||||||
|
"--id", f"{self.base_uuid}",
|
||||||
|
*self.package_args,
|
||||||
|
"--parameter", "repository", self.ctx.outname]
|
||||||
|
return package_command
|
||||||
|
|
||||||
|
def copy_command(self) -> List[str]:
|
||||||
|
|
||||||
|
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.ctx.outdir}/",
|
||||||
|
f"s3://resf-empanadas/buildimage-{self.ctx.architecture.version}-{self.ctx.architecture.name}/{self.ctx.outname}/{self.ctx.build_time.strftime('%s')}/"
|
||||||
|
]
|
||||||
|
|
||||||
|
return copy_command
|
||||||
|
|
||||||
|
def fix_ks(self):
|
||||||
|
cmd: utils.CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.ctx.architecture.name},", str(self.kickstart_path)]
|
||||||
|
self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
|
||||||
|
def setup_staging(self):
|
||||||
|
# Yes, this is gross. I'll fix it later.
|
||||||
|
if self.ctx.image_type in ["Container"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["tar", "-C", f"{self.ctx.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"],
|
||||||
|
["xz", f"{self.ctx.outdir}/layer.tar"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["RPI"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.raw"],
|
||||||
|
["xz", f"{self.ctx.outdir}/{self.ctx.outname}.raw"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["GenericCloud", "OCP", "GenericArm"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", "qcow2",
|
||||||
|
lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["EC2"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["Azure"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["/prep-azure.sh", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{STORAGE_DIR}"],
|
||||||
|
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.vhd", f"{self.ctx.outdir}/{self.ctx.outname}.vhd"]
|
||||||
|
]
|
||||||
|
if self.ctx.image_type in ["Vagrant"]:
|
||||||
|
_map = {
|
||||||
|
"Vbox": {"format": "vmdk", "provider": "virtualbox"},
|
||||||
|
"Libvirt": {"format": "qcow2", "provider": "libvirt", "virtual_size": 10},
|
||||||
|
"VMware": {"format": "vmdk", "provider": "vmware_desktop"}
|
||||||
|
}
|
||||||
|
output = f"{_map[self.ctx.variant]['format']}" # type: ignore
|
||||||
|
provider = f"{_map[self.ctx.variant]['provider']}" # type: ignore
|
||||||
|
|
||||||
|
# pop from the options map that will be passed to the vagrant metadata.json
|
||||||
|
convert_options = _map[self.ctx.variant].pop('convertOptions') if 'convertOptions' in _map[self.ctx.variant].keys() else '' # type: ignore
|
||||||
|
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", output, *convert_options,
|
||||||
|
lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.ctx.outdir}/{self.ctx.outname}.{output}"],
|
||||||
|
["tar", "-C", self.ctx.outdir, "-czf", f"/tmp/{self.ctx.outname}.box", '.'],
|
||||||
|
["mv", f"/tmp/{self.ctx.outname}.box", self.ctx.outdir]
|
||||||
|
]
|
||||||
|
self.prepare_vagrant(_map[self.ctx.variant])
|
||||||
|
|
||||||
|
if self.stage_commands:
|
||||||
|
self.stage_commands.append(["cp", "-v", lambda: f"{STORAGE_DIR}/{self.target_uuid}.meta", f"{self.ctx.outdir}/build.meta"])
|
37
iso/empanadas/empanadas/backends/interface.py
Normal file
37
iso/empanadas/empanadas/backends/interface.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""
|
||||||
|
empanadas backend interface
|
||||||
|
"""
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class BackendInterface(ABC):
|
||||||
|
"""
|
||||||
|
Interface to build images (or whatever)
|
||||||
|
"""
|
||||||
|
@abstractmethod
|
||||||
|
def prepare(self):
|
||||||
|
"""
|
||||||
|
Prepares the environment necessary for building the image.
|
||||||
|
This might include setting up directories, checking prerequisites, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def build(self):
|
||||||
|
"""
|
||||||
|
Performs the image build operation. This is the core method
|
||||||
|
where the actual image building logic is implemented.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def stage(self):
|
||||||
|
"""
|
||||||
|
Transforms and copies artifacts from build directory to the
|
||||||
|
location expected by the builder (usually in /tmp/)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def clean(self):
|
||||||
|
"""
|
||||||
|
Cleans up any resources or temporary files created during
|
||||||
|
the image building process.
|
||||||
|
"""
|
16
iso/empanadas/empanadas/backends/kiwi.py
Normal file
16
iso/empanadas/empanadas/backends/kiwi.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
|
||||||
|
"""Backend for Kiwi"""
|
||||||
|
from .interface import BackendInterface
|
||||||
|
|
||||||
|
|
||||||
|
class KiwiBackend(BackendInterface):
|
||||||
|
"""Build an image using Kiwi"""
|
||||||
|
|
||||||
|
def prepare(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
1
iso/empanadas/empanadas/builders/__init__.py
Normal file
1
iso/empanadas/empanadas/builders/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .imagebuild import ImageBuild
|
106
iso/empanadas/empanadas/builders/imagebuild.py
Normal file
106
iso/empanadas/empanadas/builders/imagebuild.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
"""Build an image with a given backend"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
from empanadas.backends import BackendInterface, KiwiBackend
|
||||||
|
from empanadas.common import Architecture
|
||||||
|
from empanadas.common import _rootdir
|
||||||
|
from . import utils
|
||||||
|
|
||||||
|
from jinja2 import Environment, FileSystemLoader, Template
|
||||||
|
from typing import List, Optional, Tuple, Callable
|
||||||
|
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ImageBuild: # pylint: disable=too-few-public-methods
|
||||||
|
"""Image builder using a given backend"""
|
||||||
|
tmplenv: Environment = field(init=False)
|
||||||
|
|
||||||
|
# Only things we know we're keeping in this class here
|
||||||
|
architecture: Architecture = field()
|
||||||
|
backend: BackendInterface = field()
|
||||||
|
build_time: datetime.datetime = field()
|
||||||
|
debug: bool = field(default=False)
|
||||||
|
log: logging.Logger = field()
|
||||||
|
release: int = field(default=0)
|
||||||
|
timeout: str = field(default='3600')
|
||||||
|
|
||||||
|
image_type: str = field() # the type of the image
|
||||||
|
type_variant: str = field(init=False)
|
||||||
|
variant: Optional[str] = field()
|
||||||
|
|
||||||
|
# Kubernetes job template
|
||||||
|
job_template: Optional[Template] = field(init=False) # the kube Job tpl
|
||||||
|
|
||||||
|
# Commands to stage artifacts
|
||||||
|
|
||||||
|
# Where the artifacts should go to
|
||||||
|
outdir: pathlib.Path = field(init=False)
|
||||||
|
outname: str = field(init=False)
|
||||||
|
|
||||||
|
def __attrs_post_init__(self):
|
||||||
|
self.backend.ctx = self
|
||||||
|
|
||||||
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
|
self.tmplenv = Environment(loader=file_loader)
|
||||||
|
|
||||||
|
self.job_template = self.tmplenv.get_template('kube/Job.tmpl')
|
||||||
|
|
||||||
|
self.type_variant = self.type_variant_name()
|
||||||
|
self.outdir, self.outname = self.output_name()
|
||||||
|
|
||||||
|
def output_name(self) -> Tuple[pathlib.Path, str]:
|
||||||
|
directory = f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{self.build_time.strftime('%Y%m%d')}.{self.release}"
|
||||||
|
name = f"{directory}.{self.architecture.name}"
|
||||||
|
outdir = pathlib.Path("/tmp/", directory)
|
||||||
|
return outdir, name
|
||||||
|
|
||||||
|
def type_variant_name(self):
|
||||||
|
return self.image_type if not self.variant else f"{self.image_type}-{self.variant}"
|
||||||
|
|
||||||
|
def prepare_and_run(self, command: utils.CMD_PARAM_T, search: Callable = None) -> utils.CMD_RESULT_T:
|
||||||
|
return utils.runCmd(self, self.prepare_command(command), search)
|
||||||
|
|
||||||
|
def prepare_command(self, command_list: utils.CMD_PARAM_T) -> List[str]:
|
||||||
|
"""
|
||||||
|
Commands may be a callable, which should be a lambda to be evaluated at
|
||||||
|
preparation time with available locals. This can be used to, among
|
||||||
|
other things, perform lazy evaluations of f-strings which have values
|
||||||
|
not available at assignment time. e.g., filling in a second command
|
||||||
|
with a value extracted from the previous step or command.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
r = []
|
||||||
|
for c in command_list:
|
||||||
|
if callable(c) and c.__name__ == '<lambda>':
|
||||||
|
r.append(c())
|
||||||
|
else:
|
||||||
|
r.append(str(c))
|
||||||
|
return r
|
||||||
|
|
||||||
|
def render_kubernetes_job(self):
|
||||||
|
# TODO(neil): should this be put in the builder class itself to return the right thing for us?
|
||||||
|
if self.backend == KiwiBackend:
|
||||||
|
self.log.error("Kube not implemented for Kiwi")
|
||||||
|
|
||||||
|
commands = [self.backend.build_command(), self.backend.package_command(), self.backend.copy_command()]
|
||||||
|
if not self.job_template:
|
||||||
|
return None
|
||||||
|
template = self.job_template.render(
|
||||||
|
architecture=self.architecture.name,
|
||||||
|
backoffLimit=4,
|
||||||
|
buildTime=self.build_time.strftime("%s"),
|
||||||
|
command=commands,
|
||||||
|
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
|
||||||
|
jobname="buildimage",
|
||||||
|
namespace="empanadas",
|
||||||
|
major=self.architecture.major,
|
||||||
|
minor=self.architecture.minor,
|
||||||
|
restartPolicy="Never",
|
||||||
|
)
|
||||||
|
return template
|
98
iso/empanadas/empanadas/builders/utils.py
Normal file
98
iso/empanadas/empanadas/builders/utils.py
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
import pathlib
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
from typing import Callable, List, Tuple, Union
|
||||||
|
|
||||||
|
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
||||||
|
|
||||||
|
STR_NONE_T = Union[bytes, None]
|
||||||
|
BYTES_NONE_T = Union[bytes, None]
|
||||||
|
# Tuple of int, stdout, stderr, uuid
|
||||||
|
CMD_RESULT_T = Tuple[int, BYTES_NONE_T, BYTES_NONE_T, STR_NONE_T]
|
||||||
|
|
||||||
|
|
||||||
|
# def prepare_vagrant(options):
|
||||||
|
# """Setup the output directory for the Vagrant type variant, dropping templates as required"""
|
||||||
|
# file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
|
# tmplenv = Environment(loader=file_loader)
|
||||||
|
#
|
||||||
|
# templates = {}
|
||||||
|
# templates['Vagrantfile'] = tmplenv.get_template(f"vagrant/Vagrantfile.{self.variant}")
|
||||||
|
# templates['metadata.json'] = tmplenv.get_template('vagrant/metadata.tmpl.json')
|
||||||
|
# templates['info.json'] = tmplenv.get_template('vagrant/info.tmpl.json')
|
||||||
|
#
|
||||||
|
# if self.variant == "VMware":
|
||||||
|
# templates[f"{self.outname}.vmx"] = tmplenv.get_template('vagrant/vmx.tmpl')
|
||||||
|
#
|
||||||
|
# if self.variant == "Vbox":
|
||||||
|
# templates['box.ovf'] = tmplenv.get_template('vagrant/box.tmpl.ovf')
|
||||||
|
#
|
||||||
|
# if self.variant == "Libvirt":
|
||||||
|
# # Libvirt vagrant driver expects the qcow2 file to be called box.img.
|
||||||
|
# qemu_command_index = [i for i, d in enumerate(self.stage_commands) if d[0] == "qemu-img"][0]
|
||||||
|
# self.stage_commands.insert(qemu_command_index+1, ["mv", f"{self.outdir}/{self.outname}.qcow2", f"{self.outdir}/box.img"])
|
||||||
|
#
|
||||||
|
# for name, template in templates.items():
|
||||||
|
# self.render_template(f"{self.outdir}/{name}", template,
|
||||||
|
# name=self.outname,
|
||||||
|
# arch=self.architecture.name,
|
||||||
|
# options=options
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
def render_template(path, template, **kwargs) -> pathlib.Path:
|
||||||
|
with open(path, "wb") as f:
|
||||||
|
_template = template.render(**kwargs)
|
||||||
|
f.write(_template.encode())
|
||||||
|
f.flush()
|
||||||
|
output = pathlib.Path(path)
|
||||||
|
if not output.exists():
|
||||||
|
raise Exception("Failed to template")
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def runCmd(ctx, prepared_command: List[str], search: Callable = None) -> CMD_RESULT_T:
|
||||||
|
ctx.log.info(f"Running command: {' '.join(prepared_command)}")
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"stderr": subprocess.PIPE,
|
||||||
|
"stdout": subprocess.PIPE
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.debug:
|
||||||
|
del kwargs["stderr"]
|
||||||
|
|
||||||
|
with subprocess.Popen(prepared_command, **kwargs) as p:
|
||||||
|
uuid = None
|
||||||
|
# @TODO implement this as a callback?
|
||||||
|
if search:
|
||||||
|
for _, line in enumerate(p.stdout): # type: ignore
|
||||||
|
ln = line.decode()
|
||||||
|
if ln.startswith("UUID: "):
|
||||||
|
uuid = ln.split(" ")[-1]
|
||||||
|
ctx.log.debug(f"found uuid: {uuid}")
|
||||||
|
|
||||||
|
out, err = p.communicate()
|
||||||
|
res = p.wait(), out, err, uuid
|
||||||
|
|
||||||
|
if res[0] > 0:
|
||||||
|
ctx.log.error(f"Problem while executing command: '{prepared_command}'")
|
||||||
|
if search and not res[3]:
|
||||||
|
ctx.log.error("UUID not found in stdout. Dumping stdout and stderr")
|
||||||
|
log_subprocess(ctx, res)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def log_subprocess(ctx, result: CMD_RESULT_T):
|
||||||
|
def log_lines(title, lines):
|
||||||
|
ctx.log.info(f"====={title}=====")
|
||||||
|
ctx.log.info(lines.decode())
|
||||||
|
ctx.log.info(f"Command return code: {result[0]}")
|
||||||
|
stdout = result[1]
|
||||||
|
stderr = result[2]
|
||||||
|
if stdout:
|
||||||
|
log_lines("Command STDOUT", stdout)
|
||||||
|
if stderr:
|
||||||
|
log_lines("Command STDERR", stderr)
|
@ -1,12 +1,10 @@
|
|||||||
# All imports are here
|
# All imports are here
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Tuple
|
from attrs import define, field
|
||||||
|
|
||||||
|
|
||||||
import rpm
|
import rpm
|
||||||
import yaml
|
import yaml
|
||||||
@ -120,7 +118,7 @@ ALLOWED_TYPE_VARIANTS = {
|
|||||||
def valid_type_variant(_type: str, variant: str = "") -> bool:
|
def valid_type_variant(_type: str, variant: str = "") -> bool:
|
||||||
if _type not in ALLOWED_TYPE_VARIANTS:
|
if _type not in ALLOWED_TYPE_VARIANTS:
|
||||||
raise Exception(f"Type is invalid: ({_type}, {variant})")
|
raise Exception(f"Type is invalid: ({_type}, {variant})")
|
||||||
if ALLOWED_TYPE_VARIANTS[_type] == None:
|
if ALLOWED_TYPE_VARIANTS[_type] is None:
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
raise Exception(f"{_type} Type expects no variant type.")
|
raise Exception(f"{_type} Type expects no variant type.")
|
||||||
return True
|
return True
|
||||||
@ -135,8 +133,6 @@ def valid_type_variant(_type: str, variant: str = "") -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
from attrs import define, field
|
|
||||||
|
|
||||||
|
|
||||||
@define(kw_only=True)
|
@define(kw_only=True)
|
||||||
class Architecture:
|
class Architecture:
|
||||||
|
@ -1,44 +1,43 @@
|
|||||||
# Builds an image given a version, type, variant, and architecture
|
# Builds an image given a version, type, variant, anctx.d architecture
|
||||||
# Defaults to the running host's architecture
|
# Defaults to the running host's architecture
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
|
|
||||||
from attrs import define, Factory, field, asdict
|
|
||||||
from botocore import args
|
|
||||||
from jinja2 import Environment, FileSystemLoader, Template
|
|
||||||
from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union
|
|
||||||
|
|
||||||
from empanadas.common import Architecture, rldict, valid_type_variant
|
from empanadas.common import Architecture, rldict, valid_type_variant
|
||||||
from empanadas.common import _rootdir
|
from empanadas.builders import ImageBuild
|
||||||
|
from empanadas.backends import ImageFactoryBackend # , KiwiBackend
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
|
|
||||||
parser.add_argument('--version', type=str, help="Release Version (8.6, 9.1)", required=True)
|
parser.add_argument('--version',
|
||||||
|
type=str, help="Release Version (8.6, 9.1)", required=True)
|
||||||
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
||||||
parser.add_argument('--kickstartdir', action='store_true', help="Use the kickstart dir instead of the os dir for repositories")
|
parser.add_argument('--kickstartdir', action='store_true',
|
||||||
|
help="Use the kickstart dir instead of the os dir")
|
||||||
parser.add_argument('--debug', action='store_true', help="debug?")
|
parser.add_argument('--debug', action='store_true', help="debug?")
|
||||||
parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True)
|
parser.add_argument('--type', type=str,
|
||||||
|
help="Image type (container, genclo, azure, aws, vagrant)",
|
||||||
|
required=True)
|
||||||
parser.add_argument('--variant', type=str, help="", required=False)
|
parser.add_argument('--variant', type=str, help="", required=False)
|
||||||
parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False)
|
parser.add_argument('--release', type=str,
|
||||||
parser.add_argument('--kube', action='store_true', help="output as a K8s job(s)", required=False)
|
help="Image release for builds with the same date stamp",
|
||||||
parser.add_argument('--timeout', type=str, help="change timeout for imagefactory build process (default 3600)", required=False, default='3600')
|
required=False)
|
||||||
|
parser.add_argument('--kube', action='store_true',
|
||||||
|
help="output as a K8s job(s)",
|
||||||
|
required=False)
|
||||||
|
parser.add_argument('--timeout', type=str,
|
||||||
|
help="change timeout for imagefactory build process",
|
||||||
|
required=False, default='3600')
|
||||||
|
|
||||||
|
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.version]
|
rlvars = rldict[results.version]
|
||||||
major = rlvars["major"]
|
major = rlvars["major"]
|
||||||
|
|
||||||
|
|
||||||
debug = results.debug
|
debug = results.debug
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -52,405 +51,6 @@ formatter = logging.Formatter(
|
|||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
log.addHandler(handler)
|
log.addHandler(handler)
|
||||||
|
|
||||||
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
|
|
||||||
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
|
|
||||||
BUILDTIME = datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
|
||||||
|
|
||||||
@define(kw_only=True)
|
|
||||||
class ImageBuild:
|
|
||||||
architecture: Architecture = field()
|
|
||||||
base_uuid: Optional[str] = field(default="")
|
|
||||||
cli_args: argparse.Namespace = field()
|
|
||||||
command_args: List[str] = field(factory=list)
|
|
||||||
common_args: List[str] = field(factory=list)
|
|
||||||
debug: bool = field(default=False)
|
|
||||||
image_type: str = field()
|
|
||||||
job_template: Optional[Template] = field(init=False)
|
|
||||||
kickstart_arg: List[str] = field(factory=list)
|
|
||||||
kickstart_path: pathlib.Path = field(init=False)
|
|
||||||
metadata: pathlib.Path = field(init=False)
|
|
||||||
out_type: str = field(init=False)
|
|
||||||
outdir: pathlib.Path = field(init=False)
|
|
||||||
outname: str = field(init=False)
|
|
||||||
package_args: List[str] = field(factory=list)
|
|
||||||
release: int = field(default=0)
|
|
||||||
stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False)
|
|
||||||
target_uuid: Optional[str] = field(default="")
|
|
||||||
tdl_path: pathlib.Path = field(init=False)
|
|
||||||
template: Template = field()
|
|
||||||
timeout: str = field(default='3600')
|
|
||||||
type_variant: str = field(init=False)
|
|
||||||
variant: Optional[str] = field()
|
|
||||||
|
|
||||||
def __attrs_post_init__(self):
|
|
||||||
self.tdl_path = self.render_icicle_template()
|
|
||||||
if not self.tdl_path:
|
|
||||||
exit(2)
|
|
||||||
self.type_variant = self.type_variant_name()
|
|
||||||
self.outdir, self.outname = self.output_name()
|
|
||||||
self.out_type = self.image_format()
|
|
||||||
self.command_args = self._command_args()
|
|
||||||
self.package_args = self._package_args()
|
|
||||||
self.common_args = self._common_args()
|
|
||||||
|
|
||||||
self.metadata = pathlib.Path(self.outdir, ".imagefactory-metadata.json")
|
|
||||||
|
|
||||||
self.kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.major}-{self.type_variant}.ks")
|
|
||||||
|
|
||||||
self.checkout_kickstarts()
|
|
||||||
self.kickstart_arg = self.kickstart_imagefactory_args()
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(self.outdir)
|
|
||||||
except FileExistsError as e:
|
|
||||||
log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
|
||||||
except Exception as e:
|
|
||||||
log.exception("Some other exception occured while creating the output directory", e)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
if os.path.exists(self.metadata):
|
|
||||||
with open(self.metadata, "r") as f:
|
|
||||||
try:
|
|
||||||
o = json.load(f)
|
|
||||||
self.base_uuid = o['base_uuid']
|
|
||||||
self.target_uuid = o['target_uuid']
|
|
||||||
except json.decoder.JSONDecodeError as e:
|
|
||||||
log.exception("Couldn't decode metadata file", e)
|
|
||||||
finally:
|
|
||||||
f.flush()
|
|
||||||
|
|
||||||
# Yes, this is gross. I'll fix it later.
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"],
|
|
||||||
["xz", f"{self.outdir}/layer.tar"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["RPI"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.raw"],
|
|
||||||
["xz", f"{self.outdir}/{self.outname}.raw"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["GenericCloud", "OCP", "GenericArm"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-c", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["EC2"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["Azure"]:
|
|
||||||
self.stage_commands = [
|
|
||||||
["/prep-azure.sh", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{STORAGE_DIR}"],
|
|
||||||
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.vhd", f"{self.outdir}/{self.outname}.vhd"]
|
|
||||||
]
|
|
||||||
if self.image_type in ["Vagrant"]:
|
|
||||||
_map = {
|
|
||||||
"Vbox": {"format": "vmdk", "provider": "virtualbox"},
|
|
||||||
"Libvirt": {"format": "qcow2", "provider": "libvirt", "virtual_size": 10},
|
|
||||||
"VMware": {"format": "vmdk", "provider": "vmware_desktop"}
|
|
||||||
}
|
|
||||||
output = f"{_map[self.variant]['format']}" #type: ignore
|
|
||||||
provider = f"{_map[self.variant]['provider']}" # type: ignore
|
|
||||||
|
|
||||||
# pop from the options map that will be passed to the vagrant metadata.json
|
|
||||||
convert_options = _map[self.variant].pop('convertOptions') if 'convertOptions' in _map[self.variant].keys() else '' #type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
self.stage_commands = [
|
|
||||||
["qemu-img", "convert", "-c", "-f", "raw", "-O", output, *convert_options, lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.{output}"],
|
|
||||||
["tar", "-C", self.outdir, "-czf", f"/tmp/{self.outname}.box", '.'],
|
|
||||||
["mv", f"/tmp/{self.outname}.box", self.outdir]
|
|
||||||
]
|
|
||||||
self.prepare_vagrant(_map[self.variant])
|
|
||||||
|
|
||||||
if self.stage_commands:
|
|
||||||
self.stage_commands.append(["cp", "-v", lambda: f"{STORAGE_DIR}/{self.target_uuid}.meta", f"{self.outdir}/build.meta"])
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_vagrant(self, options):
|
|
||||||
"""Setup the output directory for the Vagrant type variant, dropping templates as required"""
|
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
|
||||||
tmplenv = Environment(loader=file_loader)
|
|
||||||
|
|
||||||
templates = {}
|
|
||||||
templates['Vagrantfile'] = tmplenv.get_template(f"vagrant/Vagrantfile.{self.variant}")
|
|
||||||
templates['metadata.json'] = tmplenv.get_template('vagrant/metadata.tmpl.json')
|
|
||||||
templates['info.json'] = tmplenv.get_template('vagrant/info.tmpl.json')
|
|
||||||
|
|
||||||
if self.variant == "VMware":
|
|
||||||
templates[f"{self.outname}.vmx"] = tmplenv.get_template('vagrant/vmx.tmpl')
|
|
||||||
|
|
||||||
if self.variant == "Vbox":
|
|
||||||
templates['box.ovf'] = tmplenv.get_template('vagrant/box.tmpl.ovf')
|
|
||||||
|
|
||||||
if self.variant == "Libvirt":
|
|
||||||
# Libvirt vagrant driver expects the qcow2 file to be called box.img.
|
|
||||||
qemu_command_index = [i for i, d in enumerate(self.stage_commands) if d[0] == "qemu-img"][0]
|
|
||||||
self.stage_commands.insert(qemu_command_index+1, ["mv", f"{self.outdir}/{self.outname}.qcow2", f"{self.outdir}/box.img"])
|
|
||||||
|
|
||||||
for name, template in templates.items():
|
|
||||||
self.render_template(f"{self.outdir}/{name}", template,
|
|
||||||
name=self.outname,
|
|
||||||
arch=self.architecture.name,
|
|
||||||
options=options
|
|
||||||
)
|
|
||||||
|
|
||||||
def checkout_kickstarts(self) -> int:
|
|
||||||
cmd = ["git", "clone", "--branch", f"r{self.architecture.major}", rlvars['livemap']['git_repo'], f"{KICKSTART_PATH}"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
if ret > 0:
|
|
||||||
ret = self.pull_kickstarts()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def pull_kickstarts(self) -> int:
|
|
||||||
cmd: CMD_PARAM_T = ["git", "-C", f"{KICKSTART_PATH}", "reset", "--hard", "HEAD"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
if ret == 0:
|
|
||||||
cmd = ["git", "-C", f"{KICKSTART_PATH}", "pull"]
|
|
||||||
ret, out, err, _ = self.runCmd(cmd, search=False)
|
|
||||||
log.debug(out)
|
|
||||||
log.debug(err)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def output_name(self) -> Tuple[pathlib.Path, str]:
|
|
||||||
directory = f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{BUILDTIME.strftime('%Y%m%d')}.{self.release}"
|
|
||||||
name = f"{directory}.{self.architecture.name}"
|
|
||||||
outdir = pathlib.Path(f"/tmp/", directory)
|
|
||||||
return outdir, name
|
|
||||||
|
|
||||||
def type_variant_name(self):
|
|
||||||
return self.image_type if not self.variant else f"{self.image_type}-{self.variant}"
|
|
||||||
|
|
||||||
def _command_args(self):
|
|
||||||
args_mapping = {
|
|
||||||
"debug": "--debug",
|
|
||||||
}
|
|
||||||
return [param for name, param in args_mapping.items() if getattr(self.cli_args, name)]
|
|
||||||
|
|
||||||
def _package_args(self) -> List[str]:
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
return ["--parameter", "compress", "xz"]
|
|
||||||
return [""]
|
|
||||||
|
|
||||||
def _common_args(self) -> List[str]:
|
|
||||||
args = []
|
|
||||||
if self.image_type in ["Container"]:
|
|
||||||
args = ["--parameter", "offline_icicle", "true"]
|
|
||||||
if self.image_type in ["GenericCloud", "EC2", "Vagrant", "Azure", "OCP", "RPI", "GenericArm"]:
|
|
||||||
args = ["--parameter", "generate_icicle", "false"]
|
|
||||||
return args
|
|
||||||
|
|
||||||
def image_format(self) -> str:
|
|
||||||
mapping = {
|
|
||||||
"Container": "docker"
|
|
||||||
}
|
|
||||||
return mapping[self.image_type] if self.image_type in mapping.keys() else ''
|
|
||||||
|
|
||||||
def kickstart_imagefactory_args(self) -> List[str]:
|
|
||||||
|
|
||||||
if not self.kickstart_path.is_file():
|
|
||||||
log.warning(f"Kickstart file is not available: {self.kickstart_path}")
|
|
||||||
if not debug:
|
|
||||||
log.warning("Exiting because debug mode is not enabled.")
|
|
||||||
exit(2)
|
|
||||||
|
|
||||||
return ["--file-parameter", "install_script", str(self.kickstart_path)]
|
|
||||||
|
|
||||||
def render_template(self, path, template, **kwargs) -> pathlib.Path:
|
|
||||||
with open(path, "wb") as f:
|
|
||||||
_template = template.render(**kwargs)
|
|
||||||
f.write(_template.encode())
|
|
||||||
f.flush()
|
|
||||||
output = pathlib.Path(path)
|
|
||||||
if not output.exists():
|
|
||||||
log.error("Failed to write template")
|
|
||||||
raise Exception("Failed to template")
|
|
||||||
return output
|
|
||||||
|
|
||||||
def render_icicle_template(self) -> pathlib.Path:
|
|
||||||
output = tempfile.NamedTemporaryFile(delete=False).name
|
|
||||||
return self.render_template(output, self.template,
|
|
||||||
architecture=self.architecture.name,
|
|
||||||
iso8601date=BUILDTIME.strftime("%Y%m%d"),
|
|
||||||
installdir="kickstart" if self.cli_args.kickstartdir else "os",
|
|
||||||
major=self.architecture.major,
|
|
||||||
minor=self.architecture.minor,
|
|
||||||
release=self.release,
|
|
||||||
size="10G",
|
|
||||||
type=self.image_type,
|
|
||||||
utcnow=BUILDTIME,
|
|
||||||
version_variant=self.architecture.version if not self.variant else f"{self.architecture.version}-{self.variant}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def build_command(self) -> List[str]:
|
|
||||||
build_command = ["imagefactory", "--timeout", self.timeout, *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path]
|
|
||||||
return build_command
|
|
||||||
def package_command(self) -> List[str]:
|
|
||||||
package_command = ["imagefactory", *self.command_args, "target_image", self.out_type, *self.common_args,
|
|
||||||
"--id", f"{self.base_uuid}",
|
|
||||||
*self.package_args,
|
|
||||||
"--parameter", "repository", self.outname,
|
|
||||||
]
|
|
||||||
return package_command
|
|
||||||
|
|
||||||
def copy_command(self) -> List[str]:
|
|
||||||
|
|
||||||
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/",
|
|
||||||
f"s3://resf-empanadas/buildimage-{self.architecture.version}-{self.architecture.name}/{ self.outname }/{ BUILDTIME.strftime('%s') }/"
|
|
||||||
]
|
|
||||||
|
|
||||||
return copy_command
|
|
||||||
|
|
||||||
def build(self) -> int:
|
|
||||||
if self.base_uuid:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
self.fix_ks()
|
|
||||||
|
|
||||||
ret, out, err, uuid = self.runCmd(self.build_command())
|
|
||||||
if uuid:
|
|
||||||
self.base_uuid = uuid.rstrip()
|
|
||||||
self.save()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def package(self) -> int:
|
|
||||||
# Some build types don't need to be packaged by imagefactory
|
|
||||||
# @TODO remove business logic if possible
|
|
||||||
if self.image_type in ["GenericCloud", "EC2", "Azure", "Vagrant", "OCP", "RPI", "GenericArm"]:
|
|
||||||
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
|
|
||||||
|
|
||||||
if self.target_uuid:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
ret, out, err, uuid = self.runCmd(self.package_command())
|
|
||||||
if uuid:
|
|
||||||
self.target_uuid = uuid.rstrip()
|
|
||||||
self.save()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def stage(self) -> int:
|
|
||||||
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
|
||||||
if not hasattr(self,'stage_commands'):
|
|
||||||
return 0
|
|
||||||
|
|
||||||
returns = []
|
|
||||||
for command in self.stage_commands: #type: ignore
|
|
||||||
ret, out, err, _ = self.runCmd(command, search=False)
|
|
||||||
returns.append(ret)
|
|
||||||
|
|
||||||
return all(ret > 0 for ret in returns)
|
|
||||||
|
|
||||||
def copy(self, skip=False) -> int:
|
|
||||||
# move or unpack if necessary
|
|
||||||
log.info("Executing staging commands")
|
|
||||||
if (stage := self.stage() > 0):
|
|
||||||
raise Exception(stage)
|
|
||||||
|
|
||||||
if not skip:
|
|
||||||
log.info("Copying files to output directory")
|
|
||||||
ret, out, err, _ = self.runCmd(self.copy_command(), search=False)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
log.info(f"Build complete! Output available in {self.outdir}/")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def runCmd(self, command: CMD_PARAM_T, search: bool = True) -> Tuple[int, Union[bytes,None], Union[bytes,None], Union[str,None]]:
|
|
||||||
prepared, _ = self.prepare_command(command)
|
|
||||||
log.info(f"Running command: {' '.join(prepared)}")
|
|
||||||
|
|
||||||
kwargs = {
|
|
||||||
"stderr": subprocess.PIPE,
|
|
||||||
"stdout": subprocess.PIPE
|
|
||||||
}
|
|
||||||
if debug: del kwargs["stderr"]
|
|
||||||
|
|
||||||
with subprocess.Popen(prepared, **kwargs) as p:
|
|
||||||
uuid = None
|
|
||||||
# @TODO implement this as a callback?
|
|
||||||
if search:
|
|
||||||
for _, line in enumerate(p.stdout): # type: ignore
|
|
||||||
ln = line.decode()
|
|
||||||
if ln.startswith("UUID: "):
|
|
||||||
uuid = ln.split(" ")[-1]
|
|
||||||
log.debug(f"found uuid: {uuid}")
|
|
||||||
|
|
||||||
out, err = p.communicate()
|
|
||||||
res = p.wait(), out, err, uuid
|
|
||||||
|
|
||||||
if res[0] > 0:
|
|
||||||
log.error(f"Problem while executing command: '{prepared}'")
|
|
||||||
if search and not res[3]:
|
|
||||||
log.error("UUID not found in stdout. Dumping stdout and stderr")
|
|
||||||
self.log_subprocess(res)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
def prepare_command(self, command_list: CMD_PARAM_T) -> Tuple[List[str],List[None]]:
|
|
||||||
"""
|
|
||||||
Commands may be a callable, which should be a lambda to be evaluated at
|
|
||||||
preparation time with available locals. This can be used to, among
|
|
||||||
other things, perform lazy evaluations of f-strings which have values
|
|
||||||
not available at assignment time. e.g., filling in a second command
|
|
||||||
with a value extracted from the previous step or command.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = []
|
|
||||||
return r, [r.append(c()) if (callable(c) and c.__name__ == '<lambda>') else r.append(str(c)) for c in command_list]
|
|
||||||
|
|
||||||
def log_subprocess(self, result: Tuple[int, Union[bytes, None], Union[bytes, None], Union[str, None]]):
|
|
||||||
def log_lines(title, lines):
|
|
||||||
log.info(f"====={title}=====")
|
|
||||||
log.info(lines.decode())
|
|
||||||
log.info(f"Command return code: {result[0]}")
|
|
||||||
stdout = result[1]
|
|
||||||
stderr = result[2]
|
|
||||||
if stdout:
|
|
||||||
log_lines("Command STDOUT", stdout)
|
|
||||||
if stderr:
|
|
||||||
log_lines("Command STDERR", stderr)
|
|
||||||
|
|
||||||
def fix_ks(self):
|
|
||||||
cmd: CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.architecture.name},", str(self.kickstart_path)]
|
|
||||||
self.runCmd(cmd, search=False)
|
|
||||||
|
|
||||||
def render_kubernetes_job(self):
|
|
||||||
commands = [self.build_command(), self.package_command(), self.copy_command()]
|
|
||||||
if not self.job_template:
|
|
||||||
return None
|
|
||||||
template = self.job_template.render(
|
|
||||||
architecture=self.architecture.name,
|
|
||||||
backoffLimit=4,
|
|
||||||
buildTime=BUILDTIME.strftime("%s"),
|
|
||||||
command=commands,
|
|
||||||
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
|
|
||||||
jobname="buildimage",
|
|
||||||
namespace="empanadas",
|
|
||||||
major=major,
|
|
||||||
restartPolicy="Never",
|
|
||||||
)
|
|
||||||
return template
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
with open(self.metadata, "w") as f:
|
|
||||||
try:
|
|
||||||
o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] }
|
|
||||||
log.debug(o)
|
|
||||||
json.dump(o, f)
|
|
||||||
except AttributeError as e:
|
|
||||||
log.error("Couldn't find attribute in object. Something is probably wrong", e)
|
|
||||||
except Exception as e:
|
|
||||||
log.exception(e)
|
|
||||||
finally:
|
|
||||||
f.flush()
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
try:
|
try:
|
||||||
@ -459,28 +59,29 @@ def run():
|
|||||||
log.exception(e)
|
log.exception(e)
|
||||||
exit(2)
|
exit(2)
|
||||||
|
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
|
||||||
tmplenv = Environment(loader=file_loader)
|
|
||||||
tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl')
|
|
||||||
|
|
||||||
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
||||||
|
|
||||||
for architecture in arches:
|
for architecture in arches:
|
||||||
|
backend = ImageFactoryBackend(
|
||||||
|
kickstart_dir="kickstart" if results.kickstartdir else "os",
|
||||||
|
kickstart_repo=rlvars['livemap']['git_repo']
|
||||||
|
)
|
||||||
IB = ImageBuild(
|
IB = ImageBuild(
|
||||||
architecture=Architecture.from_version(architecture, rlvars['revision']),
|
architecture=Architecture.from_version(architecture, rlvars['revision']),
|
||||||
cli_args=results,
|
|
||||||
debug=results.debug,
|
debug=results.debug,
|
||||||
image_type=results.type,
|
image_type=results.type,
|
||||||
release=results.release if results.release else 0,
|
release=results.release if results.release else 0,
|
||||||
template=tdl_template,
|
|
||||||
variant=results.variant,
|
variant=results.variant,
|
||||||
|
build_time=datetime.datetime.utcnow(),
|
||||||
|
backend=backend,
|
||||||
|
log=log,
|
||||||
)
|
)
|
||||||
if results.kube:
|
|
||||||
IB.job_template = tmplenv.get_template('kube/Job.tmpl')
|
|
||||||
#commands = IB.kube_commands()
|
|
||||||
print(IB.render_kubernetes_job())
|
|
||||||
else:
|
|
||||||
ret = IB.build()
|
|
||||||
ret = IB.package()
|
|
||||||
ret = IB.copy()
|
|
||||||
|
|
||||||
|
if results.kube:
|
||||||
|
# commands = IB.kube_commands()
|
||||||
|
print(IB.render_kubernetes_job())
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
IB.backend.prepare()
|
||||||
|
IB.backend.build()
|
||||||
|
IB.backend.clean()
|
||||||
|
@ -21,6 +21,7 @@ GitPython = ">=3.1.30"
|
|||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "~5"
|
pytest = "~5"
|
||||||
|
attrs = "^23.1.0"
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
test-module = "empanadas.scripts.test_module:run"
|
test-module = "empanadas.scripts.test_module:run"
|
||||||
@ -39,6 +40,16 @@ generate-compose = "empanadas.scripts.generate_compose:run"
|
|||||||
peridot-repoclosure = "empanadas.scripts.peridot_repoclosure:run"
|
peridot-repoclosure = "empanadas.scripts.peridot_repoclosure:run"
|
||||||
refresh-all-treeinfo = "empanadas.scripts.refresh_all_treeinfo:run"
|
refresh-all-treeinfo = "empanadas.scripts.refresh_all_treeinfo:run"
|
||||||
|
|
||||||
|
[tool.pylint.main]
|
||||||
|
init-hook ="""
|
||||||
|
try:
|
||||||
|
import pylint_venv
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
pylint_venv.inithook()
|
||||||
|
"""
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
2
iso/empanadas/tox.ini
Normal file
2
iso/empanadas/tox.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[pycodestyle]
|
||||||
|
max-line-length = 160
|
Loading…
Reference in New Issue
Block a user