implement kiwi backend
This commit is contained in:
parent
ee019321ae
commit
43e723ef2b
@ -1,4 +1,4 @@
|
|||||||
FROM quay.io/centos/centos:stream9
|
FROM quay.io/rockylinux/rockylinux:9
|
||||||
|
|
||||||
ADD images/get_arch /get_arch
|
ADD images/get_arch /get_arch
|
||||||
|
|
||||||
|
@ -38,6 +38,8 @@ RUN dnf install -y \
|
|||||||
sudo \
|
sudo \
|
||||||
mock \
|
mock \
|
||||||
python-pip \
|
python-pip \
|
||||||
|
mock \
|
||||||
|
fuse-overlayfs \
|
||||||
imagefactory \
|
imagefactory \
|
||||||
imagefactory-plugins*
|
imagefactory-plugins*
|
||||||
|
|
||||||
|
@ -29,7 +29,6 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
common_args: List[str] = field(factory=list)
|
common_args: List[str] = field(factory=list)
|
||||||
package_args: List[str] = field(factory=list)
|
package_args: List[str] = field(factory=list)
|
||||||
metadata: pathlib.Path = field(init=False)
|
metadata: pathlib.Path = field(init=False)
|
||||||
ctx = field(init=False)
|
|
||||||
stage_commands: Optional[List[List[Union[str, Callable]]]] = field(init=False)
|
stage_commands: Optional[List[List[Union[str, Callable]]]] = field(init=False)
|
||||||
|
|
||||||
# The url to use in the path when fetching artifacts for the build
|
# The url to use in the path when fetching artifacts for the build
|
||||||
@ -57,9 +56,9 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
try:
|
try:
|
||||||
os.mkdir(self.ctx.outdir)
|
os.mkdir(self.ctx.outdir)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
self.ctx.log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
self.log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.ctx.log.exception("Some other exception occured while creating the output directory", e)
|
self.log.exception("Some other exception occured while creating the output directory", e)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if os.path.exists(self.metadata):
|
if os.path.exists(self.metadata):
|
||||||
@ -100,8 +99,6 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
if ret > 0:
|
if ret > 0:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
ret = self.copy()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
pass
|
pass
|
||||||
@ -140,6 +137,7 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
|
|
||||||
def stage(self) -> int:
|
def stage(self) -> int:
|
||||||
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
||||||
|
self.ctx.log.info("Executing staging commands")
|
||||||
if not hasattr(self, 'stage_commands'):
|
if not hasattr(self, 'stage_commands'):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
@ -148,22 +146,12 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
ret, out, err, _ = self.ctx.prepare_and_run(command, search=False)
|
ret, out, err, _ = self.ctx.prepare_and_run(command, search=False)
|
||||||
returns.append(ret)
|
returns.append(ret)
|
||||||
|
|
||||||
return all(ret > 0 for ret in returns)
|
if (res := all(ret > 0 for ret in returns) > 0):
|
||||||
|
raise Exception(res)
|
||||||
|
|
||||||
def copy(self, skip=False) -> int:
|
ret = self.copy()
|
||||||
# move or unpack if necessary
|
|
||||||
self.ctx.log.info("Executing staging commands")
|
|
||||||
if (stage := self.stage() > 0):
|
|
||||||
raise Exception(stage)
|
|
||||||
|
|
||||||
if not skip:
|
|
||||||
self.ctx.log.info("Copying files to output directory")
|
|
||||||
ret, out, err, _ = self.ctx.prepare_and_run(self.copy_command(), search=False)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
self.ctx.log.info(f"Build complete! Output available in {self.ctx.outdir}/")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def checkout_kickstarts(self) -> int:
|
def checkout_kickstarts(self) -> int:
|
||||||
cmd = ["git", "clone", "--branch", f"r{self.ctx.architecture.major}",
|
cmd = ["git", "clone", "--branch", f"r{self.ctx.architecture.major}",
|
||||||
self.kickstart_repo, f"{KICKSTART_PATH}"]
|
self.kickstart_repo, f"{KICKSTART_PATH}"]
|
||||||
@ -251,14 +239,6 @@ class ImageFactoryBackend(BackendInterface):
|
|||||||
"--parameter", "repository", self.ctx.outname]
|
"--parameter", "repository", self.ctx.outname]
|
||||||
return package_command
|
return package_command
|
||||||
|
|
||||||
def copy_command(self) -> List[str]:
|
|
||||||
|
|
||||||
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.ctx.outdir}/",
|
|
||||||
f"s3://resf-empanadas/buildimage-{self.ctx.architecture.version}-{self.ctx.architecture.name}/{self.ctx.outname}/{self.ctx.build_time.strftime('%s')}/"
|
|
||||||
]
|
|
||||||
|
|
||||||
return copy_command
|
|
||||||
|
|
||||||
def fix_ks(self):
|
def fix_ks(self):
|
||||||
cmd: utils.CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.ctx.architecture.name},", str(self.kickstart_path)]
|
cmd: utils.CMD_PARAM_T = ["sed", "-i", f"s,$basearch,{self.ctx.architecture.name},", str(self.kickstart_path)]
|
||||||
self.ctx.prepare_and_run(cmd, search=False)
|
self.ctx.prepare_and_run(cmd, search=False)
|
||||||
|
@ -2,9 +2,12 @@
|
|||||||
empanadas backend interface
|
empanadas backend interface
|
||||||
"""
|
"""
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from attrs import define, field
|
||||||
|
|
||||||
|
|
||||||
|
@define
|
||||||
class BackendInterface(ABC):
|
class BackendInterface(ABC):
|
||||||
|
ctx = field(init=False)
|
||||||
"""
|
"""
|
||||||
Interface to build images (or whatever)
|
Interface to build images (or whatever)
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,194 @@
|
|||||||
|
|
||||||
"""Backend for Kiwi"""
|
"""Backend for Kiwi"""
|
||||||
|
|
||||||
from .interface import BackendInterface
|
from .interface import BackendInterface
|
||||||
|
from .kiwi_imagedata import ImagesData
|
||||||
|
|
||||||
|
from empanadas.builders import utils
|
||||||
|
from empanadas.common import AttributeDict
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
from functools import wraps
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import git
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# TODO(neil): this should be part of the config, somewhere
|
||||||
|
temp = AttributeDict(
|
||||||
|
{
|
||||||
|
"GenericCloud": {
|
||||||
|
"kiwiType": "oem",
|
||||||
|
"kiwiProfile": "Cloud-GenericCloud",
|
||||||
|
"fileType": "qcow2",
|
||||||
|
"outputKey": "disk_format_image"
|
||||||
|
},
|
||||||
|
"Container": {
|
||||||
|
"kiwiType": "oci",
|
||||||
|
"kiwiProfile": "Container",
|
||||||
|
"fileType": "tar.xz",
|
||||||
|
"outputKey": "container"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_kiwi_conf(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
if not hasattr(self, 'kiwi_conf') or self.kiwi_conf is None:
|
||||||
|
self.kiwi_conf = temp[self.ctx.image_type]
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@define
|
||||||
class KiwiBackend(BackendInterface):
|
class KiwiBackend(BackendInterface):
|
||||||
"""Build an image using Kiwi"""
|
"""Build an image using Kiwi"""
|
||||||
|
|
||||||
def prepare(self):
|
build_args: List[str] = field(factory=list)
|
||||||
pass
|
image_result: ImagesData = field(init=False)
|
||||||
|
kiwi_conf: AttributeDict = field(init=False)
|
||||||
|
|
||||||
|
def prepare(self):
|
||||||
|
"""
|
||||||
|
Checkout mock-rocky-configs and rocky-kiwi-descriptions,
|
||||||
|
init the mock env, and setup to run kiwi
|
||||||
|
"""
|
||||||
|
self.checkout_repos()
|
||||||
|
self.setup_mock()
|
||||||
|
self.setup_kiwi()
|
||||||
|
|
||||||
|
@ensure_kiwi_conf
|
||||||
def build(self):
|
def build(self):
|
||||||
pass
|
self.build_args += [f"--type={self.kiwi_conf.kiwiType}", f"--profile={self.kiwi_conf.kiwiProfile}-{self.ctx.variant}"]
|
||||||
|
|
||||||
|
kiwi_command = [
|
||||||
|
"kiwi-ng", "--color-output",
|
||||||
|
*self.build_args,
|
||||||
|
]
|
||||||
|
if self.ctx.debug:
|
||||||
|
kiwi_command.append("--debug")
|
||||||
|
|
||||||
|
kiwi_system_command = [
|
||||||
|
"system", "build",
|
||||||
|
"--description='/builddir/rocky-kiwi-descriptions'",
|
||||||
|
"--target-dir", f"/builddir/{self.ctx.outdir}"
|
||||||
|
]
|
||||||
|
|
||||||
|
build_command = [
|
||||||
|
"--shell", "--enable-network", "--", *kiwi_command, *kiwi_system_command
|
||||||
|
]
|
||||||
|
ret, out, err = self.run_mock_command(build_command)
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception(f"Kiwi build failed: code {ret}")
|
||||||
|
sys.exit(ret)
|
||||||
|
|
||||||
|
@ensure_kiwi_conf
|
||||||
|
def stage(self):
|
||||||
|
ret, out, err = self.run_mock_command(["--copyout", f"/builddir/{self.ctx.outdir}", self.ctx.outdir])
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception("failed to copy build result out")
|
||||||
|
|
||||||
|
kiwi_result_path = pathlib.Path(f"{self.ctx.outdir}/kiwi.result.json")
|
||||||
|
if not os.path.exists(kiwi_result_path):
|
||||||
|
raise Exception("Missing kiwi.result.json. Aborting")
|
||||||
|
|
||||||
|
with open(kiwi_result_path, "r") as kiwi_result:
|
||||||
|
self.image_result = ImagesData.from_json(kiwi_result.read()).images
|
||||||
|
|
||||||
|
source = self.image_result[self.kiwi_conf.outputKey].filename
|
||||||
|
filetype = self.kiwi_conf.fileType
|
||||||
|
|
||||||
|
source = utils.remove_first_directory(source)
|
||||||
|
dest = f"{self.ctx.outdir}/{self.ctx.outname}.{filetype}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.move(source, dest)
|
||||||
|
except Exception as e:
|
||||||
|
raise e
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
pass
|
ret, out, err = self.run_mock_command(["--shell", "rm", "-fr", f"/builddir/{self.ctx.outdir}/build/"])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def run_mock_command(self, mock_command: List[str]):
|
||||||
|
mock_args = ["--configdir", "/workdir/mock-rocky-configs/etc/mock", "-r", f"rl-9-{self.ctx.architecture.name}-core-infra"]
|
||||||
|
if self.ctx.image_type == 'GenericCloud':
|
||||||
|
mock_args.append("--isolation=simple")
|
||||||
|
command = [
|
||||||
|
"mock",
|
||||||
|
*mock_args,
|
||||||
|
*mock_command,
|
||||||
|
]
|
||||||
|
ret, out, err, _ = self.ctx.prepare_and_run(command)
|
||||||
|
return ret, out, err
|
||||||
|
|
||||||
|
def setup_mock(self):
|
||||||
|
# TODO(neil): add error checking
|
||||||
|
ret, out, err = self.run_mock_command(["--init"])
|
||||||
|
|
||||||
|
packages = [
|
||||||
|
"kiwi-cli",
|
||||||
|
"git",
|
||||||
|
"dracut-kiwi-live",
|
||||||
|
"fuse-overlayfs",
|
||||||
|
"kiwi-systemdeps-bootloaders",
|
||||||
|
"kiwi-systemdeps-containers",
|
||||||
|
"kiwi-systemdeps-core",
|
||||||
|
"kiwi-systemdeps-disk-images",
|
||||||
|
"kiwi-systemdeps-filesystems",
|
||||||
|
"kiwi-systemdeps-image-validation",
|
||||||
|
"kiwi-systemdeps-iso-media",
|
||||||
|
"epel-release",
|
||||||
|
"rocky-release-core"
|
||||||
|
]
|
||||||
|
ret, out, err = self.run_mock_command(["--install", *packages])
|
||||||
|
|
||||||
|
ret, out, err = self.run_mock_command(["--copyin", "/workdir/rocky-kiwi-descriptions", "/builddir/"])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def checkout_repos(self):
|
||||||
|
"""
|
||||||
|
Checkout sig_core/mock-rocky-configs and sig_core/rocky-kiwi-descriptions to /workdir
|
||||||
|
"""
|
||||||
|
repos = {
|
||||||
|
"mock-rocky-configs": "main",
|
||||||
|
"rocky-kiwi-descriptions": "r9"
|
||||||
|
}
|
||||||
|
|
||||||
|
for repo, branch in repos.items():
|
||||||
|
repo_url = f"https://git.resf.org/sig_core/{repo}"
|
||||||
|
clone_dir = f"/workdir/{repo}"
|
||||||
|
|
||||||
|
if os.path.isdir(os.path.join(clone_dir, ".git")):
|
||||||
|
try:
|
||||||
|
# The directory exists and is a git repository, so attempt to pull the latest changes
|
||||||
|
git.Repo(clone_dir).remotes.origin.pull(branch)
|
||||||
|
self.ctx.log.info(f"pulled the latest changes for {branch} branch in {clone_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Failed to pull the repository: {str(e)}")
|
||||||
|
finally:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
git.Repo.clone_from(repo_url, clone_dir, branch=branch)
|
||||||
|
print(f"Repository cloned into {clone_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to clone repository: {str(e)}")
|
||||||
|
|
||||||
|
def setup_kiwi(self):
|
||||||
|
self.ctx.log.info("Generating kiwi.yml from template")
|
||||||
|
template = self.ctx.tmplenv.get_template('kiwi/kiwi.yml.j2')
|
||||||
|
output = tempfile.NamedTemporaryFile(delete=False).name
|
||||||
|
res = utils.render_template(output, template)
|
||||||
|
|
||||||
|
self.ctx.log.info("Copying generated kiwi.yml into build root")
|
||||||
|
ret, out, err = self.run_mock_command(["--copyin", res, "/etc/kiwi.yml"])
|
||||||
|
if ret > 0:
|
||||||
|
raise Exception("Failed to configure kiwi")
|
||||||
|
|
||||||
|
self.ctx.log.info("Finished setting up kiwi")
|
||||||
|
24
iso/empanadas/empanadas/backends/kiwi_imagedata.py
Normal file
24
iso/empanadas/empanadas/backends/kiwi_imagedata.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from attrs import define, field
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
@define(auto_attribs=True, kw_only=True)
|
||||||
|
class ImageInfo:
|
||||||
|
compress: bool
|
||||||
|
filename: str
|
||||||
|
shasum: bool
|
||||||
|
use_for_bundle: bool
|
||||||
|
|
||||||
|
|
||||||
|
@define(auto_attribs=True, kw_only=True)
|
||||||
|
class ImagesData:
|
||||||
|
images: Dict[str, ImageInfo] = field(factory=dict)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_json(data: str) -> 'ImagesData':
|
||||||
|
json_data = json.loads(data)
|
||||||
|
images = {key: ImageInfo(**value) for key, value in json_data.items()}
|
||||||
|
|
||||||
|
return ImagesData(images=images)
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
from attrs import define, field
|
from attrs import define, field
|
||||||
@ -72,7 +73,6 @@ class ImageBuild: # pylint: disable=too-few-public-methods
|
|||||||
other things, perform lazy evaluations of f-strings which have values
|
other things, perform lazy evaluations of f-strings which have values
|
||||||
not available at assignment time. e.g., filling in a second command
|
not available at assignment time. e.g., filling in a second command
|
||||||
with a value extracted from the previous step or command.
|
with a value extracted from the previous step or command.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
r = []
|
r = []
|
||||||
@ -104,3 +104,15 @@ class ImageBuild: # pylint: disable=too-few-public-methods
|
|||||||
restartPolicy="Never",
|
restartPolicy="Never",
|
||||||
)
|
)
|
||||||
return template
|
return template
|
||||||
|
|
||||||
|
def copy(self, skip=False) -> int:
|
||||||
|
if not skip:
|
||||||
|
self.log.info("Copying files to output directory")
|
||||||
|
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/",
|
||||||
|
f"s3://resf-empanadas/buildimage-{self.architecture.version}-{self.architecture.name}/{self.outname}/{self.build_time.strftime('%s')}/"
|
||||||
|
]
|
||||||
|
ret, out, err, _ = self.prepare_and_run(copy_command, search=False)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
self.ctx.log.info(f"Build complete! Output available in {self.ctx.outdir}/")
|
||||||
|
return 0
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import pathlib
|
import pathlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from functools import partial
|
|
||||||
from typing import Callable, List, Tuple, Union
|
from typing import Callable, List, Tuple, Union
|
||||||
|
|
||||||
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
CMD_PARAM_T = List[Union[str, Callable[..., str]]]
|
||||||
@ -96,3 +95,15 @@ def log_subprocess(ctx, result: CMD_RESULT_T):
|
|||||||
log_lines("Command STDOUT", stdout)
|
log_lines("Command STDOUT", stdout)
|
||||||
if stderr:
|
if stderr:
|
||||||
log_lines("Command STDERR", stderr)
|
log_lines("Command STDERR", stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_first_directory(path):
|
||||||
|
p = pathlib.Path(path)
|
||||||
|
# Check if the path is absolute
|
||||||
|
if p.is_absolute():
|
||||||
|
# For an absolute path, start the new path with the root
|
||||||
|
new_path = pathlib.Path(p.root, *p.parts[2:])
|
||||||
|
else:
|
||||||
|
# For a relative path, simply skip the first part
|
||||||
|
new_path = pathlib.Path(*p.parts[1:])
|
||||||
|
return new_path
|
||||||
|
@ -11,9 +11,12 @@ import yaml
|
|||||||
|
|
||||||
|
|
||||||
# An implementation from the Fabric python library
|
# An implementation from the Fabric python library
|
||||||
class AttributeDict(defaultdict):
|
class AttributeDict(dict):
|
||||||
def __init__(self):
|
def __init__(self, *args, **kwargs):
|
||||||
super(AttributeDict, self).__init__(AttributeDict)
|
super(AttributeDict, self).__init__(*args, **kwargs)
|
||||||
|
for key, value in self.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
self[key] = AttributeDict(value)
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
try:
|
try:
|
||||||
@ -24,6 +27,11 @@ class AttributeDict(defaultdict):
|
|||||||
def __setattr__(self, key, value):
|
def __setattr__(self, key, value):
|
||||||
self[key] = value
|
self[key] = value
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
value = AttributeDict(value)
|
||||||
|
super(AttributeDict, self).__setitem__(key, value)
|
||||||
|
|
||||||
|
|
||||||
# These are a bunch of colors we may use in terminal output
|
# These are a bunch of colors we may use in terminal output
|
||||||
class Color:
|
class Color:
|
||||||
@ -105,7 +113,7 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
|||||||
|
|
||||||
ALLOWED_TYPE_VARIANTS = {
|
ALLOWED_TYPE_VARIANTS = {
|
||||||
"Azure": ["Base", "LVM"],
|
"Azure": ["Base", "LVM"],
|
||||||
"Container": ["Base", "Minimal", "UBI", "WSL"],
|
"Container": ["Base", "Minimal", "UBI", "WSL", "Toolbox"],
|
||||||
"EC2": ["Base", "LVM"],
|
"EC2": ["Base", "LVM"],
|
||||||
"GenericCloud": ["Base", "LVM"],
|
"GenericCloud": ["Base", "LVM"],
|
||||||
"Vagrant": ["Libvirt", "Vbox", "VMware"],
|
"Vagrant": ["Libvirt", "Vbox", "VMware"],
|
||||||
@ -133,7 +141,6 @@ def valid_type_variant(_type: str, variant: str = "") -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@define(kw_only=True)
|
@define(kw_only=True)
|
||||||
class Architecture:
|
class Architecture:
|
||||||
name: str = field()
|
name: str = field()
|
||||||
|
@ -9,7 +9,7 @@ import sys
|
|||||||
|
|
||||||
from empanadas.common import Architecture, rldict, valid_type_variant
|
from empanadas.common import Architecture, rldict, valid_type_variant
|
||||||
from empanadas.builders import ImageBuild
|
from empanadas.builders import ImageBuild
|
||||||
from empanadas.backends import ImageFactoryBackend # , KiwiBackend
|
from empanadas.backends import ImageFactoryBackend, KiwiBackend
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
|
|
||||||
@ -19,6 +19,9 @@ parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
|||||||
parser.add_argument('--kickstartdir', action='store_true',
|
parser.add_argument('--kickstartdir', action='store_true',
|
||||||
help="Use the kickstart dir instead of the os dir")
|
help="Use the kickstart dir instead of the os dir")
|
||||||
parser.add_argument('--debug', action='store_true', help="debug?")
|
parser.add_argument('--debug', action='store_true', help="debug?")
|
||||||
|
parser.add_argument('--skip', type=str,
|
||||||
|
help="what stage(s) to skip",
|
||||||
|
required=False)
|
||||||
parser.add_argument('--type', type=str,
|
parser.add_argument('--type', type=str,
|
||||||
help="Image type (container, genclo, azure, aws, vagrant)",
|
help="Image type (container, genclo, azure, aws, vagrant)",
|
||||||
required=True)
|
required=True)
|
||||||
@ -62,6 +65,10 @@ def run():
|
|||||||
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
||||||
|
|
||||||
for architecture in arches:
|
for architecture in arches:
|
||||||
|
if results.type in ["Container", "GenericCloud"]:
|
||||||
|
backend = KiwiBackend(
|
||||||
|
)
|
||||||
|
else:
|
||||||
backend = ImageFactoryBackend(
|
backend = ImageFactoryBackend(
|
||||||
kickstart_dir="kickstart" if results.kickstartdir else "os",
|
kickstart_dir="kickstart" if results.kickstartdir else "os",
|
||||||
kickstart_repo=rlvars['livemap']['git_repo']
|
kickstart_repo=rlvars['livemap']['git_repo']
|
||||||
@ -82,6 +89,18 @@ def run():
|
|||||||
print(IB.render_kubernetes_job())
|
print(IB.render_kubernetes_job())
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
IB.backend.prepare()
|
skip_stages = results.skip.split(',') if results.skip else []
|
||||||
IB.backend.build()
|
stages = ["prepare", "build", "clean", "stage"]
|
||||||
IB.backend.clean()
|
for i, stage in enumerate(stages):
|
||||||
|
skip_stage = stage in skip_stages
|
||||||
|
|
||||||
|
log.info(f"Stage {i} - {stage}{' SKIP' if skip_stage else ''}")
|
||||||
|
|
||||||
|
if skip_stage:
|
||||||
|
continue
|
||||||
|
|
||||||
|
method = getattr(IB.backend, stage)
|
||||||
|
if callable(method):
|
||||||
|
method()
|
||||||
|
else:
|
||||||
|
log.fatal(f"Unable to execute {stage}")
|
||||||
|
162
iso/empanadas/empanadas/templates/kiwi/kiwi.yml.j2
Normal file
162
iso/empanadas/empanadas/templates/kiwi/kiwi.yml.j2
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# KIWI - Build configuration file
|
||||||
|
#
|
||||||
|
# Below all configuration parameters available to control
|
||||||
|
# KIWI's build process are listed as comments. The values
|
||||||
|
# used here provides the default values applied by KIWI if
|
||||||
|
# no other information is specified.
|
||||||
|
#
|
||||||
|
# To make any of the below effective, please uncomment the
|
||||||
|
# respective section(s) and adapt the parameters according
|
||||||
|
# to your needs
|
||||||
|
#
|
||||||
|
|
||||||
|
# Setup access to security keys
|
||||||
|
#credentials:
|
||||||
|
# # Specify private key(s) used for signing operations
|
||||||
|
# - verification_metadata_signing_key_file: /path/to/private.pem
|
||||||
|
|
||||||
|
# Setup access options for the Open BuildService
|
||||||
|
#obs:
|
||||||
|
# # Specify the URL of the Open BuildService download server
|
||||||
|
# - download_url: http://download.opensuse.org/repositories
|
||||||
|
# # Specify if the BuildService download server is public or private.
|
||||||
|
# # This information is used to verify if the request to populate
|
||||||
|
# # the repositories via the imageinclude attribute is possible
|
||||||
|
# - public: true
|
||||||
|
|
||||||
|
|
||||||
|
# Setup behaviour of the kiwi result bundle command
|
||||||
|
#bundle:
|
||||||
|
# # Specify if the bundle tarball should contain compressed results.
|
||||||
|
# # Note: Already compressed result information will not be touched.
|
||||||
|
# # Build results that generate an encrypted filesystem, i.e.
|
||||||
|
# # luks setup, will not be compressed. The intention for result compression
|
||||||
|
# # is to produce a smaller representation of the original. Encrypted data
|
||||||
|
# # generally grows when an attempt is made to compress the data. This is
|
||||||
|
# # due to the nature of compression algorithms. Therefore this setting is
|
||||||
|
# # ignored when encryption is enabled.
|
||||||
|
# - compress: false
|
||||||
|
# # Specify if the image build result and bundle should contain
|
||||||
|
# # a .changes file. The .changes file contains the package changelog
|
||||||
|
# # information from all packages installed into the image.
|
||||||
|
# - has_package_changes: false
|
||||||
|
|
||||||
|
|
||||||
|
# Setup behaviour of XZ compressor
|
||||||
|
#xz:
|
||||||
|
# # Specify options used in any xz compression call
|
||||||
|
# - options: '--threads=0'
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for container image creation
|
||||||
|
#container:
|
||||||
|
# # Specify compression for container images
|
||||||
|
# # Possible values are true, false, xz or none.
|
||||||
|
# - compress: true
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for ISO image creation
|
||||||
|
#iso:
|
||||||
|
# # Specify tool category which should be used to build iso images
|
||||||
|
# # Possible values are: xorriso
|
||||||
|
# - tool_category: xorriso
|
||||||
|
|
||||||
|
|
||||||
|
# Setup process parameters for OCI toolchain
|
||||||
|
#oci:
|
||||||
|
# # Specify OCI archive tool which should be used on creation of
|
||||||
|
# # container archives for OCI compliant images, e.g docker
|
||||||
|
# # Possible values are umoci and buildah
|
||||||
|
# - archive_tool: buildah
|
||||||
|
|
||||||
|
|
||||||
|
# Specify build constraints that applies during the image build
|
||||||
|
# process. If one or more constraints are violated the build exits
|
||||||
|
# with an appropriate error message.
|
||||||
|
#build_constraints:
|
||||||
|
# # Maximum result image size. The value can be specified in
|
||||||
|
# # bytes or it can be specified with m=MB or g=GB. The constraint
|
||||||
|
# # is checked prior to the result bundle creation
|
||||||
|
# - max_size: 700m
|
||||||
|
|
||||||
|
# Setup process parameters for partition mapping
|
||||||
|
mapper:
|
||||||
|
# # Specify tool to use for creating partition maps
|
||||||
|
# # Possible values are: kpartx and partx
|
||||||
|
- part_mapper: {{ "partx" if architecture in ["s390x"] else "kpartx" }}
|
||||||
|
|
||||||
|
# Setup process parameters to handle runtime checks
|
||||||
|
#runtime_checks:
|
||||||
|
# # Specify list of runtime checks to disable
|
||||||
|
# - disable:
|
||||||
|
# # verify that the host has the required container tools installed
|
||||||
|
# - check_container_tool_chain_installed
|
||||||
|
|
||||||
|
# # verify that there are repositories configured
|
||||||
|
# - check_repositories_configured
|
||||||
|
|
||||||
|
# # verify that the URL for imageinclude repos is accessable
|
||||||
|
# - check_image_include_repos_publicly_resolvable
|
||||||
|
|
||||||
|
# # verify secure boot setup disabled for overlay configured disk images
|
||||||
|
# - check_efi_mode_for_disk_overlay_correctly_setup
|
||||||
|
|
||||||
|
# # verify for legacy kiwi boot images that they exist on the host
|
||||||
|
# - check_boot_description_exists
|
||||||
|
|
||||||
|
# # verify if kiwi initrd_system was set if a boot attribute exists
|
||||||
|
# - check_initrd_selection_required
|
||||||
|
|
||||||
|
# # verify for legacy kiwi boot images that the same kernel is used
|
||||||
|
# - check_consistent_kernel_in_boot_and_system_image
|
||||||
|
|
||||||
|
# # check for reserved label names used in LVM setup
|
||||||
|
# - check_volume_setup_defines_reserved_labels
|
||||||
|
|
||||||
|
# # verify only one full size volume is specified for LVM images
|
||||||
|
# - check_volume_setup_defines_multiple_fullsize_volumes
|
||||||
|
|
||||||
|
# # verify no / volume setup is setup but the @root volume is used
|
||||||
|
# - check_volume_setup_has_no_root_definition
|
||||||
|
|
||||||
|
# # verify if volume label is really used with a volume setup
|
||||||
|
# - check_volume_label_used_with_lvm
|
||||||
|
|
||||||
|
# # verify that there is a xen domain setup for xen images
|
||||||
|
# - check_xen_uniquely_setup_as_server_or_guest
|
||||||
|
|
||||||
|
# # verify mediacheck is installed for ISO images that requests it
|
||||||
|
# - check_mediacheck_installed
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-live is installed for ISO images
|
||||||
|
# - check_dracut_module_for_live_iso_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-overlay is installed for overlay disk images
|
||||||
|
# - check_dracut_module_for_disk_overlay_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-repart is installed for OEM disk images
|
||||||
|
# - check_dracut_module_for_disk_oem_in_package_list
|
||||||
|
|
||||||
|
# # verify dracut-kiwi-oem-dump is installed for OEM install images
|
||||||
|
# - check_dracut_module_for_oem_install_in_package_list
|
||||||
|
|
||||||
|
# # verify configured firmware is compatible with host architecture
|
||||||
|
# - check_architecture_supports_iso_firmware_setup
|
||||||
|
|
||||||
|
# # verify WSL naming conventions
|
||||||
|
# - check_appx_naming_conventions_valid
|
||||||
|
|
||||||
|
# # check kiwi dracut modules compatible with kiwi builder
|
||||||
|
# - check_dracut_module_versions_compatible_to_kiwi
|
||||||
|
|
||||||
|
# # check for unresolved include statements in the XML description
|
||||||
|
# - check_include_references_unresolvable
|
||||||
|
|
||||||
|
# # validate options passed to cryptsetup via luksformat element
|
||||||
|
# - check_luksformat_options_valid
|
||||||
|
|
||||||
|
# # check devicepersistency compatible with partition table type
|
||||||
|
# - check_partuuid_persistency_type_used_with_mbr
|
||||||
|
|
||||||
|
# # check efifatimagesize does not exceed the max El Torito load size
|
||||||
|
# - check_efi_fat_image_has_correct_size
|
@ -21,7 +21,6 @@ GitPython = ">=3.1.30"
|
|||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "~5"
|
pytest = "~5"
|
||||||
attrs = "^23.1.0"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
test-module = "empanadas.scripts.test_module:run"
|
test-module = "empanadas.scripts.test_module:run"
|
||||||
|
Loading…
Reference in New Issue
Block a user