Update Configs + Extra ISO Build Progress

* Configs updated to deal with repo and structure
* Add graft point process
* Start local and podman process for extra ISO builds
This commit is contained in:
Louis Abel 2022-06-23 13:12:53 -07:00
parent 9463f96381
commit 2e5c4778f4
Signed by: label
GPG Key ID: B37E62D143879B36
12 changed files with 466 additions and 35 deletions

View File

@ -5,6 +5,7 @@
rclvl: 'RC2' rclvl: 'RC2'
major: '8' major: '8'
minor: '6' minor: '6'
bugurl: 'https://bugs.rockylinux.org'
allowed_arches: allowed_arches:
- x86_64 - x86_64
- aarch64 - aarch64
@ -64,6 +65,34 @@
- 'isomd5sum' - 'isomd5sum'
- 'lorax-templates-rhel' - 'lorax-templates-rhel'
- 'lorax-templates-generic' - 'lorax-templates-generic'
structure:
packages: 'os/Packages'
repodata: 'os/repodata'
iso_map:
hosts:
x86_64: ''
aarch64: ''
images:
dvd:
repos:
- 'BaseOS'
- 'AppStream'
lorax_variants:
- dvd
- minimal
- BaseOS
repos:
- 'BaseOS'
- 'AppStream'
variant: 'BaseOS'
lorax_removes:
- 'libreport-rhel-anaconda-bugzilla'
required_pkgs:
- 'lorax'
- 'genisoimage'
- 'isomd5sum'
- 'lorax-templates-rhel'
- 'lorax-templates-generic'
repoclosure_map: repoclosure_map:
arches: arches:
x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch' x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
@ -91,10 +120,15 @@
- AppStream - AppStream
extra_files: extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git' git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r8/'
branch: 'r8' branch: 'r8'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-rockyofficial'
testing: 'SOURCES/RPM-GPG-KEY-rockytesting'
list: list:
- 'SOURCES/COMMUNITY-CHARTER' - 'SOURCES/COMMUNITY-CHARTER'
- 'SOURCES/EULA' - 'SOURCES/EULA'
- 'SOURCES/LICENSE' - 'SOURCES/LICENSE'
- 'SOURCES/RPM-GPG-KEY-rockyofficial' - 'SOURCES/RPM-GPG-KEY-rockyofficial'
- 'SOURCES/RPM-GPG-KEY-rockytesting'
... ...

View File

@ -47,6 +47,9 @@
has_modules: has_modules:
- 'AppStream' - 'AppStream'
- 'CRB' - 'CRB'
structure:
packages: 'os/Packages'
repodata: 'os/repodata'
iso_map: iso_map:
hosts: hosts:
x86_64: '' x86_64: ''
@ -54,7 +57,10 @@
ppc64le: '' ppc64le: ''
s390x: '' s390x: ''
images: images:
- dvd dvd:
repos:
- 'BaseOS'
- 'AppStream'
lorax_variants: lorax_variants:
- dvd - dvd
- minimal - minimal

View File

@ -47,6 +47,9 @@
has_modules: has_modules:
- 'AppStream' - 'AppStream'
- 'CRB' - 'CRB'
structure:
packages: 'os/Packages'
repodata: 'os/repodata'
iso_map: iso_map:
hosts: hosts:
x86_64: '' x86_64: ''
@ -54,7 +57,10 @@
ppc64le: '' ppc64le: ''
s390x: '' s390x: ''
images: images:
- dvd dvd:
repos:
- 'BaseOS'
- 'AppStream'
lorax_variants: lorax_variants:
- dvd - dvd
- minimal - minimal

View File

@ -47,6 +47,9 @@
has_modules: has_modules:
- 'AppStream' - 'AppStream'
- 'CRB' - 'CRB'
structure:
packages: 'os/Packages'
repodata: 'os/repodata'
iso_map: iso_map:
hosts: hosts:
x86_64: '' x86_64: ''
@ -54,7 +57,10 @@
ppc64le: '' ppc64le: ''
s390x: '' s390x: ''
images: images:
- dvd dvd:
repos:
- 'BaseOS'
- 'AppStream'
lorax_variants: lorax_variants:
- dvd - dvd
- minimal - minimal

View File

@ -0,0 +1,37 @@
# builds ISO's
import argparse
from empanadas.common import *
from empanadas.util import Checks
from empanadas.util import IsoBuild
parser = argparse.ArgumentParser(description="ISO Compose")
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
parser.add_argument('--rc', action='store_true', help="Release Candidate")
parser.add_argument('--arch', type=str, help="Architecture")
parser.add_argument('--isolation', type=str, help="Mock Isolation")
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
parser.add_argument('--logger', type=str)
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
parser.add_argument('--extra-iso-mode', type=str)
results = parser.parse_args()
rlvars = rldict[results.release]
major = rlvars['major']
a = IsoBuild(
rlvars,
config,
major=major,
rc=results.rc,
arch=results.arch,
isolation=results.isolation,
extra_iso=results.extra_iso,
extra_iso_mode=results.extra_iso_mode,
compose_dir_is_here=results.local_compose,
logger=results.logger,
)
def run():
a.run_build_extra_iso()

View File

@ -1,6 +1,10 @@
#!/bin/bash #!/bin/bash
set -ex set -ex
{% if inside_podman %}
mkdir /builddor
{% endif %}
cd /builddir cd /builddir
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
@ -14,3 +18,9 @@ fi
{{ implantmd5 }} {{ implantmd5 }}
{{ make_manifest }} {{ make_manifest }}
{% if inside_podman %}
mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
cp /builddir/*.iso {{ compose_work_iso_dir }}/{{ arch }}
cp /builddir/*.iso.manifest {{ compose_work_iso_dir }}/{{ arch }}
{% endif %}

View File

@ -0,0 +1,49 @@
#!/bin/bash
# This is a template that is used to build extra ISO's for Rocky Linux. Only
# under extreme circumstances should you be filling this out and running
# manually.
# Vars
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
MOCK_RESL="${MOCK_ROOT}/result"
MOCK_CHRO="${MOCK_ROOT}/root"
MOCK_LOG="${MOCK_RESL}/mock-output.log"
IMAGE_SCR="/var/tmp/buildExtraImage.sh"
IMAGE_ISO="{{ shortname }}-{{ major }}.{{ minor }}{{ rc }}-{{ arch }}-dvd{{ discnum|default('1') }}.iso"
ISOLATION="{{ isolation }}"
BUILDDIR="{{ builddir }}"
# Init the container
mock \
-r "${MOCK_CFG}" \
--isolation="${ISOLATION}" \
--enable-network \
--init
init_ret_val=$?
if [ $init_ret_val -ne 0 ]; then
echo "!! MOCK INIT FAILED !!"
exit 1
fi
mkdir -p "${MOCK_RESL}"
cp "${IMAGE_SCR}" "${MOCK_CHRO}${IMAGE_SCR}"
mock \
-r "${MOCK_CFG}" \
--shell \
--isolation="${ISOLATION}" \
--enable-network -- /bin/bash "${IMAGE_SCR}" | tee -a "${MOCK_LOG}"
mock_ret_val=$?
if [ $mock_ret_val -eq 0 ]; then
# Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result
mkdir -p "${MOCK_RESL}"
cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}" "${MOCK_RESL}"
else
echo "!! EXTRA ISO RUN FAILED !!"
exit 1
fi
# Clean up?

View File

@ -33,7 +33,7 @@ mock \
-r "${MOCK_CFG}" \ -r "${MOCK_CFG}" \
--shell \ --shell \
--isolation="${ISOLATION}" \ --isolation="${ISOLATION}" \
--enable-network -- /bin/bash /var/tmp/buildImage.sh | tee -a "${MOCK_LOG}" --enable-network -- /bin/bash "${LORAX_SCR}" | tee -a "${MOCK_LOG}"
mock_ret_val=$? mock_ret_val=$?
if [ $mock_ret_val -eq 0 ]; then if [ $mock_ret_val -eq 0 ]; then

View File

@ -4,17 +4,23 @@ name={{repo.name}}
baseurl={{ repo.baseurl }} baseurl={{ repo.baseurl }}
enabled=1 enabled=1
gpgcheck=1 gpgcheck=1
repo_gpgcheck=1
gpgkey={{ repo.gpgkey }}
[{{ repo.name }}-debug] [{{ repo.name }}-debug]
name={{repo.name}} name={{repo.name}}
baseurl={{ repo.baseurl }}-debug baseurl={{ repo.baseurl }}-debug
enabled=1 enabled=1
gpgcheck=1 gpgcheck=1
repo_gpgcheck=1
gpgkey={{ repo.gpgkey }}
[{{ repo.name }}-source] [{{ repo.name }}-source]
name={{repo.name}} name={{repo.name}}
baseurl={{ repo.srcbaseurl }} baseurl={{ repo.srcbaseurl }}
enabled=1 enabled=1
gpgcheck=1 gpgcheck=1
repo_gpgcheck=1
gpgkey={{ repo.gpgkey }}
{% endfor %} {% endfor %}

View File

@ -360,7 +360,7 @@ class RepoSync:
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
"--repoid={} -p {} --forcearch {} --norepopath " "--repoid={} -p {} --forcearch {} --norepopath "
"--gpgcheck 2>&1").format( "--gpgcheck --assumeyes 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
@ -370,7 +370,7 @@ class RepoSync:
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
"--download-metadata --repoid={}-debug -p {} --forcearch {} " "--download-metadata --repoid={}-debug -p {} --forcearch {} "
"--gpgcheck --norepopath 2>&1").format( "--gpgcheck --norepopath --assumeyes 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
@ -433,7 +433,7 @@ class RepoSync:
source_sync_cmd = ("/usr/bin/dnf reposync -c {} " source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
"--download-metadata --repoid={}-source -p {} " "--download-metadata --repoid={}-source -p {} "
"--gpgcheck --norepopath 2>&1").format( "--gpgcheck --norepopath --assumeyes 2>&1").format(
self.dnf_config, self.dnf_config,
r, r,
source_sync_path source_sync_path
@ -626,7 +626,8 @@ class RepoSync:
repodata = { repodata = {
'name': repo, 'name': repo,
'baseurl': constructed_url, 'baseurl': constructed_url,
'srcbaseurl': constructed_url_src 'srcbaseurl': constructed_url_src,
'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
} }
repolist.append(repodata) repolist.append(repodata)

View File

@ -22,6 +22,7 @@ import xmltodict
# if we can access s3 # if we can access s3
import boto3 import boto3
import kobo.shortcuts import kobo.shortcuts
from fnmatch import fnmatch
# This is for treeinfo # This is for treeinfo
from configparser import ConfigParser from configparser import ConfigParser
@ -56,6 +57,8 @@ class IsoBuild:
force_download: bool = False, force_download: bool = False,
force_unpack: bool = False, force_unpack: bool = False,
isolation: str = 'auto', isolation: str = 'auto',
extra_iso=None,
extra_iso_mode: str = 'local',
compose_dir_is_here: bool = False, compose_dir_is_here: bool = False,
image=None, image=None,
logger=None logger=None
@ -83,6 +86,8 @@ class IsoBuild:
self.s3 = s3 self.s3 = s3
self.force_unpack = force_unpack self.force_unpack = force_unpack
self.force_download = force_download self.force_download = force_download
self.extra_iso = extra_iso
self.extra_iso_mode = extra_iso_mode
# Relevant major version items # Relevant major version items
self.arch = arch self.arch = arch
@ -94,6 +99,7 @@ class IsoBuild:
self.repos = rlvars['iso_map']['repos'] self.repos = rlvars['iso_map']['repos']
self.repo_base_url = config['repo_base_url'] self.repo_base_url = config['repo_base_url']
self.project_id = rlvars['project_id'] self.project_id = rlvars['project_id']
self.structure = rlvars['structure']
self.extra_files = rlvars['extra_files'] self.extra_files = rlvars['extra_files']
@ -134,11 +140,11 @@ class IsoBuild:
"work/logs" "work/logs"
) )
self.iso_work_dir = os.path.join( #self.iso_work_dir = os.path.join(
self.compose_latest_dir, # self.compose_latest_dir,
"work/iso", # "work/iso",
config['arch'] # config['arch']
) #)
self.lorax_work_dir = os.path.join( self.lorax_work_dir = os.path.join(
self.compose_latest_dir, self.compose_latest_dir,
@ -306,12 +312,6 @@ class IsoBuild:
self.log.error('See the logs for more information.') self.log.error('See the logs for more information.')
raise SystemExit() raise SystemExit()
def run_image_build(self, arch):
"""
Builds the other images
"""
print()
def run_pull_lorax_artifacts(self): def run_pull_lorax_artifacts(self):
""" """
Pulls the required artifacts and unpacks it to work/lorax/$arch Pulls the required artifacts and unpacks it to work/lorax/$arch
@ -665,52 +665,322 @@ class IsoBuild:
# Next set of functions are loosely borrowed (in concept) from pungi. Some # Next set of functions are loosely borrowed (in concept) from pungi. Some
# stuff may be combined/mixed together, other things may be simplified or # stuff may be combined/mixed together, other things may be simplified or
# reduced in nature. # reduced in nature.
def build_extra_iso(self): def run_build_extra_iso(self):
""" """
Builds DVD images based on the data created from the initial lorax on Builds DVD images based on the data created from the initial lorax on
each arch. This should NOT be called during the usual run() section. each arch. This should NOT be called during the usual run() section.
""" """
sync_root = self.compose_latest_sync
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Starting Extra ISOs phase'
)
def _generate_graft_points(self): self._extra_iso_build_wrap()
self.log.info('Compose repo directory: %s' % sync_root)
self.log.info('ISO result directory: %s/$arch' % self.lorax_work_dir)
self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Extra ISO phase completed.'
)
def _extra_iso_build_wrap(self):
"""
Try to figure out where the build is going, we only support mock for
now.
"""
arches_to_build = self.arches
if self.arch:
arches_to_build = [self.arch]
images_to_build = self.iso_map['images']
if self.extra_iso:
images_to_build = [self.extra_iso]
for y in images_to_build:
for a in arches_to_build:
grafts = self._generate_graft_points(
a,
y,
self.iso_map['images'][y]['repos'],
)
if self.extra_iso_mode == 'local':
self._extra_iso_local_config(a, y, grafts)
self._extra_iso_local_run()
elif self.extra_iso_mode == 'podman':
continue
else:
self.log.info(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'Mode specified is not valid.'
)
raise SystemExit()
if self.extra_iso_mode == 'podman':
print()
def _extra_iso_local_config(self, arch, image, grafts):
"""
Local ISO build mode - this should build in mock
"""
self.log.info('Generating Extra ISO configuration and script')
mock_iso_template = self.tmplenv.get_template('isomock.tmpl.cfg')
mock_sh_template = self.tmplenv.get_template('extraisobuild.tmpl.sh')
iso_template = self.tmplenv.get_template('buildExtraImage.tmpl.sh')
mock_iso_path = '/var/tmp/lorax-' + self.major_version + '.cfg'
mock_sh_path = '/var/tmp/extraisobuild.sh'
iso_template_path = '/var/tmp/buildExtraImage.sh'
rclevel = ''
if self.release_candidate:
rclevel = '-' + self.rclvl
mock_iso_template_output = mock_iso_template.render(
arch=self.current_arch,
major=self.major_version,
fullname=self.fullname,
shortname=self.shortname,
required_pkgs=self.required_pkgs,
dist=self.disttag,
repos=self.repolist,
user_agent='{{ user_agent }}',
)
mock_sh_template_output = mock_sh_template.render(
arch=self.current_arch,
major=self.major_version,
isolation=self.mock_isolation,
builddir=self.mock_work_root,
shortname=self.shortname,
)
def _extra_iso_local_run(self):
"""
Runs the actual local process
"""
def _generate_graft_points(
self,
arch,
iso,
variants,
):
""" """
Get a list of packages for an extras ISO. This should NOT be called Get a list of packages for an extras ISO. This should NOT be called
during the usual run() section. during the usual run() section.
""" """
lorax_base_dir = os.path.join(self.lorax_work_dir, arch)
global_work_dir = os.path.join(self.compose_latest_dir, "work/global")
def _get_grafts(self): self.log.info(
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
'Generating graft points for extra iso: (' + arch + ') ' + iso
)
files = {}
# This is the data we need to actually boot
lorax_for_var = os.path.join(lorax_base_dir, iso)
if not os.path.exists(lorax_for_var + '/.treeinfo'):
self.log.info(
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
'!! .treeinfo is missing, does this variant actually exist? !!'
)
return
# extra files
extra_files_for_var = os.path.join(
global_work_dir,
"extra-files"
)
# actually get the boot data
files = self._get_grafts([lorax_for_var, extra_files_for_var])
# This is to get all the packages for each repo
for repo in variants:
pkg_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['packages']
)
rd_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['repodata']
)
for k, v in self._get_grafts([pkg_for_var]).items():
files[os.path.join(repo, "Packages", k)] = v
for k, v in self._get_grafts([rd_for_var]).items():
files[os.path.join(repo, "repodata", k)] = v
grafts = '{}/{}-{}-grafts'.format(
lorax_base_dir,
iso,
arch
)
self._write_grafts(
grafts,
files,
exclude=["*/lost+found", "*/boot.iso"]
)
return grafts
def _get_grafts(self, paths, exclusive_paths=None, exclude=None):
""" """
Actually get some grafts (get_iso_contents), called by generate grafts Actually get some grafts (get_iso_contents), called by generate grafts
""" """
result = {}
exclude = exclude or []
exclusive_paths = exclusive_paths or []
def _write_grafts(self): for p in paths:
""" if isinstance(p, dict):
Write out the graft points, called by get_grafts tree = p
""" else:
tree = self._scanning(p)
result = self._merging(result, tree)
def _scanning(self): for p in exclusive_paths:
tree = self._scanning(p)
result = self._merging(result, tree, exclusive=True)
# Resolves possible symlinks
for key in result.keys():
path = result[key]
if os.path.islink(path):
real_path = os.readlink(path)
abspath = os.path.normpath(os.path.join(os.path.dirname(path), real_path))
if not abspath.startswith(self.compose_base):
result[key] = abspath
return result
def _write_grafts(self, filepath, u, exclude=None):
"""
Write out the graft points
"""
seen = set()
exclude = exclude or []
result = {}
for zl in sorted(u, reverse=True):
dirn = os.path.dirname(zl)
if not zl.endswith("/"):
result[zl] = u[zl]
seen.add(dirn)
continue
found = False
for j in seen:
if j.startswith(dirn):
found = True
break
if not found:
result[zl] = u[zl]
seen.add(dirn)
fh = open(filepath, "w")
for zl in sorted(result, key=self._sorting):
found = False
for excl in exclude:
if fnmatch(zl, excl):
found = True
break
if found:
continue
fh.write("%s=%s\n" % (zl, u[zl]))
fh.close()
def _scanning(self, p):
""" """
Scan tree Scan tree
""" """
path = os.path.abspath(p)
result = {}
for root, dirs, files in os.walk(path):
for file in files:
abspath = os.path.join(root, file)
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
result[relpath] = abspath
def _merging(self): # Include empty directories too
if root != path:
abspath = os.path.join(root, "")
relpath = kobo.shortcuts.relative_path(abspath, path.rstrip("/") + "/")
result[relpath] = abspath
return result
def _merging(self, tree_a, tree_b, exclusive=False):
""" """
Merge tree Merge tree
""" """
result = tree_b.copy()
all_dirs = set(
[os.path.dirname(dirn).rstrip("/") for dirn in result if os.path.dirname(dirn) != ""]
)
def _sorting(self): for dirn in tree_a:
""" dn = os.path.dirname(dirn)
Sorting using the is_rpm and is_image funcs if exclusive:
""" match = False
for x in all_dirs:
if dn == x or dn.startswith("%s/" % x):
match = True
break
if match:
continue
def _is_rpm(self): if dirn in result:
continue
result[dirn] = tree_a[dirn]
return result
def _sorting(self, k):
"""
Sorting using the is_rpm and is_image funcs. Images are first, extras
next, rpm's last.
"""
rolling = (0 if self._is_image(k) else 2 if self._is_rpm(k) else 1, k)
return rolling
def _is_rpm(self, k):
""" """
Is this an RPM? :o Is this an RPM? :o
""" """
result = k.endswith(".rpm")
return result
def _is_image(self): def _is_image(self, k):
""" """
Is this an image? :o Is this an image? :o
""" """
if (
k.startswith("images/") or
k.startswith("isolinux/") or
k.startswith("EFI/") or
k.startswith("etc/") or
k.startswith("ppc/")
):
return True
if (
k.endswith(".img") or
k.endswith(".ins")
):
return True
return False
def _get_vol_id(self): def _get_vol_id(self):
""" """
@ -819,8 +1089,13 @@ class IsoBuild:
#joliet = True #joliet = True
#joliet_long = True #joliet_long = True
#rock = True #rock = True
cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"] cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"]
if not os.path.exists(cmd[0]):
self.log.error('%s was not found. Good bye.' % cmd[0])
raise SystemExit("\n\n" + cmd[0] + " was not found.\n\nPlease "
" ensure that you have installed the necessary packages on "
" this system. "
)
if iso_level: if iso_level:
cmd.extend(["-iso-level", str(iso_level)]) cmd.extend(["-iso-level", str(iso_level)])

View File

@ -25,6 +25,7 @@ sync_from_peridot = "empanadas.scripts.sync_from_peridot:run"
sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run" sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run"
sync_sig = "empanadas.scripts.sync_sig:run" sync_sig = "empanadas.scripts.sync_sig:run"
build-iso = "empanadas.scripts.build_iso:run" build-iso = "empanadas.scripts.build_iso:run"
build-iso-extra = "empanadas.scripts.build_iso_extra:run"
pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run" pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run"
launch-builds = "empanadas.scripts.launch_builds:run" launch-builds = "empanadas.scripts.launch_builds:run"