forked from sig_core/toolkit
Tarball pull and extract
This commit is contained in:
parent
db55677a88
commit
72f98dcdb6
35
iso/empanadas/empanadas/scripts/pull_unpack_artifact.py
Executable file
35
iso/empanadas/empanadas/scripts/pull_unpack_artifact.py
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
# builds ISO's
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import IsoBuild
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
||||||
|
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
|
parser.add_argument('--s3', action='store_true', help="Release Candidate")
|
||||||
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
|
parser.add_argument('--force-unpack', action='store_true', help="Force an unpack")
|
||||||
|
parser.add_argument('--force-download', action='store_true', help="Force a download")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
a = IsoBuild(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
s3=results.s3,
|
||||||
|
arch=results.arch,
|
||||||
|
force_unpack=results.force_unpack,
|
||||||
|
force_download=results.force_download,
|
||||||
|
compose_dir_is_here=results.local_compose,
|
||||||
|
logger=results.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
a.run_pull_lorax_artifacts()
|
@ -452,7 +452,10 @@ class RepoSync:
|
|||||||
|
|
||||||
join_all_pods = ' '.join(entry_name_list)
|
join_all_pods = ' '.join(entry_name_list)
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
self.log.info('Syncing %s ...' % r)
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Syncing ' + r + ' ...'
|
||||||
|
)
|
||||||
pod_watcher = '{} wait {}'.format(
|
pod_watcher = '{} wait {}'.format(
|
||||||
cmd,
|
cmd,
|
||||||
join_all_pods
|
join_all_pods
|
||||||
@ -500,7 +503,10 @@ class RepoSync:
|
|||||||
)
|
)
|
||||||
|
|
||||||
entry_name_list.clear()
|
entry_name_list.clear()
|
||||||
self.log.info('Syncing %s completed' % r)
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Syncing ' + r + ' completed'
|
||||||
|
)
|
||||||
|
|
||||||
if len(bad_exit_list) > 0:
|
if len(bad_exit_list) > 0:
|
||||||
self.log.error(
|
self.log.error(
|
||||||
|
@ -11,7 +11,14 @@ import os.path
|
|||||||
import subprocess
|
import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
import time
|
import time
|
||||||
import re
|
import tarfile
|
||||||
|
|
||||||
|
# lazy person's s3 parser
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import xmltodict
|
||||||
|
# if we can access s3
|
||||||
|
import boto3
|
||||||
|
|
||||||
# This is for treeinfo
|
# This is for treeinfo
|
||||||
from configparser import ConfigParser
|
from configparser import ConfigParser
|
||||||
@ -21,13 +28,6 @@ from productmd.extra_files import ExtraFiles
|
|||||||
import productmd.treeinfo
|
import productmd.treeinfo
|
||||||
# End treeinfo
|
# End treeinfo
|
||||||
|
|
||||||
# lazy person's s3 parser
|
|
||||||
import urllib
|
|
||||||
import json
|
|
||||||
import xmltodict
|
|
||||||
# if we can access s3
|
|
||||||
import boto3
|
|
||||||
|
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
from empanadas.common import Color, _rootdir
|
from empanadas.common import Color, _rootdir
|
||||||
@ -49,6 +49,7 @@ class IsoBuild:
|
|||||||
arch=None,
|
arch=None,
|
||||||
rc: bool = False,
|
rc: bool = False,
|
||||||
s3: bool = False,
|
s3: bool = False,
|
||||||
|
force_download: bool = False,
|
||||||
force_unpack: bool = False,
|
force_unpack: bool = False,
|
||||||
isolation: str = 'auto',
|
isolation: str = 'auto',
|
||||||
compose_dir_is_here: bool = False,
|
compose_dir_is_here: bool = False,
|
||||||
@ -77,6 +78,7 @@ class IsoBuild:
|
|||||||
self.release_candidate = rc
|
self.release_candidate = rc
|
||||||
self.s3 = s3
|
self.s3 = s3
|
||||||
self.force_unpack = force_unpack
|
self.force_unpack = force_unpack
|
||||||
|
self.force_download = force_download
|
||||||
|
|
||||||
# Relevant major version items
|
# Relevant major version items
|
||||||
self.arch = arch
|
self.arch = arch
|
||||||
@ -102,6 +104,9 @@ class IsoBuild:
|
|||||||
self.s3_bucket = config['bucket']
|
self.s3_bucket = config['bucket']
|
||||||
self.s3_bucket_url = config['bucket_url']
|
self.s3_bucket_url = config['bucket_url']
|
||||||
|
|
||||||
|
if s3:
|
||||||
|
self.s3 = boto3.client('s3')
|
||||||
|
|
||||||
# Templates
|
# Templates
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
self.tmplenv = Environment(loader=file_loader)
|
self.tmplenv = Environment(loader=file_loader)
|
||||||
@ -302,21 +307,223 @@ class IsoBuild:
|
|||||||
|
|
||||||
def run_pull_lorax_artifacts(self):
|
def run_pull_lorax_artifacts(self):
|
||||||
"""
|
"""
|
||||||
Pulls the required artifacts and unacps it to work/lorax/$arch
|
Pulls the required artifacts and unpacks it to work/lorax/$arch
|
||||||
"""
|
"""
|
||||||
self.log.info('Determining the latest pull...')
|
# Determine if we're only managing one architecture out of all of them.
|
||||||
print()
|
# It does not hurt to do everything at once. But the option is there.
|
||||||
|
unpack_single_arch = False
|
||||||
|
arches_to_unpack = self.arches
|
||||||
|
if self.arch:
|
||||||
|
unpack_single_arch = True
|
||||||
|
arches_to_unpack = [self.arch]
|
||||||
|
|
||||||
def _download_artifacts(self, force_unpack, arch=None):
|
self.log.info(
|
||||||
"""
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
Download the requested artifact(s)
|
'Determining the latest pulls...'
|
||||||
"""
|
)
|
||||||
print()
|
if self.s3:
|
||||||
|
latest_artifacts = self._s3_determine_latest()
|
||||||
|
else:
|
||||||
|
latest_artifacts = self._reqs_determine_latest()
|
||||||
|
|
||||||
def _unpack_artifacts(self, force_unpack, arch=None):
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Downloading requested artifact(s)'
|
||||||
|
)
|
||||||
|
for arch in arches_to_unpack:
|
||||||
|
lorax_arch_dir = os.path.join(
|
||||||
|
self.lorax_work_dir,
|
||||||
|
arch
|
||||||
|
)
|
||||||
|
|
||||||
|
source_path = latest_artifacts[arch]
|
||||||
|
|
||||||
|
full_drop = '{}/lorax-{}-{}.tar.gz'.format(
|
||||||
|
lorax_arch_dir,
|
||||||
|
self.major_version,
|
||||||
|
arch
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(lorax_arch_dir):
|
||||||
|
os.makedirs(lorax_arch_dir, exist_ok=True)
|
||||||
|
|
||||||
|
self.log.info(
|
||||||
|
'Downloading artifact for ' + Color.BOLD + arch + Color.END
|
||||||
|
)
|
||||||
|
if self.s3:
|
||||||
|
self._s3_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
source_path,
|
||||||
|
full_drop
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._reqs_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
source_path,
|
||||||
|
full_drop
|
||||||
|
)
|
||||||
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Download phase completed'
|
||||||
|
)
|
||||||
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Beginning unpack phase...'
|
||||||
|
)
|
||||||
|
|
||||||
|
for arch in arches_to_unpack:
|
||||||
|
tarname = 'lorax-{}-{}.tar.gz'.format(
|
||||||
|
self.major_version,
|
||||||
|
arch
|
||||||
|
)
|
||||||
|
|
||||||
|
tarball = os.path.join(
|
||||||
|
self.lorax_work_dir,
|
||||||
|
arch,
|
||||||
|
tarname
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(tarball):
|
||||||
|
self.log.error(
|
||||||
|
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||||
|
'Artifact does not exist: ' + tarball
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._unpack_artifacts(self.force_unpack, arch, tarball)
|
||||||
|
|
||||||
|
self.log.info(
|
||||||
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
|
'Unpack phase completed'
|
||||||
|
)
|
||||||
|
|
||||||
|
def _s3_determine_latest(self):
|
||||||
|
"""
|
||||||
|
Using native s3, determine the latest artifacts and return a list
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
try:
|
||||||
|
self.s3.list_objects(Bucket=self.s3_bucket)['Contents']
|
||||||
|
except:
|
||||||
|
self.log.error(
|
||||||
|
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||||
|
'Cannot access s3 bucket.'
|
||||||
|
)
|
||||||
|
raise SystemExit()
|
||||||
|
|
||||||
|
for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']:
|
||||||
|
if 'tar.gz' in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in self.arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _s3_download_artifacts(self, force_download, source, dest):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via s3
|
||||||
|
"""
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
self.log.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info('Downloading to: %s' % dest)
|
||||||
|
try:
|
||||||
|
self.s3.download_file(
|
||||||
|
Bucket=self.s3_bucket,
|
||||||
|
Key=source,
|
||||||
|
Filename=dest
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
self.log.error('There was an issue downloading from %s' % self.s3_bucket)
|
||||||
|
|
||||||
|
def _reqs_determine_latest(self):
|
||||||
|
"""
|
||||||
|
Using requests, determine the latest artifacts and return a list
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket_data = requests.get(self.s3_bucket_url)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
self.log.error('The s3 bucket http endpoint is inaccessible')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
resp = xmltodict.parse(bucket_data.content)
|
||||||
|
|
||||||
|
for y in resp['ListBucketResult']['Contents']:
|
||||||
|
if 'tar.gz' in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in self.arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _reqs_download_artifacts(self, force_download, source, dest):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via requests only
|
||||||
|
"""
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
self.log.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
unurl = self.s3_bucket_url + '/' + source
|
||||||
|
|
||||||
|
self.log.info('Downloading to: %s' % dest)
|
||||||
|
try:
|
||||||
|
with requests.get(unurl, allow_redirects=True) as r:
|
||||||
|
with open(dest, 'wb') as f:
|
||||||
|
f.write(r.content)
|
||||||
|
f.close()
|
||||||
|
r.close()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
self.log.error('There was a problem downloading the artifact')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
def _unpack_artifacts(self, force_unpack, arch, tarball):
|
||||||
"""
|
"""
|
||||||
Unpack the requested artifacts(s)
|
Unpack the requested artifacts(s)
|
||||||
"""
|
"""
|
||||||
|
unpack_dir = os.path.join(self.lorax_work_dir, arch)
|
||||||
|
if not force_unpack:
|
||||||
|
file_check = os.path.join(unpack_dir, 'lorax/.treeinfo')
|
||||||
|
if os.path.exists(file_check):
|
||||||
|
self.log.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact (' + arch + ') already unpacked'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info('Unpacking %s' % tarball)
|
||||||
|
with tarfile.open(tarball) as t:
|
||||||
|
t.extractall(unpack_dir)
|
||||||
|
t.close()
|
||||||
|
|
||||||
|
def _copy_lorax_to_variant(self, force_unpack, arch):
|
||||||
|
"""
|
||||||
|
Copy to variants for easy access of mkiso and copying to compose dirs
|
||||||
|
"""
|
||||||
print()
|
print()
|
||||||
|
|
||||||
def run_boot_sync(self):
|
def run_boot_sync(self):
|
||||||
|
63
iso/empanadas/poetry.lock
generated
63
iso/empanadas/poetry.lock
generated
@ -52,6 +52,25 @@ urllib3 = ">=1.25.4,<1.27"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
crt = ["awscrt (==0.13.8)"]
|
crt = ["awscrt (==0.13.8)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2022.6.15"
|
||||||
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "charset-normalizer"
|
||||||
|
version = "2.0.12"
|
||||||
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
unicode_backport = ["unicodedata2"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
version = "0.4.5"
|
version = "0.4.5"
|
||||||
@ -60,6 +79,14 @@ category = "dev"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.3"
|
||||||
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-metadata"
|
name = "importlib-metadata"
|
||||||
version = "4.8.3"
|
version = "4.8.3"
|
||||||
@ -227,6 +254,24 @@ category = "main"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests"
|
||||||
|
version = "2.28.0"
|
||||||
|
description = "Python HTTP for Humans."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7, <4"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = ">=2017.4.17"
|
||||||
|
charset-normalizer = ">=2.0.0,<2.1.0"
|
||||||
|
idna = ">=2.5,<4"
|
||||||
|
urllib3 = ">=1.21.1,<1.27"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
|
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rpm-py-installer"
|
name = "rpm-py-installer"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@ -309,7 +354,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = ">=3.7,<4"
|
python-versions = ">=3.7,<4"
|
||||||
content-hash = "93600aadcd1d588e33fc16d0fd7f505ee10484722c85bdadb612f57b10e9439b"
|
content-hash = "d011f4622c248f6aa107fd679616eaa19a897147398c6f52dd0dea0ab1d74486"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
atomicwrites = [
|
atomicwrites = [
|
||||||
@ -328,10 +373,22 @@ botocore = [
|
|||||||
{file = "botocore-1.27.12-py3-none-any.whl", hash = "sha256:b8ac156e55267da6e728ea0b806bfcd97adf882801cffe7849c4b88ce4780326"},
|
{file = "botocore-1.27.12-py3-none-any.whl", hash = "sha256:b8ac156e55267da6e728ea0b806bfcd97adf882801cffe7849c4b88ce4780326"},
|
||||||
{file = "botocore-1.27.12.tar.gz", hash = "sha256:17d3ec9f684d21e06b64d9cb224934557bcd95031e2ecb551bf16271e8722fec"},
|
{file = "botocore-1.27.12.tar.gz", hash = "sha256:17d3ec9f684d21e06b64d9cb224934557bcd95031e2ecb551bf16271e8722fec"},
|
||||||
]
|
]
|
||||||
|
certifi = [
|
||||||
|
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
||||||
|
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
||||||
|
]
|
||||||
|
charset-normalizer = [
|
||||||
|
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
||||||
|
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
|
||||||
|
]
|
||||||
colorama = [
|
colorama = [
|
||||||
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
||||||
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
||||||
]
|
]
|
||||||
|
idna = [
|
||||||
|
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||||
|
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||||
|
]
|
||||||
importlib-metadata = [
|
importlib-metadata = [
|
||||||
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
|
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
|
||||||
{file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"},
|
{file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"},
|
||||||
@ -486,6 +543,10 @@ pyyaml = [
|
|||||||
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
|
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
|
||||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||||
]
|
]
|
||||||
|
requests = [
|
||||||
|
{file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"},
|
||||||
|
{file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"},
|
||||||
|
]
|
||||||
rpm-py-installer = [
|
rpm-py-installer = [
|
||||||
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
||||||
]
|
]
|
||||||
|
@ -14,6 +14,7 @@ productmd = "~1.33"
|
|||||||
importlib-resources = "^5.8.0"
|
importlib-resources = "^5.8.0"
|
||||||
boto3 = "^1.24.12"
|
boto3 = "^1.24.12"
|
||||||
xmltodict = "^0.13.0"
|
xmltodict = "^0.13.0"
|
||||||
|
requests = "^2.28.0"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "~5"
|
pytest = "~5"
|
||||||
@ -23,6 +24,7 @@ sync_from_peridot = "empanadas.scripts.sync_from_peridot:run"
|
|||||||
sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run"
|
sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run"
|
||||||
sync_sig = "empanadas.scripts.sync_sig:run"
|
sync_sig = "empanadas.scripts.sync_sig:run"
|
||||||
build-iso = "empanadas.scripts.build_iso:run"
|
build-iso = "empanadas.scripts.build_iso:run"
|
||||||
|
pull-unpack-artifact = "empanadas.scripts.pull_unpack_artifact:run"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
Loading…
Reference in New Issue
Block a user