Bump to 0.2.0
* Add metadata with README information * Bump to 0.2.0 * Move more functions into shared
This commit is contained in:
parent
7365ca6b06
commit
04e7e1d164
@ -1 +1 @@
|
|||||||
__version__ = '0.1.0'
|
__version__ = '0.2.0'
|
||||||
|
@ -2,5 +2,26 @@ These set of repositories (or "compose") is for {{ fullname }} and was generated
|
|||||||
using Empanadas {{ version }} from the SIG/Core Toolkit.
|
using Empanadas {{ version }} from the SIG/Core Toolkit.
|
||||||
|
|
||||||
As this is not a traditional compose, there will be things that you might be
|
As this is not a traditional compose, there will be things that you might be
|
||||||
expecting and do not see, or not expecting and do see.. While we attempted to
|
expecting and do not see, or not expecting and do see. While we attempted to
|
||||||
recreate a lot of those elements, it's not perfect.
|
recreate a lot of those elements, it's not perfect. In the future, we do plan on
|
||||||
|
having more metadata and providing a client libraries that can ingest this type
|
||||||
|
of metadata that we produce for easy consumption.
|
||||||
|
|
||||||
|
# Notes #
|
||||||
|
|
||||||
|
## Checksums ##
|
||||||
|
|
||||||
|
CHECKSUM Validation: https://github.com/rocky-linux/checksums
|
||||||
|
|
||||||
|
Traditionally, we would to "sign" the checksum files with the current GPG key
|
||||||
|
of a major release. However, due to how the new build system operates and for
|
||||||
|
ensuring strong security within the build system as it pertains the signing
|
||||||
|
keys, this is no longer possible. It was determined by SIG/Core or Release
|
||||||
|
Engineering to instead provide verified signed commits using our keys with
|
||||||
|
RESF/Rocky Linux email domain names to a proper git repository.
|
||||||
|
|
||||||
|
With that being said, if you are looking for "verification" of the ISO
|
||||||
|
checksums, it is highly recommended to visit the link above.
|
||||||
|
|
||||||
|
These are *always* updated with new releases or new images. This includes
|
||||||
|
live images as we release them.
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
# under extreme circumstances should you be filling this out and running
|
# under extreme circumstances should you be filling this out and running
|
||||||
# manually.
|
# manually.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
# Vars
|
# Vars
|
||||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
# This is a template that is used to build ISO's for Rocky Linux. Only under
|
# This is a template that is used to build ISO's for Rocky Linux. Only under
|
||||||
# extreme circumstances should you be filling this out and running manually.
|
# extreme circumstances should you be filling this out and running manually.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
# Vars
|
# Vars
|
||||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||||
|
@ -18,6 +18,7 @@ import json
|
|||||||
|
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
|
import empanadas
|
||||||
from empanadas.common import Color, _rootdir
|
from empanadas.common import Color, _rootdir
|
||||||
from empanadas.util import Shared
|
from empanadas.util import Shared
|
||||||
|
|
||||||
@ -987,6 +988,17 @@ class RepoSync:
|
|||||||
'Metadata files phase completed.'
|
'Metadata files phase completed.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Deploy README to metadata directory
|
||||||
|
readme_template = self.tmplenv.get_template('README.tmpl')
|
||||||
|
readme_output = readme_template.render(
|
||||||
|
fullname=self.fullname,
|
||||||
|
version=empanadas.__version__
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(metadata_dir + '/README') as readme_file:
|
||||||
|
readme_file.write(readme_output)
|
||||||
|
readme_file.close()
|
||||||
|
|
||||||
|
|
||||||
def deploy_treeinfo(self, repo, sync_root, arch):
|
def deploy_treeinfo(self, repo, sync_root, arch):
|
||||||
"""
|
"""
|
||||||
|
@ -15,11 +15,11 @@ import tarfile
|
|||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
# lazy person's s3 parser
|
# lazy person's s3 parser
|
||||||
import requests
|
#import requests
|
||||||
import json
|
#import json
|
||||||
import xmltodict
|
#import xmltodict
|
||||||
# if we can access s3
|
# if we can access s3
|
||||||
import boto3
|
#import boto3
|
||||||
# relative_path, compute_file_checksums
|
# relative_path, compute_file_checksums
|
||||||
import kobo.shortcuts
|
import kobo.shortcuts
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@ -122,8 +122,8 @@ class IsoBuild:
|
|||||||
self.s3_bucket = config['bucket']
|
self.s3_bucket = config['bucket']
|
||||||
self.s3_bucket_url = config['bucket_url']
|
self.s3_bucket_url = config['bucket_url']
|
||||||
|
|
||||||
if s3:
|
#if s3:
|
||||||
self.s3 = boto3.client('s3')
|
# self.s3 = boto3.client('s3')
|
||||||
|
|
||||||
# arch specific
|
# arch specific
|
||||||
self.hfs_compat = hfs_compat
|
self.hfs_compat = hfs_compat
|
||||||
@ -352,9 +352,21 @@ class IsoBuild:
|
|||||||
'Determining the latest pulls...'
|
'Determining the latest pulls...'
|
||||||
)
|
)
|
||||||
if self.s3:
|
if self.s3:
|
||||||
latest_artifacts = self._s3_determine_latest()
|
latest_artifacts = Shared.s3_determine_latest(
|
||||||
|
self.s3_bucket,
|
||||||
|
self.release,
|
||||||
|
self.arches,
|
||||||
|
'tar.gz',
|
||||||
|
self.log
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
latest_artifacts = self._reqs_determine_latest()
|
latest_artifacts = Shared.reqs_determine_latest(
|
||||||
|
self.s3_bucket_url,
|
||||||
|
self.release,
|
||||||
|
self.arches,
|
||||||
|
'tar.gz',
|
||||||
|
self.log
|
||||||
|
)
|
||||||
|
|
||||||
self.log.info(
|
self.log.info(
|
||||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
@ -381,16 +393,20 @@ class IsoBuild:
|
|||||||
'Downloading artifact for ' + Color.BOLD + arch + Color.END
|
'Downloading artifact for ' + Color.BOLD + arch + Color.END
|
||||||
)
|
)
|
||||||
if self.s3:
|
if self.s3:
|
||||||
self._s3_download_artifacts(
|
Shared.s3_download_artifacts(
|
||||||
self.force_download,
|
self.force_download,
|
||||||
|
self.s3_bucket,
|
||||||
source_path,
|
source_path,
|
||||||
full_drop
|
full_drop,
|
||||||
|
self.log
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._reqs_download_artifacts(
|
Shared.reqs_download_artifacts(
|
||||||
self.force_download,
|
self.force_download,
|
||||||
|
self.s3_bucket_url,
|
||||||
source_path,
|
source_path,
|
||||||
full_drop
|
full_drop,
|
||||||
|
self.log
|
||||||
)
|
)
|
||||||
self.log.info(
|
self.log.info(
|
||||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||||
@ -464,111 +480,6 @@ class IsoBuild:
|
|||||||
)
|
)
|
||||||
self._copy_nondisc_to_repo(self.force_unpack, arch, variant)
|
self._copy_nondisc_to_repo(self.force_unpack, arch, variant)
|
||||||
|
|
||||||
|
|
||||||
def _s3_determine_latest(self):
|
|
||||||
"""
|
|
||||||
Using native s3, determine the latest artifacts and return a dict
|
|
||||||
"""
|
|
||||||
temp = []
|
|
||||||
data = {}
|
|
||||||
try:
|
|
||||||
self.s3.list_objects(Bucket=self.s3_bucket)['Contents']
|
|
||||||
except:
|
|
||||||
self.log.error(
|
|
||||||
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
|
||||||
'Cannot access s3 bucket.'
|
|
||||||
)
|
|
||||||
raise SystemExit()
|
|
||||||
|
|
||||||
for y in self.s3.list_objects(Bucket=self.s3_bucket)['Contents']:
|
|
||||||
if 'tar.gz' in y['Key'] and self.release in y['Key']:
|
|
||||||
temp.append(y['Key'])
|
|
||||||
|
|
||||||
for arch in self.arches:
|
|
||||||
temps = []
|
|
||||||
for y in temp:
|
|
||||||
if arch in y:
|
|
||||||
temps.append(y)
|
|
||||||
temps.sort(reverse=True)
|
|
||||||
data[arch] = temps[0]
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _s3_download_artifacts(self, force_download, source, dest):
|
|
||||||
"""
|
|
||||||
Download the requested artifact(s) via s3
|
|
||||||
"""
|
|
||||||
if os.path.exists(dest):
|
|
||||||
if not force_download:
|
|
||||||
self.log.warn(
|
|
||||||
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
|
||||||
'Artifact at ' + dest + ' already exists'
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.log.info('Downloading ({}) to: {}'.format(source, dest))
|
|
||||||
try:
|
|
||||||
self.s3.download_file(
|
|
||||||
Bucket=self.s3_bucket,
|
|
||||||
Key=source,
|
|
||||||
Filename=dest
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
self.log.error('There was an issue downloading from %s' % self.s3_bucket)
|
|
||||||
|
|
||||||
def _reqs_determine_latest(self):
|
|
||||||
"""
|
|
||||||
Using requests, determine the latest artifacts and return a list
|
|
||||||
"""
|
|
||||||
temp = []
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
bucket_data = requests.get(self.s3_bucket_url)
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
self.log.error('The s3 bucket http endpoint is inaccessible')
|
|
||||||
raise SystemExit(e)
|
|
||||||
|
|
||||||
resp = xmltodict.parse(bucket_data.content)
|
|
||||||
|
|
||||||
for y in resp['ListBucketResult']['Contents']:
|
|
||||||
if 'tar.gz' in y['Key'] and self.release in y['Key']:
|
|
||||||
temp.append(y['Key'])
|
|
||||||
|
|
||||||
for arch in self.arches:
|
|
||||||
temps = []
|
|
||||||
for y in temp:
|
|
||||||
if arch in y:
|
|
||||||
temps.append(y)
|
|
||||||
temps.sort(reverse=True)
|
|
||||||
data[arch] = temps[0]
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _reqs_download_artifacts(self, force_download, source, dest):
|
|
||||||
"""
|
|
||||||
Download the requested artifact(s) via requests only
|
|
||||||
"""
|
|
||||||
if os.path.exists(dest):
|
|
||||||
if not force_download:
|
|
||||||
self.log.warn(
|
|
||||||
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
|
||||||
'Artifact at ' + dest + ' already exists'
|
|
||||||
)
|
|
||||||
return
|
|
||||||
unurl = self.s3_bucket_url + '/' + source
|
|
||||||
|
|
||||||
self.log.info('Downloading ({}) to: {}'.format(source, dest))
|
|
||||||
try:
|
|
||||||
with requests.get(unurl, allow_redirects=True) as r:
|
|
||||||
with open(dest, 'wb') as f:
|
|
||||||
f.write(r.content)
|
|
||||||
f.close()
|
|
||||||
r.close()
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
self.log.error('There was a problem downloading the artifact')
|
|
||||||
raise SystemExit(e)
|
|
||||||
|
|
||||||
def _unpack_artifacts(self, force_unpack, arch, tarball):
|
def _unpack_artifacts(self, force_unpack, arch, tarball):
|
||||||
"""
|
"""
|
||||||
Unpack the requested artifacts(s)
|
Unpack the requested artifacts(s)
|
||||||
@ -729,7 +640,8 @@ class IsoBuild:
|
|||||||
if not os.path.exists(pathway):
|
if not os.path.exists(pathway):
|
||||||
self.log.error(
|
self.log.error(
|
||||||
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||||
'Repo and Image variant do NOT match'
|
'Repo and Image variant either does NOT match or does ' +
|
||||||
|
'NOT exist. Are you sure you have synced the repository?'
|
||||||
)
|
)
|
||||||
|
|
||||||
if not force_unpack:
|
if not force_unpack:
|
||||||
|
@ -6,7 +6,11 @@ import hashlib
|
|||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
import yaml
|
import yaml
|
||||||
|
import requests
|
||||||
|
import boto3
|
||||||
|
import xmltodict
|
||||||
import productmd.treeinfo
|
import productmd.treeinfo
|
||||||
|
import empanadas
|
||||||
from empanadas.common import Color
|
from empanadas.common import Color
|
||||||
|
|
||||||
class ArchCheck:
|
class ArchCheck:
|
||||||
@ -233,7 +237,7 @@ class Shared:
|
|||||||
metadata = {
|
metadata = {
|
||||||
"header": {
|
"header": {
|
||||||
"name": "empanadas",
|
"name": "empanadas",
|
||||||
"version": "0.2.0",
|
"version": empanadas.__version__,
|
||||||
"type": "toolkit",
|
"type": "toolkit",
|
||||||
"maintainer": "SIG/Core"
|
"maintainer": "SIG/Core"
|
||||||
},
|
},
|
||||||
@ -507,3 +511,114 @@ class Shared:
|
|||||||
os.makedirs(dest, exist_ok=True)
|
os.makedirs(dest, exist_ok=True)
|
||||||
|
|
||||||
return 'Not available', 1
|
return 'Not available', 1
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def s3_determine_latest(s3_bucket, release, arches, filetype, logger):
|
||||||
|
"""
|
||||||
|
Using native s3, determine the latest artifacts and return a dict
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
s3 = boto3.client('s3')
|
||||||
|
|
||||||
|
try:
|
||||||
|
s3.list_objects(Bucket=s3_bucket)['Contents']
|
||||||
|
except:
|
||||||
|
logger.error(
|
||||||
|
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||||
|
'Cannot access s3 bucket.'
|
||||||
|
)
|
||||||
|
raise SystemExit()
|
||||||
|
|
||||||
|
for y in s3.list_objects(Bucket=s3_bucket)['Contents']:
|
||||||
|
if filetype in y['Key'] and release in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def s3_download_artifacts(force_download, s3_bucket, source, dest, logger):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via s3
|
||||||
|
"""
|
||||||
|
s3 = boto3.client('s3')
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
logger.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info('Downloading ({}) to: {}'.format(source, dest))
|
||||||
|
try:
|
||||||
|
s3.download_file(
|
||||||
|
Bucket=s3_bucket,
|
||||||
|
Key=source,
|
||||||
|
Filename=dest
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
logger.error('There was an issue downloading from %s' % s3_bucket)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reqs_determine_latest(s3_bucket_url, release, arches, filetype, logger):
|
||||||
|
"""
|
||||||
|
Using requests, determine the latest artifacts and return a list
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket_data = requests.get(s3_bucket_url)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error('The s3 bucket http endpoint is inaccessible')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
resp = xmltodict.parse(bucket_data.content)
|
||||||
|
|
||||||
|
for y in resp['ListBucketResult']['Contents']:
|
||||||
|
if filetype in y['Key'] and release in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reqs_download_artifacts(force_download, s3_bucket_url, source, dest, logger):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via requests only
|
||||||
|
"""
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
logger.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
unurl = s3_bucket_url + '/' + source
|
||||||
|
|
||||||
|
logger.info('Downloading ({}) to: {}'.format(source, dest))
|
||||||
|
try:
|
||||||
|
with requests.get(unurl, allow_redirects=True) as r:
|
||||||
|
with open(dest, 'wb') as f:
|
||||||
|
f.write(r.content)
|
||||||
|
f.close()
|
||||||
|
r.close()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error('There was a problem downloading the artifact')
|
||||||
|
raise SystemExit(e)
|
||||||
|
@ -2,4 +2,4 @@ from empanadas import __version__
|
|||||||
|
|
||||||
|
|
||||||
def test_version():
|
def test_version():
|
||||||
assert __version__ == '0.1.0'
|
assert __version__ == '0.2.0'
|
||||||
|
Loading…
Reference in New Issue
Block a user