forked from sig_core/toolkit
images should be pulled based on format and name
This commit is contained in:
parent
4c426ca1e3
commit
a4ee9ecc02
@ -1,12 +1,21 @@
|
|||||||
# iso
|
# iso
|
||||||
|
|
||||||
|
|
||||||
## Setup / Install
|
## Setup / Install
|
||||||
|
|
||||||
1. Install [Poetry](https://python-poetry.org/docs/)
|
1. Install [Poetry](https://python-poetry.org/docs/)
|
||||||
2. Setup: `poetry install`
|
2. Setup: `poetry install`
|
||||||
|
3. Install dependencies: `dnf install podman mock`
|
||||||
3. Have fun
|
3. Have fun
|
||||||
|
|
||||||
|
## Reliance on podman and mock
|
||||||
|
|
||||||
|
### Why podman?
|
||||||
|
|
||||||
|
Podman is a requirement for performing reposyncs. This was done because it was found to be easier to spin up several podman containers than several mock chroots and it was faster than doing one at a time in a loop. Podman is also used to parallelize ISO builds.
|
||||||
|
|
||||||
|
### Why mock?
|
||||||
|
|
||||||
|
There are cases where running `mock` is the preferred go-to: For example, building lorax images. Since you cannot build a lorax image for an architecture your system does not support, trying to "parallelize" it was out of the question. Adding this support in was not only for local testing without podman, it was also done so it can be run in our peridot kube cluster for each architecture.
|
||||||
|
|
||||||
## Updating dependencies
|
## Updating dependencies
|
||||||
|
|
||||||
@ -16,9 +25,8 @@ Changes to the poetry.lock should be commited if dependencies are added or updat
|
|||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
Verbose mode should exist to output everything that's being called or ran.
|
* Verbose mode should exist to output everything that's being called or ran.
|
||||||
|
* There should be additional logging regardless, not just to stdout, but also to a file.
|
||||||
There should be additional logging regardless, not just to stdout, but also to a file.
|
|
||||||
|
|
||||||
## scripts
|
## scripts
|
||||||
|
|
||||||
@ -27,6 +35,10 @@ There should be additional logging regardless, not just to stdout, but also to a
|
|||||||
* sync_sig -> Syncs SIG repositories from Peridot
|
* sync_sig -> Syncs SIG repositories from Peridot
|
||||||
* build-iso -> Builds initial ISO's using Lorax
|
* build-iso -> Builds initial ISO's using Lorax
|
||||||
* build-iso-extra -> Builds DVD's and other images based on Lorax data
|
* build-iso-extra -> Builds DVD's and other images based on Lorax data
|
||||||
|
* build-iso-live -> Builds live images
|
||||||
|
* pull-unpack-tree -> Pulls the latest lorax data from an S3 bucket and configures treeinfo
|
||||||
|
* pull-cloud-image -> Pulls the latest cloud images from an S3 bucket
|
||||||
|
* finalize_compose -> Finalizes a compose with metadata and checksums, as well as copies images
|
||||||
* launch-builds -> Creates a kube config to run build-iso
|
* launch-builds -> Creates a kube config to run build-iso
|
||||||
* build-image -> Runs build-iso
|
* build-image -> Runs build-iso
|
||||||
```
|
```
|
||||||
|
@ -86,11 +86,10 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
- EC2
|
EC2:
|
||||||
- GenericCloud
|
format: raw
|
||||||
formats:
|
GenericCloud:
|
||||||
- qcow2
|
format: qcow2
|
||||||
- raw
|
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9'
|
branch: 'r9'
|
||||||
|
@ -74,11 +74,10 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
- EC2
|
EC2:
|
||||||
- GenericCloud
|
format: raw
|
||||||
formats:
|
GenericCloud:
|
||||||
- qcow2
|
format: qcow2
|
||||||
- raw
|
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9-beta'
|
branch: 'r9-beta'
|
||||||
|
@ -74,11 +74,10 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
- EC2
|
EC2:
|
||||||
- GenericCloud
|
format: raw
|
||||||
formats:
|
GenericCloud:
|
||||||
- qcow2
|
format: qcow2
|
||||||
- raw
|
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9'
|
branch: 'r9'
|
||||||
|
@ -74,11 +74,10 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
- EC2
|
EC2:
|
||||||
- GenericCloud
|
format: raw
|
||||||
formats:
|
GenericCloud:
|
||||||
- qcow2
|
format: qcow2
|
||||||
- raw
|
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9lh'
|
branch: 'r9lh'
|
||||||
|
@ -74,11 +74,10 @@
|
|||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
cloudimages:
|
cloudimages:
|
||||||
images:
|
images:
|
||||||
- EC2
|
EC2:
|
||||||
- GenericCloud
|
format: raw
|
||||||
formats:
|
GenericCloud:
|
||||||
- qcow2
|
format: qcow2
|
||||||
- raw
|
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'rln'
|
branch: 'rln'
|
||||||
|
BIN
iso/empanadas/empanadas/util/.shared.py.swp
Normal file
BIN
iso/empanadas/empanadas/util/.shared.py.swp
Normal file
Binary file not shown.
@ -1329,71 +1329,85 @@ class IsoBuild:
|
|||||||
|
|
||||||
for imagename in self.cloudimages['images']:
|
for imagename in self.cloudimages['images']:
|
||||||
self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...')
|
self.log.info(Color.INFO + 'Determining the latest images for ' + imagename + ' ...')
|
||||||
|
formattype = self.cloudimages['images'][imagename]['format']
|
||||||
|
|
||||||
for formattype in self.cloudimages['formats']:
|
if self.s3:
|
||||||
if self.s3:
|
latest_artifacts = Shared.s3_determine_latest(
|
||||||
latest_artifacts = Shared.s3_determine_latest(
|
self.s3_bucket,
|
||||||
self.s3_bucket,
|
self.release,
|
||||||
self.release,
|
arches_to_unpack,
|
||||||
self.arches,
|
formattype,
|
||||||
formattype,
|
imagename,
|
||||||
imagename,
|
self.log
|
||||||
self.log
|
)
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
latest_artifacts = Shared.reqs_determine_latest(
|
latest_artifacts = Shared.reqs_determine_latest(
|
||||||
self.s3_bucket_url,
|
self.s3_bucket_url,
|
||||||
self.release,
|
self.release,
|
||||||
self.arches,
|
arches_to_unpack,
|
||||||
formattype,
|
formattype,
|
||||||
imagename,
|
imagename,
|
||||||
self.log
|
self.log
|
||||||
)
|
)
|
||||||
|
|
||||||
if not len(latest_artifacts) > 0:
|
if not len(latest_artifacts) > 0:
|
||||||
self.log.warn(Color.WARN + 'No images found.')
|
self.log.warn(Color.WARN + 'No images found.')
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.log.info(Color.INFO + 'Attempting to download requested artifacts')
|
||||||
|
for arch in arches_to_unpack:
|
||||||
|
image_arch_dir = os.path.join(
|
||||||
|
self.image_work_dir,
|
||||||
|
arch
|
||||||
|
)
|
||||||
|
|
||||||
|
if arch not in latest_artifacts.keys():
|
||||||
|
self.log.warn(Color.WARN + 'Artifact for ' + imagename +
|
||||||
|
' ' + arch + ' (' + formattype + ') does not exist.')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Attempting to download requested artifacts')
|
source_path = latest_artifacts[arch]
|
||||||
for arch in arches_to_unpack:
|
drop_name = source_path.split('/')[-1]
|
||||||
image_arch_dir = os.path.join(
|
full_drop = '{}/{}'.format(
|
||||||
self.image_work_dir,
|
image_arch_dir,
|
||||||
arch
|
drop_name
|
||||||
|
)
|
||||||
|
|
||||||
|
checksum_drop = '{}/{}.CHECKSUM'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
drop_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(image_arch_dir):
|
||||||
|
os.makedirs(image_arch_dir, exist_ok=True)
|
||||||
|
|
||||||
|
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
||||||
|
if self.s3:
|
||||||
|
Shared.s3_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
self.s3_bucket,
|
||||||
|
source_path,
|
||||||
|
full_drop,
|
||||||
|
self.log
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Shared.reqs_download_artifacts(
|
||||||
|
self.force_download,
|
||||||
|
self.s3_bucket_url,
|
||||||
|
source_path,
|
||||||
|
full_drop,
|
||||||
|
self.log
|
||||||
)
|
)
|
||||||
|
|
||||||
if arch not in latest_artifacts.keys():
|
self.log.info('Creating checksum ...')
|
||||||
self.log.warn(Color.WARN + 'Artifact for ' + imagename +
|
checksum = Shared.get_checksum(full_drop, self.checksum, self.log)
|
||||||
' ' + arch + ' (' + formattype + ') does not exist.')
|
if not checksum:
|
||||||
continue
|
self.log.error(Color.FAIL + full_drop + ' not found! Are you sure we copied it?')
|
||||||
|
continue
|
||||||
source_path = latest_artifacts[arch]
|
with open(checksum_drop, 'w+') as c:
|
||||||
drop_name = source_path.split('/')[-1]
|
c.write(checksum)
|
||||||
full_drop = '{}/{}'.format(
|
c.close()
|
||||||
image_arch_dir,
|
|
||||||
drop_name
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(image_arch_dir):
|
|
||||||
os.makedirs(image_arch_dir, exist_ok=True)
|
|
||||||
|
|
||||||
self.log.info('Downloading artifact for ' + Color.BOLD + arch + Color.END)
|
|
||||||
if self.s3:
|
|
||||||
Shared.s3_download_artifacts(
|
|
||||||
self.force_download,
|
|
||||||
self.s3_bucket,
|
|
||||||
source_path,
|
|
||||||
full_drop,
|
|
||||||
self.log
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Shared.reqs_download_artifacts(
|
|
||||||
self.force_download,
|
|
||||||
self.s3_bucket_url,
|
|
||||||
source_path,
|
|
||||||
full_drop,
|
|
||||||
self.log
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Image download phase completed')
|
self.log.info(Color.INFO + 'Image download phase completed')
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user