Compare commits
98 Commits
190e1b4b22
...
4a4c1c0aeb
Author | SHA1 | Date | |
---|---|---|---|
4a4c1c0aeb | |||
929aa97e8b | |||
03d0c585ae | |||
ca47a82d99 | |||
beebdaa105 | |||
813acedf84 | |||
9aada45f95 | |||
4a278cb091 | |||
37714cabd6 | |||
f1fbcff0ef | |||
8017026bc8 | |||
3207fd4ad0 | |||
20bf0812db | |||
6b236b7b5f | |||
911f835bfb | |||
f10a172c17 | |||
76012c8549 | |||
1ba76a849a | |||
cee688b2c2 | |||
9e4f89cf6d | |||
a3c5d33ae8 | |||
520db534be | |||
30d1c317cd | |||
9568b0cbcb | |||
1f94680924 | |||
193c3f9b73 | |||
40b3af462d | |||
5f2f3cae5b | |||
007f571224 | |||
3cccd03d55 | |||
767362aceb | |||
9e9955a0de | |||
a4ee9ecc02 | |||
4c426ca1e3 | |||
aaa89c74a4 | |||
6880403c38 | |||
343fe053d9 | |||
8f9f2646b7 | |||
e9e37384ae | |||
22184f30a4 | |||
d482019ca5 | |||
73f08780d9 | |||
b54447571b | |||
6946b737fc | |||
a7cf5db050 | |||
cf13fb0a02 | |||
1d5447b1fb | |||
3aa640ec45 | |||
138d1076f5 | |||
554937009e | |||
8954987365 | |||
fe4daffb25 | |||
1c90edaa70 | |||
b89ebe777a | |||
340a6a3377 | |||
843f412923 | |||
9536ab0743 | |||
55abe763ef | |||
04e7e1d164 | |||
7365ca6b06 | |||
d84a686102 | |||
b78c4a774d | |||
f308e87b15 | |||
93d6bae08c | |||
f9166541f4 | |||
79682d0e98 | |||
b9037585c7 | |||
77178e9657 | |||
3cf47dd85c | |||
361c155481 | |||
957bf5ef3f | |||
462ea264b3 | |||
c081f6f202 | |||
2884bb0eaa | |||
412a7ab089 | |||
e47ca962af | |||
c1f1be9353 | |||
4bc377cd44 | |||
7f6f7babf5 | |||
2c3409de51 | |||
49b001e31d | |||
5a02fe5a25 | |||
4922e283d6 | |||
1d710a6d42 | |||
1acca22141 | |||
2e38400255 | |||
69317f3881 | |||
1a04399851 | |||
f342046f25 | |||
f65a331826 | |||
7d7163a156 | |||
760967211b | |||
710cb08134 | |||
0deaae0c65 | |||
56799df270 | |||
79425e848e | |||
e60f6524a2 | |||
4bf6fb6618 |
47
.github/workflows/imagefactory-image.yml
vendored
Normal file
47
.github/workflows/imagefactory-image.yml
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
name: Build empanada images for imagefactory
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ $default-branch, "devel" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ $default-branch ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
buildx:
|
||||||
|
runs-on:
|
||||||
|
- ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
with:
|
||||||
|
install: true
|
||||||
|
|
||||||
|
- name: Login to ghcr
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le
|
||||||
|
context: ./iso/empanadas
|
||||||
|
file: ./iso/empanadas/Containerfile.imagefactory
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ghcr.io/rocky-linux/empanadas-imagefactory:latest
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
6
.github/workflows/mix-empanadas.yml
vendored
6
.github/workflows/mix-empanadas.yml
vendored
@ -1,9 +1,9 @@
|
|||||||
---
|
---
|
||||||
name: Build empanada container images
|
name: Build empanada container images for lorax
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ $default-branch ]
|
branches: [ $default-branch, "devel" ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ $default-branch ]
|
branches: [ $default-branch ]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@ -42,6 +42,6 @@ jobs:
|
|||||||
context: ./iso/empanadas
|
context: ./iso/empanadas
|
||||||
file: ./iso/empanadas/Containerfile
|
file: ./iso/empanadas/Containerfile
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ghcr.io/neilhanlon/sig-core-toolkit:latest
|
tags: ghcr.io/rocky-linux/sig-core-toolkit:latest
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
.swp
|
13
README.md
13
README.md
@ -3,10 +3,9 @@ sig-core-toolkit
|
|||||||
|
|
||||||
Release Engineering toolkit for repeatable operations or functionality testing.
|
Release Engineering toolkit for repeatable operations or functionality testing.
|
||||||
|
|
||||||
Currently mirrored at our [github](https://github.com/rocky-linux),
|
Currently mirrored at our [github](https://github.com/rocky-linux), and the
|
||||||
[Rocky Linux Git Service](https://git.rockylinux.org), and the
|
[RESF Git Service](https://git.resf.org). Changes will typically occur at the
|
||||||
[RESF Git Service](https://git.resf.org). Changes either occur at the Rocky
|
RESF Git Service.
|
||||||
Linux Git Service or RESF Git Service.
|
|
||||||
|
|
||||||
What does this have?
|
What does this have?
|
||||||
--------------------
|
--------------------
|
||||||
@ -14,10 +13,10 @@ What does this have?
|
|||||||
* analyze -> Analysis utilities (such as download stats)
|
* analyze -> Analysis utilities (such as download stats)
|
||||||
* chat -> mattermost related utilities
|
* chat -> mattermost related utilities
|
||||||
* func -> (mostly defunct) testing scripts and tools to test base functionality
|
* func -> (mostly defunct) testing scripts and tools to test base functionality
|
||||||
* iso -> ISO related utilities
|
* iso -> ISO, Compose, and Sync related utilities, primarily for Rocky Linux 9+
|
||||||
* live -> Live image related utilities
|
* live -> Live image related utilities
|
||||||
* mangle -> Manglers and other misc stuff
|
* mangle -> Manglers and other misc stuff
|
||||||
* sync -> Sync tools, primarily for Rocky Linux 8
|
* sync -> Sync tools, primarily for Rocky Linux 8 and will eventually be deprecated
|
||||||
|
|
||||||
How can I help?
|
How can I help?
|
||||||
---------------
|
---------------
|
||||||
@ -28,7 +27,7 @@ when you make changes:
|
|||||||
* Have pre-commit installed
|
* Have pre-commit installed
|
||||||
* Have shellcheck installed
|
* Have shellcheck installed
|
||||||
* Shell Scripts: These must pass a shellcheck test!
|
* Shell Scripts: These must pass a shellcheck test!
|
||||||
* Python scripts: Try your best to follow PEP8 guidelines
|
* Python scripts: Try your best to follow PEP8 guidelines (even the best linters get things wrong)
|
||||||
|
|
||||||
Your PR should be against the devel branch at all times. PR's against the main
|
Your PR should be against the devel branch at all times. PR's against the main
|
||||||
branch will be closed.
|
branch will be closed.
|
||||||
|
59
func/ipa.sh
Normal file
59
func/ipa.sh
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Release Engineering Core Functionality Testing
|
||||||
|
# Louis Abel <label@rockylinux.org> @nazunalika
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Settings and variables
|
||||||
|
|
||||||
|
# Exits on any non-zero exit status - Disabled for now.
|
||||||
|
#set -e
|
||||||
|
# Undefined variables will cause an exit
|
||||||
|
set -u
|
||||||
|
|
||||||
|
COMMON_EXPORTS='./common/exports.sh'
|
||||||
|
COMMON_IMPORTS='./common/imports.sh'
|
||||||
|
SELINUX=$(getenforce)
|
||||||
|
|
||||||
|
# End
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
# shellcheck source=/dev/null disable=SC2015
|
||||||
|
[ -f $COMMON_EXPORTS ] && source $COMMON_EXPORTS || { echo -e "\n[-] $(date): Variables cannot be sourced."; exit 1; }
|
||||||
|
# shellcheck source=/dev/null disable=SC2015
|
||||||
|
[ -f $COMMON_IMPORTS ] && source $COMMON_IMPORTS || { echo -e "\n[-] $(date): Functions cannot be sourced."; exit 1; }
|
||||||
|
# Init log
|
||||||
|
# shellcheck disable=SC2015
|
||||||
|
[ -e "$LOGFILE" ] && m_recycleLog || touch "$LOGFILE"
|
||||||
|
# SELinux check
|
||||||
|
if [ "$SELINUX" != "Enforcing" ]; then
|
||||||
|
echo -e "\n[-] $(date): SELinux is not enforcing."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
r_log "internal" "Starting Release Engineering Core Tests"
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Script Work
|
||||||
|
|
||||||
|
# Skip tests in a list - some tests are already -x, so it won't be an issue
|
||||||
|
if [ -e skip.list ]; then
|
||||||
|
r_log "internal" "Disabling tests"
|
||||||
|
# shellcheck disable=SC2162
|
||||||
|
grep -E "^${RL_VER}" skip.list | while read line; do
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
testFile="$(echo $line | cut -d '|' -f 2)"
|
||||||
|
r_log "internal" "SKIP ${testFile}"
|
||||||
|
chmod -x "${testFile}"
|
||||||
|
done
|
||||||
|
r_log "internal" "WARNING: Tests above were disabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# TODO: should we let $1 judge what directory is ran?
|
||||||
|
# TODO: get some stacks and lib in there
|
||||||
|
|
||||||
|
#r_processor <(/usr/bin/find ./core -type f | sort -t'/')
|
||||||
|
#r_processor <(/usr/bin/find ./lib -type f | sort -t'/')
|
||||||
|
r_processor <(/usr/bin/find ./stacks/ipa -type f | sort -t'/')
|
||||||
|
|
||||||
|
r_log "internal" "Core Tests completed"
|
||||||
|
exit 0
|
0
func/stacks/ipa/00-ipa-pregame.sh
Normal file → Executable file
0
func/stacks/ipa/00-ipa-pregame.sh
Normal file → Executable file
2
func/stacks/ipa/10-install-ipa.sh
Normal file → Executable file
2
func/stacks/ipa/10-install-ipa.sh
Normal file → Executable file
@ -11,4 +11,4 @@ if [ "$RL_VER" -eq 8 ]; then
|
|||||||
p_enableModule idm:DL1/{client,common,dns,server}
|
p_enableModule idm:DL1/{client,common,dns,server}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
p_installPackageNormal ipa-server ipa-server-dns
|
p_installPackageNormal ipa-server ipa-server-dns expect
|
||||||
|
0
func/stacks/ipa/11-configure-ipa.sh
Normal file → Executable file
0
func/stacks/ipa/11-configure-ipa.sh
Normal file → Executable file
0
func/stacks/ipa/12-verify-ipa.sh
Normal file → Executable file
0
func/stacks/ipa/12-verify-ipa.sh
Normal file → Executable file
35
func/stacks/ipa/20-ipa-user.sh
Normal file → Executable file
35
func/stacks/ipa/20-ipa-user.sh
Normal file → Executable file
@ -13,42 +13,43 @@ kdestroy &> /dev/null
|
|||||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
expect -f - <<EOF
|
echo "b1U3OnyX!" | kinit admin@RLIPA.LOCAL
|
||||||
set send_human {.1 .3 1 .05 2}
|
|
||||||
spawn kinit admin
|
|
||||||
sleep 1
|
|
||||||
expect "Password for admin@RLIPA.LOCAL:"
|
|
||||||
send -h "b1U3OnyX!\r"
|
|
||||||
sleep 5
|
|
||||||
close
|
|
||||||
EOF
|
|
||||||
|
|
||||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Test adding a user"
|
r_log "ipa" "Test adding a user"
|
||||||
userDetails="$(ipa user-add --first=test --last=user --random ipatestuser)"
|
ipa user-add --first=test --last=user --random ipatestuser > /tmp/ipatestuser
|
||||||
echo "$userDetails" | grep -q 'Added user "ipatestuser"'
|
grep -q 'Added user "ipatestuser"' /tmp/ipatestuser
|
||||||
r_checkExitStatus $?
|
|
||||||
|
|
||||||
echo "$userDetails" | grep -q 'First name: test'
|
ret_val=$?
|
||||||
|
if [ "$ret_val" -ne 0 ]; then
|
||||||
|
r_log "ipa" "User was not created, this is considered fatal"
|
||||||
|
r_checkExitStatus 1
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -i 's|^ ||g' /tmp/ipatestuser
|
||||||
|
grep -q 'First name: test' /tmp/ipatestuser
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
echo "$userDetails" | grep -q 'Last name: user'
|
grep -q 'Last name: user' /tmp/ipatestuser
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
echo "$userDetails" | grep -q 'Full name: test user'
|
grep -q 'Full name: test user' /tmp/ipatestuser
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
echo "$userDetails" | grep -q 'Home directory: /home/ipatestuser'
|
grep -q 'Home directory: /home/ipatestuser' /tmp/ipatestuser
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Changing password of the user"
|
r_log "ipa" "Changing password of the user"
|
||||||
kdestroy &> /dev/null
|
kdestroy &> /dev/null
|
||||||
|
userPassword="$(awk '/Random password/ { print $3 }' /tmp/ipatestuser)"
|
||||||
|
/bin/rm /tmp/ipatestuser
|
||||||
|
|
||||||
expect -f - <<EOF
|
expect -f - <<EOF
|
||||||
set send_human {.1 .3 1 .05 2}
|
set send_human {.1 .3 1 .05 2}
|
||||||
spawn kinit ipatestuser
|
spawn kinit ipatestuser
|
||||||
sleep 1
|
sleep 1
|
||||||
expect "Password for ipatestuser@RLIPA.LOCAL: "
|
expect "Password for ipatestuser@RLIPA.LOCAL: "
|
||||||
send -h -- "$(echo "$userDetails" | awk '$0 ~ /Random password/ {print $3}')\r"
|
send -h -- "$(echo "$userPassword")\r"
|
||||||
sleep 1
|
sleep 1
|
||||||
expect "Enter new password: "
|
expect "Enter new password: "
|
||||||
send -h -- "gr@YAm3thy5st!\r"
|
send -h -- "gr@YAm3thy5st!\r"
|
||||||
|
18
func/stacks/ipa/21-ipa-service.sh
Normal file → Executable file
18
func/stacks/ipa/21-ipa-service.sh
Normal file → Executable file
@ -13,29 +13,21 @@ kdestroy &> /dev/null
|
|||||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
expect -f - <<EOF
|
echo "b1U3OnyX!" | kinit admin@RLIPA.LOCAL
|
||||||
set send_human {.1 .3 1 .05 2}
|
|
||||||
spawn kinit admin
|
|
||||||
sleep 1
|
|
||||||
expect "Password for admin@RLIPA.LOCAL:"
|
|
||||||
send -h "b1U3OnyX!\r"
|
|
||||||
sleep 5
|
|
||||||
close
|
|
||||||
EOF
|
|
||||||
|
|
||||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Adding test service"
|
r_log "ipa" "Adding test service"
|
||||||
ipa service-add testservice/rltest.rlipa.local &> /dev/null
|
ipa service-add testservice/onyxtest.rlipa.local &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Getting keytab for service"
|
r_log "ipa" "Getting keytab for service"
|
||||||
ipa-getkeytab -s rltest.rlipa.local -p testservice/rltest.rlipa.local -k /tmp/testservice.keytab &> /dev/null
|
ipa-getkeytab -s onyxtest.rlipa.local -p testservice/onyxtest.rlipa.local -k /tmp/testservice.keytab &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Getting a certificate for service"
|
r_log "ipa" "Getting a certificate for service"
|
||||||
ipa-getcert request -K testservice/rltest.rlipa.local -D rltest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null
|
ipa-getcert request -K testservice/onyxtest.rlipa.local -D onyxtest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
@ -57,7 +49,7 @@ while ! stat /etc/pki/tls/certs/testservice.crt &> /dev/null; do
|
|||||||
done
|
done
|
||||||
|
|
||||||
r_log "ipa" "Verifying keytab"
|
r_log "ipa" "Verifying keytab"
|
||||||
klist -k /tmp/testservice.keytab | grep "testservice/rltest.rlipa.local" &> /dev/null
|
klist -k /tmp/testservice.keytab | grep "testservice/onyxtest.rlipa.local" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Verifying key matches the certificate"
|
r_log "ipa" "Verifying key matches the certificate"
|
||||||
|
16
func/stacks/ipa/22-ipa-dns.sh
Normal file → Executable file
16
func/stacks/ipa/22-ipa-dns.sh
Normal file → Executable file
@ -13,21 +13,13 @@ kdestroy &> /dev/null
|
|||||||
klist 2>&1 | grep -qE "(No credentials|Credentials cache .* not found)" &> /dev/null
|
klist 2>&1 | grep -qE "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
expect -f - <<EOF
|
echo "b1U3OnyX!" | kinit admin@RLIPA.LOCAL
|
||||||
set send_human {.1 .3 1 .05 2}
|
|
||||||
spawn kinit admin
|
|
||||||
sleep 1
|
|
||||||
expect "Password for admin@RLIPA.LOCAL:"
|
|
||||||
send -h "b1U3OnyX!\r"
|
|
||||||
sleep 5
|
|
||||||
close
|
|
||||||
EOF
|
|
||||||
|
|
||||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Adding testzone subdomain"
|
r_log "ipa" "Adding testzone subdomain"
|
||||||
ipa dnszone-add --name-server=rltest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null
|
ipa dnszone-add --name-server=onyxtest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
sleep 5
|
sleep 5
|
||||||
|
|
||||||
@ -36,7 +28,7 @@ dig @localhost SOA testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null
|
|||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Adding a CNAME record to the primary domain"
|
r_log "ipa" "Adding a CNAME record to the primary domain"
|
||||||
ipa dnsrecord-add rlipa.local testrecord --cname-hostname=rltest &> /dev/null
|
ipa dnsrecord-add rlipa.local testrecord --cname-hostname=onyxtest &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
sleep 5
|
sleep 5
|
||||||
|
|
||||||
@ -45,7 +37,7 @@ dig @localhost CNAME testrecord.rlipa.local | grep -q "status: NOERROR" &> /dev/
|
|||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
r_log "ipa" "Adding a CNAME to subdomain"
|
r_log "ipa" "Adding a CNAME to subdomain"
|
||||||
ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=rltest.rlipa.local. &> /dev/null
|
ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=onyxtest.rlipa.local. &> /dev/null
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
sleep 5
|
sleep 5
|
||||||
|
|
||||||
|
54
func/stacks/ipa/23-ipa-sudo.sh
Normal file → Executable file
54
func/stacks/ipa/23-ipa-sudo.sh
Normal file → Executable file
@ -9,19 +9,51 @@ if [ "$IPAINSTALLED" -eq 1 ]; then
|
|||||||
r_checkExitStatus 1
|
r_checkExitStatus 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
kdestroy &> /dev/null
|
kdestroy -A
|
||||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)"
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
|
||||||
expect -f - <<EOF
|
echo "b1U3OnyX!" | kinit admin@RLIPA.LOCAL
|
||||||
set send_human {.1 .3 1 .05 2}
|
|
||||||
spawn kinit admin
|
klist | grep -q "admin@RLIPA.LOCAL"
|
||||||
sleep 1
|
r_checkExitStatus $?
|
||||||
expect "Password for admin@RLIPA.LOCAL:"
|
|
||||||
send -h "b1U3OnyX!\r"
|
r_log "ipa" "Creating a test sudo rule"
|
||||||
|
ipa sudorule-add testrule --desc="Test rule in IPA" --hostcat=all --cmdcat=all --runasusercat=all --runasgroupcat=all &> /dev/null
|
||||||
|
r_checkExitStatus $?
|
||||||
|
|
||||||
|
r_log "ipa" "Adding user to test sudo rule"
|
||||||
|
ipa sudorule-add-user testrule --users="ipatestuser" &> /dev/null
|
||||||
|
r_checkExitStatus $?
|
||||||
|
|
||||||
|
r_log "ipa" "Verifying rule..."
|
||||||
|
ipa sudorule-show testrule > /tmp/testrule
|
||||||
|
grep -q 'Rule name: testrule' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'Description: Test rule in IPA' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'Enabled: TRUE' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'Host category: all' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'Command category: all' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'RunAs User category: all' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'RunAs Group category: all' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'Users: ipatestuser' /tmp/testrule
|
||||||
|
r_checkExitStatus $?
|
||||||
|
|
||||||
|
m_serviceCycler sssd stop
|
||||||
|
rm -rf /var/lib/sss/db/*
|
||||||
|
m_serviceCycler sssd start
|
||||||
|
|
||||||
sleep 5
|
sleep 5
|
||||||
close
|
|
||||||
EOF
|
|
||||||
|
|
||||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
r_log "ipa" "Verifying sudo abilities"
|
||||||
|
sudo -l -U ipatestuser > /tmp/sudooutput
|
||||||
|
grep -q 'ipatestuser may run the following commands' /tmp/sudooutput
|
||||||
|
r_checkExitStatus $?
|
||||||
|
grep -q 'ALL) ALL' /tmp/sudooutput
|
||||||
r_checkExitStatus $?
|
r_checkExitStatus $?
|
||||||
|
0
func/stacks/ipa/50-cleanup-ipa.sh
Normal file → Executable file
0
func/stacks/ipa/50-cleanup-ipa.sh
Normal file → Executable file
0
func/stacks/lamp/00-install-lamp.sh
Executable file → Normal file
0
func/stacks/lamp/00-install-lamp.sh
Executable file → Normal file
0
func/stacks/lamp/01-verification.sh
Executable file → Normal file
0
func/stacks/lamp/01-verification.sh
Executable file → Normal file
0
func/stacks/lamp/10-test-lamp.sh
Executable file → Normal file
0
func/stacks/lamp/10-test-lamp.sh
Executable file → Normal file
1
iso/empanadas/.gitignore
vendored
1
iso/empanadas/.gitignore
vendored
@ -2,3 +2,4 @@ __pycache__/
|
|||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
*.so
|
*.so
|
||||||
|
Containerfile*.devel
|
||||||
|
@ -56,7 +56,7 @@ RUN rm -rf /etc/yum.repos.d/*.repo
|
|||||||
RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder
|
RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder
|
||||||
RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d
|
RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d
|
||||||
|
|
||||||
RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas'
|
RUN pip install 'git+https://git.resf.org/sig_core/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas'
|
||||||
|
|
||||||
RUN pip install awscli
|
RUN pip install awscli
|
||||||
|
|
||||||
|
70
iso/empanadas/Containerfile.imagefactory
Normal file
70
iso/empanadas/Containerfile.imagefactory
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
FROM docker.io/fedora:36
|
||||||
|
|
||||||
|
ADD images/get_arch /get_arch
|
||||||
|
|
||||||
|
ENV TINI_VERSION v0.19.0
|
||||||
|
RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)"
|
||||||
|
RUN chmod +x /tini
|
||||||
|
|
||||||
|
RUN dnf install -y \
|
||||||
|
bash \
|
||||||
|
bzip2 \
|
||||||
|
cpio \
|
||||||
|
diffutils \
|
||||||
|
findutils \
|
||||||
|
gawk \
|
||||||
|
gcc \
|
||||||
|
gcc-c++ \
|
||||||
|
git \
|
||||||
|
grep \
|
||||||
|
gzip \
|
||||||
|
info \
|
||||||
|
make \
|
||||||
|
patch \
|
||||||
|
python3 \
|
||||||
|
redhat-rpm-config \
|
||||||
|
rpm-build \
|
||||||
|
scl-utils-build \
|
||||||
|
sed \
|
||||||
|
shadow-utils \
|
||||||
|
tar \
|
||||||
|
unzip \
|
||||||
|
util-linux \
|
||||||
|
which \
|
||||||
|
xz \
|
||||||
|
dnf-plugins-core \
|
||||||
|
createrepo_c \
|
||||||
|
rpm-sign \
|
||||||
|
sudo \
|
||||||
|
mock \
|
||||||
|
python-pip \
|
||||||
|
imagefactory \
|
||||||
|
imagefactory-plugins*
|
||||||
|
|
||||||
|
RUN sed -i -e 's/# memory = 1024/memory = 2048/' /etc/oz/oz.cfg
|
||||||
|
|
||||||
|
COPY imagefactory.patch /
|
||||||
|
COPY oz.rpm /
|
||||||
|
|
||||||
|
RUN dnf -y install /oz.rpm
|
||||||
|
RUN (cd /usr/lib/python3.10/site-packages/; patch -p1 </imagefactory.patch)
|
||||||
|
|
||||||
|
RUN ssh-keygen -t rsa -q -f "$HOME/.ssh/id_rsa" -N ""
|
||||||
|
RUN dnf clean all
|
||||||
|
RUN rm -rf /etc/yum.repos.d/*.repo /get_arch
|
||||||
|
# RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder
|
||||||
|
# RUN chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d && chown -R peridotbuilder:mock /var/lib/imagefactory/storage
|
||||||
|
|
||||||
|
RUN pip install awscli
|
||||||
|
|
||||||
|
ARG BRANCH r9
|
||||||
|
RUN git clone https://git.resf.org/sig_core/kickstarts.git --branch $BRANCH /kickstarts
|
||||||
|
|
||||||
|
RUN pip install 'git+https://git.resf.org/sig_core/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas'
|
||||||
|
|
||||||
|
ENV LIBGUESTFS_BACKEND direct
|
||||||
|
|
||||||
|
COPY prep-azure.sh /prep-azure.sh
|
||||||
|
RUN chmod +x /prep-azure.sh
|
||||||
|
|
||||||
|
ENTRYPOINT ["/tini", "--"]
|
@ -1,12 +1,21 @@
|
|||||||
# iso
|
# iso
|
||||||
|
|
||||||
|
|
||||||
## Setup / Install
|
## Setup / Install
|
||||||
|
|
||||||
1. Install [Poetry](https://python-poetry.org/docs/)
|
1. Install [Poetry](https://python-poetry.org/docs/)
|
||||||
2. Setup: `poetry install`
|
2. Setup: `poetry install`
|
||||||
|
3. Install dependencies: `dnf install podman mock`
|
||||||
3. Have fun
|
3. Have fun
|
||||||
|
|
||||||
|
## Reliance on podman and mock
|
||||||
|
|
||||||
|
### Why podman?
|
||||||
|
|
||||||
|
Podman is a requirement for performing reposyncs. This was done because it was found to be easier to spin up several podman containers than several mock chroots and it was faster than doing one at a time in a loop. Podman is also used to parallelize ISO builds.
|
||||||
|
|
||||||
|
### Why mock?
|
||||||
|
|
||||||
|
There are cases where running `mock` is the preferred go-to: For example, building lorax images. Since you cannot build a lorax image for an architecture your system does not support, trying to "parallelize" it was out of the question. Adding this support in was not only for local testing without podman, it was also done so it can be run in our peridot kube cluster for each architecture.
|
||||||
|
|
||||||
## Updating dependencies
|
## Updating dependencies
|
||||||
|
|
||||||
@ -16,24 +25,35 @@ Changes to the poetry.lock should be commited if dependencies are added or updat
|
|||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
Verbose mode should exist to output everything that's being called or ran.
|
* Verbose mode should exist to output everything that's being called or ran.
|
||||||
|
* There should be additional logging regardless, not just to stdout, but also to a file.
|
||||||
There should be additional logging regardless, not just to stdout, but also to a file.
|
|
||||||
|
|
||||||
## scripts
|
## scripts
|
||||||
|
|
||||||
* sync-variant-pungi
|
```
|
||||||
* sync-variant-peridot
|
* sync_from_peridot -> Syncs repositories from Peridot
|
||||||
* sync-from-pungi
|
* sync_sig -> Syncs SIG repositories from Peridot
|
||||||
* sync-from-peridot
|
* build-iso -> Builds initial ISO's using Lorax
|
||||||
* sync-sig
|
* build-iso-extra -> Builds DVD's and other images based on Lorax data
|
||||||
* build-all-iso
|
* build-iso-live -> Builds live images
|
||||||
* sign-repos-only
|
* pull-unpack-tree -> Pulls the latest lorax data from an S3 bucket and configures treeinfo
|
||||||
|
* pull-cloud-image -> Pulls the latest cloud images from an S3 bucket
|
||||||
|
* finalize_compose -> Finalizes a compose with metadata and checksums, as well as copies images
|
||||||
|
* launch-builds -> Creates a kube config to run build-iso
|
||||||
|
* build-image -> Runs build-iso
|
||||||
|
* generate_compose -> Creates a compose directory right away and optionally links it as latest
|
||||||
|
(You should only use this if you are running into errors with images)
|
||||||
|
```
|
||||||
|
|
||||||
## wrappers
|
## wrappers
|
||||||
|
|
||||||
* lorax-generators
|
```
|
||||||
* sync-generators
|
* common -> The starting point
|
||||||
|
* iso_utils -> Does work for ISO building and generation
|
||||||
|
* dnf_utils -> Does work for repo building and generation
|
||||||
|
* check -> Checks if the architecture/release combination are valid
|
||||||
|
* shared -> Shared utilities between all wrappers
|
||||||
|
```
|
||||||
|
|
||||||
## rules
|
## rules
|
||||||
|
|
||||||
@ -43,8 +63,9 @@ When making a script, you *must* import common. This is insanely bad practice,
|
|||||||
but we would prefer if we started out this way:
|
but we would prefer if we started out this way:
|
||||||
|
|
||||||
```
|
```
|
||||||
from common import *
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
```
|
```
|
||||||
|
|
||||||
Whatever is imported in common will effectively be imported in your scripts as
|
Whatever is imported in common will effectively be imported in your scripts as
|
||||||
|
@ -1 +1 @@
|
|||||||
__version__ = '0.1.0'
|
__version__ = '0.2.0'
|
||||||
|
@ -8,6 +8,24 @@ import yaml
|
|||||||
import logging
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
# An implementation from the Fabric python library
|
||||||
|
class AttributeDict(defaultdict):
|
||||||
|
def __init__(self):
|
||||||
|
super(AttributeDict, self).__init__(AttributeDict)
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError(key)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
# These are a bunch of colors we may use in terminal output
|
# These are a bunch of colors we may use in terminal output
|
||||||
class Color:
|
class Color:
|
||||||
RED = '\033[91m'
|
RED = '\033[91m'
|
||||||
@ -20,10 +38,14 @@ class Color:
|
|||||||
UNDERLINE = '\033[4m'
|
UNDERLINE = '\033[4m'
|
||||||
BOLD = '\033[1m'
|
BOLD = '\033[1m'
|
||||||
END = '\033[0m'
|
END = '\033[0m'
|
||||||
|
INFO = '[' + BOLD + GREEN + 'INFO' + END + '] '
|
||||||
|
WARN = '[' + BOLD + YELLOW + 'WARN' + END + '] '
|
||||||
|
FAIL = '[' + BOLD + RED + 'FAIL' + END + '] '
|
||||||
|
STAT = '[' + BOLD + CYAN + 'STAT' + END + '] '
|
||||||
|
|
||||||
# vars and additional checks
|
# vars and additional checks
|
||||||
rldict = {}
|
rldict = AttributeDict()
|
||||||
sigdict = {}
|
sigdict = AttributeDict()
|
||||||
config = {
|
config = {
|
||||||
"rlmacro": rpm.expandMacro('%rhel'),
|
"rlmacro": rpm.expandMacro('%rhel'),
|
||||||
"dist": 'el' + rpm.expandMacro('%rhel'),
|
"dist": 'el' + rpm.expandMacro('%rhel'),
|
||||||
@ -77,3 +99,40 @@ for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
|||||||
#rlvars = rldict[rlver]
|
#rlvars = rldict[rlver]
|
||||||
#rlvars = rldict[rlmacro]
|
#rlvars = rldict[rlmacro]
|
||||||
#COMPOSE_ISO_WORKDIR = COMPOSE_ROOT + "work/" + arch + "/" + date_stamp
|
#COMPOSE_ISO_WORKDIR = COMPOSE_ROOT + "work/" + arch + "/" + date_stamp
|
||||||
|
|
||||||
|
|
||||||
|
ALLOWED_TYPE_VARIANTS = {
|
||||||
|
"Azure": None,
|
||||||
|
"Container": ["Base", "Minimal", "UBI"],
|
||||||
|
"EC2": None,
|
||||||
|
"GenericCloud": None,
|
||||||
|
"Vagrant": ["Libvirt", "Vbox"]
|
||||||
|
}
|
||||||
|
def valid_type_variant(_type: str, variant: str="") -> bool:
|
||||||
|
if _type not in ALLOWED_TYPE_VARIANTS:
|
||||||
|
raise Exception(f"Type is invalid: ({_type}, {variant})")
|
||||||
|
if ALLOWED_TYPE_VARIANTS[_type] == None:
|
||||||
|
if variant is not None:
|
||||||
|
raise Exception(f"{_type} Type expects no variant type.")
|
||||||
|
return True
|
||||||
|
if variant not in ALLOWED_TYPE_VARIANTS[_type]:
|
||||||
|
if variant.capitalize() in ALLOWED_TYPE_VARIANTS[_type]:
|
||||||
|
raise Exception(f"Capitalization mismatch. Found: ({_type}, {variant}). Expected: ({_type}, {variant.capitalize()})")
|
||||||
|
raise Exception(f"Type/Variant Combination is not allowed: ({_type}, {variant})")
|
||||||
|
return True
|
||||||
|
|
||||||
|
from attrs import define, field
|
||||||
|
@define(kw_only=True)
|
||||||
|
class Architecture:
|
||||||
|
name: str = field()
|
||||||
|
version: str = field()
|
||||||
|
major: int = field(converter=int)
|
||||||
|
minor: int = field(converter=int)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_version(cls, architecture: str, version: str):
|
||||||
|
major, minor = str.split(version, ".")
|
||||||
|
if architecture not in rldict[major]["allowed_arches"]:
|
||||||
|
print("Invalid architecture/version combo, skipping")
|
||||||
|
exit()
|
||||||
|
return cls(name=architecture, version=version, major=major, minor=minor)
|
||||||
|
@ -44,64 +44,78 @@
|
|||||||
has_modules:
|
has_modules:
|
||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
- 'PowerTools'
|
- 'PowerTools'
|
||||||
iso_map:
|
|
||||||
hosts:
|
|
||||||
x86_64: ''
|
|
||||||
aarch64: ''
|
|
||||||
ppc64le: ''
|
|
||||||
s390x: ''
|
|
||||||
images:
|
|
||||||
- dvd1
|
|
||||||
- minimal
|
|
||||||
- boot
|
|
||||||
repos:
|
|
||||||
- 'BaseOS'
|
|
||||||
- 'AppStream'
|
|
||||||
variant: 'BaseOS'
|
|
||||||
lorax_removes:
|
|
||||||
- 'libreport-rhel-anaconda-bugzilla'
|
|
||||||
required_packages:
|
|
||||||
- 'lorax'
|
|
||||||
- 'genisoimage'
|
|
||||||
- 'isomd5sum'
|
|
||||||
- 'lorax-templates-rhel'
|
|
||||||
- 'lorax-templates-generic'
|
|
||||||
structure:
|
structure:
|
||||||
packages: 'os/Packages'
|
packages: 'os/Packages'
|
||||||
repodata: 'os/repodata'
|
repodata: 'os/repodata'
|
||||||
iso_map:
|
iso_map:
|
||||||
xorrisofs: False
|
xorrisofs: False
|
||||||
iso_level: False
|
iso_level: False
|
||||||
hosts:
|
|
||||||
x86_64: ''
|
|
||||||
aarch64: ''
|
|
||||||
images:
|
images:
|
||||||
dvd:
|
dvd:
|
||||||
|
disc: True
|
||||||
|
variant: 'AppStream'
|
||||||
repos:
|
repos:
|
||||||
- 'BaseOS'
|
- 'BaseOS'
|
||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
lorax_variants:
|
minimal:
|
||||||
- dvd
|
disc: True
|
||||||
- minimal
|
isoskip: True
|
||||||
- BaseOS
|
repos:
|
||||||
repos:
|
- 'minimal'
|
||||||
- 'BaseOS'
|
- 'BaseOS'
|
||||||
- 'AppStream'
|
variant: 'minimal'
|
||||||
variant: 'BaseOS'
|
BaseOS:
|
||||||
lorax_removes:
|
disc: False
|
||||||
- 'libreport-rhel-anaconda-bugzilla'
|
isoskip: True
|
||||||
|
variant: 'BaseOS'
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
lorax:
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
variant: 'BaseOS'
|
||||||
|
lorax_removes:
|
||||||
|
- 'libreport-rhel-anaconda-bugzilla'
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax'
|
||||||
|
- 'genisoimage'
|
||||||
|
- 'isomd5sum'
|
||||||
|
- 'lorax-templates-rhel'
|
||||||
|
- 'lorax-templates-generic'
|
||||||
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r9'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax'
|
- 'lorax-lmc-novirt'
|
||||||
- 'genisoimage'
|
- 'vim-minimal'
|
||||||
- 'isomd5sum'
|
- 'pykickstart'
|
||||||
- 'lorax-templates-rhel'
|
- 'git'
|
||||||
- 'lorax-templates-generic'
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r8'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r8/'
|
||||||
repoclosure_map:
|
repoclosure_map:
|
||||||
arches:
|
arches:
|
||||||
x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
aarch64: '--arch=aarch64 --arch=noarch'
|
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||||
ppc64le: '--arch=ppc64le --arch=noarch'
|
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||||
s390x: '--arch=s390x --arch=noarch'
|
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||||
repos:
|
repos:
|
||||||
BaseOS: []
|
BaseOS: []
|
||||||
AppStream:
|
AppStream:
|
||||||
|
@ -50,6 +50,7 @@
|
|||||||
isoskip: True
|
isoskip: True
|
||||||
repos:
|
repos:
|
||||||
- 'minimal'
|
- 'minimal'
|
||||||
|
- 'BaseOS'
|
||||||
variant: 'minimal'
|
variant: 'minimal'
|
||||||
BaseOS:
|
BaseOS:
|
||||||
disc: False
|
disc: False
|
||||||
@ -72,6 +73,31 @@
|
|||||||
- 'lorax-templates-rhel'
|
- 'lorax-templates-rhel'
|
||||||
- 'lorax-templates-generic'
|
- 'lorax-templates-generic'
|
||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r9-beta'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r9-beta'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9-beta/'
|
||||||
repoclosure_map:
|
repoclosure_map:
|
||||||
arches:
|
arches:
|
||||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
'9':
|
'9':
|
||||||
fullname: 'Rocky Linux 9.0'
|
fullname: 'Rocky Linux 9.0'
|
||||||
revision: '9.0'
|
revision: '9.0'
|
||||||
rclvl: 'RC1'
|
rclvl: 'RC2'
|
||||||
major: '9'
|
major: '9'
|
||||||
minor: '0'
|
minor: '0'
|
||||||
profile: '9'
|
profile: '9'
|
||||||
@ -50,7 +50,9 @@
|
|||||||
isoskip: True
|
isoskip: True
|
||||||
repos:
|
repos:
|
||||||
- 'minimal'
|
- 'minimal'
|
||||||
|
- 'BaseOS'
|
||||||
variant: 'minimal'
|
variant: 'minimal'
|
||||||
|
volname: 'dvd'
|
||||||
BaseOS:
|
BaseOS:
|
||||||
disc: False
|
disc: False
|
||||||
isoskip: True
|
isoskip: True
|
||||||
@ -72,6 +74,31 @@
|
|||||||
- 'lorax-templates-rhel'
|
- 'lorax-templates-rhel'
|
||||||
- 'lorax-templates-generic'
|
- 'lorax-templates-generic'
|
||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r9'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r9'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9/'
|
||||||
repoclosure_map:
|
repoclosure_map:
|
||||||
arches:
|
arches:
|
||||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
@ -50,6 +50,7 @@
|
|||||||
isoskip: True
|
isoskip: True
|
||||||
repos:
|
repos:
|
||||||
- 'minimal'
|
- 'minimal'
|
||||||
|
- 'BaseOS'
|
||||||
variant: 'minimal'
|
variant: 'minimal'
|
||||||
BaseOS:
|
BaseOS:
|
||||||
disc: False
|
disc: False
|
||||||
@ -72,6 +73,31 @@
|
|||||||
- 'lorax-templates-rhel'
|
- 'lorax-templates-rhel'
|
||||||
- 'lorax-templates-generic'
|
- 'lorax-templates-generic'
|
||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r9lh'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r9lh'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9lh/'
|
||||||
repoclosure_map:
|
repoclosure_map:
|
||||||
arches:
|
arches:
|
||||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
@ -40,18 +40,19 @@
|
|||||||
iso_level: False
|
iso_level: False
|
||||||
images:
|
images:
|
||||||
dvd:
|
dvd:
|
||||||
discnum: '1'
|
disc: True
|
||||||
variant: 'AppStream'
|
variant: 'AppStream'
|
||||||
repos:
|
repos:
|
||||||
- 'BaseOS'
|
- 'BaseOS'
|
||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
minimal:
|
minimal:
|
||||||
discnum: '1'
|
disc: True
|
||||||
isoskip: True
|
isoskip: True
|
||||||
repos:
|
repos:
|
||||||
- 'minimal'
|
- 'minimal'
|
||||||
variant: 'minimal'
|
variant: 'minimal'
|
||||||
BaseOS:
|
BaseOS:
|
||||||
|
disc: False
|
||||||
isoskip: True
|
isoskip: True
|
||||||
variant: 'BaseOS'
|
variant: 'BaseOS'
|
||||||
repos:
|
repos:
|
||||||
@ -66,10 +67,32 @@
|
|||||||
- 'libreport-rhel-anaconda-bugzilla'
|
- 'libreport-rhel-anaconda-bugzilla'
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax'
|
- 'lorax'
|
||||||
|
- 'genisoimage'
|
||||||
- 'isomd5sum'
|
- 'isomd5sum'
|
||||||
- 'lorax-templates-rhel'
|
- 'lorax-templates-rhel'
|
||||||
- 'lorax-templates-generic'
|
- 'lorax-templates-generic'
|
||||||
- 'xorriso'
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'rln'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
repoclosure_map:
|
repoclosure_map:
|
||||||
arches:
|
arches:
|
||||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
424
iso/empanadas/empanadas/scripts/build_image.py
Normal file
424
iso/empanadas/empanadas/scripts/build_image.py
Normal file
@ -0,0 +1,424 @@
|
|||||||
|
# Builds an image given a version, type, variant, and architecture
|
||||||
|
# Defaults to the running host's architecture
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import platform
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
|
||||||
|
from attrs import define, Factory, field, asdict
|
||||||
|
from botocore import args
|
||||||
|
from jinja2 import Environment, FileSystemLoader, Template
|
||||||
|
from typing import Callable, List, NoReturn, Optional, Tuple, IO, Union
|
||||||
|
|
||||||
|
from empanadas.common import Architecture, rldict, valid_type_variant
|
||||||
|
from empanadas.common import _rootdir
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||||
|
|
||||||
|
parser.add_argument('--version', type=str, help="Release Version (8.6, 9.1)", required=True)
|
||||||
|
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
||||||
|
parser.add_argument('--kickstartdir', action='store_true', help="Use the kickstart dir instead of the os dir for repositories")
|
||||||
|
parser.add_argument('--debug', action='store_true', help="debug?")
|
||||||
|
parser.add_argument('--type', type=str, help="Image type (container, genclo, azure, aws, vagrant)", required=True)
|
||||||
|
parser.add_argument('--variant', type=str, help="", required=False)
|
||||||
|
parser.add_argument('--release', type=str, help="Image release for subsequent builds with the same date stamp (rarely needed)", required=False)
|
||||||
|
parser.add_argument('--kube', action='store_true', help="output as a K8s job(s)", required=False)
|
||||||
|
|
||||||
|
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.version]
|
||||||
|
major = rlvars["major"]
|
||||||
|
|
||||||
|
|
||||||
|
debug = results.debug
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(logging.INFO if not debug else logging.DEBUG)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO if not debug else logging.DEBUG)
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s :: %(name)s :: %(message)s',
|
||||||
|
'%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
log.addHandler(handler)
|
||||||
|
|
||||||
|
STORAGE_DIR = pathlib.Path("/var/lib/imagefactory/storage")
|
||||||
|
KICKSTART_PATH = pathlib.Path(os.environ.get("KICKSTART_PATH", "/kickstarts"))
|
||||||
|
BUILDTIME = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ImageBuild:
|
||||||
|
architecture: Architecture = field()
|
||||||
|
base_uuid: Optional[str] = field(default="")
|
||||||
|
cli_args: argparse.Namespace = field()
|
||||||
|
command_args: List[str] = field(factory=list)
|
||||||
|
common_args: List[str] = field(factory=list)
|
||||||
|
debug: bool = field(default=False)
|
||||||
|
image_type: str = field()
|
||||||
|
job_template: Optional[Template] = field(init=False)
|
||||||
|
kickstart_arg: List[str] = field(factory=list)
|
||||||
|
metadata: pathlib.Path = field(init=False)
|
||||||
|
out_type: str = field(init=False)
|
||||||
|
outdir: pathlib.Path = field(init=False)
|
||||||
|
outname: str = field(init=False)
|
||||||
|
package_args: List[str] = field(factory=list)
|
||||||
|
release: int = field(default=0)
|
||||||
|
stage_commands: Optional[List[List[Union[str,Callable]]]] = field(init=False)
|
||||||
|
target_uuid: Optional[str] = field(default="")
|
||||||
|
tdl_path: pathlib.Path = field(init=False)
|
||||||
|
template: Template = field()
|
||||||
|
type_variant: str = field(init=False)
|
||||||
|
variant: Optional[str] = field()
|
||||||
|
|
||||||
|
def __attrs_post_init__(self):
|
||||||
|
self.tdl_path = self.render_icicle_template()
|
||||||
|
if not self.tdl_path:
|
||||||
|
exit(2)
|
||||||
|
self.type_variant = self.type_variant_name()
|
||||||
|
self.outdir, self.outname = self.output_name()
|
||||||
|
self.out_type = self.image_format()
|
||||||
|
self.command_args = self._command_args()
|
||||||
|
self.package_args = self._package_args()
|
||||||
|
self.common_args = self._common_args()
|
||||||
|
self.kickstart_arg = self.kickstart_imagefactory_args()
|
||||||
|
|
||||||
|
self.metadata = pathlib.Path(self.outdir, "metadata.json")
|
||||||
|
|
||||||
|
# Yes, this is gross. I'll fix it later.
|
||||||
|
if self.image_type in ["Container"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["tar", "-C", f"{self.outdir}", "--strip-components=1", "-x", "-f", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", "*/layer.tar"],
|
||||||
|
["xz", f"{self.outdir}/layer.tar"]
|
||||||
|
]
|
||||||
|
if self.image_type in ["GenericCloud"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.image_type in ["EC2"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-f", "raw", "-O", "qcow2", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.qcow2"]
|
||||||
|
]
|
||||||
|
if self.image_type in ["Azure"]:
|
||||||
|
self.stage_commands = [
|
||||||
|
["/prep-azure.sh", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{STORAGE_DIR}"],
|
||||||
|
["cp", lambda: f"{STORAGE_DIR}/{self.target_uuid}.vhd", f"{self.outdir}/{self.outname}.vhd"]
|
||||||
|
]
|
||||||
|
# ["qemu-img", "resize", "-f", "raw", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", lambda: f"{self.rounded_size()}"],
|
||||||
|
# ["qemu-img", "convert", "-f", "raw", "-o", "subformat=fixed,force_size" ,"-O", "vpc", lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.vhd"]
|
||||||
|
if self.image_type in ["Vagrant"]:
|
||||||
|
_map = {
|
||||||
|
"Vbox": "vmdk",
|
||||||
|
"Libvirt": "qcow2"
|
||||||
|
}
|
||||||
|
output = f"{_map[self.variant]}" #type: ignore
|
||||||
|
self.stage_commands = [
|
||||||
|
["qemu-img", "convert", "-c", "-f", "raw", "-O", output, lambda: f"{STORAGE_DIR}/{self.target_uuid}.body", f"{self.outdir}/{self.outname}.{output}"]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
if self.stage_commands:
|
||||||
|
self.stage_commands.append(["cp", "-v", lambda: f"{STORAGE_DIR}/{self.target_uuid}.meta", f"{self.outdir}/build.meta"])
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.mkdir(self.outdir)
|
||||||
|
except FileExistsError as e:
|
||||||
|
log.info("Directory already exists for this release. If possible, previously executed steps may be skipped")
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Some other exception occured while creating the output directory", e)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if os.path.exists(self.metadata):
|
||||||
|
with open(self.metadata, "r") as f:
|
||||||
|
try:
|
||||||
|
o = json.load(f)
|
||||||
|
self.base_uuid = o['base_uuid']
|
||||||
|
self.target_uuid = o['target_uuid']
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
log.exception("Couldn't decode metadata file", e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
# def rounded_size(self) -> int:
|
||||||
|
# # Azure images need to be rounded to the nearest 1MB boundary.
|
||||||
|
# MB=1024*1024
|
||||||
|
#
|
||||||
|
# raw_size = pathlib.Path(STORAGE_DIR},f"{self.target_uuid}.body").stat().st_size
|
||||||
|
# rounded_size = raw
|
||||||
|
|
||||||
|
def output_name(self) -> Tuple[pathlib.Path, str]:
|
||||||
|
directory = f"Rocky-{self.architecture.major}-{self.type_variant}-{self.architecture.version}-{BUILDTIME.strftime('%Y%m%d')}.{self.release}"
|
||||||
|
name = f"{directory}.{self.architecture.name}"
|
||||||
|
outdir = pathlib.Path(f"/tmp/", directory)
|
||||||
|
return outdir, name
|
||||||
|
|
||||||
|
def type_variant_name(self):
|
||||||
|
return self.image_type if not self.variant else f"{self.image_type}-{self.variant}"
|
||||||
|
|
||||||
|
def _command_args(self):
|
||||||
|
args_mapping = {
|
||||||
|
"debug": "--debug"
|
||||||
|
}
|
||||||
|
return [param for name, param in args_mapping.items() if getattr(self.cli_args, name)]
|
||||||
|
|
||||||
|
def _package_args(self) -> List[str]:
|
||||||
|
if self.image_type == "Container":
|
||||||
|
return ["--parameter", "compress", "xz"]
|
||||||
|
return [""]
|
||||||
|
|
||||||
|
def _common_args(self) -> List[str]:
|
||||||
|
args = []
|
||||||
|
if self.image_type == "Container":
|
||||||
|
args = ["--parameter", "offline_icicle", "true"]
|
||||||
|
if self.image_type in ["GenericCloud", "EC2", "Vagrant", "Azure"]:
|
||||||
|
args = ["--parameter", "generate_icicle", "false"]
|
||||||
|
return args
|
||||||
|
|
||||||
|
def image_format(self) -> str:
|
||||||
|
mapping = {
|
||||||
|
"Container": "docker"
|
||||||
|
}
|
||||||
|
return mapping[self.image_type] if self.image_type in mapping.keys() else ''
|
||||||
|
|
||||||
|
def kickstart_imagefactory_args(self) -> List[str]:
|
||||||
|
kickstart_path = pathlib.Path(f"{KICKSTART_PATH}/Rocky-{self.architecture.major}-{self.type_variant}.ks")
|
||||||
|
|
||||||
|
if not kickstart_path.is_file():
|
||||||
|
log.warn(f"Kickstart file is not available: {kickstart_path}")
|
||||||
|
if not debug:
|
||||||
|
log.warn("Exiting because debug mode is not enabled.")
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
return ["--file-parameter", "install_script", str(kickstart_path)]
|
||||||
|
|
||||||
|
def render_icicle_template(self) -> pathlib.Path:
|
||||||
|
handle, output = tempfile.mkstemp()
|
||||||
|
if not handle:
|
||||||
|
exit(3)
|
||||||
|
with os.fdopen(handle, "wb") as tmp:
|
||||||
|
_template = self.template.render(
|
||||||
|
architecture=self.architecture.name,
|
||||||
|
iso8601date=BUILDTIME.strftime("%Y%m%d"),
|
||||||
|
installdir="kickstart" if self.cli_args.kickstartdir else "os",
|
||||||
|
major=self.architecture.major,
|
||||||
|
minor=self.architecture.minor,
|
||||||
|
release=self.release,
|
||||||
|
size="10G",
|
||||||
|
type=self.image_type,
|
||||||
|
utcnow=BUILDTIME,
|
||||||
|
version_variant=self.architecture.version if not self.variant else f"{self.architecture.version}-{self.variant}",
|
||||||
|
)
|
||||||
|
tmp.write(_template.encode())
|
||||||
|
tmp.flush()
|
||||||
|
output = pathlib.Path(output)
|
||||||
|
if not output.exists():
|
||||||
|
log.error("Failed to write TDL template")
|
||||||
|
raise Exception("Failed to write TDL template")
|
||||||
|
return output
|
||||||
|
|
||||||
|
def build_command(self) -> List[str]:
|
||||||
|
build_command = ["imagefactory", *self.command_args, "base_image", *self.common_args, *self.kickstart_arg, self.tdl_path
|
||||||
|
# "|", "tee", "-a", f"{outdir}/logs/base_image-{outname}.out",
|
||||||
|
# "|", "tail", "-n4", ">", f"{outdir}/base.meta", "||", "exit", "2"
|
||||||
|
]
|
||||||
|
return build_command
|
||||||
|
def package_command(self) -> List[str]:
|
||||||
|
package_command = ["imagefactory", *self.command_args, "target_image", self.out_type, *self.common_args,
|
||||||
|
"--id", f"{self.base_uuid}",
|
||||||
|
*self.package_args,
|
||||||
|
"--parameter", "repository", self.outname,
|
||||||
|
# "|", "tee", "-a", f"{outdir}/base_image-{outname}.out",
|
||||||
|
# "|", "tail", "-n4", ">", f"{outdir}/target.meta", "||", "exit", "3"
|
||||||
|
]
|
||||||
|
return package_command
|
||||||
|
|
||||||
|
def copy_command(self) -> List[str]:
|
||||||
|
|
||||||
|
copy_command = ["aws", "s3", "cp", "--recursive", f"{self.outdir}/",
|
||||||
|
f"s3://resf-empanadas/buildimage-{self.architecture.version}-{self.architecture.name}/{ self.outname }/{ BUILDTIME.strftime('%s') }/"
|
||||||
|
]
|
||||||
|
|
||||||
|
return copy_command
|
||||||
|
|
||||||
|
def build(self) -> int:
|
||||||
|
if self.base_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.fix_ks()
|
||||||
|
|
||||||
|
ret, out, err, uuid = self.runCmd(self.build_command())
|
||||||
|
if uuid:
|
||||||
|
self.base_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def package(self) -> int:
|
||||||
|
# Some build types don't need to be packaged by imagefactory
|
||||||
|
# @TODO remove business logic if possible
|
||||||
|
if self.image_type in ["GenericCloud", "EC2", "Azure", "Vagrant"]:
|
||||||
|
self.target_uuid = self.base_uuid if hasattr(self, 'base_uuid') else ""
|
||||||
|
|
||||||
|
if self.target_uuid:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
ret, out, err, uuid = self.runCmd(self.package_command())
|
||||||
|
if uuid:
|
||||||
|
self.target_uuid = uuid.rstrip()
|
||||||
|
self.save()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def stage(self) -> int:
|
||||||
|
""" Stage the artifacst from wherever they are (unpacking and converting if needed)"""
|
||||||
|
if not hasattr(self,'stage_commands'):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
returns = []
|
||||||
|
for command in self.stage_commands: #type: ignore
|
||||||
|
ret, out, err, _ = self.runCmd(command, search=False)
|
||||||
|
returns.append(ret)
|
||||||
|
|
||||||
|
return all(ret > 0 for ret in returns)
|
||||||
|
|
||||||
|
def copy(self, skip=False) -> int:
|
||||||
|
# move or unpack if necessary
|
||||||
|
log.info("Executing staging commands")
|
||||||
|
if (stage := self.stage() > 0):
|
||||||
|
raise Exception(stage)
|
||||||
|
|
||||||
|
if not skip:
|
||||||
|
log.info("Copying files to output directory")
|
||||||
|
ret, out, err, _ = self.runCmd(self.copy_command(), search=False)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
log.info(f"Build complete! Output available in {self.outdir}/")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def runCmd(self, command: List[Union[str, Callable]], search: bool = True) -> Tuple[int, Union[bytes,None], Union[bytes,None], Union[str,None]]:
|
||||||
|
prepared, _ = self.prepare_command(command)
|
||||||
|
log.info(f"Running command: {' '.join(prepared)}")
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"stderr": subprocess.PIPE,
|
||||||
|
"stdout": subprocess.PIPE
|
||||||
|
}
|
||||||
|
if debug: del kwargs["stderr"]
|
||||||
|
|
||||||
|
with subprocess.Popen(prepared, **kwargs) as p:
|
||||||
|
uuid = None
|
||||||
|
# @TODO implement this as a callback?
|
||||||
|
if search:
|
||||||
|
for _, line in enumerate(p.stdout): # type: ignore
|
||||||
|
ln = line.decode()
|
||||||
|
if ln.startswith("UUID: "):
|
||||||
|
uuid = ln.split(" ")[-1]
|
||||||
|
log.debug(f"found uuid: {uuid}")
|
||||||
|
|
||||||
|
out, err = p.communicate()
|
||||||
|
res = p.wait(), out, err, uuid
|
||||||
|
|
||||||
|
if res[0] > 0:
|
||||||
|
log.error(f"Problem while executing command: '{prepared}'")
|
||||||
|
if search and not res[3]:
|
||||||
|
log.error("UUID not found in stdout. Dumping stdout and stderr")
|
||||||
|
self.log_subprocess(res)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
def prepare_command(self, command_list: List[Union[str, Callable]]) -> Tuple[List[str],List[None]]:
|
||||||
|
"""
|
||||||
|
Commands may be a callable, which should be a lambda to be evaluated at
|
||||||
|
preparation time with available locals. This can be used to, among
|
||||||
|
other things, perform lazy evaluations of f-strings which have values
|
||||||
|
not available at assignment time. e.g., filling in a second command
|
||||||
|
with a value extracted from the previous step or command.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
r = []
|
||||||
|
return r, [r.append(c()) if (callable(c) and c.__name__ == '<lambda>') else r.append(str(c)) for c in command_list]
|
||||||
|
|
||||||
|
def log_subprocess(self, result: Tuple[int, Union[bytes, None], Union[bytes, None], Union[str, None]]):
|
||||||
|
def log_lines(title, lines):
|
||||||
|
log.info(f"====={title}=====")
|
||||||
|
log.info(lines.decode())
|
||||||
|
log.info(f"Command return code: {result[0]}")
|
||||||
|
stdout = result[1]
|
||||||
|
stderr = result[2]
|
||||||
|
if stdout:
|
||||||
|
log_lines("Command STDOUT", stdout)
|
||||||
|
if stderr:
|
||||||
|
log_lines("Command STDERR", stderr)
|
||||||
|
|
||||||
|
def fix_ks(self):
|
||||||
|
self.runCmd(["sed", "-i", f"s,$basearch,{self.architecture.name},", self.kickstart_arg[-1]], search=False)
|
||||||
|
|
||||||
|
def render_kubernetes_job(self):
|
||||||
|
commands = [self.build_command(), self.package_command(), self.copy_command()]
|
||||||
|
if not self.job_template:
|
||||||
|
return None
|
||||||
|
template = self.job_template.render(
|
||||||
|
architecture=self.architecture.name,
|
||||||
|
backoffLimit=4,
|
||||||
|
buildTime=BUILDTIME.strftime("%s"),
|
||||||
|
command=commands,
|
||||||
|
imageName="ghcr.io/rockylinux/sig-core-toolkit:latest",
|
||||||
|
jobname="buildimage",
|
||||||
|
namespace="empanadas",
|
||||||
|
major=major,
|
||||||
|
restartPolicy="Never",
|
||||||
|
)
|
||||||
|
return template
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
with open(pathlib.Path(self.outdir, "metadata.json"), "w") as f:
|
||||||
|
try:
|
||||||
|
o = { name: getattr(self, name) for name in ["base_uuid", "target_uuid"] }
|
||||||
|
log.debug(o)
|
||||||
|
json.dump(o, f)
|
||||||
|
except AttributeError as e:
|
||||||
|
log.error("Couldn't find attribute in object. Something is probably wrong", e)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(e)
|
||||||
|
finally:
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
def run():
|
||||||
|
try:
|
||||||
|
valid_type_variant(results.type, results.variant)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(e)
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
|
tmplenv = Environment(loader=file_loader)
|
||||||
|
tdl_template = tmplenv.get_template('icicle/tdl.xml.tmpl')
|
||||||
|
|
||||||
|
arches = rlvars['allowed_arches'] if results.kube else [platform.uname().machine]
|
||||||
|
|
||||||
|
for architecture in arches:
|
||||||
|
IB = ImageBuild(
|
||||||
|
architecture=Architecture.from_version(architecture, rlvars['revision']),
|
||||||
|
cli_args=results,
|
||||||
|
debug=results.debug,
|
||||||
|
image_type=results.type,
|
||||||
|
release=results.release if results.release else 0,
|
||||||
|
template=tdl_template,
|
||||||
|
variant=results.variant,
|
||||||
|
)
|
||||||
|
if results.kube:
|
||||||
|
IB.job_template = tmplenv.get_template('kube/Job.tmpl')
|
||||||
|
#commands = IB.kube_commands()
|
||||||
|
print(IB.render_kubernetes_job())
|
||||||
|
else:
|
||||||
|
ret = IB.build()
|
||||||
|
ret = IB.package()
|
||||||
|
ret = IB.copy()
|
||||||
|
|
@ -13,6 +13,7 @@ parser.add_argument('--isolation', type=str, help="mock isolation mode")
|
|||||||
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
||||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.release]
|
rlvars = rldict[results.release]
|
||||||
major = rlvars['major']
|
major = rlvars['major']
|
||||||
@ -24,6 +25,7 @@ a = IsoBuild(
|
|||||||
rc=results.rc,
|
rc=results.rc,
|
||||||
isolation=results.isolation,
|
isolation=results.isolation,
|
||||||
compose_dir_is_here=results.local_compose,
|
compose_dir_is_here=results.local_compose,
|
||||||
|
hashed=results.hashed,
|
||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ parser.add_argument('--local-compose', action='store_true', help="Compose Direct
|
|||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
|
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
|
||||||
parser.add_argument('--extra-iso-mode', type=str, default='local')
|
parser.add_argument('--extra-iso-mode', type=str, default='local')
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.release]
|
rlvars = rldict[results.release]
|
||||||
major = rlvars['major']
|
major = rlvars['major']
|
||||||
@ -30,6 +31,7 @@ a = IsoBuild(
|
|||||||
extra_iso=results.extra_iso,
|
extra_iso=results.extra_iso,
|
||||||
extra_iso_mode=results.extra_iso_mode,
|
extra_iso_mode=results.extra_iso_mode,
|
||||||
compose_dir_is_here=results.local_compose,
|
compose_dir_is_here=results.local_compose,
|
||||||
|
hashed=results.hashed,
|
||||||
logger=results.logger
|
logger=results.logger
|
||||||
)
|
)
|
||||||
|
|
||||||
|
39
iso/empanadas/empanadas/scripts/build_iso_live.py
Executable file
39
iso/empanadas/empanadas/scripts/build_iso_live.py
Executable file
@ -0,0 +1,39 @@
|
|||||||
|
# builds ISO's
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import LiveBuild
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Live ISO Compose")
|
||||||
|
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--isolation', type=str, help="Mock Isolation")
|
||||||
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
|
parser.add_argument('--image', type=str, help="Granular choice in which live image is built")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
parser.add_argument('--live-iso-mode', type=str, default='local')
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
|
parser.add_argument('--just-copy-it', action='store_true', help="Just copy the images to the compose dir")
|
||||||
|
parser.add_argument('--force-build', action='store_true', help="Just build and overwrite the images")
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
a = LiveBuild(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
isolation=results.isolation,
|
||||||
|
live_iso_mode=results.live_iso_mode,
|
||||||
|
image=results.image,
|
||||||
|
compose_dir_is_here=results.local_compose,
|
||||||
|
hashed=results.hashed,
|
||||||
|
justcopyit=results.just_copy_it,
|
||||||
|
force_build=results.force_build,
|
||||||
|
logger=results.logger
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
a.run_build_live_iso()
|
34
iso/empanadas/empanadas/scripts/build_iso_live_test.py
Executable file
34
iso/empanadas/empanadas/scripts/build_iso_live_test.py
Executable file
@ -0,0 +1,34 @@
|
|||||||
|
# builds ISO's
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import IsoBuild
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Live ISO Compose")
|
||||||
|
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--isolation', type=str, help="Mock Isolation")
|
||||||
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
|
parser.add_argument('--image', action='store_true', help="Live image name")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
parser.add_argument('--live-iso-mode', type=str, default='local')
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
a = LiveBuild(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
isolation=results.isolation,
|
||||||
|
extra_iso_mode=results.live_iso_mode,
|
||||||
|
image=results.image,
|
||||||
|
compose_dir_is_here=results.local_compose,
|
||||||
|
logger=results.logger
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
print(a.livemap['ksentry'])
|
||||||
|
print(a.livemap['ksentry'].keys())
|
35
iso/empanadas/empanadas/scripts/finalize_compose.py
Executable file
35
iso/empanadas/empanadas/scripts/finalize_compose.py
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
# This script can be called to do single syncs or full on syncs.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import RepoSync
|
||||||
|
|
||||||
|
# Start up the parser baby
|
||||||
|
parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
||||||
|
|
||||||
|
# All of our options
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
|
# Parse them
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
r = Checks(rlvars, config['arch'])
|
||||||
|
r.check_valid_arch()
|
||||||
|
|
||||||
|
# Send them and do whatever I guess
|
||||||
|
a = RepoSync(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
arch=results.arch,
|
||||||
|
logger=results.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
a.run_compose_closeout()
|
73
iso/empanadas/empanadas/scripts/generate_compose.py
Executable file
73
iso/empanadas/empanadas/scripts/generate_compose.py
Executable file
@ -0,0 +1,73 @@
|
|||||||
|
# This script can be called to do single syncs or full on syncs.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import RepoSync
|
||||||
|
from empanadas.util import Shared
|
||||||
|
|
||||||
|
# Start up the parser baby
|
||||||
|
parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
||||||
|
|
||||||
|
# All of our options
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--symlink', action='store_true', help="symlink")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
|
# Parse them
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
r = Checks(rlvars, config['arch'])
|
||||||
|
r.check_valid_arch()
|
||||||
|
|
||||||
|
# Send them and do whatever I guess
|
||||||
|
def run():
|
||||||
|
if results.logger is None:
|
||||||
|
log = logging.getLogger("generate")
|
||||||
|
log.setLevel(logging.INFO)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s :: %(name)s :: %(message)s',
|
||||||
|
'%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
log.addHandler(handler)
|
||||||
|
else:
|
||||||
|
log = results.logger
|
||||||
|
|
||||||
|
compose_base = config['compose_root'] + "/" + major
|
||||||
|
shortname = config['shortname']
|
||||||
|
version = rlvars['revision']
|
||||||
|
date_stamp = config['date_stamp']
|
||||||
|
profile = rlvars['profile']
|
||||||
|
logger = log
|
||||||
|
|
||||||
|
generated_dir = Shared.generate_compose_dirs(
|
||||||
|
compose_base,
|
||||||
|
shortname,
|
||||||
|
version,
|
||||||
|
date_stamp,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
|
if results.symlink:
|
||||||
|
compose_latest_dir = os.path.join(
|
||||||
|
config['compose_root'],
|
||||||
|
major,
|
||||||
|
"latest-{}-{}".format(
|
||||||
|
shortname,
|
||||||
|
profile,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if os.path.exists(compose_latest_dir):
|
||||||
|
os.remove(compose_latest_dir)
|
||||||
|
|
||||||
|
os.symlink(generated_dir, compose_latest_dir)
|
||||||
|
|
||||||
|
log.info('Generated compose dirs.')
|
@ -8,10 +8,11 @@ from empanadas.common import _rootdir
|
|||||||
|
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
parser = argparse.ArgumentParser(description="Generate Kubernetes Jobs to run lorax in mock and upload the result. Pipe into kubectl for the appropriate cluster")
|
||||||
|
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version: (8|9)", required=True)
|
||||||
parser.add_argument('--env', type=str, help="environment", required=True)
|
parser.add_argument('--env', type=str, help="environment: one of (eks|ext|all). presently jobs are scheduled on different kubernetes clusters", required=True)
|
||||||
|
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.release]
|
rlvars = rldict[results.release]
|
||||||
major = rlvars['major']
|
major = rlvars['major']
|
||||||
@ -30,16 +31,25 @@ def run():
|
|||||||
elif results.env == "all":
|
elif results.env == "all":
|
||||||
arches = EKSARCH+EXTARCH
|
arches = EKSARCH+EXTARCH
|
||||||
|
|
||||||
command = ["build-iso", "--release", f"{results.release}", "--rc", "--isolation", "simple"]
|
command = ["build-iso", "--release", f"{results.release}", "--isolation", "simple", "--hashed"]
|
||||||
|
if results.rc:
|
||||||
|
command += ["--rc"]
|
||||||
|
|
||||||
|
buildstamp = datetime.datetime.utcnow()
|
||||||
|
|
||||||
out = ""
|
out = ""
|
||||||
for arch in arches:
|
for architecture in arches:
|
||||||
|
copy_command = (f"aws s3 cp --recursive --exclude=* --include=lorax* "
|
||||||
|
f"/var/lib/mock/rocky-{ major }-$(uname -m)/root/builddir/ "
|
||||||
|
f"s3://resf-empanadas/buildiso-{ major }-{ architecture }/{ buildstamp.strftime('%s') }/"
|
||||||
|
)
|
||||||
out += job_template.render(
|
out += job_template.render(
|
||||||
architecture=arch,
|
architecture=architecture,
|
||||||
backoffLimit=4,
|
backoffLimit=4,
|
||||||
buildTime=datetime.datetime.utcnow().strftime("%s"),
|
buildTime=buildstamp.strftime("%s"),
|
||||||
command=command,
|
command=[command, copy_command],
|
||||||
imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest",
|
imageName="ghcr.io/rocky-linux/sig-core-toolkit:latest",
|
||||||
|
jobname="buildiso",
|
||||||
namespace="empanadas",
|
namespace="empanadas",
|
||||||
major=major,
|
major=major,
|
||||||
restartPolicy="Never",
|
restartPolicy="Never",
|
||||||
|
33
iso/empanadas/empanadas/scripts/pull_cloud_image.py
Executable file
33
iso/empanadas/empanadas/scripts/pull_cloud_image.py
Executable file
@ -0,0 +1,33 @@
|
|||||||
|
# builds ISO's
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import IsoBuild
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
||||||
|
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
|
parser.add_argument('--s3', action='store_true', help="Release Candidate")
|
||||||
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
|
parser.add_argument('--force-download', action='store_true', help="Force a download")
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
a = IsoBuild(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
s3=results.s3,
|
||||||
|
arch=results.arch,
|
||||||
|
force_download=results.force_download,
|
||||||
|
compose_dir_is_here=results.local_compose,
|
||||||
|
logger=results.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
a.run_pull_generic_images()
|
@ -22,6 +22,7 @@ parser.add_argument('--dry-run', action='store_true')
|
|||||||
parser.add_argument('--full-run', action='store_true')
|
parser.add_argument('--full-run', action='store_true')
|
||||||
parser.add_argument('--no-fail', action='store_true')
|
parser.add_argument('--no-fail', action='store_true')
|
||||||
parser.add_argument('--refresh-extra-files', action='store_true')
|
parser.add_argument('--refresh-extra-files', action='store_true')
|
||||||
|
parser.add_argument('--refresh-treeinfo', action='store_true')
|
||||||
# I am aware this is confusing, I want podman to be the default option
|
# I am aware this is confusing, I want podman to be the default option
|
||||||
parser.add_argument('--simple', action='store_false')
|
parser.add_argument('--simple', action='store_false')
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
@ -52,6 +53,7 @@ a = RepoSync(
|
|||||||
nofail=results.no_fail,
|
nofail=results.no_fail,
|
||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
refresh_extra_files=results.refresh_extra_files,
|
refresh_extra_files=results.refresh_extra_files,
|
||||||
|
refresh_treeinfo=results.refresh_treeinfo,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import empanadas
|
||||||
from empanadas.common import *
|
from empanadas.common import *
|
||||||
from empanadas.util import Checks
|
from empanadas.util import Checks
|
||||||
from empanadas.util import RepoSync
|
from empanadas.util import RepoSync
|
||||||
@ -16,3 +17,5 @@ a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_deb
|
|||||||
def run():
|
def run():
|
||||||
print(rlvars.keys())
|
print(rlvars.keys())
|
||||||
print(rlvars)
|
print(rlvars)
|
||||||
|
print(empanadas.__version__)
|
||||||
|
print(a.hashed)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
---
|
---
|
||||||
cloud:
|
cloud:
|
||||||
'8':
|
'8':
|
||||||
|
profile: 'cloud'
|
||||||
cloud-kernel:
|
cloud-kernel:
|
||||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
@ -10,8 +11,23 @@ cloud:
|
|||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
- x86_64
|
- x86_64
|
||||||
project_id: ''
|
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||||
'9':
|
'9':
|
||||||
|
cloud-kernel:
|
||||||
|
project_id: ''
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
|
- x86_64
|
||||||
|
- ppc64le
|
||||||
|
- s390x
|
||||||
cloud-common:
|
cloud-common:
|
||||||
project_id: ''
|
project_id: ''
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
|
35
iso/empanadas/empanadas/templates/README.tmpl
Normal file
35
iso/empanadas/empanadas/templates/README.tmpl
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
These set of repositories (or "compose") is for {{ fullname }} and was generated
|
||||||
|
using Empanadas {{ version }} from the SIG/Core Toolkit.
|
||||||
|
|
||||||
|
As this is not a traditional compose (via pungi), there will be things that you
|
||||||
|
might be expecting and do not see, or not expecting and do see. While we
|
||||||
|
attempted to recreate a lot of those elements, it's not perfect and we don't
|
||||||
|
expect that it ever will be. With that being said, in the future, we do plan on
|
||||||
|
having more metadata and providing client libraries that can ingest this type of
|
||||||
|
metadata that we produce for easy consumption, on top of extending what our
|
||||||
|
metadata provides.
|
||||||
|
|
||||||
|
# Notes #
|
||||||
|
|
||||||
|
## Checksums ##
|
||||||
|
|
||||||
|
CHECKSUM Validation: https://github.com/rocky-linux/checksums
|
||||||
|
https://git.resf.org/rocky-linux/checksums (mirror)
|
||||||
|
|
||||||
|
Traditionally, we would "sign" the checksum files with the current GPG key of a
|
||||||
|
major release. However, due to how the new build system operates and for
|
||||||
|
ensuring strong security within the new build ecosystem as it pertains the
|
||||||
|
signing keys, this is no longer a viable approach. It was determined by SIG/Core
|
||||||
|
(or Release Engineering) to instead provide verified signed commits using our
|
||||||
|
keys with RESF/Rocky Linux email domain names to a proper git repository. Our
|
||||||
|
signing keys are attached to our GitHub and RESF Git Service profiles.
|
||||||
|
|
||||||
|
If you are looking for "verification" of the ISO checksums and were expecting a
|
||||||
|
`CHECKSUM.sig`, it is highly recommended to visit the link above instead.
|
||||||
|
|
||||||
|
To verify our signature, click on "commits" and click the green "Verified"
|
||||||
|
button where you will see a GPG key ID. You can then search for this ID at the
|
||||||
|
any of the following:
|
||||||
|
|
||||||
|
https://keys.openpgp.org/
|
||||||
|
https://keyserver.ubuntu.com
|
@ -5,7 +5,7 @@ set -ex
|
|||||||
{{ lorax_pkg_cmd }}
|
{{ lorax_pkg_cmd }}
|
||||||
mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
|
mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
|
||||||
cd {{ compose_work_iso_dir }}/{{ arch }}
|
cd {{ compose_work_iso_dir }}/{{ arch }}
|
||||||
test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; }
|
test -f {{ isoname }} && { echo "ERROR: ISO ALREDY EXISTS!"; exit 1; }
|
||||||
{% else %}
|
{% else %}
|
||||||
cd /builddir
|
cd /builddir
|
||||||
|
|
||||||
|
36
iso/empanadas/empanadas/templates/buildLiveImage.tmpl.sh
Normal file
36
iso/empanadas/empanadas/templates/buildLiveImage.tmpl.sh
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
{% if live_iso_mode == "podman" %}
|
||||||
|
{{ live_pkg_cmd }}
|
||||||
|
mkdir -p {{ compose_live_work_dir }}/{{ arch }}
|
||||||
|
cd {{ compose_live_work_dir }}/{{ arch }}
|
||||||
|
test -f {{ isoname }} && { echo "ERROR: ISO ALREDY EXISTS!"; exit 1; }
|
||||||
|
|
||||||
|
major=$(grep loop /proc/devices | cut -c3)
|
||||||
|
for index in 0 1 2 3 4 5; do
|
||||||
|
mknod /dev/loop$index $major $index
|
||||||
|
done
|
||||||
|
{% else %}
|
||||||
|
cd /builddir
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{{ git_clone }}
|
||||||
|
if [ -d "/builddir/ks/live/{{ major }}/peridot" ]; then
|
||||||
|
pushd /builddir/ks/live/{{ major }}/peridot || { echo "Could not change directory"; exit 1; }
|
||||||
|
else
|
||||||
|
pushd /builddir/ks/live/{{ major }}/staging || { echo "Could not change directory"; exit 1; }
|
||||||
|
fi
|
||||||
|
ksflatten -c {{ ks_file }} -o /builddir/ks.cfg
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "Error flattening kickstart"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
popd || { echo "Could not leave directory"; exit 1; }
|
||||||
|
|
||||||
|
{{ make_image }}
|
||||||
|
|
||||||
|
{% if live_iso_mode == "podman" %}
|
||||||
|
cp /builddir/lmc/{{ isoname }} {{ compose_live_work_dir }}/{{ arch }}/{{ isoname }}
|
||||||
|
{% endif %}
|
@ -3,6 +3,8 @@
|
|||||||
# under extreme circumstances should you be filling this out and running
|
# under extreme circumstances should you be filling this out and running
|
||||||
# manually.
|
# manually.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
# Vars
|
# Vars
|
||||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||||
|
21
iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl
Normal file
21
iso/empanadas/empanadas/templates/icicle/tdl.xml.tmpl
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
<template>
|
||||||
|
<name>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}}</name>
|
||||||
|
<os>
|
||||||
|
<name>RHEL-{{major}}</name>
|
||||||
|
<version>{{minor}}</version>
|
||||||
|
<arch>{{architecture}}</arch>
|
||||||
|
<install type='url'>
|
||||||
|
<url>https://download.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}}</url>
|
||||||
|
</install>
|
||||||
|
<icicle>
|
||||||
|
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
|
||||||
|
</icicle>
|
||||||
|
<kernelparam>console=tty0 inst.usefbx</kernelparam>
|
||||||
|
</os>
|
||||||
|
<description>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}} Generated on {{utcnow}}</description>
|
||||||
|
<disk>
|
||||||
|
<size>{{size}}</size>
|
||||||
|
</disk>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
|||||||
# This is a template that is used to build ISO's for Rocky Linux. Only under
|
# This is a template that is used to build ISO's for Rocky Linux. Only under
|
||||||
# extreme circumstances should you be filling this out and running manually.
|
# extreme circumstances should you be filling this out and running manually.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
# Vars
|
# Vars
|
||||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
apiVersion: batch/v1
|
apiVersion: batch/v1
|
||||||
kind: Job
|
kind: Job
|
||||||
metadata:
|
metadata:
|
||||||
name: build-iso-{{ major }}-{{ architecture }}
|
name: {{ jobname }}-{{ major }}-{{ architecture }}
|
||||||
namespace: {{ namespace }}
|
namespace: {{ namespace }}
|
||||||
spec:
|
spec:
|
||||||
template:
|
template:
|
||||||
@ -11,15 +11,18 @@ spec:
|
|||||||
peridot.rockylinux.org/workflow-tolerates-arch: {{ architecture }}
|
peridot.rockylinux.org/workflow-tolerates-arch: {{ architecture }}
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- name: buildiso-{{ major }}-{{ architecture }}
|
- name: {{ jobname }}-{{ major }}-{{ architecture }}
|
||||||
image: {{ imageName }}
|
image: {{ imageName }}
|
||||||
command: ["/bin/bash", "-c"]
|
command: ["/bin/bash", "-c"]
|
||||||
args:
|
args:
|
||||||
- |
|
- |
|
||||||
{{ command | join(' ') }}
|
{%- for c in command -%}
|
||||||
aws s3 cp --recursive --exclude=* --include=lorax* \
|
{%- if c is string %}
|
||||||
/var/lib/mock/rocky-{{ major }}-$(uname -m)/root/builddir/ \
|
{{ c }}
|
||||||
"s3://resf-empanadas/buildiso-{{ major }}-{{ architecture }}/{{ buildTime }}/"
|
{%- else %}
|
||||||
|
{{ ' '.join(c) }}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
securityContext:
|
securityContext:
|
||||||
runAsUser: 0
|
runAsUser: 0
|
||||||
runAsGroup: 0
|
runAsGroup: 0
|
||||||
|
58
iso/empanadas/empanadas/templates/liveisobuild.tmpl.sh
Normal file
58
iso/empanadas/empanadas/templates/liveisobuild.tmpl.sh
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# This is a template that is used to build extra ISO's for Rocky Linux. Only
|
||||||
|
# under extreme circumstances should you be filling this out and running
|
||||||
|
# manually.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
# Vars
|
||||||
|
MOCK_CFG="/var/tmp/live-{{ major }}.cfg"
|
||||||
|
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||||
|
MOCK_RESL="${MOCK_ROOT}/result"
|
||||||
|
MOCK_CHRO="${MOCK_ROOT}/root"
|
||||||
|
MOCK_LOG="${MOCK_RESL}/mock-output.log"
|
||||||
|
IMAGE_SCR="{{ entries_dir }}/buildLiveImage-{{ arch }}-{{ image }}.sh"
|
||||||
|
IMAGE_ISO="{{ isoname }}"
|
||||||
|
ISOLATION="{{ isolation }}"
|
||||||
|
BUILDDIR="{{ builddir }}"
|
||||||
|
|
||||||
|
#if [ -f "/usr/sbin/setenforce" ]; then
|
||||||
|
# sudo setenforce 0
|
||||||
|
#fi
|
||||||
|
|
||||||
|
# Init the container
|
||||||
|
mock \
|
||||||
|
-r "${MOCK_CFG}" \
|
||||||
|
--isolation="${ISOLATION}" \
|
||||||
|
--enable-network \
|
||||||
|
--init
|
||||||
|
|
||||||
|
init_ret_val=$?
|
||||||
|
if [ $init_ret_val -ne 0 ]; then
|
||||||
|
echo "!! MOCK INIT FAILED !!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "${MOCK_RESL}"
|
||||||
|
cp "${IMAGE_SCR}" "${MOCK_CHRO}${IMAGE_SCR}"
|
||||||
|
|
||||||
|
mock \
|
||||||
|
-r "${MOCK_CFG}" \
|
||||||
|
--shell \
|
||||||
|
--isolation="${ISOLATION}" \
|
||||||
|
--enable-network -- /bin/bash "${IMAGE_SCR}" | tee -a "${MOCK_LOG}"
|
||||||
|
|
||||||
|
mock_ret_val=$?
|
||||||
|
if [ $mock_ret_val -eq 0 ]; then
|
||||||
|
# Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result
|
||||||
|
mkdir -p "${MOCK_RESL}"
|
||||||
|
cp "${MOCK_CHRO}${BUILDDIR}/lmc/${IMAGE_ISO}" "${MOCK_RESL}"
|
||||||
|
else
|
||||||
|
echo "!! EXTRA ISO RUN FAILED !!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up?
|
||||||
|
#if [ -f "/usr/sbin/setenforce" ]; then
|
||||||
|
# sudo setenforce 1
|
||||||
|
#fi
|
38
iso/empanadas/empanadas/templates/minimal/9/aarch64
Normal file
38
iso/empanadas/empanadas/templates/minimal/9/aarch64
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
@minimal-environment
|
||||||
|
@core
|
||||||
|
@standard
|
||||||
|
@base
|
||||||
|
@guest-agents
|
||||||
|
kernel
|
||||||
|
lvm2
|
||||||
|
bubblewrap
|
||||||
|
efibootmgr
|
||||||
|
efi-filesystem
|
||||||
|
efivar-libs
|
||||||
|
flashrom
|
||||||
|
fwupd
|
||||||
|
fwupd-plugin-flashrom
|
||||||
|
gdisk
|
||||||
|
glibc-langpack-en
|
||||||
|
grub2
|
||||||
|
grub2-efi-aa64
|
||||||
|
langpacks-core-en
|
||||||
|
langpacks-en
|
||||||
|
libatasmart
|
||||||
|
libblockdev
|
||||||
|
libblockdev-crypto
|
||||||
|
libblockdev-fs
|
||||||
|
libblockdev-loop
|
||||||
|
libblockdev-mdraid
|
||||||
|
libblockdev-part
|
||||||
|
libblockdev-swap
|
||||||
|
libblockdev-utils
|
||||||
|
libbytesize
|
||||||
|
libgcab1
|
||||||
|
libjcat
|
||||||
|
libudisks2
|
||||||
|
libxmlb
|
||||||
|
mokutil
|
||||||
|
shim-aa64
|
||||||
|
udisks2
|
||||||
|
volume_key-libs
|
33
iso/empanadas/empanadas/templates/minimal/9/ppc64le
Normal file
33
iso/empanadas/empanadas/templates/minimal/9/ppc64le
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
@minimal-environment
|
||||||
|
@core
|
||||||
|
@standard
|
||||||
|
@base
|
||||||
|
@guest-agents
|
||||||
|
kernel
|
||||||
|
lvm2
|
||||||
|
bubblewrap
|
||||||
|
efi-filesystem
|
||||||
|
flashrom
|
||||||
|
fwupd
|
||||||
|
fwupd-plugin-flashrom
|
||||||
|
gdisk
|
||||||
|
glibc-langpack-en
|
||||||
|
grub2
|
||||||
|
langpacks-core-en
|
||||||
|
langpacks-en
|
||||||
|
libatasmart
|
||||||
|
libblockdev
|
||||||
|
libblockdev-crypto
|
||||||
|
libblockdev-fs
|
||||||
|
libblockdev-loop
|
||||||
|
libblockdev-mdraid
|
||||||
|
libblockdev-part
|
||||||
|
libblockdev-swap
|
||||||
|
libblockdev-utils
|
||||||
|
libbytesize
|
||||||
|
libgcab1
|
||||||
|
libjcat
|
||||||
|
libudisks2
|
||||||
|
libxmlb
|
||||||
|
udisks2
|
||||||
|
volume_key-libs
|
29
iso/empanadas/empanadas/templates/minimal/9/s390x
Normal file
29
iso/empanadas/empanadas/templates/minimal/9/s390x
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
@minimal-environment
|
||||||
|
@core
|
||||||
|
@standard
|
||||||
|
@base
|
||||||
|
kernel
|
||||||
|
lvm2
|
||||||
|
bubblewrap
|
||||||
|
efi-filesystem
|
||||||
|
fwupd
|
||||||
|
gdisk
|
||||||
|
glibc-langpack-en
|
||||||
|
langpacks-core-en
|
||||||
|
langpacks-en
|
||||||
|
libatasmart
|
||||||
|
libblockdev
|
||||||
|
libblockdev-crypto
|
||||||
|
libblockdev-fs
|
||||||
|
libblockdev-loop
|
||||||
|
libblockdev-mdraid
|
||||||
|
libblockdev-part
|
||||||
|
libblockdev-swap
|
||||||
|
libblockdev-utils
|
||||||
|
libbytesize
|
||||||
|
libgcab1
|
||||||
|
libjcat
|
||||||
|
libudisks2
|
||||||
|
libxmlb
|
||||||
|
udisks2
|
||||||
|
volume_key-libs
|
39
iso/empanadas/empanadas/templates/minimal/9/x86_64
Normal file
39
iso/empanadas/empanadas/templates/minimal/9/x86_64
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
@minimal-environment
|
||||||
|
@core
|
||||||
|
@standard
|
||||||
|
@base
|
||||||
|
@guest-agents
|
||||||
|
kernel
|
||||||
|
lvm2
|
||||||
|
bubblewrap
|
||||||
|
efibootmgr
|
||||||
|
efi-filesystem
|
||||||
|
efivar-libs
|
||||||
|
flashrom
|
||||||
|
fwupd
|
||||||
|
fwupd-plugin-flashrom
|
||||||
|
gdisk
|
||||||
|
glibc-langpack-en
|
||||||
|
grub2
|
||||||
|
grub2-efi-x64
|
||||||
|
langpacks-core-en
|
||||||
|
langpacks-en
|
||||||
|
libatasmart
|
||||||
|
libblockdev
|
||||||
|
libblockdev-crypto
|
||||||
|
libblockdev-fs
|
||||||
|
libblockdev-loop
|
||||||
|
libblockdev-mdraid
|
||||||
|
libblockdev-part
|
||||||
|
libblockdev-swap
|
||||||
|
libblockdev-utils
|
||||||
|
libbytesize
|
||||||
|
libgcab1
|
||||||
|
libjcat
|
||||||
|
libsmbios
|
||||||
|
libudisks2
|
||||||
|
libxmlb
|
||||||
|
mokutil
|
||||||
|
shim-x64
|
||||||
|
udisks2
|
||||||
|
volume_key-libs
|
@ -2,6 +2,24 @@
|
|||||||
set -o pipefail
|
set -o pipefail
|
||||||
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
||||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||||
|
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
||||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||||
|
|
||||||
|
# Yes this is a bit hacky. Can't think of a better way to do this.
|
||||||
|
ret_val=$?
|
||||||
|
if [ "$ret_val" -ne 0 ]; then
|
||||||
|
echo "SYNCING FAILED" | tee -a {{ sync_log }}
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ret_val" -eq 0 ]; then
|
||||||
|
recs=$(grep '\[FAILED\]' {{ sync_log }})
|
||||||
|
if [[ -n "${recs}" ]]; then
|
||||||
|
echo "SOME PACKAGES DID NOT DOWNLOAD" | tee -a {{ sync_log }}
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# {{ check_cmd }} | tee -a {{ sync_log }}
|
# {{ check_cmd }} | tee -a {{ sync_log }}
|
||||||
|
@ -3,6 +3,31 @@ set -o pipefail
|
|||||||
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
||||||
{{ arch_force_cp }} | tee -a {{ sync_log }}
|
{{ arch_force_cp }} | tee -a {{ sync_log }}
|
||||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||||
|
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
||||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||||
|
|
||||||
|
# Yes this is a bit hacky. Can't think of a better way to do this.
|
||||||
|
ret_val=$?
|
||||||
|
if [ "$ret_val" -ne 0 ]; then
|
||||||
|
echo "SYNCING FAILED" | tee -a {{ sync_log }}
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ret_val" -eq 0 ]; then
|
||||||
|
recs=$(grep '\[FAILED\]' {{ sync_log }})
|
||||||
|
if [[ -n "${recs}" ]]; then
|
||||||
|
echo "SOME PACKAGES DID NOT DOWNLOAD" | tee -a {{ sync_log }}
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# This is kind of a hack too.
|
||||||
|
#FOUND=$(grep -A20 'git\.rockylinux\.org' {{ sync_log }} | egrep -c '^\([0-9]+\/[0-9]+\)|\[SKIPPED\]|\.rpm')
|
||||||
|
#if [ "$FOUND" -eq "0" ]; then
|
||||||
|
# echo "Repository is empty." | tee -a {{ sync_log }}
|
||||||
|
# rm -rf {{ download_path }}
|
||||||
|
#fi
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
# {{ check_cmd }} | tee -a {{ sync_log }}
|
# {{ check_cmd }} | tee -a {{ sync_log }}
|
||||||
|
@ -8,6 +8,7 @@ from empanadas.util.check import (
|
|||||||
|
|
||||||
from empanadas.util.shared import (
|
from empanadas.util.shared import (
|
||||||
Shared,
|
Shared,
|
||||||
|
ArchCheck,
|
||||||
)
|
)
|
||||||
|
|
||||||
from empanadas.util.dnf_utils import (
|
from empanadas.util.dnf_utils import (
|
||||||
|
File diff suppressed because it is too large
Load Diff
0
iso/empanadas/empanadas/util/imagebuild.py
Normal file
0
iso/empanadas/empanadas/util/imagebuild.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,42 @@
|
|||||||
# These are shared utilities used
|
# These are shared utilities used
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import yaml
|
||||||
|
import requests
|
||||||
|
import boto3
|
||||||
|
import xmltodict
|
||||||
|
import productmd.treeinfo
|
||||||
|
import productmd.composeinfo
|
||||||
|
import empanadas
|
||||||
|
import kobo.shortcuts
|
||||||
|
from empanadas.common import Color
|
||||||
|
|
||||||
|
class ArchCheck:
|
||||||
|
"""
|
||||||
|
Arches and their files
|
||||||
|
"""
|
||||||
|
archfile = {
|
||||||
|
'x86_64': [
|
||||||
|
'isolinux/vmlinuz',
|
||||||
|
'images/grub.conf',
|
||||||
|
'EFI/BOOT/BOOTX64.EFI'
|
||||||
|
],
|
||||||
|
'aarch64': [
|
||||||
|
'EFI/BOOT/BOOTAA64.EFI'
|
||||||
|
],
|
||||||
|
'ppc64le': [
|
||||||
|
'ppc/bootinfo.txt',
|
||||||
|
'ppc/ppc64/vmlinuz'
|
||||||
|
],
|
||||||
|
's390x': [
|
||||||
|
'generic.ins',
|
||||||
|
'images/generic.prm'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
class Shared:
|
class Shared:
|
||||||
"""
|
"""
|
||||||
@ -44,6 +79,189 @@ class Shared:
|
|||||||
checksum.hexdigest()
|
checksum.hexdigest()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def treeinfo_new_write(
|
||||||
|
file_path,
|
||||||
|
distname,
|
||||||
|
shortname,
|
||||||
|
release,
|
||||||
|
arch,
|
||||||
|
time,
|
||||||
|
repo
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Writes really basic treeinfo, this is for single repository treeinfo
|
||||||
|
data. This is usually called in the case of a fresh run and each repo
|
||||||
|
needs one. This basic info may be overwritten later either by lorax
|
||||||
|
data or a full run.
|
||||||
|
"""
|
||||||
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
|
ti.release.name = distname
|
||||||
|
ti.release.short = shortname
|
||||||
|
ti.release.version = release
|
||||||
|
ti.tree.arch = arch
|
||||||
|
ti.tree.build_timestamp = time
|
||||||
|
# Variants (aka repos)
|
||||||
|
variant = productmd.treeinfo.Variant(ti)
|
||||||
|
variant.id = repo
|
||||||
|
variant.uid = repo
|
||||||
|
variant.name = repo
|
||||||
|
variant.type = "variant"
|
||||||
|
variant.paths.repository = "."
|
||||||
|
variant.paths.packages = "Packages"
|
||||||
|
ti.variants.add(variant)
|
||||||
|
ti.dump(file_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def treeinfo_modify_write(data, imagemap, logger):
|
||||||
|
"""
|
||||||
|
Modifies a specific treeinfo with already available data. This is in
|
||||||
|
the case of modifying treeinfo for primary repos or images.
|
||||||
|
"""
|
||||||
|
arch = data['arch']
|
||||||
|
variant = data['variant']
|
||||||
|
variant_path = data['variant_path']
|
||||||
|
checksum = data['checksum']
|
||||||
|
distname = data['distname']
|
||||||
|
fullname = data['fullname']
|
||||||
|
shortname = data['shortname']
|
||||||
|
release = data['release']
|
||||||
|
timestamp = data['timestamp']
|
||||||
|
|
||||||
|
os_or_ks = ''
|
||||||
|
if '/os' in variant_path or not imagemap['disc']:
|
||||||
|
os_or_ks = 'os'
|
||||||
|
if '/kickstart' in variant_path:
|
||||||
|
os_or_ks = 'kickstart'
|
||||||
|
|
||||||
|
image = os.path.join(variant_path)
|
||||||
|
treeinfo = os.path.join(image, '.treeinfo')
|
||||||
|
discinfo = os.path.join(image, '.discinfo')
|
||||||
|
mediarepo = os.path.join(image, 'media.repo')
|
||||||
|
#imagemap = self.iso_map['images'][variant]
|
||||||
|
primary = imagemap['variant']
|
||||||
|
repos = imagemap['repos']
|
||||||
|
is_disc = False
|
||||||
|
|
||||||
|
if imagemap['disc']:
|
||||||
|
is_disc = True
|
||||||
|
discnum = 1
|
||||||
|
|
||||||
|
# load up productmd
|
||||||
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
|
ti.load(treeinfo)
|
||||||
|
|
||||||
|
# Set the name
|
||||||
|
ti.release.name = distname
|
||||||
|
ti.release.short = shortname
|
||||||
|
# Set the version (the initial lorax run does this, but we are setting
|
||||||
|
# it just in case)
|
||||||
|
ti.release.version = release
|
||||||
|
# Assign the present images into a var as a copy. For each platform,
|
||||||
|
# clear out the present dictionary. For each item and path in the
|
||||||
|
# assigned var, assign it back to the platform dictionary. If the path
|
||||||
|
# is empty, continue. Do checksums afterwards.
|
||||||
|
plats = ti.images.images.copy()
|
||||||
|
for platform in ti.images.images:
|
||||||
|
ti.images.images[platform] = {}
|
||||||
|
for i, p in plats[platform].items():
|
||||||
|
if not p:
|
||||||
|
continue
|
||||||
|
if 'boot.iso' in i and is_disc:
|
||||||
|
continue
|
||||||
|
ti.images.images[platform][i] = p
|
||||||
|
ti.checksums.add(p, checksum, root_dir=image)
|
||||||
|
|
||||||
|
# stage2 checksums
|
||||||
|
if ti.stage2.mainimage:
|
||||||
|
ti.checksums.add(ti.stage2.mainimage, checksum, root_dir=image)
|
||||||
|
|
||||||
|
if ti.stage2.instimage:
|
||||||
|
ti.checksums.add(ti.stage2.instimage, checksum, root_dir=image)
|
||||||
|
|
||||||
|
# If we are a disc, set the media section appropriately.
|
||||||
|
if is_disc:
|
||||||
|
ti.media.discnum = discnum
|
||||||
|
ti.media.totaldiscs = discnum
|
||||||
|
|
||||||
|
# Create variants
|
||||||
|
# Note to self: There's a lot of legacy stuff running around for
|
||||||
|
# Fedora, ELN, and RHEL in general. This is the general structure,
|
||||||
|
# apparently. But there could be a chance it'll change. We may need to
|
||||||
|
# put in a configuration to deal with it at some point.
|
||||||
|
#ti.variants.variants.clear()
|
||||||
|
for y in repos:
|
||||||
|
if y in ti.variants.variants.keys():
|
||||||
|
vari = ti.variants.variants[y]
|
||||||
|
else:
|
||||||
|
vari = productmd.treeinfo.Variant(ti)
|
||||||
|
|
||||||
|
vari.id = y
|
||||||
|
vari.uid = y
|
||||||
|
vari.name = y
|
||||||
|
vari.type = "variant"
|
||||||
|
if is_disc:
|
||||||
|
vari.paths.repository = y
|
||||||
|
vari.paths.packages = y + "/Packages"
|
||||||
|
else:
|
||||||
|
if y == primary:
|
||||||
|
vari.paths.repository = "."
|
||||||
|
vari.paths.packages = "Packages"
|
||||||
|
else:
|
||||||
|
vari.paths.repository = "../../../" + y + "/" + arch + "/" + os_or_ks
|
||||||
|
vari.paths.packages = "../../../" + y + "/" + arch + "/" + os_or_ks + "/Packages"
|
||||||
|
|
||||||
|
if y not in ti.variants.variants.keys():
|
||||||
|
ti.variants.add(vari)
|
||||||
|
|
||||||
|
del vari
|
||||||
|
|
||||||
|
# Set default variant
|
||||||
|
logger.info('Writing treeinfo')
|
||||||
|
ti.dump(treeinfo, main_variant=primary)
|
||||||
|
# Set discinfo
|
||||||
|
logger.info('Writing discinfo')
|
||||||
|
Shared.discinfo_write(timestamp, fullname, arch, discinfo)
|
||||||
|
# Set media.repo
|
||||||
|
logger.info('Writing media.repo')
|
||||||
|
Shared.media_repo_write(timestamp, fullname, mediarepo)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def write_metadata(
|
||||||
|
timestamp,
|
||||||
|
datestamp,
|
||||||
|
fullname,
|
||||||
|
release,
|
||||||
|
compose_id,
|
||||||
|
file_path
|
||||||
|
):
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
"header": {
|
||||||
|
"name": "empanadas",
|
||||||
|
"version": empanadas.__version__,
|
||||||
|
"type": "toolkit",
|
||||||
|
"maintainer": "SIG/Core"
|
||||||
|
},
|
||||||
|
"payload": {
|
||||||
|
"compose": {
|
||||||
|
"date": datestamp,
|
||||||
|
"id": compose_id,
|
||||||
|
"fullname": fullname,
|
||||||
|
"release": release,
|
||||||
|
"timestamp": timestamp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(file_path + ".json", "w+") as fp:
|
||||||
|
json.dump(metadata, fp, indent=4)
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
with open(file_path + ".yaml", "w+") as yp:
|
||||||
|
yaml.dump(metadata, yp)
|
||||||
|
yp.close()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def discinfo_write(timestamp, fullname, arch, file_path):
|
def discinfo_write(timestamp, fullname, arch, file_path):
|
||||||
"""
|
"""
|
||||||
@ -53,7 +271,8 @@ class Shared:
|
|||||||
"%s" % timestamp,
|
"%s" % timestamp,
|
||||||
"%s" % fullname,
|
"%s" % fullname,
|
||||||
"%s" % arch,
|
"%s" % arch,
|
||||||
"ALL"
|
"ALL",
|
||||||
|
""
|
||||||
]
|
]
|
||||||
|
|
||||||
with open(file_path, "w+") as f:
|
with open(file_path, "w+") as f:
|
||||||
@ -77,3 +296,668 @@ class Shared:
|
|||||||
|
|
||||||
with open(file_path, "w") as f:
|
with open(file_path, "w") as f:
|
||||||
f.write("\n".join(data))
|
f.write("\n".join(data))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_compose_dirs(
|
||||||
|
compose_base,
|
||||||
|
shortname,
|
||||||
|
version,
|
||||||
|
date_stamp,
|
||||||
|
logger
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate compose dirs for full runs
|
||||||
|
"""
|
||||||
|
compose_base_dir = os.path.join(
|
||||||
|
compose_base,
|
||||||
|
"{}-{}-{}".format(
|
||||||
|
shortname,
|
||||||
|
version,
|
||||||
|
date_stamp
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info('Creating compose directory %s' % compose_base_dir)
|
||||||
|
if not os.path.exists(compose_base_dir):
|
||||||
|
os.makedirs(compose_base_dir)
|
||||||
|
os.makedirs(compose_base_dir + '/work')
|
||||||
|
os.makedirs(compose_base_dir + '/work/entries')
|
||||||
|
os.makedirs(compose_base_dir + '/work/logs')
|
||||||
|
os.makedirs(compose_base_dir + '/compose')
|
||||||
|
|
||||||
|
return compose_base_dir
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def podman_cmd(logger) -> str:
|
||||||
|
"""
|
||||||
|
This generates the podman run command. This is in the case that we want
|
||||||
|
to do reposyncs in parallel as we cannot reasonably run multiple
|
||||||
|
instances of dnf reposync on a single system.
|
||||||
|
"""
|
||||||
|
cmd = None
|
||||||
|
if os.path.exists("/usr/bin/podman"):
|
||||||
|
cmd = "/usr/bin/podman"
|
||||||
|
else:
|
||||||
|
logger.error(Color.FAIL + '/usr/bin/podman was not found. Good bye.')
|
||||||
|
raise SystemExit("\n\n/usr/bin/podman was not found.\n\nPlease "
|
||||||
|
" ensure that you have installed the necessary packages on "
|
||||||
|
" this system. " + Color.BOLD + "Note that docker is not "
|
||||||
|
"supported." + Color.END
|
||||||
|
)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reposync_cmd(logger) -> str:
|
||||||
|
"""
|
||||||
|
This generates the reposync command. We don't support reposync by
|
||||||
|
itself and will raise an error.
|
||||||
|
|
||||||
|
:return: The path to the reposync command. If dnf exists, we'll use
|
||||||
|
that. Otherwise, fail immediately.
|
||||||
|
"""
|
||||||
|
cmd = None
|
||||||
|
if os.path.exists("/usr/bin/dnf"):
|
||||||
|
cmd = "/usr/bin/dnf reposync"
|
||||||
|
else:
|
||||||
|
logger(Color.FAIL + '/usr/bin/dnf was not found. Good bye.')
|
||||||
|
raise SystemExit("/usr/bin/dnf was not found. \n\n/usr/bin/reposync "
|
||||||
|
"is not sufficient and you are likely running on an el7 "
|
||||||
|
"system or a grossly modified EL8+ system, " + Color.BOLD +
|
||||||
|
"which tells us that you probably made changes to these tools "
|
||||||
|
"expecting them to work and got to this point." + Color.END)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def git_cmd(logger) -> str:
|
||||||
|
"""
|
||||||
|
This generates the git command. This is when we need to pull down extra
|
||||||
|
files or do work from a git repository.
|
||||||
|
"""
|
||||||
|
cmd = None
|
||||||
|
if os.path.exists("/usr/bin/git"):
|
||||||
|
cmd = "/usr/bin/git"
|
||||||
|
else:
|
||||||
|
logger.error(Color.FAIL + '/usr/bin/git was not found. Good bye.')
|
||||||
|
raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease "
|
||||||
|
" ensure that you have installed the necessary packages on "
|
||||||
|
" this system. "
|
||||||
|
)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def mock_cmd(logger) -> str:
|
||||||
|
"""
|
||||||
|
This generates the mock command. This is when we are building or
|
||||||
|
performing any kind of operation in mock.
|
||||||
|
"""
|
||||||
|
cmd = None
|
||||||
|
if os.path.exists("/usr/bin/mock"):
|
||||||
|
cmd = "/usr/bin/mock"
|
||||||
|
else:
|
||||||
|
logger.error(Color.FAIL + '/usr/bin/mock was not found. Good bye.')
|
||||||
|
raise SystemExit("\n\n/usr/bin/mock was not found.\n\nPlease "
|
||||||
|
" ensure that you have installed the necessary packages on "
|
||||||
|
" this system. "
|
||||||
|
)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_conf(data, logger, dest_path='/var/tmp') -> str:
|
||||||
|
"""
|
||||||
|
Generates the necessary repo conf file for the operation. This repo
|
||||||
|
file should be temporary in nature. This will generate a repo file
|
||||||
|
with all repos by default. If a repo is chosen for sync, that will be
|
||||||
|
the only one synced.
|
||||||
|
|
||||||
|
:param dest_path: The destination where the temporary conf goes
|
||||||
|
:param repo: The repo object to create a file for
|
||||||
|
"""
|
||||||
|
fname = os.path.join(
|
||||||
|
dest_path,
|
||||||
|
"{}-{}-config.repo".format(data.shortname, data.major_version)
|
||||||
|
)
|
||||||
|
data.log.info('Generating the repo configuration: %s' % fname)
|
||||||
|
|
||||||
|
if data.repo_base_url.startswith("/"):
|
||||||
|
logger.error("Local file syncs are not supported.")
|
||||||
|
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
||||||
|
"supported." + Color.END)
|
||||||
|
|
||||||
|
prehashed = ''
|
||||||
|
if data.hashed:
|
||||||
|
prehashed = "hashed-"
|
||||||
|
# create dest_path
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path, exist_ok=True)
|
||||||
|
config_file = open(fname, "w+")
|
||||||
|
repolist = []
|
||||||
|
for repo in data.repos:
|
||||||
|
|
||||||
|
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||||
|
data.repo_base_url,
|
||||||
|
data.project_id,
|
||||||
|
prehashed,
|
||||||
|
repo,
|
||||||
|
)
|
||||||
|
|
||||||
|
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
||||||
|
data.repo_base_url,
|
||||||
|
data.project_id,
|
||||||
|
prehashed,
|
||||||
|
repo,
|
||||||
|
)
|
||||||
|
|
||||||
|
repodata = {
|
||||||
|
'name': repo,
|
||||||
|
'baseurl': constructed_url,
|
||||||
|
'srcbaseurl': constructed_url_src,
|
||||||
|
'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey]
|
||||||
|
}
|
||||||
|
repolist.append(repodata)
|
||||||
|
|
||||||
|
template = data.tmplenv.get_template('repoconfig.tmpl')
|
||||||
|
output = template.render(repos=repolist)
|
||||||
|
config_file.write(output)
|
||||||
|
|
||||||
|
config_file.close()
|
||||||
|
return fname
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def quick_sync(src, dest, logger, tmp_dir):
|
||||||
|
"""
|
||||||
|
Does a quick sync from one place to another. This determines the method
|
||||||
|
in which will be used. We will look for fpsync and fall back to
|
||||||
|
parallel | rsync if that is also available. It will fail if parallel is
|
||||||
|
not available.
|
||||||
|
|
||||||
|
Return true or false on completion?
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def simple_sync(src, dest):
|
||||||
|
"""
|
||||||
|
This is for simple syncs only, using rsync or copytree.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fpsync_method(src, dest, tmp_dir):
|
||||||
|
"""
|
||||||
|
Returns a list for the fpsync command
|
||||||
|
"""
|
||||||
|
cmd = '/usr/bin/fpsync'
|
||||||
|
rsync_switches = '-av --numeric-ids --no-compress --chown=10004:10005'
|
||||||
|
if not os.path.exists(cmd):
|
||||||
|
message = 'fpsync not found'
|
||||||
|
retval = 1
|
||||||
|
return message, retval
|
||||||
|
|
||||||
|
os.makedirs(tmp_dir, exist_ok=True)
|
||||||
|
|
||||||
|
fpsync_cmd = '{} -o "{}" -n 18 -t {} {} {}'.format(
|
||||||
|
cmd,
|
||||||
|
rsync_switches,
|
||||||
|
tmp_dir,
|
||||||
|
src,
|
||||||
|
dest
|
||||||
|
)
|
||||||
|
|
||||||
|
process = subprocess.call(
|
||||||
|
shlex.split(fpsync_cmd),
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
|
||||||
|
if process != 0:
|
||||||
|
message = 'Syncing (fpsync) failed'
|
||||||
|
retval = process
|
||||||
|
return message, retval
|
||||||
|
|
||||||
|
if os.path.exists(dest):
|
||||||
|
message = 'Syncing (fpsync) succeeded'
|
||||||
|
retval = process
|
||||||
|
else:
|
||||||
|
message = 'Path synced does not seem to exist for some reason.'
|
||||||
|
retval = 1
|
||||||
|
|
||||||
|
#shutil.rmtree(tmp_dir)
|
||||||
|
|
||||||
|
return message, retval
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def rsync_method(src, dest):
|
||||||
|
"""
|
||||||
|
Returns a string for the rsync command plus parallel. Yes, this is a
|
||||||
|
hack.
|
||||||
|
"""
|
||||||
|
find_cmd = '/usr/bin/find'
|
||||||
|
parallel_cmd = '/usr/bin/parallel'
|
||||||
|
rsync_cmd = '/usr/bin/rsync'
|
||||||
|
switches = '-av --chown=10004:10005 --progress --relative --human-readable'
|
||||||
|
|
||||||
|
os.makedirs(dest, exist_ok=True)
|
||||||
|
|
||||||
|
return 'Not available', 1
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def s3_determine_latest(s3_bucket, release, arches, filetype, name, logger):
|
||||||
|
"""
|
||||||
|
Using native s3, determine the latest artifacts and return a dict
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
s3 = boto3.client('s3')
|
||||||
|
|
||||||
|
try:
|
||||||
|
s3.list_objects(Bucket=s3_bucket)['Contents']
|
||||||
|
except:
|
||||||
|
logger.error(
|
||||||
|
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||||
|
'Cannot access s3 bucket.'
|
||||||
|
)
|
||||||
|
raise SystemExit()
|
||||||
|
|
||||||
|
for y in s3.list_objects(Bucket=s3_bucket)['Contents']:
|
||||||
|
if filetype in y['Key'] and release in y['Key'] and name in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
if len(temps) > 0:
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def s3_download_artifacts(force_download, s3_bucket, source, dest, logger):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via s3
|
||||||
|
"""
|
||||||
|
s3 = boto3.client('s3')
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
logger.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info('Downloading ({}) to: {}'.format(source, dest))
|
||||||
|
try:
|
||||||
|
s3.download_file(
|
||||||
|
Bucket=s3_bucket,
|
||||||
|
Key=source,
|
||||||
|
Filename=dest
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
logger.error('There was an issue downloading from %s' % s3_bucket)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reqs_determine_latest(s3_bucket_url, release, arches, filetype, name, logger):
|
||||||
|
"""
|
||||||
|
Using requests, determine the latest artifacts and return a list
|
||||||
|
"""
|
||||||
|
temp = []
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket_data = requests.get(s3_bucket_url)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error('The s3 bucket http endpoint is inaccessible')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
resp = xmltodict.parse(bucket_data.content)
|
||||||
|
|
||||||
|
for y in resp['ListBucketResult']['Contents']:
|
||||||
|
if filetype in y['Key'] and release in y['Key'] and name in y['Key']:
|
||||||
|
temp.append(y['Key'])
|
||||||
|
|
||||||
|
for arch in arches:
|
||||||
|
temps = []
|
||||||
|
for y in temp:
|
||||||
|
if arch in y:
|
||||||
|
temps.append(y)
|
||||||
|
temps.sort(reverse=True)
|
||||||
|
if len(temps) > 0:
|
||||||
|
data[arch] = temps[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reqs_download_artifacts(force_download, s3_bucket_url, source, dest, logger):
|
||||||
|
"""
|
||||||
|
Download the requested artifact(s) via requests only
|
||||||
|
"""
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if not force_download:
|
||||||
|
logger.warn(
|
||||||
|
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||||
|
'Artifact at ' + dest + ' already exists'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
unurl = s3_bucket_url + '/' + source
|
||||||
|
|
||||||
|
logger.info('Downloading ({}) to: {}'.format(source, dest))
|
||||||
|
try:
|
||||||
|
with requests.get(unurl, allow_redirects=True) as r:
|
||||||
|
with open(dest, 'wb') as f:
|
||||||
|
f.write(r.content)
|
||||||
|
f.close()
|
||||||
|
r.close()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error('There was a problem downloading the artifact')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
# ISO related
|
||||||
|
@staticmethod
|
||||||
|
def get_boot_options(arch, createfrom, efi=True, hfs_compat=False):
|
||||||
|
"""
|
||||||
|
Gets boot options based on architecture, the iso commands are not
|
||||||
|
universal.
|
||||||
|
"""
|
||||||
|
if arch in ("armhfp",):
|
||||||
|
result = []
|
||||||
|
return result
|
||||||
|
|
||||||
|
if arch in ("aarch64",):
|
||||||
|
result = [
|
||||||
|
"-eltorito-alt-boot",
|
||||||
|
"-e",
|
||||||
|
"images/efiboot.img",
|
||||||
|
"-no-emul-boot",
|
||||||
|
]
|
||||||
|
return result
|
||||||
|
|
||||||
|
if arch in ("i386", "i686", "x86_64"):
|
||||||
|
result = [
|
||||||
|
"-b",
|
||||||
|
"isolinux/isolinux.bin",
|
||||||
|
"-c",
|
||||||
|
"isolinux/boot.cat",
|
||||||
|
"-no-emul-boot",
|
||||||
|
"-boot-load-size",
|
||||||
|
"4",
|
||||||
|
"-boot-info-table",
|
||||||
|
]
|
||||||
|
|
||||||
|
# EFI args
|
||||||
|
if arch == "x86_64":
|
||||||
|
result.extend(
|
||||||
|
[
|
||||||
|
"-eltorito-alt-boot",
|
||||||
|
"-e",
|
||||||
|
"images/efiboot.img",
|
||||||
|
"-no-emul-boot"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
# need to go double check if this is needed with stream 9
|
||||||
|
if arch == "ppc64le" and hfs_compat:
|
||||||
|
result = [
|
||||||
|
"-part",
|
||||||
|
"-hfs",
|
||||||
|
"-r",
|
||||||
|
"-l",
|
||||||
|
"-sysid",
|
||||||
|
"PPC",
|
||||||
|
"-no-desktop",
|
||||||
|
"-allow-multidot",
|
||||||
|
"-chrp-boot",
|
||||||
|
"-map",
|
||||||
|
os.path.join(createfrom, "mapping"),
|
||||||
|
"-hfs-bless",
|
||||||
|
"/ppc/mac"
|
||||||
|
]
|
||||||
|
return result
|
||||||
|
|
||||||
|
if arch == "ppc64le" and not hfs_compat:
|
||||||
|
result = [
|
||||||
|
"-r",
|
||||||
|
"-l",
|
||||||
|
"-sysid",
|
||||||
|
"PPC",
|
||||||
|
"-chrp-boot",
|
||||||
|
]
|
||||||
|
return result
|
||||||
|
|
||||||
|
if arch in ("s390x",):
|
||||||
|
result = [
|
||||||
|
"-eltorito-boot",
|
||||||
|
"images/cdboot.img",
|
||||||
|
"-no-emul-boot",
|
||||||
|
]
|
||||||
|
return result
|
||||||
|
|
||||||
|
raise ValueError("Architecture %s%s%s is NOT known" % (Color.BOLD, arch, Color.END))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_mkisofs_cmd(
|
||||||
|
iso,
|
||||||
|
appid=None,
|
||||||
|
volid=None,
|
||||||
|
volset=None,
|
||||||
|
exclude=None,
|
||||||
|
boot_args=None,
|
||||||
|
input_charset="utf-8",
|
||||||
|
grafts=None,
|
||||||
|
use_xorrisofs=False,
|
||||||
|
iso_level=None,
|
||||||
|
):
|
||||||
|
# I should hardcode this I think
|
||||||
|
#untranslated_filenames = True
|
||||||
|
translation_table = True
|
||||||
|
#joliet = True
|
||||||
|
#joliet_long = True
|
||||||
|
#rock = True
|
||||||
|
cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"]
|
||||||
|
if not os.path.exists(cmd[0]):
|
||||||
|
#logger.error('%s was not found. Good bye.' % cmd[0])
|
||||||
|
raise SystemExit("\n\n" + cmd[0] + " was not found.\n\nPlease "
|
||||||
|
" ensure that you have installed the necessary packages on "
|
||||||
|
" this system. "
|
||||||
|
)
|
||||||
|
|
||||||
|
if iso_level:
|
||||||
|
cmd.extend(["-iso-level", str(iso_level)])
|
||||||
|
|
||||||
|
if appid:
|
||||||
|
cmd.extend(["-appid", appid])
|
||||||
|
|
||||||
|
#if untranslated_filenames:
|
||||||
|
cmd.append("-untranslated-filenames")
|
||||||
|
|
||||||
|
if volid:
|
||||||
|
cmd.extend(["-volid", volid])
|
||||||
|
|
||||||
|
#if joliet:
|
||||||
|
cmd.append("-J")
|
||||||
|
|
||||||
|
#if joliet_long:
|
||||||
|
cmd.append("-joliet-long")
|
||||||
|
|
||||||
|
if volset:
|
||||||
|
cmd.extend(["-volset", volset])
|
||||||
|
|
||||||
|
#if rock:
|
||||||
|
cmd.append("-rational-rock")
|
||||||
|
|
||||||
|
if not use_xorrisofs and translation_table:
|
||||||
|
cmd.append("-translation-table")
|
||||||
|
|
||||||
|
if input_charset:
|
||||||
|
cmd.extend(["-input-charset", input_charset])
|
||||||
|
|
||||||
|
if exclude:
|
||||||
|
for i in kobo.shortcuts.force_list(exclude):
|
||||||
|
cmd.extend(["-x", i])
|
||||||
|
|
||||||
|
if boot_args:
|
||||||
|
cmd.extend(boot_args)
|
||||||
|
|
||||||
|
cmd.extend(["-o", iso])
|
||||||
|
|
||||||
|
if grafts:
|
||||||
|
cmd.append("-graft-points")
|
||||||
|
cmd.extend(["-path-list", grafts])
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_make_image_cmd(opts, hfs_compat):
|
||||||
|
"""
|
||||||
|
Generates the command to actually make the image in the first place
|
||||||
|
"""
|
||||||
|
isokwargs = {}
|
||||||
|
isokwargs["boot_args"] = Shared.get_boot_options(
|
||||||
|
opts['arch'],
|
||||||
|
os.path.join("$TEMPLATE", "config_files/ppc"),
|
||||||
|
hfs_compat=hfs_compat,
|
||||||
|
)
|
||||||
|
|
||||||
|
if opts['arch'] in ("ppc64", "ppc64le"):
|
||||||
|
isokwargs["input_charset"] = None
|
||||||
|
|
||||||
|
if opts['use_xorrisofs']:
|
||||||
|
cmd = ['/usr/bin/xorriso', '-dialog', 'on', '<', opts['graft_points']]
|
||||||
|
else:
|
||||||
|
cmd = Shared.get_mkisofs_cmd(
|
||||||
|
opts['iso_name'],
|
||||||
|
volid=opts['volid'],
|
||||||
|
exclude=["./lost+found"],
|
||||||
|
grafts=opts['graft_points'],
|
||||||
|
use_xorrisofs=False,
|
||||||
|
iso_level=opts['iso_level'],
|
||||||
|
**isokwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
returned_cmd = ' '.join(cmd)
|
||||||
|
return returned_cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_isohybrid_cmd(opts):
|
||||||
|
cmd = []
|
||||||
|
if not opts['use_xorrisofs']:
|
||||||
|
if opts['arch'] == "x86_64":
|
||||||
|
cmd = ["/usr/bin/isohybrid"]
|
||||||
|
cmd.append("--uefi")
|
||||||
|
cmd.append(opts['iso_name'])
|
||||||
|
returned_cmd = ' '.join(cmd)
|
||||||
|
else:
|
||||||
|
returned_cmd = ''
|
||||||
|
|
||||||
|
return returned_cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_implantisomd5_cmd(opts):
|
||||||
|
"""
|
||||||
|
Implants md5 into iso
|
||||||
|
"""
|
||||||
|
cmd = ["/usr/bin/implantisomd5", "--supported-iso", opts['iso_name']]
|
||||||
|
returned_cmd = ' '.join(cmd)
|
||||||
|
return returned_cmd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_manifest_cmd(opts):
|
||||||
|
"""
|
||||||
|
Gets an ISO manifest
|
||||||
|
"""
|
||||||
|
if opts['use_xorrisofs']:
|
||||||
|
return """/usr/bin/xorriso -dev %s --find |
|
||||||
|
tail -n+2 |
|
||||||
|
tr -d "'" |
|
||||||
|
cut -c2- | sort >> %s.manifest""" % (
|
||||||
|
shlex.quote(opts['iso_name']),
|
||||||
|
shlex.quote(opts['iso_name']),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return "/usr/bin/isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
||||||
|
shlex.quote(opts['iso_name']),
|
||||||
|
shlex.quote(opts['iso_name']),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_repo_list(
|
||||||
|
repo_base_url,
|
||||||
|
repos,
|
||||||
|
project_id,
|
||||||
|
current_arch,
|
||||||
|
compose_latest_sync,
|
||||||
|
compose_dir_is_here: bool = False,
|
||||||
|
hashed: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Builds the repo dictionary
|
||||||
|
"""
|
||||||
|
repolist = []
|
||||||
|
prehashed = ''
|
||||||
|
if hashed:
|
||||||
|
prehashed = 'hashed-'
|
||||||
|
|
||||||
|
for name in repos:
|
||||||
|
if not compose_dir_is_here:
|
||||||
|
constructed_url = '{}/{}/repo/{}{}/{}'.format(
|
||||||
|
repo_base_url,
|
||||||
|
project_id,
|
||||||
|
prehashed,
|
||||||
|
name,
|
||||||
|
current_arch
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
constructed_url = 'file://{}/{}/{}/os'.format(
|
||||||
|
compose_latest_sync,
|
||||||
|
name,
|
||||||
|
current_arch
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
repodata = {
|
||||||
|
'name': name,
|
||||||
|
'url': constructed_url
|
||||||
|
}
|
||||||
|
|
||||||
|
repolist.append(repodata)
|
||||||
|
|
||||||
|
return repolist
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def composeinfo_write(
|
||||||
|
file_path,
|
||||||
|
distname,
|
||||||
|
shortname,
|
||||||
|
release,
|
||||||
|
release_type,
|
||||||
|
datestamp,
|
||||||
|
arches: list = [],
|
||||||
|
repos: list = []
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Write compose info similar to pungi.
|
||||||
|
|
||||||
|
arches and repos may be better suited for a dictionary. that is a
|
||||||
|
future thing we will work on for 0.3.0.
|
||||||
|
"""
|
||||||
|
cijson = file_path + '.json'
|
||||||
|
ciyaml = file_path + '.yaml'
|
||||||
|
ci = productmd.composeinfo.ComposeInfo()
|
||||||
|
ci.release.name = distname
|
||||||
|
ci.release.short = shortname
|
||||||
|
ci.release.version = release
|
||||||
|
ci.release.type = release_type
|
||||||
|
|
||||||
|
ci.compose.id = '{}-{}-{}'.format(shortname, release, datestamp)
|
||||||
|
ci.compose.type = "production"
|
||||||
|
ci.compose.date = datestamp
|
||||||
|
ci.compose.respin = 0
|
||||||
|
|
||||||
|
ci.dump(cijson)
|
||||||
|
|
||||||
|
with open(cijson, 'r') as cidump:
|
||||||
|
jsonData = json.load(cidump)
|
||||||
|
cidump.close()
|
||||||
|
|
||||||
|
with open(ciyaml, 'w+') as ymdump:
|
||||||
|
yaml.dump(jsonData, ymdump)
|
||||||
|
ymdump.close()
|
||||||
|
16
iso/empanadas/imagefactory.patch
Normal file
16
iso/empanadas/imagefactory.patch
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
diff --git a/imagefactory_plugins/TinMan/TinMan.info b/imagefactory_plugins/TinMan/TinMan.info
|
||||||
|
index bd61a02..00a8112 100644
|
||||||
|
--- a/imagefactory_plugins/TinMan/TinMan.info
|
||||||
|
+++ b/imagefactory_plugins/TinMan/TinMan.info
|
||||||
|
@@ -3,7 +3,10 @@
|
||||||
|
"targets": [ ["Fedora", null, null], ["RHEL-6", null, null], ["RHEL-5", null, null],
|
||||||
|
["Ubuntu", null, null], ["CentOS-6", null, null], ["CentOS-5", null, null],
|
||||||
|
["ScientificLinux-6", null, null], ["ScientificLinux-5", null, null], ["OpenSUSE", null, null],
|
||||||
|
- [ "RHEL-7", null, null ], [ "CentOS-7", null, null ], [ "ScientificLinux-7", null, null ] ],
|
||||||
|
+ [ "RHEL-7", null, null ], [ "CentOS-7", null, null ], [ "ScientificLinux-7", null, null ],
|
||||||
|
+ [ "RHEL-8", null, null ], [ "CentOS-8", null, null ], [ "Rocky-8", null, null ],
|
||||||
|
+ [ "RHEL-9", null, null ], [ "CentOS-9", null, null ], [ "Rocky-9", null, null ]
|
||||||
|
+ ],
|
||||||
|
"description": "Plugin to support most Oz customize capable guest types",
|
||||||
|
"maintainer": {
|
||||||
|
"name": "Red Hat, Inc.",
|
BIN
iso/empanadas/oz.rpm
Normal file
BIN
iso/empanadas/oz.rpm
Normal file
Binary file not shown.
54
iso/empanadas/poetry.lock
generated
54
iso/empanadas/poetry.lock
generated
@ -10,7 +10,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "21.4.0"
|
version = "21.4.0"
|
||||||
description = "Classes Without Boilerplate"
|
description = "Classes Without Boilerplate"
|
||||||
category = "dev"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
@ -22,14 +22,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "boto3"
|
name = "boto3"
|
||||||
version = "1.24.14"
|
version = "1.24.22"
|
||||||
description = "The AWS SDK for Python"
|
description = "The AWS SDK for Python"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">= 3.7"
|
python-versions = ">= 3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
botocore = ">=1.27.14,<1.28.0"
|
botocore = ">=1.27.22,<1.28.0"
|
||||||
jmespath = ">=0.7.1,<2.0.0"
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
s3transfer = ">=0.6.0,<0.7.0"
|
s3transfer = ">=0.6.0,<0.7.0"
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "botocore"
|
name = "botocore"
|
||||||
version = "1.27.14"
|
version = "1.27.22"
|
||||||
description = "Low-level, data-driven core of boto 3."
|
description = "Low-level, data-driven core of boto 3."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -62,11 +62,11 @@ python-versions = ">=3.6"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "2.0.12"
|
version = "2.1.0"
|
||||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5.0"
|
python-versions = ">=3.6.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
unicode_backport = ["unicodedata2"]
|
unicode_backport = ["unicodedata2"]
|
||||||
@ -89,7 +89,7 @@ python-versions = ">=3.5"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-metadata"
|
name = "importlib-metadata"
|
||||||
version = "4.11.4"
|
version = "4.12.0"
|
||||||
description = "Read metadata from Python packages"
|
description = "Read metadata from Python packages"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -102,7 +102,7 @@ zipp = ">=0.5"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||||
perf = ["ipython"]
|
perf = ["ipython"]
|
||||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
|
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-resources"
|
name = "importlib-resources"
|
||||||
@ -143,7 +143,7 @@ python-versions = ">=3.7"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kobo"
|
name = "kobo"
|
||||||
version = "0.24.1"
|
version = "0.24.2"
|
||||||
description = "A pile of python modules used by Red Hat release engineering to build their tools"
|
description = "A pile of python modules used by Red Hat release engineering to build their tools"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -267,7 +267,7 @@ python-versions = ">=3.6"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests"
|
name = "requests"
|
||||||
version = "2.28.0"
|
version = "2.28.1"
|
||||||
description = "Python HTTP for Humans."
|
description = "Python HTTP for Humans."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -275,13 +275,13 @@ python-versions = ">=3.7, <4"
|
|||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = ">=2017.4.17"
|
certifi = ">=2017.4.17"
|
||||||
charset-normalizer = ">=2.0.0,<2.1.0"
|
charset-normalizer = ">=2,<3"
|
||||||
idna = ">=2.5,<4"
|
idna = ">=2.5,<4"
|
||||||
urllib3 = ">=1.21.1,<1.27"
|
urllib3 = ">=1.21.1,<1.27"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
|
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rpm-py-installer"
|
name = "rpm-py-installer"
|
||||||
@ -315,7 +315,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typing-extensions"
|
name = "typing-extensions"
|
||||||
version = "4.2.0"
|
version = "4.3.0"
|
||||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -365,7 +365,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = ">=3.7,<4"
|
python-versions = ">=3.7,<4"
|
||||||
content-hash = "ccd47ad1b0819968dbad34b68c3f9afd98bd657ee639f9037731fd2a0746bd16"
|
content-hash = "42676fd0ceb350c8cd90246dc688cfcd404e14d22229052d0527fe342c135b95"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
atomicwrites = [
|
atomicwrites = [
|
||||||
@ -377,20 +377,20 @@ attrs = [
|
|||||||
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||||
]
|
]
|
||||||
boto3 = [
|
boto3 = [
|
||||||
{file = "boto3-1.24.14-py3-none-any.whl", hash = "sha256:490f5e88f5551b33ae3019a37412158b76426d63d1fb910968ade9b6a024e5fe"},
|
{file = "boto3-1.24.22-py3-none-any.whl", hash = "sha256:c9a9f893561f64f5b81de197714ac4951251a328672a8dba28ad4c4a589c3adf"},
|
||||||
{file = "boto3-1.24.14.tar.gz", hash = "sha256:e284705da36faa668c715ae1f74ebbff4320dbfbe3a733df3a8ab076d1ed1226"},
|
{file = "boto3-1.24.22.tar.gz", hash = "sha256:67d404c643091d4aa37fc485193289ad859f1f65f94d0fa544e13bdd1d4187c1"},
|
||||||
]
|
]
|
||||||
botocore = [
|
botocore = [
|
||||||
{file = "botocore-1.27.14-py3-none-any.whl", hash = "sha256:df1e9b208ff93daac7c645b0b04fb6dccd7f20262eae24d87941727025cbeece"},
|
{file = "botocore-1.27.22-py3-none-any.whl", hash = "sha256:7145d9b7cae87999a9f074de700d02a1b3222ee7d1863aa631ff56c5fc868035"},
|
||||||
{file = "botocore-1.27.14.tar.gz", hash = "sha256:bb56fa77b8fa1ec367c2e16dee62d60000451aac5140dcce3ebddc167fd5c593"},
|
{file = "botocore-1.27.22.tar.gz", hash = "sha256:f57cb33446deef92e552b0be0e430d475c73cf64bc9e46cdb4783cdfe39cb6bb"},
|
||||||
]
|
]
|
||||||
certifi = [
|
certifi = [
|
||||||
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
||||||
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
||||||
]
|
]
|
||||||
charset-normalizer = [
|
charset-normalizer = [
|
||||||
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
{file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"},
|
||||||
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
|
{file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"},
|
||||||
]
|
]
|
||||||
colorama = [
|
colorama = [
|
||||||
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
||||||
@ -401,8 +401,8 @@ idna = [
|
|||||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||||
]
|
]
|
||||||
importlib-metadata = [
|
importlib-metadata = [
|
||||||
{file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"},
|
{file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
|
||||||
{file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"},
|
{file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
|
||||||
]
|
]
|
||||||
importlib-resources = [
|
importlib-resources = [
|
||||||
{file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"},
|
{file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"},
|
||||||
@ -417,7 +417,7 @@ jmespath = [
|
|||||||
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||||
]
|
]
|
||||||
kobo = [
|
kobo = [
|
||||||
{file = "kobo-0.24.1.tar.gz", hash = "sha256:d5a30cc20c323f3e9d9b4b2e511650c4b98929b88859bd8cf57463876686e407"},
|
{file = "kobo-0.24.2.tar.gz", hash = "sha256:1b3c17260a93d933d2238884373fbf3485ecd417d930acf984285dc012410e2b"},
|
||||||
]
|
]
|
||||||
markupsafe = [
|
markupsafe = [
|
||||||
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
|
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
|
||||||
@ -558,8 +558,8 @@ pyyaml = [
|
|||||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||||
]
|
]
|
||||||
requests = [
|
requests = [
|
||||||
{file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"},
|
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
||||||
{file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"},
|
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
||||||
]
|
]
|
||||||
rpm-py-installer = [
|
rpm-py-installer = [
|
||||||
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
||||||
@ -573,8 +573,8 @@ six = [
|
|||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
]
|
]
|
||||||
typing-extensions = [
|
typing-extensions = [
|
||||||
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
|
||||||
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
|
||||||
]
|
]
|
||||||
urllib3 = [
|
urllib3 = [
|
||||||
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
|
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
|
||||||
|
55
iso/empanadas/prep-azure.sh
Executable file
55
iso/empanadas/prep-azure.sh
Executable file
@ -0,0 +1,55 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
$0: prep raw image for azure
|
||||||
|
|
||||||
|
usage: $0 raw_image
|
||||||
|
|
||||||
|
Description: Takes a raw image and calculates the closest whole-MegaByte,
|
||||||
|
resizing a copy of the raw image, and returning the path to the resize 'vpc'
|
||||||
|
image (a .vhd file to upload)
|
||||||
|
|
||||||
|
Dumps VHD in \$PWD by default. Override with ``OUTDIR=/path/to/outdir``
|
||||||
|
|
||||||
|
Don't try to compress it.
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
log() {
|
||||||
|
local level="$1"; shift
|
||||||
|
local msg="$@"
|
||||||
|
local out=$([ "$level" == "error" ] && echo 2 || echo 1)
|
||||||
|
printf "[%s] %s: %s\n" "$(date '+%Y-%m-%d %H:%M:%S')" "${level}" "${msg}" >&${out}
|
||||||
|
if [[ "${level}" == "error" ]]; then
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
MB=$((1024*1024)) # for calculations - 1048576 bytes
|
||||||
|
|
||||||
|
if ! command -v qemu-img 2>&1 >/dev/null; then
|
||||||
|
log error "Need qemu-img.";
|
||||||
|
usage
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
rawdisk="$1"
|
||||||
|
|
||||||
|
if [[ -z "$rawdisk" ]]; then
|
||||||
|
usage
|
||||||
|
log error "need path to a raw image to prep"
|
||||||
|
fi
|
||||||
|
|
||||||
|
outdir="${2:-${PWD}}"
|
||||||
|
|
||||||
|
size=$(qemu-img info -f raw --output json "${rawdisk}" | gawk 'match($0, /"virtual-size": ([0-9]+),/, val) {print val[1]}')
|
||||||
|
|
||||||
|
rounded_size=$(((($size+$MB-1)/$MB)*$MB)) # size (in bytes) + 1MB, less one, and rounded.
|
||||||
|
|
||||||
|
outfilename=$(basename ${rawdisk//body/vhd})
|
||||||
|
outfile="${outdir}/${outfilename}"
|
||||||
|
qemu-img resize -f raw "${rawdisk}" "${rounded_size}" || log error "failed to resize"
|
||||||
|
qemu-img convert -f raw -o subformat=fixed,force_size -O vpc "${rawdisk}" "${outfile}" || log error "failed to convert to VHD format"
|
||||||
|
|
||||||
|
echo "${outfile}"
|
@ -1,8 +1,8 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "empanadas"
|
name = "empanadas"
|
||||||
version = "0.1.0"
|
version = "0.3.0"
|
||||||
description = "hand crafted ISOs with love and spice"
|
description = "hand crafted ISOs with love and spice"
|
||||||
authors = ["Louis Abel <louis@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.7,<4"
|
python = ">=3.7,<4"
|
||||||
@ -16,6 +16,7 @@ boto3 = "^1.24.12"
|
|||||||
xmltodict = "^0.13.0"
|
xmltodict = "^0.13.0"
|
||||||
requests = "^2.28.0"
|
requests = "^2.28.0"
|
||||||
kobo = "^0.24.1"
|
kobo = "^0.24.1"
|
||||||
|
attrs = "^21.4.0"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "~5"
|
pytest = "~5"
|
||||||
@ -26,8 +27,13 @@ sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run"
|
|||||||
sync_sig = "empanadas.scripts.sync_sig:run"
|
sync_sig = "empanadas.scripts.sync_sig:run"
|
||||||
build-iso = "empanadas.scripts.build_iso:run"
|
build-iso = "empanadas.scripts.build_iso:run"
|
||||||
build-iso-extra = "empanadas.scripts.build_iso_extra:run"
|
build-iso-extra = "empanadas.scripts.build_iso_extra:run"
|
||||||
|
build-iso-live = "empanadas.scripts.build_iso_live:run"
|
||||||
pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run"
|
pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run"
|
||||||
launch-builds = "empanadas.scripts.launch_builds:run"
|
launch-builds = "empanadas.scripts.launch_builds:run"
|
||||||
|
build-image = "empanadas.scripts.build_image:run"
|
||||||
|
finalize_compose = "empanadas.scripts.finalize_compose:run"
|
||||||
|
pull-cloud-image = "empanadas.scripts.pull_cloud_image:run"
|
||||||
|
generate_compose = "empanadas.scripts.generate_compose:run"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
@ -2,4 +2,4 @@ from empanadas import __version__
|
|||||||
|
|
||||||
|
|
||||||
def test_version():
|
def test_version():
|
||||||
assert __version__ == '0.1.0'
|
assert __version__ == '0.2.0'
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
# To be sourced by scripts as needed
|
# To be sourced by scripts as needed
|
||||||
|
|
||||||
# The mirrorlist url
|
# The mirrorlist url
|
||||||
MIRRORLIST_BASE="http://mirrors.rockylinux.org/mirrorlist"
|
LIST=${LIST:-mirrorlist}
|
||||||
|
MIRRORLIST_BASE="http://mirrors.rockylinux.org/${LIST}"
|
||||||
|
|
||||||
MIRROR_DISPLAY_COUNT=1
|
MIRROR_DISPLAY_COUNT=1
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
###
|
||||||
|
# Use RLVER=9 for rocky 9
|
||||||
|
|
||||||
# Source mangle vars
|
# Source mangle vars
|
||||||
# shellcheck source=./common disable=SC1091,1090
|
# shellcheck source=./common disable=SC1091,1090
|
||||||
source "$(dirname "$0")/common"
|
source "$(dirname "${BASH_SOURCE[0]}")/common"
|
||||||
# Source sync / migrate vars for repository information
|
# Source sync / migrate vars for repository information
|
||||||
# shellcheck source=../sync/common disable=SC1091,1090
|
# shellcheck source=../sync/common disable=SC1091,1090
|
||||||
source "$(dirname "$0")/../sync/common"
|
source "$(dirname "${BASH_SOURCE[0]}")/../sync/common"
|
||||||
|
|
||||||
# How many
|
# How many
|
||||||
ARG1=${1}
|
ARG1=${1}
|
||||||
@ -30,12 +33,16 @@ cleanup_repo () {
|
|||||||
repo="${repo^^}"
|
repo="${repo^^}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Everything has an 8 appended to it
|
# Append the major version from sync/common to support 8 and 9
|
||||||
repo="${repo}-8"
|
repo="${repo}-${MAJOR}"
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
for repo in "${ALL_REPOS[@]}"; do
|
# Sort the array
|
||||||
|
IFS=$'\n' sorted=($(sort <<<"${ALL_REPOS[*]}"))
|
||||||
|
unset IFS
|
||||||
|
|
||||||
|
for repo in "${sorted[@]}"; do
|
||||||
|
|
||||||
# Business logic must be done, sometimes...
|
# Business logic must be done, sometimes...
|
||||||
cleanup_repo "${repo}"
|
cleanup_repo "${repo}"
|
||||||
@ -52,8 +59,8 @@ for repo in "${ALL_REPOS[@]}"; do
|
|||||||
result=$(curl -s "${MIRRORLIST_BASE}?repo=${repo}&arch=${arch}&time&country=global")
|
result=$(curl -s "${MIRRORLIST_BASE}?repo=${repo}&arch=${arch}&time&country=global")
|
||||||
print_result
|
print_result
|
||||||
|
|
||||||
# x86 and a64 have 'debug' types, as well
|
# x86 and a64 have 'debug' types, as well ("arch" != "source")
|
||||||
if [[ "${arch}" =~ ^(x86_|aarch)64$ ]]; then
|
if [[ "${arch}" =~ ^(x86_|aarch)64|(s390x|ppc64le)$ ]]; then
|
||||||
result=$(curl -s "${MIRRORLIST_BASE}?repo=${repo}-debug&arch=${arch}&time&country=global")
|
result=$(curl -s "${MIRRORLIST_BASE}?repo=${repo}-debug&arch=${arch}&time&country=global")
|
||||||
print_result
|
print_result
|
||||||
fi
|
fi
|
||||||
|
@ -32,7 +32,7 @@ ARCHES=(x86_64 aarch64)
|
|||||||
|
|
||||||
# Source Major common
|
# Source Major common
|
||||||
# Override: Not Allowed
|
# Override: Not Allowed
|
||||||
test -f "$(dirname "$0")/common_${RLVER}" && source "$(dirname "$0")/common_${RLVER}"
|
test -f "$(dirname "${BASH_SOURCE[0]}")/common_${RLVER}" && source "$(dirname "${BASH_SOURCE[0]}")/common_${RLVER}"
|
||||||
if [ "$?" -ne 0 ]; then
|
if [ "$?" -ne 0 ]; then
|
||||||
echo "Could not source common_${RLVER}"
|
echo "Could not source common_${RLVER}"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -9,7 +9,7 @@ MAJOR="${REVISION:0:1}"
|
|||||||
MINOR="${REVISION:2:1}"
|
MINOR="${REVISION:2:1}"
|
||||||
|
|
||||||
# comment or blank if needed
|
# comment or blank if needed
|
||||||
APPEND_TO_DIR="-RC1"
|
APPEND_TO_DIR="-RC2"
|
||||||
|
|
||||||
STAGING_ROOT="/mnt/repos-staging"
|
STAGING_ROOT="/mnt/repos-staging"
|
||||||
PRODUCTION_ROOT="/mnt/repos-production"
|
PRODUCTION_ROOT="/mnt/repos-production"
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
# Source common variables
|
# Source common variables
|
||||||
# shellcheck disable=SC2046,1091,1090
|
# shellcheck disable=SC2046,1091,1090
|
||||||
source "$(dirname "$0")/common"
|
source "$(dirname "${BASH_SOURCE[0]}")/common"
|
||||||
|
|
||||||
NAME=gen-torrents
|
NAME=gen-torrents
|
||||||
|
|
||||||
|
46
sync/sync-to-prod-9.sh
Normal file
46
sync/sync-to-prod-9.sh
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Syncs everything from staging to production
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
REV=${REVISION}${APPEND_TO_DIR}
|
||||||
|
|
||||||
|
cd "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
TARGET="${PRODUCTION_ROOT}/${CATEGORY_STUB}/${REV:0:3}"
|
||||||
|
mkdir -p "${TARGET}"
|
||||||
|
echo "Syncing ${REVISION}"
|
||||||
|
sudo -l && time fpsync -o '-av --numeric-ids --no-compress --chown=10004:10005' -n 24 -t /mnt/compose/partitions "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/" "${TARGET}/"
|
||||||
|
|
||||||
|
# Full file list update for production root
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
echo "Getting a full file list for the root dir"
|
||||||
|
find . > fullfilelist
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
# Full file list update for rocky linux itself
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
# Hardlink everything except xml files
|
||||||
|
echo "Hard linking"
|
||||||
|
hardlink -x '.*\.xml.*' "${REVISION}"
|
||||||
|
echo "Getting a full file list for the rocky dir"
|
||||||
|
find . > fullfilelist
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
|
@ -22,7 +22,27 @@ if [ $ret_val -eq "0" ]; then
|
|||||||
sudo -l && find ** -maxdepth 0 -type l | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
sudo -l && find ** -maxdepth 0 -type l | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
{} "${TARGET}"
|
{} "${TARGET}"
|
||||||
|
|
||||||
# Full file list update
|
# Temporary until empanadas has this support
|
||||||
|
if [ -f "COMPOSE_ID" ]; then
|
||||||
|
cp COMPOSE_ID "${TARGET}"
|
||||||
|
chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d "metadata" ]; then
|
||||||
|
rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Full file list update for production root
|
||||||
|
cd "${PRODUCTION_ROOT}/" || echo { echo "Failed to change directory"; exit 1; }
|
||||||
|
find . > fullfilelist
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
# Full file list update for rocky linux itself
|
||||||
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
# Hardlink everything except xml files
|
# Hardlink everything except xml files
|
||||||
hardlink -x '.*\.xml.*' "${REVISION}"
|
hardlink -x '.*\.xml.*' "${REVISION}"
|
||||||
|
40
sync/sync-to-staging-9.sh
Normal file
40
sync/sync-to-staging-9.sh
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "You must specify a short name."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Major Version (eg, 8)
|
||||||
|
MAJ=${RLVER}
|
||||||
|
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||||
|
SHORT=${1}
|
||||||
|
PROFILE=${2}
|
||||||
|
|
||||||
|
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}"
|
||||||
|
mkdir -p "${TARGET}"
|
||||||
|
# disabling because none of our files should be starting with dashes. If they
|
||||||
|
# are something is *seriously* wrong here.
|
||||||
|
# shellcheck disable=SC2035
|
||||||
|
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
|
# {} "${TARGET}"
|
||||||
|
sudo -l && time fpsync -o '-av --numeric-ids --no-compress --chown=10004:10005' -n 24 -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||||
|
|
||||||
|
# This is temporary until we implement rsync into empanadas
|
||||||
|
#if [ -f "COMPOSE_ID" ]; then
|
||||||
|
# cp COMPOSE_ID "${TARGET}"
|
||||||
|
# chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||||
|
#fi
|
||||||
|
|
||||||
|
#if [ -d "metadata" ]; then
|
||||||
|
# rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||||
|
#fi
|
||||||
|
fi
|
@ -25,4 +25,14 @@ if [ $ret_val -eq "0" ]; then
|
|||||||
# shellcheck disable=SC2035
|
# shellcheck disable=SC2035
|
||||||
sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
{} "${TARGET}"
|
{} "${TARGET}"
|
||||||
|
|
||||||
|
# This is temporary until we implement rsync into empanadas
|
||||||
|
if [ -f "COMPOSE_ID" ]; then
|
||||||
|
cp COMPOSE_ID "${TARGET}"
|
||||||
|
chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d "metadata" ]; then
|
||||||
|
rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
Loading…
Reference in New Issue
Block a user