forked from sig_core/toolkit
Merge branch 'devel' into 'main'
Sync into Main See merge request release-engineering/public/toolkit!47
This commit is contained in:
commit
190e1b4b22
47
.github/workflows/mix-empanadas.yml
vendored
Normal file
47
.github/workflows/mix-empanadas.yml
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
---
|
||||
name: Build empanada container images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ $default-branch ]
|
||||
pull_request:
|
||||
branches: [ $default-branch ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
buildx:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Login to ghcr
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le
|
||||
context: ./iso/empanadas
|
||||
file: ./iso/empanadas/Containerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ghcr.io/neilhanlon/sig-core-toolkit:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
@ -28,9 +28,11 @@ How to Run
|
||||
There are two ways to run through the tests:
|
||||
|
||||
* By running `/bin/bash runtests.sh`
|
||||
* Runs all tests
|
||||
* Runs all core tests
|
||||
* By running `/bin/bash stacktests.sh`
|
||||
* Runs all stack tests (eg, lamp, ipa)
|
||||
* By running `/bin/bash monotests.sh`
|
||||
* Runs all tests one by one to help identify failures as they happen
|
||||
* Supposed to runs all tests one by one to help identify failures as they happen (not functional)
|
||||
|
||||
Adding Tests
|
||||
------------
|
||||
@ -316,6 +318,9 @@ Current Tree
|
||||
│ │ ├── 00-install-lsof.sh
|
||||
│ │ ├── 10-test-lsof.sh
|
||||
│ │ └── README.md
|
||||
│ ├── pkg_mdadm
|
||||
│ │ ├── 00-install-mdadm.sh
|
||||
│ │ └── 01-test-mdadm.sh
|
||||
│ ├── pkg_network
|
||||
│ │ ├── 00-install-packages.sh
|
||||
│ │ ├── 10-tracepath.sh
|
||||
@ -348,6 +353,13 @@ Current Tree
|
||||
│ │ ├── 30-postfix-sasl.sh
|
||||
│ │ ├── 40-postfix-tls.sh
|
||||
│ │ └── README.md
|
||||
│ ├── pkg_postgresql
|
||||
│ │ ├── 00-install-postgresql.sh
|
||||
│ │ ├── 01-configure-postgresql.sh
|
||||
│ │ ├── 10-create-db.sh
|
||||
│ │ ├── 11-create-user.sh
|
||||
│ │ ├── 20-drop-db.sh
|
||||
│ │ └── 21-drop-user.sh
|
||||
│ ├── pkg_python
|
||||
│ │ ├── 00-install-python.sh
|
||||
│ │ ├── 10-test-python3.sh
|
||||
@ -423,6 +435,11 @@ Current Tree
|
||||
│ ├── pkg_telnet
|
||||
│ │ ├── 00-install-telnet.sh
|
||||
│ │ └── 10-test-telnet.sh
|
||||
│ ├── pkg_tftp-server
|
||||
│ │ ├── 00-install-tftp.sh
|
||||
│ │ ├── 01-configure-tftp.sh
|
||||
│ │ ├── 10-get-test.sh
|
||||
│ │ └── 11-put-test.sh
|
||||
│ ├── pkg_vsftpd
|
||||
│ │ ├── 00-install-vsftpd.sh
|
||||
│ │ ├── 10-anonymous-vsftpd.sh
|
||||
@ -447,20 +464,21 @@ Current Tree
|
||||
├── README.md
|
||||
├── runtests.sh
|
||||
├── skip.list
|
||||
└── stacks
|
||||
├── ipa
|
||||
│ ├── 00-ipa-pregame.sh
|
||||
│ ├── 10-install-ipa.sh
|
||||
│ ├── 11-configure-ipa.sh
|
||||
│ ├── 12-verify-ipa.sh
|
||||
│ ├── 20-ipa-user.sh
|
||||
│ ├── 21-ipa-service.sh
|
||||
│ ├── 22-ipa-dns.sh
|
||||
│ ├── 23-ipa-sudo.sh
|
||||
│ ├── 50-cleanup-ipa.sh
|
||||
│ └── README.md
|
||||
└── lamp
|
||||
├── 00-install-lamp.sh
|
||||
├── 01-verification.sh
|
||||
└── 10-test-lamp.sh
|
||||
├── stacks
|
||||
│ ├── ipa
|
||||
│ │ ├── 00-ipa-pregame.sh
|
||||
│ │ ├── 10-install-ipa.sh
|
||||
│ │ ├── 11-configure-ipa.sh
|
||||
│ │ ├── 12-verify-ipa.sh
|
||||
│ │ ├── 20-ipa-user.sh
|
||||
│ │ ├── 21-ipa-service.sh
|
||||
│ │ ├── 22-ipa-dns.sh
|
||||
│ │ ├── 23-ipa-sudo.sh
|
||||
│ │ ├── 50-cleanup-ipa.sh
|
||||
│ │ └── README.md
|
||||
│ └── lamp
|
||||
│ ├── 00-install-lamp.sh
|
||||
│ ├── 01-verification.sh
|
||||
│ └── 10-test-lamp.sh
|
||||
└── stacks.sh
|
||||
```
|
||||
|
@ -11,5 +11,5 @@ export readonly RELEASE_NAME=rocky
|
||||
# A 0 means it was successful. It can be changed to 1 on failure.
|
||||
export IPAINSTALLED=0
|
||||
|
||||
LOGFILE="./log/$(date +'%m-%d-%Y')-tests.log"
|
||||
LOGFILE="$(pwd)/log/$(date +'%m-%d-%Y')-tests.log"
|
||||
export LOGFILE
|
||||
|
@ -2,4 +2,4 @@
|
||||
r_log "archive" "Installing appropriate archive formats"
|
||||
|
||||
# We might need expect for zmore - does anyone actually use zmore?
|
||||
p_installPackageNormal bzip2 diffutils gzip less ncompress tar unzip util-linux-ng zip lzop
|
||||
p_installPackageNormal bzip2 diffutils gzip less tar unzip util-linux-ng zip lzop
|
||||
|
@ -89,8 +89,12 @@ gzip $FILE $FILE.1 || r_checkExitStatus 1
|
||||
r_log "archive" "Verify that .Z files can be handled"
|
||||
gunzip $FILE.gz
|
||||
ls -l /var/tmp >> $FILE
|
||||
compress $FILE || r_checkExitStatus 1
|
||||
gunzip $FILE.Z || r_checkExitStatus 1
|
||||
if [ "$RL_VER" -eq 8 ]; then
|
||||
compress $FILE || r_checkExitStatus 1
|
||||
gunzip $FILE.Z || r_checkExitStatus 1
|
||||
else
|
||||
r_log "archive" "Skipping for 9"
|
||||
fi
|
||||
|
||||
# handle some zip files
|
||||
r_log "archive" "Verify that .zip files can be handled"
|
||||
|
@ -9,7 +9,7 @@ fi
|
||||
|
||||
COUNTS="$(grep -c rockylinux.org $FIREPATH)"
|
||||
|
||||
if [ "$COUNTS" -eq 2 ]; then
|
||||
if [ "$COUNTS" -ge 2 ]; then
|
||||
r_checkExitStatus 0
|
||||
else
|
||||
r_checkExitStatus 1
|
||||
|
@ -2,7 +2,7 @@
|
||||
r_log "httpd" "Verify httpd branding"
|
||||
|
||||
r_log "httpd" "Token"
|
||||
curl -sI http://localhost/ | grep -i "Server:\ Apache.*\ (Rocky)" > /dev/null 2>&1
|
||||
curl -sI http://localhost/ | grep -i "Server:\ Apache.*\ (Rocky Linux)" > /dev/null 2>&1
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "httpd" "index"
|
||||
|
@ -1,3 +1,8 @@
|
||||
#!/bin/bash
|
||||
r_log "lsb" "Install LSB package"
|
||||
if [ "$RL_VER" -ge 8 ]; then
|
||||
r_log "lsb" "redhat-lsb is not in EL9"
|
||||
exit $PASS
|
||||
fi
|
||||
|
||||
p_installPackageNormal redhat-lsb
|
||||
|
@ -1,5 +1,10 @@
|
||||
#!/bin/bash
|
||||
r_log "lsb" "Test LSB branding"
|
||||
if [ "$RL_VER" -ge 8 ]; then
|
||||
r_log "lsb" "redhat-lsb is not in EL9"
|
||||
exit $PASS
|
||||
fi
|
||||
|
||||
lsb_release -i | grep -q "Rocky"
|
||||
r_checkExitStatus $?
|
||||
lsb_release -d | grep -q "Rocky"
|
||||
|
3
func/core/pkg_mdadm/00-install-mdadm.sh
Executable file
3
func/core/pkg_mdadm/00-install-mdadm.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
r_log "mdadm" "Install mdadm"
|
||||
p_installPackageNormal mdadm
|
13
func/core/pkg_mdadm/01-test-mdadm.sh
Executable file
13
func/core/pkg_mdadm/01-test-mdadm.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
r_log "mdadm" "Check that mdadm will operate and return the right exit codes"
|
||||
[ ${EUID} -eq 0 ] || { r_log "mdadm" "Not running as root. Skipping." ; exit "$PASS"; }
|
||||
MDADM=$(which mdadm)
|
||||
|
||||
[ -z "${MDADM}" ] && { r_log "mdadm" "which reported the binary but it doesn't exist, why?"; exit "$FAIL"; }
|
||||
|
||||
${MDADM} --detail --scan &> /dev/null
|
||||
ret_val=$?
|
||||
|
||||
[ "$ret_val" -eq 0 ] || { r_log "mdadm" "There was a non-zero exit. This is likely fatal."; exit "$FAIL"; }
|
||||
|
||||
r_checkExitStatus $ret_val
|
@ -1,3 +1,7 @@
|
||||
#!/bin/bash
|
||||
r_log "network" "Install necessary network packages and utilities"
|
||||
p_installPackageNormal traceroute iputils iproute mtr arpwatch psmisc net-tools which iptraf
|
||||
pkgs=(traceroute iputils iproute mtr psmisc net-tools which iptraf)
|
||||
if [ "$RL_VER" -eq 8 ]; then
|
||||
pkgs+=( arpwatch )
|
||||
fi
|
||||
p_installPackageNormal "${pkgs[@]}"
|
||||
|
3
func/core/pkg_postgresql/00-install-postgresql.sh
Executable file
3
func/core/pkg_postgresql/00-install-postgresql.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Installing postgresql"
|
||||
p_installPackageNormal postgresql-server postgresql
|
5
func/core/pkg_postgresql/01-configure-postgresql.sh
Executable file
5
func/core/pkg_postgresql/01-configure-postgresql.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Initialize postgresql"
|
||||
postgresql-setup --initdb
|
||||
m_serviceCycler postgresql cycle
|
||||
sleep 15
|
4
func/core/pkg_postgresql/10-create-db.sh
Executable file
4
func/core/pkg_postgresql/10-create-db.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Creating db"
|
||||
su - postgres -c 'createdb pg_test'
|
||||
r_checkExitStatus $?
|
4
func/core/pkg_postgresql/11-create-user.sh
Executable file
4
func/core/pkg_postgresql/11-create-user.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Creating user"
|
||||
su - postgres -c 'createuser -S -R -D testuser' > /dev/null 2>&1
|
||||
r_checkExitStatus $?
|
4
func/core/pkg_postgresql/20-drop-db.sh
Executable file
4
func/core/pkg_postgresql/20-drop-db.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Dropping database"
|
||||
su - postgres -c 'dropdb pg_test' > /dev/null 2>&1
|
||||
r_checkExitStatus $?
|
4
func/core/pkg_postgresql/21-drop-user.sh
Executable file
4
func/core/pkg_postgresql/21-drop-user.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
r_log "postgresql" "Dropping user"
|
||||
su - postgres -c 'dropuser testuser' > /dev/null 2>&1
|
||||
r_checkExitStatus $?
|
@ -4,8 +4,8 @@ if [ "$RL_VER" -eq 8 ]; then
|
||||
file /etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial > /dev/null 2>&1 && \
|
||||
file /etc/pki/rpm-gpg/RPM-GPG-KEY-rockytesting > /dev/null 2>&1
|
||||
else
|
||||
file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}" > /ev/null 2>&1 && \
|
||||
file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}-Testing" > /ev/null 2>&1
|
||||
file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}" > /dev/null 2>&1 && \
|
||||
file "/etc/pki/rpm-gpg/RPM-GPG-KEY-Rocky-${RL_VER}-Testing" > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
r_checkExitStatus $?
|
||||
|
@ -4,10 +4,10 @@ r_log "rocky" "Check /etc/os-release stuff"
|
||||
r_log "rocky" "Verify support directives"
|
||||
for s in NAME=\"Rocky\ Linux\" \
|
||||
ID=\"rocky\" \
|
||||
ROCKY_SUPPORT_PRODUCT=\"Rocky\ Linux\" \
|
||||
ROCKY_SUPPORT_PRODUCT_VERSION=\"$RL_VER\"; do
|
||||
ROCKY_SUPPORT_PRODUCT=\"Rocky-Linux-$RL_VER\" \
|
||||
ROCKY_SUPPORT_PRODUCT_VERSION=\"$RL_VER\..*\"; do
|
||||
if ! grep -q "$s" /etc/os-release; then
|
||||
r_log "rocky" "Missing string in /etc/os-release"
|
||||
r_log "rocky" "Missing string ($s) in /etc/os-release"
|
||||
r_checkExitStatus 1
|
||||
fi
|
||||
done
|
||||
|
@ -110,6 +110,6 @@ r_log "shadow" "Test sg"
|
||||
sg onyxuser "touch /var/tmp/onyxsg"
|
||||
r_checkExitStatus $?
|
||||
r_log "shadow" "Verify sg worked"
|
||||
stat --format="%U" /var/tmp/onyxsg | grep -q onyxuser
|
||||
stat --format="%G" /var/tmp/onyxsg | grep -q onyxuser
|
||||
r_checkExitStatus $?
|
||||
rm /var/tmp/onyxsg
|
||||
|
3
func/core/pkg_tftp-server/00-install-tftp.sh
Executable file
3
func/core/pkg_tftp-server/00-install-tftp.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
r_log "tftp" "Installing packages"
|
||||
p_installPackageNormal tftp-server tftp
|
23
func/core/pkg_tftp-server/01-configure-tftp.sh
Executable file
23
func/core/pkg_tftp-server/01-configure-tftp.sh
Executable file
@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
r_log "tftp" "Configure tftp"
|
||||
|
||||
if [ "$RL_VER" -eq 8 ]; then
|
||||
cat <<EOF > /etc/xinetd.d/tftp
|
||||
service tftp
|
||||
{
|
||||
socket_type = dgram
|
||||
protocol = udp
|
||||
wait = yes
|
||||
user = root
|
||||
server = /usr/sbin/in.tftpd
|
||||
server_args = -s /var/lib/tftpboot
|
||||
disable = no
|
||||
per_source = 11
|
||||
cps = 100 2
|
||||
flags = IPv4
|
||||
}
|
||||
EOF
|
||||
|
||||
fi
|
||||
|
||||
m_serviceCycler tftp.socket start
|
10
func/core/pkg_tftp-server/10-get-test.sh
Executable file
10
func/core/pkg_tftp-server/10-get-test.sh
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
r_log "tftp" "Getting a file from tftp"
|
||||
|
||||
chmod 777 /var/lib/tftpboot
|
||||
echo "rocky func" > /var/lib/tftpboot/tftptest
|
||||
tftp 127.0.0.1 -c get tftptest
|
||||
|
||||
grep -q "rocky func" tftptest
|
||||
r_checkExitStatus
|
||||
/bin/rm tftptest
|
14
func/core/pkg_tftp-server/11-put-test.sh
Executable file
14
func/core/pkg_tftp-server/11-put-test.sh
Executable file
@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
r_log "tftp" "Testing anon write"
|
||||
TFTPDIR=/var/lib/tftpboot
|
||||
setsebool tftp_anon_write 1
|
||||
chmod 777 $TFTPDIR
|
||||
echo "rocky func" > puttest
|
||||
touch $TFTPDIR > $TFTPDIR/puttest
|
||||
chmod 666 $TFTPDIR/puttest
|
||||
tftp 127.0.0.1 -c put puttest
|
||||
sleep 2
|
||||
grep -q 'rocky func' $TFTPDIR/puttest
|
||||
r_checkExitStatus $?
|
||||
/bin/rm puttest
|
||||
/bin/rm $TFTPDIR/puttest
|
@ -10,9 +10,12 @@
|
||||
# -> Must be a URL to bugs.rl.o, a github issue number, or a code, such as:
|
||||
# * NEEDINFO
|
||||
# * NOTREADY
|
||||
# * NOPKG
|
||||
8|./core/pkg_archive/26-zmore.sh|nazunalika|NEEDINFO
|
||||
8|./core/pkg_nfs/12-prepare-autofs.sh|nazunalika|NEEDINFO
|
||||
8|./core/pkg_diffutils/00-install-diff.sh|nazunalika|NOTREADY
|
||||
8|./core/pkg_snmp/12-test-snmp-3.sh|nazunalika|NOTWORKING
|
||||
8|./core/pkg_samba/00-install-samba.sh|nazunalika|NOTWORKING
|
||||
8|./core/pkg_samba/10-test-samba.sh|nazunalika|NOTWORKING
|
||||
9|./core/pkg_archive/27-znew.sh|nazunalika|NOPKG
|
||||
9|./core/pkg_network/30-test-arpwatch.sh|nazunalika|NOPKG
|
||||
|
59
func/stacks.sh
Normal file
59
func/stacks.sh
Normal file
@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
# Release Engineering Core Functionality Testing
|
||||
# Louis Abel <label@rockylinux.org> @nazunalika
|
||||
|
||||
################################################################################
|
||||
# Settings and variables
|
||||
|
||||
# Exits on any non-zero exit status - Disabled for now.
|
||||
#set -e
|
||||
# Undefined variables will cause an exit
|
||||
set -u
|
||||
|
||||
COMMON_EXPORTS='./common/exports.sh'
|
||||
COMMON_IMPORTS='./common/imports.sh'
|
||||
SELINUX=$(getenforce)
|
||||
|
||||
# End
|
||||
################################################################################
|
||||
|
||||
# shellcheck source=/dev/null disable=SC2015
|
||||
[ -f $COMMON_EXPORTS ] && source $COMMON_EXPORTS || { echo -e "\n[-] $(date): Variables cannot be sourced."; exit 1; }
|
||||
# shellcheck source=/dev/null disable=SC2015
|
||||
[ -f $COMMON_IMPORTS ] && source $COMMON_IMPORTS || { echo -e "\n[-] $(date): Functions cannot be sourced."; exit 1; }
|
||||
# Init log
|
||||
# shellcheck disable=SC2015
|
||||
[ -e "$LOGFILE" ] && m_recycleLog || touch "$LOGFILE"
|
||||
# SELinux check
|
||||
if [ "$SELINUX" != "Enforcing" ]; then
|
||||
echo -e "\n[-] $(date): SELinux is not enforcing."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
r_log "internal" "Starting Release Engineering Core Tests"
|
||||
|
||||
################################################################################
|
||||
# Script Work
|
||||
|
||||
# Skip tests in a list - some tests are already -x, so it won't be an issue
|
||||
if [ -e skip.list ]; then
|
||||
r_log "internal" "Disabling tests"
|
||||
# shellcheck disable=SC2162
|
||||
grep -E "^${RL_VER}" skip.list | while read line; do
|
||||
# shellcheck disable=SC2086
|
||||
testFile="$(echo $line | cut -d '|' -f 2)"
|
||||
r_log "internal" "SKIP ${testFile}"
|
||||
chmod -x "${testFile}"
|
||||
done
|
||||
r_log "internal" "WARNING: Tests above were disabled."
|
||||
fi
|
||||
|
||||
# TODO: should we let $1 judge what directory is ran?
|
||||
# TODO: get some stacks and lib in there
|
||||
|
||||
#r_processor <(/usr/bin/find ./core -type f | sort -t'/')
|
||||
#r_processor <(/usr/bin/find ./lib -type f | sort -t'/')
|
||||
r_processor <(/usr/bin/find ./stacks -type f | sort -t'/')
|
||||
|
||||
r_log "internal" "Core Tests completed"
|
||||
exit 0
|
@ -9,5 +9,6 @@ fi
|
||||
# going to be the same thing or not so this check is there just in case.
|
||||
if [ "$RL_VER" -eq 8 ]; then
|
||||
p_enableModule idm:DL1/{client,common,dns,server}
|
||||
p_installPackageNormal ipa-server ipa-server-dns
|
||||
fi
|
||||
|
||||
p_installPackageNormal ipa-server ipa-server-dns
|
||||
|
@ -4,8 +4,77 @@ if m_getArch aarch64 | grep -qE 'aarch64'; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$IPAINSTALLED" -eq 1 ]; then
|
||||
if [ "$IPAINSTALLED" -eq 1 ]; then
|
||||
r_log "ipa" "IPA was not successfully installed. Aborting."
|
||||
r_checkExitStatus 1
|
||||
fi
|
||||
|
||||
kdestroy &> /dev/null
|
||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit admin
|
||||
sleep 1
|
||||
expect "Password for admin@RLIPA.LOCAL:"
|
||||
send -h "b1U3OnyX!\r"
|
||||
sleep 5
|
||||
close
|
||||
EOF
|
||||
|
||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Test adding a user"
|
||||
userDetails="$(ipa user-add --first=test --last=user --random ipatestuser)"
|
||||
echo "$userDetails" | grep -q 'Added user "ipatestuser"'
|
||||
r_checkExitStatus $?
|
||||
|
||||
echo "$userDetails" | grep -q 'First name: test'
|
||||
r_checkExitStatus $?
|
||||
echo "$userDetails" | grep -q 'Last name: user'
|
||||
r_checkExitStatus $?
|
||||
echo "$userDetails" | grep -q 'Full name: test user'
|
||||
r_checkExitStatus $?
|
||||
echo "$userDetails" | grep -q 'Home directory: /home/ipatestuser'
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Changing password of the user"
|
||||
kdestroy &> /dev/null
|
||||
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit ipatestuser
|
||||
sleep 1
|
||||
expect "Password for ipatestuser@RLIPA.LOCAL: "
|
||||
send -h -- "$(echo "$userDetails" | awk '$0 ~ /Random password/ {print $3}')\r"
|
||||
sleep 1
|
||||
expect "Enter new password: "
|
||||
send -h -- "gr@YAm3thy5st!\r"
|
||||
sleep 1
|
||||
expect "Enter it again: "
|
||||
send -h -- "gr@YAm3thy5st!\r"
|
||||
sleep 5
|
||||
close
|
||||
EOF
|
||||
|
||||
r_log "ipa" "Re-doing a kinit"
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit ipatestuser
|
||||
sleep 1
|
||||
expect "Password for ipatestuser@C6IPA.LOCAL:"
|
||||
send -h "gr@YAm3thy5st!\r"
|
||||
sleep 1
|
||||
close
|
||||
EOF
|
||||
|
||||
klist | grep "ipatestuser@RLIPA.LOCAL" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
kdestroy &> /dev/null
|
||||
|
||||
r_log "ipa" "Testing for user in getent"
|
||||
getent passwd ipatestuser &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
if m_getArch aarch64 | grep -qE 'aarch64'; then
|
||||
r_log "ipa -bash" "Skipping for aarch64"
|
||||
r_log "ipa" "Skipping for aarch64"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@ -9,3 +9,61 @@ if [ "$IPAINSTALLED" -eq 1 ]; then
|
||||
r_checkExitStatus 1
|
||||
fi
|
||||
|
||||
kdestroy &> /dev/null
|
||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit admin
|
||||
sleep 1
|
||||
expect "Password for admin@RLIPA.LOCAL:"
|
||||
send -h "b1U3OnyX!\r"
|
||||
sleep 5
|
||||
close
|
||||
EOF
|
||||
|
||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Adding test service"
|
||||
ipa service-add testservice/rltest.rlipa.local &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Getting keytab for service"
|
||||
ipa-getkeytab -s rltest.rlipa.local -p testservice/rltest.rlipa.local -k /tmp/testservice.keytab &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Getting a certificate for service"
|
||||
ipa-getcert request -K testservice/rltest.rlipa.local -D rltest.rlipa.local -f /etc/pki/tls/certs/testservice.crt -k /etc/pki/tls/private/testservice.key &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
while true; do
|
||||
entry="$(ipa-getcert list -r | sed -n '/Request ID/,/auto-renew: yes/p')"
|
||||
if [[ $entry =~ "status:" ]] && [[ $entry =~ "CA_REJECTED" ]]; then
|
||||
r_checkExitStatus 1
|
||||
break
|
||||
fi
|
||||
if [[ $entry =~ "" ]]; then
|
||||
r_checkExitStatus 0
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
while ! stat /etc/pki/tls/certs/testservice.crt &> /dev/null; do
|
||||
sync
|
||||
sleep 1
|
||||
done
|
||||
|
||||
r_log "ipa" "Verifying keytab"
|
||||
klist -k /tmp/testservice.keytab | grep "testservice/rltest.rlipa.local" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Verifying key matches the certificate"
|
||||
diff <(openssl x509 -in /etc/pki/tls/certs/testservice.crt -noout -modulus 2>&1 ) <(openssl rsa -in /etc/pki/tls/private/testservice.key -noout -modulus 2>&1 )
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Verifying the certificate against our CA"
|
||||
openssl verify -CAfile /etc/ipa/ca.crt /etc/pki/tls/certs/testservice.crt | grep "/etc/pki/tls/certs/testservice.crt: OK" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
@ -9,3 +9,46 @@ if [ "$IPAINSTALLED" -eq 1 ]; then
|
||||
r_checkExitStatus 1
|
||||
fi
|
||||
|
||||
kdestroy &> /dev/null
|
||||
klist 2>&1 | grep -qE "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit admin
|
||||
sleep 1
|
||||
expect "Password for admin@RLIPA.LOCAL:"
|
||||
send -h "b1U3OnyX!\r"
|
||||
sleep 5
|
||||
close
|
||||
EOF
|
||||
|
||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Adding testzone subdomain"
|
||||
ipa dnszone-add --name-server=rltest.rlipa.local. --admin-email=hostmaster.testzone.rlipa.local. testzone.rlipa.local &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
sleep 5
|
||||
|
||||
r_log "ipa" "Get SOA from testzone subdomain"
|
||||
dig @localhost SOA testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Adding a CNAME record to the primary domain"
|
||||
ipa dnsrecord-add rlipa.local testrecord --cname-hostname=rltest &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
sleep 5
|
||||
|
||||
r_log "ipa" "Retrieving CNAME record"
|
||||
dig @localhost CNAME testrecord.rlipa.local | grep -q "status: NOERROR" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
r_log "ipa" "Adding a CNAME to subdomain"
|
||||
ipa dnsrecord-add testzone.rlipa.local testrecord --cname-hostname=rltest.rlipa.local. &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
sleep 5
|
||||
|
||||
r_log "ipa" "Testing can retrieve record from subdomain"
|
||||
dig @localhost CNAME testrecord.testzone.rlipa.local | grep -q "status: NOERROR" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
@ -9,3 +9,19 @@ if [ "$IPAINSTALLED" -eq 1 ]; then
|
||||
r_checkExitStatus 1
|
||||
fi
|
||||
|
||||
kdestroy &> /dev/null
|
||||
klist 2>&1 | grep -E "(No credentials|Credentials cache .* not found)" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
||||
expect -f - <<EOF
|
||||
set send_human {.1 .3 1 .05 2}
|
||||
spawn kinit admin
|
||||
sleep 1
|
||||
expect "Password for admin@RLIPA.LOCAL:"
|
||||
send -h "b1U3OnyX!\r"
|
||||
sleep 5
|
||||
close
|
||||
EOF
|
||||
|
||||
klist | grep "admin@RLIPA.LOCAL" &> /dev/null
|
||||
r_checkExitStatus $?
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
if m_getArch aarch64 | grep -qE 'aarch64'; then
|
||||
r_log "ipa -bash" "Skipping for aarch64"
|
||||
r_log "ipa" "Skipping for aarch64"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
66
iso/empanadas/Containerfile
Normal file
66
iso/empanadas/Containerfile
Normal file
@ -0,0 +1,66 @@
|
||||
FROM quay.io/centos/centos:stream9
|
||||
|
||||
ADD images/get_arch /get_arch
|
||||
|
||||
ENV TINI_VERSION v0.19.0
|
||||
RUN curl -o /tini -L "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-$(/get_arch)"
|
||||
RUN chmod +x /tini
|
||||
|
||||
RUN rm -rf /etc/yum.repos.d/*.repo
|
||||
ADD images/epelkey.gpg /etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9
|
||||
ADD images/rhel.repo /etc/yum.repos.d/rhel.repo
|
||||
|
||||
RUN dnf update -y && dnf install -y \
|
||||
bash \
|
||||
bzip2 \
|
||||
cpio \
|
||||
diffutils \
|
||||
findutils \
|
||||
gawk \
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
git \
|
||||
grep \
|
||||
gzip \
|
||||
info \
|
||||
make \
|
||||
patch \
|
||||
python3 \
|
||||
redhat-rpm-config \
|
||||
rpm-build \
|
||||
scl-utils-build \
|
||||
sed \
|
||||
shadow-utils \
|
||||
tar \
|
||||
unzip \
|
||||
util-linux \
|
||||
which \
|
||||
xz \
|
||||
dnf-plugins-core \
|
||||
createrepo_c \
|
||||
rpm-sign \
|
||||
sudo \
|
||||
mock \
|
||||
python-pip \
|
||||
genisoimage \
|
||||
isomd5sum \
|
||||
lorax \
|
||||
lorax-templates-rhel \
|
||||
lorax-templates-generic
|
||||
|
||||
RUN sed -i '/libreport-rhel-anaconda-bugzilla/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl
|
||||
|
||||
RUN ssh-keygen -t rsa -q -f "$HOME/.ssh/id_rsa" -N ""
|
||||
RUN dnf clean all
|
||||
RUN rm -rf /etc/yum.repos.d/*.repo
|
||||
RUN useradd -o -d /var/peridot -u 1002 peridotbuilder && usermod -a -G mock peridotbuilder
|
||||
RUN chown peridotbuilder:mock /etc/yum.conf && chown -R peridotbuilder:mock /etc/dnf && chown -R peridotbuilder:mock /etc/rpm && chown -R peridotbuilder:mock /etc/yum.repos.d
|
||||
|
||||
RUN pip install 'git+https://git.rockylinux.org/release-engineering/public/toolkit.git@feature/iso-kube#egg=empanadas&subdirectory=iso/empanadas'
|
||||
|
||||
RUN pip install awscli
|
||||
|
||||
ENV USER=1002
|
||||
USER 1002
|
||||
|
||||
ENTRYPOINT ["/tini", "--"]
|
@ -1,5 +1,19 @@
|
||||
# iso
|
||||
|
||||
|
||||
## Setup / Install
|
||||
|
||||
1. Install [Poetry](https://python-poetry.org/docs/)
|
||||
2. Setup: `poetry install`
|
||||
3. Have fun
|
||||
|
||||
|
||||
## Updating dependencies
|
||||
|
||||
Dependencies can be manipulated via the pyproject.toml file or with the poetry add/remove commands.
|
||||
|
||||
Changes to the poetry.lock should be commited if dependencies are added or updated.
|
||||
|
||||
## TODO
|
||||
|
||||
Verbose mode should exist to output everything that's being called or ran.
|
||||
@ -52,5 +66,6 @@ r.check_valid_arch()
|
||||
|
||||
### script names and permissions
|
||||
|
||||
* Callable scripts should *not* end in `.py`
|
||||
* They should have at least `775` or `+x` permissions
|
||||
* Callable scripts should always end in `.py` and live in the empanadas/scripts folder
|
||||
* Poetry will handle the installation of these executables with setuptools for distribution, and they can be invoked by name using `poetry run script-name`, too.
|
||||
* Configure the script and function to be executed in pyproject.toml (TODO: dynamically load scripts from this directory as well as standardize on the script input/outputs)
|
14
iso/empanadas/build-container.sh
Normal file
14
iso/empanadas/build-container.sh
Normal file
@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
MANIFEST_NAME="peridotempanadas"
|
||||
BUILD_PATH="."
|
||||
REGISTRY="docker.io"
|
||||
USER="neilresf"
|
||||
IMAGE_TAG="v0.1.0"
|
||||
IMAGE_NAME="peridotempanadas"
|
||||
|
||||
podman buildx build \
|
||||
--platform linux/amd64,linux/arm64,linux/s390x,linux/ppc64le \
|
||||
--tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \
|
||||
$PWD
|
||||
|
1
iso/empanadas/empanadas/__init__.py
Normal file
1
iso/empanadas/empanadas/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
__version__ = '0.1.0'
|
@ -6,6 +6,7 @@ import glob
|
||||
import rpm
|
||||
import yaml
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
# These are a bunch of colors we may use in terminal output
|
||||
class Color:
|
||||
@ -25,6 +26,7 @@ rldict = {}
|
||||
sigdict = {}
|
||||
config = {
|
||||
"rlmacro": rpm.expandMacro('%rhel'),
|
||||
"dist": 'el' + rpm.expandMacro('%rhel'),
|
||||
"arch": platform.machine(),
|
||||
"date_stamp": time.strftime("%Y%m%d.%H%M%S", time.localtime()),
|
||||
"compose_root": "/mnt/compose",
|
||||
@ -33,16 +35,31 @@ config = {
|
||||
"category_stub": "mirror/pub/rocky",
|
||||
"sig_category_stub": "mirror/pub/sig",
|
||||
"repo_base_url": "https://yumrepofs.build.resf.org/v1/projects",
|
||||
"container": "centos:stream9"
|
||||
"mock_work_root": "/builddir",
|
||||
"container": "centos:stream9",
|
||||
"distname": "Rocky Linux",
|
||||
"shortname": "Rocky",
|
||||
"translators": {
|
||||
"x86_64": "amd64",
|
||||
"aarch64": "arm64",
|
||||
"ppc64le": "ppc64le",
|
||||
"s390x": "s390x"
|
||||
},
|
||||
"aws_region": "us-east-2",
|
||||
"bucket": "resf-empanadas",
|
||||
"bucket_url": "https://resf-empanadas.s3.us-east-2.amazonaws.com"
|
||||
}
|
||||
|
||||
# Importing the config from yaml
|
||||
for conf in glob.iglob('configs/*.yaml'):
|
||||
import importlib_resources
|
||||
_rootdir = importlib_resources.files("empanadas")
|
||||
|
||||
for conf in glob.iglob(f"{_rootdir}/configs/*.yaml"):
|
||||
with open(conf, 'r', encoding="utf-8") as file:
|
||||
rldict.update(yaml.safe_load(file))
|
||||
|
||||
# Import all SIG configs from yaml
|
||||
for conf in glob.iglob('sig/*.yaml'):
|
||||
for conf in glob.iglob(f"{_rootdir}/sig/*.yaml"):
|
||||
with open(conf, 'r', encoding="utf-8") as file:
|
||||
sigdict.update(yaml.safe_load(file))
|
||||
|
@ -1,16 +1,17 @@
|
||||
---
|
||||
'8':
|
||||
fullname: 'Rocky Linux 8'
|
||||
revision: '8.6'
|
||||
rclvl: 'RC2'
|
||||
major: '8'
|
||||
minor: '6'
|
||||
profile: '8'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
provide_multilib: False
|
||||
project_id: ''
|
||||
required_packages:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
repo_symlinks:
|
||||
devel: 'Devel'
|
||||
NFV: 'nfv'
|
||||
@ -53,6 +54,48 @@
|
||||
- dvd1
|
||||
- minimal
|
||||
- boot
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_packages:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map:
|
||||
xorrisofs: False
|
||||
iso_level: False
|
||||
hosts:
|
||||
x86_64: ''
|
||||
aarch64: ''
|
||||
images:
|
||||
dvd:
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
lorax_variants:
|
||||
- dvd
|
||||
- minimal
|
||||
- BaseOS
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_pkgs:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
repoclosure_map:
|
||||
arches:
|
||||
x86_64: '--arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||
@ -80,10 +123,15 @@
|
||||
- AppStream
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r8/'
|
||||
branch: 'r8'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||
list:
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
||||
- 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||
- 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||
...
|
123
iso/empanadas/empanadas/configs/el9-beta.yaml
Normal file
123
iso/empanadas/empanadas/configs/el9-beta.yaml
Normal file
@ -0,0 +1,123 @@
|
||||
---
|
||||
'9-beta':
|
||||
fullname: 'Rocky Linux 9.1'
|
||||
revision: '9.1'
|
||||
rclvl: 'BETA1'
|
||||
major: '9'
|
||||
minor: '1'
|
||||
profile: '9-beta'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
checksum: 'sha256'
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- ppc64le
|
||||
- s390x
|
||||
provide_multilib: True
|
||||
project_id: ''
|
||||
repo_symlinks:
|
||||
NFV: 'nfv'
|
||||
renames:
|
||||
all: 'devel'
|
||||
all_repos:
|
||||
- 'all'
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
- 'HighAvailability'
|
||||
- 'ResilientStorage'
|
||||
- 'RT'
|
||||
- 'NFV'
|
||||
- 'SAP'
|
||||
- 'SAPHANA'
|
||||
- 'extras'
|
||||
- 'plus'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map:
|
||||
xorrisofs: True
|
||||
iso_level: False
|
||||
images:
|
||||
dvd:
|
||||
disc: True
|
||||
variant: 'AppStream'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
minimal:
|
||||
disc: True
|
||||
isoskip: True
|
||||
repos:
|
||||
- 'minimal'
|
||||
variant: 'minimal'
|
||||
BaseOS:
|
||||
disc: False
|
||||
isoskip: True
|
||||
variant: 'BaseOS'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
lorax:
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_pkgs:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
- 'xorriso'
|
||||
repoclosure_map:
|
||||
arches:
|
||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||
repos:
|
||||
devel: []
|
||||
BaseOS: []
|
||||
AppStream:
|
||||
- BaseOS
|
||||
CRB:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
HighAvailability:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
ResilientStorage:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
RT:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
NFV:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
SAP:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
SAPHANA:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
|
||||
branch: 'r9'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
list:
|
||||
- 'SOURCES/Contributors'
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
...
|
@ -1,7 +1,13 @@
|
||||
---
|
||||
'9':
|
||||
fullname: 'Rocky Linux 9.0'
|
||||
revision: '9.0'
|
||||
rclvl: 'RC1'
|
||||
major: '9'
|
||||
minor: '0'
|
||||
profile: '9'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
checksum: 'sha256'
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
@ -9,15 +15,10 @@
|
||||
- s390x
|
||||
provide_multilib: True
|
||||
project_id: '55b17281-bc54-4929-8aca-a8a11d628738'
|
||||
required_packages:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
repo_symlinks:
|
||||
devel: 'Devel'
|
||||
NFV: 'nfv'
|
||||
renames:
|
||||
all: 'nplb'
|
||||
all: 'devel'
|
||||
all_repos:
|
||||
- 'all'
|
||||
- 'BaseOS'
|
||||
@ -30,39 +31,47 @@
|
||||
- 'SAP'
|
||||
- 'SAPHANA'
|
||||
- 'extras'
|
||||
- 'devel'
|
||||
- 'plus'
|
||||
no_comps_or_groups:
|
||||
- 'all'
|
||||
- 'extras'
|
||||
- 'devel'
|
||||
- 'plus'
|
||||
comps_or_groups:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
- 'HighAvailability'
|
||||
- 'ResilientStorage'
|
||||
- 'RT'
|
||||
- 'NFV'
|
||||
- 'SAP'
|
||||
- 'SAPHANA'
|
||||
has_modules:
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map:
|
||||
hosts:
|
||||
x86_64: ''
|
||||
aarch64: ''
|
||||
ppc64le: ''
|
||||
s390x: ''
|
||||
xorrisofs: True
|
||||
iso_level: False
|
||||
images:
|
||||
- dvd1
|
||||
- minimal
|
||||
- boot
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
dvd:
|
||||
disc: True
|
||||
variant: 'AppStream'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
minimal:
|
||||
disc: True
|
||||
isoskip: True
|
||||
repos:
|
||||
- 'minimal'
|
||||
variant: 'minimal'
|
||||
BaseOS:
|
||||
disc: False
|
||||
isoskip: True
|
||||
variant: 'BaseOS'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
lorax:
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_pkgs:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
- 'xorriso'
|
||||
repoclosure_map:
|
||||
arches:
|
||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||
@ -70,7 +79,7 @@
|
||||
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||
repos:
|
||||
nplb: []
|
||||
devel: []
|
||||
BaseOS: []
|
||||
AppStream:
|
||||
- BaseOS
|
||||
@ -99,8 +108,13 @@
|
||||
- HighAvailability
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
|
||||
branch: 'r9'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
list:
|
||||
- 'SOURCES/Contributors'
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
123
iso/empanadas/empanadas/configs/el9lh.yaml
Normal file
123
iso/empanadas/empanadas/configs/el9lh.yaml
Normal file
@ -0,0 +1,123 @@
|
||||
---
|
||||
'9-lookahead':
|
||||
fullname: 'Rocky Linux 9.1'
|
||||
revision: '9.1'
|
||||
rclvl: 'LH1'
|
||||
major: '9'
|
||||
minor: '1'
|
||||
profile: '9-lookahead'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
checksum: 'sha256'
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- ppc64le
|
||||
- s390x
|
||||
provide_multilib: True
|
||||
project_id: ''
|
||||
repo_symlinks:
|
||||
NFV: 'nfv'
|
||||
renames:
|
||||
all: 'devel'
|
||||
all_repos:
|
||||
- 'all'
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
- 'HighAvailability'
|
||||
- 'ResilientStorage'
|
||||
- 'RT'
|
||||
- 'NFV'
|
||||
- 'SAP'
|
||||
- 'SAPHANA'
|
||||
- 'extras'
|
||||
- 'plus'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map:
|
||||
xorrisofs: True
|
||||
iso_level: False
|
||||
images:
|
||||
dvd:
|
||||
disc: True
|
||||
variant: 'AppStream'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
minimal:
|
||||
disc: True
|
||||
isoskip: True
|
||||
repos:
|
||||
- 'minimal'
|
||||
variant: 'minimal'
|
||||
BaseOS:
|
||||
disc: False
|
||||
isoskip: True
|
||||
variant: 'BaseOS'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
lorax:
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_pkgs:
|
||||
- 'lorax'
|
||||
- 'genisoimage'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
- 'xorriso'
|
||||
repoclosure_map:
|
||||
arches:
|
||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||
repos:
|
||||
devel: []
|
||||
BaseOS: []
|
||||
AppStream:
|
||||
- BaseOS
|
||||
CRB:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
HighAvailability:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
ResilientStorage:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
RT:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
NFV:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
SAP:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
SAPHANA:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
|
||||
branch: 'r9lh'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
list:
|
||||
- 'SOURCES/Contributors'
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||
...
|
121
iso/empanadas/empanadas/configs/rln.yaml
Normal file
121
iso/empanadas/empanadas/configs/rln.yaml
Normal file
@ -0,0 +1,121 @@
|
||||
---
|
||||
'rln':
|
||||
fullname: 'Rocky Linux New'
|
||||
revision: '10'
|
||||
rclvl: 'RLN120'
|
||||
major: '10'
|
||||
minor: '0'
|
||||
profile: 'rln'
|
||||
bugurl: 'https://bugs.rockylinux.org'
|
||||
checksum: 'sha256'
|
||||
allowed_arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- ppc64le
|
||||
- s390x
|
||||
provide_multilib: True
|
||||
project_id: ''
|
||||
repo_symlinks:
|
||||
NFV: 'nfv'
|
||||
renames:
|
||||
all: 'devel'
|
||||
all_repos:
|
||||
- 'all'
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
- 'CRB'
|
||||
- 'HighAvailability'
|
||||
- 'ResilientStorage'
|
||||
- 'RT'
|
||||
- 'NFV'
|
||||
- 'SAP'
|
||||
- 'SAPHANA'
|
||||
- 'extras'
|
||||
- 'plus'
|
||||
structure:
|
||||
packages: 'os/Packages'
|
||||
repodata: 'os/repodata'
|
||||
iso_map:
|
||||
xorrisofs: True
|
||||
iso_level: False
|
||||
images:
|
||||
dvd:
|
||||
discnum: '1'
|
||||
variant: 'AppStream'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
minimal:
|
||||
discnum: '1'
|
||||
isoskip: True
|
||||
repos:
|
||||
- 'minimal'
|
||||
variant: 'minimal'
|
||||
BaseOS:
|
||||
isoskip: True
|
||||
variant: 'BaseOS'
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
lorax:
|
||||
repos:
|
||||
- 'BaseOS'
|
||||
- 'AppStream'
|
||||
variant: 'BaseOS'
|
||||
lorax_removes:
|
||||
- 'libreport-rhel-anaconda-bugzilla'
|
||||
required_pkgs:
|
||||
- 'lorax'
|
||||
- 'isomd5sum'
|
||||
- 'lorax-templates-rhel'
|
||||
- 'lorax-templates-generic'
|
||||
- 'xorriso'
|
||||
repoclosure_map:
|
||||
arches:
|
||||
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||
repos:
|
||||
devel: []
|
||||
BaseOS: []
|
||||
AppStream:
|
||||
- BaseOS
|
||||
CRB:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
HighAvailability:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
ResilientStorage:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
RT:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
NFV:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
SAP:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
SAPHANA:
|
||||
- BaseOS
|
||||
- AppStream
|
||||
- HighAvailability
|
||||
extra_files:
|
||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/rln/'
|
||||
branch: 'rln'
|
||||
gpg:
|
||||
stable: 'SOURCES/RPM-GPG-KEY-Rocky-RLN'
|
||||
testing: 'SOURCES/RPM-GPG-KEY-Rocky-RLN-Testing'
|
||||
list:
|
||||
- 'SOURCES/Contributors'
|
||||
- 'SOURCES/COMMUNITY-CHARTER'
|
||||
- 'SOURCES/EULA'
|
||||
- 'SOURCES/LICENSE'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-RLN'
|
||||
- 'SOURCES/RPM-GPG-KEY-Rocky-RLN'
|
||||
...
|
31
iso/empanadas/empanadas/scripts/build_iso.py
Executable file
31
iso/empanadas/empanadas/scripts/build_iso.py
Executable file
@ -0,0 +1,31 @@
|
||||
# builds ISO's
|
||||
|
||||
import argparse
|
||||
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import IsoBuild
|
||||
|
||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||
|
||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||
parser.add_argument('--isolation', type=str, help="mock isolation mode")
|
||||
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||
parser.add_argument('--logger', type=str)
|
||||
results = parser.parse_args()
|
||||
rlvars = rldict[results.release]
|
||||
major = rlvars['major']
|
||||
|
||||
a = IsoBuild(
|
||||
rlvars,
|
||||
config,
|
||||
major=major,
|
||||
rc=results.rc,
|
||||
isolation=results.isolation,
|
||||
compose_dir_is_here=results.local_compose,
|
||||
logger=results.logger,
|
||||
)
|
||||
|
||||
def run():
|
||||
a.run()
|
37
iso/empanadas/empanadas/scripts/build_iso_extra.py
Executable file
37
iso/empanadas/empanadas/scripts/build_iso_extra.py
Executable file
@ -0,0 +1,37 @@
|
||||
# builds ISO's
|
||||
|
||||
import argparse
|
||||
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import IsoBuild
|
||||
|
||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||
|
||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||
parser.add_argument('--rc', action='store_true', help="Release Candidate, Beta, RLN")
|
||||
parser.add_argument('--arch', type=str, help="Architecture")
|
||||
parser.add_argument('--isolation', type=str, help="Mock Isolation")
|
||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||
parser.add_argument('--logger', type=str)
|
||||
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
|
||||
parser.add_argument('--extra-iso-mode', type=str, default='local')
|
||||
results = parser.parse_args()
|
||||
rlvars = rldict[results.release]
|
||||
major = rlvars['major']
|
||||
|
||||
a = IsoBuild(
|
||||
rlvars,
|
||||
config,
|
||||
major=major,
|
||||
rc=results.rc,
|
||||
arch=results.arch,
|
||||
isolation=results.isolation,
|
||||
extra_iso=results.extra_iso,
|
||||
extra_iso_mode=results.extra_iso_mode,
|
||||
compose_dir_is_here=results.local_compose,
|
||||
logger=results.logger
|
||||
)
|
||||
|
||||
def run():
|
||||
a.run_build_extra_iso()
|
48
iso/empanadas/empanadas/scripts/launch_builds.py
Executable file
48
iso/empanadas/empanadas/scripts/launch_builds.py
Executable file
@ -0,0 +1,48 @@
|
||||
# Launches the builds of ISOs
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
|
||||
from empanadas.common import *
|
||||
from empanadas.common import _rootdir
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
parser = argparse.ArgumentParser(description="ISO Compose")
|
||||
|
||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||
parser.add_argument('--env', type=str, help="environment", required=True)
|
||||
results = parser.parse_args()
|
||||
rlvars = rldict[results.release]
|
||||
major = rlvars['major']
|
||||
|
||||
EXTARCH=["s390x", "ppc64le"]
|
||||
EKSARCH=["amd64", "arm64"]
|
||||
|
||||
def run():
|
||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||
tmplenv = Environment(loader=file_loader)
|
||||
job_template = tmplenv.get_template('kube/Job.tmpl')
|
||||
|
||||
arches = EKSARCH
|
||||
if results.env == "ext" and results.env != "all":
|
||||
arches = EXTARCH
|
||||
elif results.env == "all":
|
||||
arches = EKSARCH+EXTARCH
|
||||
|
||||
command = ["build-iso", "--release", f"{results.release}", "--rc", "--isolation", "simple"]
|
||||
|
||||
out = ""
|
||||
for arch in arches:
|
||||
out += job_template.render(
|
||||
architecture=arch,
|
||||
backoffLimit=4,
|
||||
buildTime=datetime.datetime.utcnow().strftime("%s"),
|
||||
command=command,
|
||||
imageName="ghcr.io/neilhanlon/sig-core-toolkit:latest",
|
||||
namespace="empanadas",
|
||||
major=major,
|
||||
restartPolicy="Never",
|
||||
)
|
||||
|
||||
print(out)
|
37
iso/empanadas/empanadas/scripts/pull_unpack_tree.py
Executable file
37
iso/empanadas/empanadas/scripts/pull_unpack_tree.py
Executable file
@ -0,0 +1,37 @@
|
||||
# builds ISO's
|
||||
|
||||
import argparse
|
||||
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import IsoBuild
|
||||
|
||||
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
||||
|
||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||
parser.add_argument('--s3', action='store_true', help="Release Candidate")
|
||||
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
||||
parser.add_argument('--arch', type=str, help="Architecture")
|
||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||
parser.add_argument('--force-unpack', action='store_true', help="Force an unpack")
|
||||
parser.add_argument('--force-download', action='store_true', help="Force a download")
|
||||
parser.add_argument('--logger', type=str)
|
||||
results = parser.parse_args()
|
||||
rlvars = rldict[results.release]
|
||||
major = rlvars['major']
|
||||
|
||||
a = IsoBuild(
|
||||
rlvars,
|
||||
config,
|
||||
major=major,
|
||||
rc=results.rc,
|
||||
s3=results.s3,
|
||||
arch=results.arch,
|
||||
force_unpack=results.force_unpack,
|
||||
force_download=results.force_download,
|
||||
compose_dir_is_here=results.local_compose,
|
||||
logger=results.logger,
|
||||
)
|
||||
|
||||
def run():
|
||||
a.run_pull_lorax_artifacts()
|
@ -1,21 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# This script can be called to do single syncs or full on syncs.
|
||||
|
||||
import argparse
|
||||
from common import *
|
||||
from util import Checks
|
||||
from util import RepoSync
|
||||
|
||||
#rlvars = rldict['9']
|
||||
#r = Checks(rlvars, config['arch'])
|
||||
#r.check_valid_arch()
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import RepoSync
|
||||
|
||||
# Start up the parser baby
|
||||
parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
||||
|
||||
# All of our options
|
||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||
parser.add_argument('--repo', type=str, help="Repository name")
|
||||
parser.add_argument('--arch', type=str, help="Architecture")
|
||||
parser.add_argument('--ignore-debug', action='store_true')
|
||||
@ -26,14 +21,16 @@ parser.add_argument('--hashed', action='store_true')
|
||||
parser.add_argument('--dry-run', action='store_true')
|
||||
parser.add_argument('--full-run', action='store_true')
|
||||
parser.add_argument('--no-fail', action='store_true')
|
||||
parser.add_argument('--refresh-extra-files', action='store_true')
|
||||
# I am aware this is confusing, I want podman to be the default option
|
||||
parser.add_argument('--simple', action='store_false')
|
||||
parser.add_argument('--logger', type=str)
|
||||
|
||||
# Parse them
|
||||
results = parser.parse_args()
|
||||
|
||||
rlvars = rldict[results.release]
|
||||
major = rlvars['major']
|
||||
|
||||
r = Checks(rlvars, config['arch'])
|
||||
r.check_valid_arch()
|
||||
|
||||
@ -41,7 +38,7 @@ r.check_valid_arch()
|
||||
a = RepoSync(
|
||||
rlvars,
|
||||
config,
|
||||
major=results.release,
|
||||
major=major,
|
||||
repo=results.repo,
|
||||
arch=results.arch,
|
||||
ignore_debug=results.ignore_debug,
|
||||
@ -53,7 +50,9 @@ a = RepoSync(
|
||||
dryrun=results.dry_run,
|
||||
fullrun=results.full_run,
|
||||
nofail=results.no_fail,
|
||||
logger=results.logger
|
||||
logger=results.logger,
|
||||
refresh_extra_files=results.refresh_extra_files,
|
||||
)
|
||||
|
||||
a.run()
|
||||
def run():
|
||||
a.run()
|
18
iso/empanadas/empanadas/scripts/sync_from_peridot_test.py
Executable file
18
iso/empanadas/empanadas/scripts/sync_from_peridot_test.py
Executable file
@ -0,0 +1,18 @@
|
||||
# This is a testing script to ensure the RepoSync class is working as intended.
|
||||
|
||||
import argparse
|
||||
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import RepoSync
|
||||
|
||||
rlvars = rldict['9-lookahead']
|
||||
r = Checks(rlvars, config['arch'])
|
||||
r.check_valid_arch()
|
||||
|
||||
#a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False)
|
||||
a = RepoSync(rlvars, config, major="9", repo="BaseOS", parallel=True, ignore_debug=False, ignore_source=False, hashed=True)
|
||||
|
||||
def run():
|
||||
print(rlvars.keys())
|
||||
print(rlvars)
|
@ -1,11 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# This script can be called to do single syncs or full on syncs.
|
||||
|
||||
import argparse
|
||||
from common import *
|
||||
from util import Checks
|
||||
from util import SigRepoSync
|
||||
from empanadas.common import *
|
||||
from empanadas.util import Checks
|
||||
from empanadas.util import SigRepoSync
|
||||
|
||||
#rlvars = rldict['9']
|
||||
#r = Checks(rlvars, config['arch'])
|
||||
@ -58,4 +56,6 @@ a = SigRepoSync(
|
||||
logger=results.logger
|
||||
)
|
||||
|
||||
a.run()
|
||||
|
||||
def run():
|
||||
a.run()
|
25
iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh
Normal file
25
iso/empanadas/empanadas/templates/buildExtraImage.tmpl.sh
Normal file
@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
{% if extra_iso_mode == "podman" %}
|
||||
{{ lorax_pkg_cmd }}
|
||||
mkdir -p {{ compose_work_iso_dir }}/{{ arch }}
|
||||
cd {{ compose_work_iso_dir }}/{{ arch }}
|
||||
test -f {{ isoname }} || { echo "!! ISO ALREDY EXISTS !!"; exit 1; }
|
||||
{% else %}
|
||||
cd /builddir
|
||||
|
||||
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
|
||||
TEMPLATE="/usr/share/lorax"
|
||||
fi
|
||||
{% endif %}
|
||||
|
||||
|
||||
{{ make_image }}
|
||||
|
||||
{{ isohybrid }}
|
||||
|
||||
{{ implantmd5 }}
|
||||
|
||||
{{ make_manifest }}
|
||||
|
64
iso/empanadas/empanadas/templates/buildImage.tmpl.sh
Normal file
64
iso/empanadas/empanadas/templates/buildImage.tmpl.sh
Normal file
@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
VOLID="{{ shortname }}-{{ major }}-{{ minor }}{{ rc }}-{{ arch }}-dvd"
|
||||
VARIANT="{{ variant }}"
|
||||
ARCH="{{ arch }}"
|
||||
VERSION="{{ revision }}"
|
||||
PRODUCT="{{ distname }}"
|
||||
MOCKBLD="{{ builddir }}"
|
||||
LORAXRES="{{ lorax_work_root }}"
|
||||
LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
|
||||
LOGFILE="lorax-{{ arch }}.log"
|
||||
BUGURL="{{ bugurl }}"
|
||||
|
||||
{% for pkg in lorax %}
|
||||
sed -i '/{{ pkg }}/ s/^/#/' /usr/share/lorax/templates.d/80-rhel/runtime-install.tmpl
|
||||
{% endfor %}
|
||||
|
||||
lorax --product="${PRODUCT}" \
|
||||
--version="${VERSION}" \
|
||||
--release="${VERSION}" \
|
||||
{%- if rc == '' %}
|
||||
--isfinal \
|
||||
{%- endif %}
|
||||
{%- for repo in repos %}
|
||||
--source={{ repo.url }} \
|
||||
{%- endfor %}
|
||||
--bugurl="${BUGURL}" \
|
||||
--variant="${VARIANT}" \
|
||||
--nomacboot \
|
||||
--buildarch="${ARCH}" \
|
||||
--volid="${VOLID}" \
|
||||
--logfile="${MOCKBLD}/${LOGFILE}" \
|
||||
--rootfs-size=3 \
|
||||
"${LORAXRES}"
|
||||
|
||||
ret_val=$?
|
||||
if [ $ret_val -ne 0 ]; then
|
||||
echo "!! LORAX FAILED !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If we didn't fail, let's pack up everything!
|
||||
cd "${MOCKBLD}"
|
||||
|
||||
# Get ISO manifest
|
||||
if [ -f "/usr/bin/xorriso" ]; then
|
||||
/usr/bin/xorriso -dev lorax/images/boot.iso --find |
|
||||
tail -n+2 |
|
||||
tr -d "'" |
|
||||
cut -c2- | sort >> lorax/images/boot.iso.manifest
|
||||
elif [ -f "/usr/bin/isoinfo" ]; then
|
||||
/usr/bin/isoinfo -R -f -i lorax/images/boot.iso |
|
||||
grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest
|
||||
fi
|
||||
|
||||
tar czf "${LORAX_TAR}" lorax "${LOGFILE}"
|
||||
|
||||
tar_ret_val=$?
|
||||
if [ $ret_val -ne 0 ]; then
|
||||
echo "!! PROBLEM CREATING ARCHIVE !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
50
iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh
Normal file
50
iso/empanadas/empanadas/templates/extraisobuild.tmpl.sh
Normal file
@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
# This is a template that is used to build extra ISO's for Rocky Linux. Only
|
||||
# under extreme circumstances should you be filling this out and running
|
||||
# manually.
|
||||
|
||||
# Vars
|
||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||
MOCK_RESL="${MOCK_ROOT}/result"
|
||||
MOCK_CHRO="${MOCK_ROOT}/root"
|
||||
MOCK_LOG="${MOCK_RESL}/mock-output.log"
|
||||
IMAGE_SCR="{{ entries_dir }}/buildExtraImage-{{ arch }}-{{ image }}.sh"
|
||||
IMAGE_ISO="{{ isoname }}"
|
||||
ISOLATION="{{ isolation }}"
|
||||
BUILDDIR="{{ builddir }}"
|
||||
|
||||
# Init the container
|
||||
mock \
|
||||
-r "${MOCK_CFG}" \
|
||||
--isolation="${ISOLATION}" \
|
||||
--enable-network \
|
||||
--init
|
||||
|
||||
init_ret_val=$?
|
||||
if [ $init_ret_val -ne 0 ]; then
|
||||
echo "!! MOCK INIT FAILED !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${MOCK_RESL}"
|
||||
cp "${IMAGE_SCR}" "${MOCK_CHRO}${IMAGE_SCR}"
|
||||
|
||||
mock \
|
||||
-r "${MOCK_CFG}" \
|
||||
--shell \
|
||||
--isolation="${ISOLATION}" \
|
||||
--enable-network -- /bin/bash "${IMAGE_SCR}" | tee -a "${MOCK_LOG}"
|
||||
|
||||
mock_ret_val=$?
|
||||
if [ $mock_ret_val -eq 0 ]; then
|
||||
# Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result
|
||||
mkdir -p "${MOCK_RESL}"
|
||||
cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}" "${MOCK_RESL}"
|
||||
cp "${MOCK_CHRO}${BUILDDIR}/${IMAGE_ISO}.manifest" "${MOCK_RESL}"
|
||||
else
|
||||
echo "!! EXTRA ISO RUN FAILED !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean up?
|
48
iso/empanadas/empanadas/templates/isobuild.tmpl.sh
Normal file
48
iso/empanadas/empanadas/templates/isobuild.tmpl.sh
Normal file
@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
# This is a template that is used to build ISO's for Rocky Linux. Only under
|
||||
# extreme circumstances should you be filling this out and running manually.
|
||||
|
||||
# Vars
|
||||
MOCK_CFG="/var/tmp/lorax-{{ major }}.cfg"
|
||||
MOCK_ROOT="/var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}"
|
||||
MOCK_RESL="${MOCK_ROOT}/result"
|
||||
MOCK_CHRO="${MOCK_ROOT}/root"
|
||||
MOCK_LOG="${MOCK_RESL}/mock-output.log"
|
||||
LORAX_SCR="/var/tmp/buildImage.sh"
|
||||
LORAX_TAR="lorax-{{ revision }}-{{ arch }}.tar.gz"
|
||||
ISOLATION="{{ isolation }}"
|
||||
BUILDDIR="{{ builddir }}"
|
||||
|
||||
# Init the container
|
||||
mock \
|
||||
-r "${MOCK_CFG}" \
|
||||
--isolation="${ISOLATION}" \
|
||||
--enable-network \
|
||||
--init
|
||||
|
||||
init_ret_val=$?
|
||||
if [ $init_ret_val -ne 0 ]; then
|
||||
echo "!! MOCK INIT FAILED !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${MOCK_RESL}"
|
||||
cp "${LORAX_SCR}" "${MOCK_CHRO}${LORAX_SCR}"
|
||||
|
||||
mock \
|
||||
-r "${MOCK_CFG}" \
|
||||
--shell \
|
||||
--isolation="${ISOLATION}" \
|
||||
--enable-network -- /bin/bash "${LORAX_SCR}" | tee -a "${MOCK_LOG}"
|
||||
|
||||
mock_ret_val=$?
|
||||
if [ $mock_ret_val -eq 0 ]; then
|
||||
# Copy resulting data to /var/lib/mock/{{ shortname|lower }}-{{ major }}-{{ arch }}/result
|
||||
mkdir -p "${MOCK_RESL}"
|
||||
cp "${MOCK_CHRO}${BUILDDIR}/${LORAX_TAR}" "${MOCK_RESL}"
|
||||
else
|
||||
echo "!! LORAX RUN FAILED !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean up?
|
49
iso/empanadas/empanadas/templates/isomock.tmpl.cfg
Normal file
49
iso/empanadas/empanadas/templates/isomock.tmpl.cfg
Normal file
@ -0,0 +1,49 @@
|
||||
config_opts['root'] = '{{ shortname|lower }}-{{ major }}-{{ arch }}'
|
||||
config_opts['description'] = '{{ fullname }}'
|
||||
config_opts['target_arch'] = '{{ arch }}'
|
||||
config_opts['legal_host_arches'] = ('{{ arch }}',)
|
||||
|
||||
config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils redhat-release findutils gawk glibc-minimal-langpack grep gzip info patch redhat-rpm-config rpm-build sed shadow-utils tar unzip util-linux which xz {{ required_pkgs|join(' ') }}'
|
||||
config_opts['dist'] = '{{ dist }}' # only useful for --resultdir variable subst
|
||||
config_opts['releasever'] = '{{ major }}'
|
||||
config_opts['package_manager'] = '{{ pkgmanager|default("dnf") }}'
|
||||
config_opts['extra_chroot_dirs'] = [ '/run/lock', ]
|
||||
# config_opts['bootstrap_image'] = 'quay.io/{{ shortname|lower }}/{{ shortname|lower }}:{{ major }}'
|
||||
|
||||
# If compose is local, the bind mounts will be here
|
||||
{% if compose_dir_is_here %}
|
||||
config_opts['plugin_conf']['bind_mount_enable'] = True
|
||||
config_opts['plugin_conf']['bind_mount_opts']['dirs'].append(('{{ compose_dir }}', '{{ compose_dir }}'))
|
||||
{% endif %}
|
||||
|
||||
config_opts['dnf.conf'] = """
|
||||
[main]
|
||||
keepcache=1
|
||||
debuglevel=2
|
||||
reposdir=/dev/null
|
||||
logfile=/var/log/yum.log
|
||||
retries=20
|
||||
obsoletes=1
|
||||
gpgcheck=0
|
||||
assumeyes=1
|
||||
syslog_ident=mock
|
||||
syslog_device=
|
||||
metadata_expire=0
|
||||
mdpolicy=group:primary
|
||||
best=1
|
||||
install_weak_deps=0
|
||||
protected_packages=
|
||||
module_platform_id=platform:{{ dist }}
|
||||
user_agent={{ user_agent }}
|
||||
|
||||
{% for repo in repos %}
|
||||
[{{ repo.name }}]
|
||||
name={{ repo.name }}
|
||||
baseurl={{ repo.url }}
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
||||
{% endfor %}
|
||||
|
||||
"""
|
||||
|
61
iso/empanadas/empanadas/templates/kube/Job.tmpl
Normal file
61
iso/empanadas/empanadas/templates/kube/Job.tmpl
Normal file
@ -0,0 +1,61 @@
|
||||
---
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: build-iso-{{ major }}-{{ architecture }}
|
||||
namespace: {{ namespace }}
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
peridot.rockylinux.org/workflow-tolerates-arch: {{ architecture }}
|
||||
spec:
|
||||
containers:
|
||||
- name: buildiso-{{ major }}-{{ architecture }}
|
||||
image: {{ imageName }}
|
||||
command: ["/bin/bash", "-c"]
|
||||
args:
|
||||
- |
|
||||
{{ command | join(' ') }}
|
||||
aws s3 cp --recursive --exclude=* --include=lorax* \
|
||||
/var/lib/mock/rocky-{{ major }}-$(uname -m)/root/builddir/ \
|
||||
"s3://resf-empanadas/buildiso-{{ major }}-{{ architecture }}/{{ buildTime }}/"
|
||||
securityContext:
|
||||
runAsUser: 0
|
||||
runAsGroup: 0
|
||||
privileged: true
|
||||
runAsNonRoot: false
|
||||
allowPrivilegeEscalation: true
|
||||
volumeMounts:
|
||||
- mountPath: /etc/resolv.conf
|
||||
name: resolv-conf
|
||||
- mountPath: /var/lib/mock/
|
||||
name: mock
|
||||
env:
|
||||
- name: AWS_REGION
|
||||
value: us-east-2
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: empanadas-s3
|
||||
key: ID
|
||||
- name: AWS_SECRET_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: empanadas-s3
|
||||
key: SECRET
|
||||
tolerations:
|
||||
- effect: NoSchedule
|
||||
key: peridot.rockylinux.org/workflow-tolerates-arch
|
||||
operator: Equal
|
||||
value: {{ architecture }}
|
||||
restartPolicy: {{ restartPolicy }}
|
||||
volumes:
|
||||
- name: resolv-conf
|
||||
hostPath:
|
||||
path: /etc/resolv.conf
|
||||
type: File
|
||||
- name: mock
|
||||
emptyDir: {}
|
||||
backoffLimit: {{ backoffLimit }}
|
||||
|
26
iso/empanadas/empanadas/templates/repoconfig.tmpl
Normal file
26
iso/empanadas/empanadas/templates/repoconfig.tmpl
Normal file
@ -0,0 +1,26 @@
|
||||
{%- for repo in repos -%}
|
||||
[{{ repo.name }}]
|
||||
name={{repo.name}}
|
||||
baseurl={{ repo.baseurl }}
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=1
|
||||
gpgkey={{ repo.gpgkey }}
|
||||
|
||||
[{{ repo.name }}-debug]
|
||||
name={{repo.name}}
|
||||
baseurl={{ repo.baseurl }}-debug
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=1
|
||||
gpgkey={{ repo.gpgkey }}
|
||||
|
||||
[{{ repo.name }}-source]
|
||||
name={{repo.name}}
|
||||
baseurl={{ repo.srcbaseurl }}
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=1
|
||||
gpgkey={{ repo.gpgkey }}
|
||||
|
||||
{% endfor %}
|
7
iso/empanadas/empanadas/templates/reposync-src.tmpl
Normal file
7
iso/empanadas/empanadas/templates/reposync-src.tmpl
Normal file
@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -o pipefail
|
||||
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||
|
||||
# {{ check_cmd }} | tee -a {{ sync_log }}
|
8
iso/empanadas/empanadas/templates/reposync.tmpl
Normal file
8
iso/empanadas/empanadas/templates/reposync.tmpl
Normal file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -o pipefail
|
||||
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
||||
{{ arch_force_cp }} | tee -a {{ sync_log }}
|
||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||
|
||||
# {{ check_cmd }} | tee -a {{ sync_log }}
|
6
iso/empanadas/empanadas/templates/xorriso.tmpl.txt
Normal file
6
iso/empanadas/empanadas/templates/xorriso.tmpl.txt
Normal file
@ -0,0 +1,6 @@
|
||||
-indev {{ boot_iso }}
|
||||
-outdev {{ isoname }}
|
||||
-boot_image any replay
|
||||
-volid {{ volid }}
|
||||
{{ graft }}
|
||||
-end
|
27
iso/empanadas/empanadas/util/__init__.py
Normal file
27
iso/empanadas/empanadas/util/__init__.py
Normal file
@ -0,0 +1,27 @@
|
||||
"""
|
||||
Imports all of our classes for this local module
|
||||
"""
|
||||
|
||||
from empanadas.util.check import (
|
||||
Checks,
|
||||
)
|
||||
|
||||
from empanadas.util.shared import (
|
||||
Shared,
|
||||
)
|
||||
|
||||
from empanadas.util.dnf_utils import (
|
||||
RepoSync,
|
||||
SigRepoSync
|
||||
)
|
||||
|
||||
from empanadas.util.iso_utils import (
|
||||
IsoBuild,
|
||||
LiveBuild
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'Checks',
|
||||
'RepoSync',
|
||||
'Shared'
|
||||
]
|
@ -1,6 +1,7 @@
|
||||
# Is our arch allowed for this particular release? Some previous releases do
|
||||
# not support ppc or s390x
|
||||
from common import Color
|
||||
from empanadas.common import Color
|
||||
|
||||
class Checks:
|
||||
"""This class helps check some things"""
|
||||
def __init__(self, rlvars, arch):
|
@ -10,10 +10,19 @@ import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import shlex
|
||||
import shutil
|
||||
import time
|
||||
import re
|
||||
import json
|
||||
#import pipes
|
||||
from common import Color
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from empanadas.common import Color, _rootdir
|
||||
from empanadas.util import Shared
|
||||
|
||||
# initial treeinfo data is made here
|
||||
import productmd.treeinfo
|
||||
|
||||
#HAS_LIBREPO = True
|
||||
#try:
|
||||
@ -37,12 +46,16 @@ class RepoSync:
|
||||
ignore_debug: bool = False,
|
||||
ignore_source: bool = False,
|
||||
repoclosure: bool = False,
|
||||
refresh_extra_files: bool = False,
|
||||
refresh_treeinfo: bool = False,
|
||||
skip_all: bool = False,
|
||||
hashed: bool = False,
|
||||
parallel: bool = False,
|
||||
dryrun: bool = False,
|
||||
fullrun: bool = False,
|
||||
nofail: bool = False,
|
||||
gpgkey: str = 'stable',
|
||||
rlmode: str = 'stable',
|
||||
logger=None
|
||||
):
|
||||
self.nofail = nofail
|
||||
@ -54,6 +67,8 @@ class RepoSync:
|
||||
self.skip_all = skip_all
|
||||
self.hashed = hashed
|
||||
self.repoclosure = repoclosure
|
||||
self.refresh_extra_files = refresh_extra_files
|
||||
self.refresh_treeinfo = refresh_treeinfo
|
||||
# Enables podman syncing, which should effectively speed up operations
|
||||
self.parallel = parallel
|
||||
# Relevant config items
|
||||
@ -62,9 +77,12 @@ class RepoSync:
|
||||
self.repo_base_url = config['repo_base_url']
|
||||
self.compose_root = config['compose_root']
|
||||
self.compose_base = config['compose_root'] + "/" + major
|
||||
self.profile = rlvars['profile']
|
||||
|
||||
# Relevant major version items
|
||||
self.shortname = config['shortname']
|
||||
self.revision = rlvars['revision'] + "-" + rlvars['rclvl']
|
||||
self.fullversion = rlvars['revision']
|
||||
self.arches = rlvars['allowed_arches']
|
||||
self.project_id = rlvars['project_id']
|
||||
self.repo_renames = rlvars['renames']
|
||||
@ -72,6 +90,11 @@ class RepoSync:
|
||||
self.multilib = rlvars['provide_multilib']
|
||||
self.repo = repo
|
||||
self.extra_files = rlvars['extra_files']
|
||||
self.gpgkey = gpgkey
|
||||
|
||||
# Templates
|
||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||
self.tmplenv = Environment(loader=file_loader)
|
||||
|
||||
# each el can have its own designated container to run stuff in,
|
||||
# otherwise we'll just default to the default config.
|
||||
@ -91,7 +114,7 @@ class RepoSync:
|
||||
self.compose_latest_dir = os.path.join(
|
||||
config['compose_root'],
|
||||
major,
|
||||
"latest-Rocky-{}".format(major)
|
||||
"latest-Rocky-{}".format(self.profile)
|
||||
)
|
||||
|
||||
self.compose_latest_sync = os.path.join(
|
||||
@ -104,6 +127,11 @@ class RepoSync:
|
||||
"work/logs"
|
||||
)
|
||||
|
||||
self.compose_global_work_root = os.path.join(
|
||||
self.compose_latest_dir,
|
||||
"work/global"
|
||||
)
|
||||
|
||||
# This is temporary for now.
|
||||
if logger is None:
|
||||
self.log = logging.getLogger("reposync")
|
||||
@ -169,27 +197,46 @@ class RepoSync:
|
||||
|
||||
log_root = os.path.join(
|
||||
work_root,
|
||||
"logs"
|
||||
"logs",
|
||||
self.date_stamp
|
||||
)
|
||||
|
||||
global_work_root = os.path.join(
|
||||
work_root,
|
||||
"global",
|
||||
)
|
||||
|
||||
if self.dryrun:
|
||||
self.log.error('Dry Runs are not supported just yet. Sorry!')
|
||||
raise SystemExit()
|
||||
|
||||
self.sync(self.repo, sync_root, work_root, log_root, self.arch)
|
||||
if self.fullrun and self.refresh_extra_files:
|
||||
self.log.warn(
|
||||
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||
'A full run implies extra files are also deployed.'
|
||||
)
|
||||
|
||||
self.sync(self.repo, sync_root, work_root, log_root, global_work_root, self.arch)
|
||||
|
||||
if self.fullrun:
|
||||
self.deploy_extra_files()
|
||||
self.symlink_to_latest()
|
||||
self.deploy_extra_files(global_work_root)
|
||||
self.deploy_treeinfo(self.repo, sync_root, self.arch)
|
||||
self.symlink_to_latest(generated_dir)
|
||||
|
||||
if self.repoclosure:
|
||||
self.repoclosure_work(sync_root, work_root, log_root)
|
||||
|
||||
if self.refresh_extra_files and not self.fullrun:
|
||||
self.deploy_extra_files(global_work_root)
|
||||
|
||||
if self.refresh_treeinfo and not self.fullrun:
|
||||
self.deploy_treeinfo(self.repo, sync_root, self.arch)
|
||||
|
||||
self.log.info('Compose repo directory: %s' % sync_root)
|
||||
self.log.info('Compose logs: %s' % log_root)
|
||||
self.log.info('Compose completed.')
|
||||
|
||||
def sync(self, repo, sync_root, work_root, log_root, arch=None):
|
||||
def sync(self, repo, sync_root, work_root, log_root, global_work_root, arch=None):
|
||||
"""
|
||||
Calls out syncing of the repos. We generally sync each component of a
|
||||
repo:
|
||||
@ -200,7 +247,7 @@ class RepoSync:
|
||||
If parallel is true, we will run in podman.
|
||||
"""
|
||||
if self.parallel:
|
||||
self.podman_sync(repo, sync_root, work_root, log_root, arch)
|
||||
self.podman_sync(repo, sync_root, work_root, log_root, global_work_root, arch)
|
||||
else:
|
||||
self.dnf_sync(repo, sync_root, work_root, arch)
|
||||
|
||||
@ -212,7 +259,15 @@ class RepoSync:
|
||||
self.log.error('Please install podman and enable parallel')
|
||||
raise SystemExit()
|
||||
|
||||
def podman_sync(self, repo, sync_root, work_root, log_root, arch):
|
||||
def podman_sync(
|
||||
self,
|
||||
repo,
|
||||
sync_root,
|
||||
work_root,
|
||||
log_root,
|
||||
global_work_root,
|
||||
arch
|
||||
):
|
||||
"""
|
||||
This is for podman syncs
|
||||
|
||||
@ -230,6 +285,9 @@ class RepoSync:
|
||||
os.makedirs(entries_dir, exist_ok=True)
|
||||
|
||||
# yeah, I know.
|
||||
if not os.path.exists(global_work_root):
|
||||
os.makedirs(global_work_root, exist_ok=True)
|
||||
|
||||
if not os.path.exists(log_root):
|
||||
os.makedirs(log_root, exist_ok=True)
|
||||
|
||||
@ -293,6 +351,11 @@ class RepoSync:
|
||||
'debug/tree'
|
||||
)
|
||||
|
||||
import_gpg_cmd = ("/usr/bin/rpm --import {}{}").format(
|
||||
self.extra_files['git_raw_path'],
|
||||
self.extra_files['gpg'][self.gpgkey]
|
||||
)
|
||||
|
||||
arch_force_cp = ("/usr/bin/sed 's|$basearch|{}|g' {} > {}.{}".format(
|
||||
a,
|
||||
self.dnf_config,
|
||||
@ -300,48 +363,63 @@ class RepoSync:
|
||||
a
|
||||
))
|
||||
|
||||
sync_log = ("{}/{}-{}.log").format(
|
||||
log_root,
|
||||
repo_name,
|
||||
a
|
||||
)
|
||||
|
||||
debug_sync_log = ("{}/{}-{}-debug.log").format(
|
||||
log_root,
|
||||
repo_name,
|
||||
a
|
||||
)
|
||||
|
||||
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
|
||||
"--repoid={} -p {} --forcearch {} --norepopath 2>&1 "
|
||||
"| tee -a {}/{}-{}-{}.log").format(
|
||||
"--repoid={} -p {} --forcearch {} --norepopath "
|
||||
"--gpgcheck --assumeyes 2>&1").format(
|
||||
self.dnf_config,
|
||||
a,
|
||||
r,
|
||||
os_sync_path,
|
||||
a,
|
||||
log_root,
|
||||
repo_name,
|
||||
a,
|
||||
self.date_stamp
|
||||
a
|
||||
)
|
||||
|
||||
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
|
||||
"--download-metadata --repoid={}-debug -p {} --forcearch {} "
|
||||
"--norepopath 2>&1 | tee -a {}/{}-{}-debug-{}.log").format(
|
||||
"--gpgcheck --norepopath --assumeyes 2>&1").format(
|
||||
self.dnf_config,
|
||||
a,
|
||||
r,
|
||||
debug_sync_path,
|
||||
a,
|
||||
log_root,
|
||||
repo_name,
|
||||
a,
|
||||
self.date_stamp
|
||||
a
|
||||
)
|
||||
|
||||
dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y"
|
||||
|
||||
sync_template = self.tmplenv.get_template('reposync.tmpl')
|
||||
sync_output = sync_template.render(
|
||||
import_gpg_cmd=import_gpg_cmd,
|
||||
arch_force_cp=arch_force_cp,
|
||||
dnf_plugin_cmd=dnf_plugin_cmd,
|
||||
sync_cmd=sync_cmd,
|
||||
sync_log=sync_log
|
||||
)
|
||||
|
||||
debug_sync_template = self.tmplenv.get_template('reposync.tmpl')
|
||||
debug_sync_output = debug_sync_template.render(
|
||||
import_gpg_cmd=import_gpg_cmd,
|
||||
arch_force_cp=arch_force_cp,
|
||||
dnf_plugin_cmd=dnf_plugin_cmd,
|
||||
sync_cmd=debug_sync_cmd,
|
||||
sync_log=debug_sync_log
|
||||
)
|
||||
|
||||
entry_point_open = open(entry_point_sh, "w+")
|
||||
debug_entry_point_open = open(debug_entry_point_sh, "w+")
|
||||
|
||||
entry_point_open.write('#!/bin/bash\n')
|
||||
entry_point_open.write('set -o pipefail\n')
|
||||
entry_point_open.write(arch_force_cp + '\n')
|
||||
entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n')
|
||||
entry_point_open.write(sync_cmd + '\n')
|
||||
|
||||
debug_entry_point_open.write('#!/bin/bash\n')
|
||||
debug_entry_point_open.write('set -o pipefail\n')
|
||||
debug_entry_point_open.write(arch_force_cp + '\n')
|
||||
debug_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n')
|
||||
debug_entry_point_open.write(debug_sync_cmd + '\n')
|
||||
entry_point_open.write(sync_output)
|
||||
debug_entry_point_open.write(debug_sync_output)
|
||||
|
||||
entry_point_open.close()
|
||||
debug_entry_point_open.close()
|
||||
@ -365,21 +443,29 @@ class RepoSync:
|
||||
'source/tree'
|
||||
)
|
||||
|
||||
source_sync_log = ("{}/{}-source.log").format(
|
||||
log_root,
|
||||
repo_name
|
||||
)
|
||||
|
||||
source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
|
||||
"--download-metadata --repoid={}-source -p {} "
|
||||
"--norepopath | tee -a {}/{}-source-{}.log").format(
|
||||
"--gpgcheck --norepopath --assumeyes 2>&1").format(
|
||||
self.dnf_config,
|
||||
r,
|
||||
source_sync_path,
|
||||
log_root,
|
||||
repo_name,
|
||||
self.date_stamp
|
||||
source_sync_path
|
||||
)
|
||||
|
||||
source_sync_template = self.tmplenv.get_template('reposync-src.tmpl')
|
||||
source_sync_output = source_sync_template.render(
|
||||
import_gpg_cmd=import_gpg_cmd,
|
||||
dnf_plugin_cmd=dnf_plugin_cmd,
|
||||
sync_cmd=source_sync_cmd,
|
||||
sync_log=source_sync_log
|
||||
)
|
||||
|
||||
source_entry_point_open = open(source_entry_point_sh, "w+")
|
||||
source_entry_point_open.write('#!/bin/bash\n')
|
||||
source_entry_point_open.write('set -o pipefail\n')
|
||||
source_entry_point_open.write('/usr/bin/dnf install dnf-plugins-core -y\n')
|
||||
source_entry_point_open.write(source_sync_cmd + '\n')
|
||||
source_entry_point_open.write(source_sync_output)
|
||||
source_entry_point_open.close()
|
||||
os.chmod(source_entry_point_sh, 0o755)
|
||||
|
||||
@ -410,7 +496,10 @@ class RepoSync:
|
||||
|
||||
join_all_pods = ' '.join(entry_name_list)
|
||||
time.sleep(3)
|
||||
self.log.info('Syncing %s ...' % r)
|
||||
self.log.info(
|
||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||
'Syncing ' + r + ' ...'
|
||||
)
|
||||
pod_watcher = '{} wait {}'.format(
|
||||
cmd,
|
||||
join_all_pods
|
||||
@ -439,10 +528,10 @@ class RepoSync:
|
||||
)
|
||||
|
||||
output, errors = podcheck.communicate()
|
||||
if 'Exited (0)' in output.decode():
|
||||
self.log.info('%s seems ok' % pod)
|
||||
else:
|
||||
self.log.error('%s had issues syncing' % pod)
|
||||
if 'Exited (0)' not in output.decode():
|
||||
self.log.error(
|
||||
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod
|
||||
)
|
||||
bad_exit_list.append(pod)
|
||||
|
||||
rmcmd = '{} rm {}'.format(
|
||||
@ -458,7 +547,10 @@ class RepoSync:
|
||||
)
|
||||
|
||||
entry_name_list.clear()
|
||||
self.log.info('Syncing %s completed' % r)
|
||||
self.log.info(
|
||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||
'Syncing ' + r + ' completed'
|
||||
)
|
||||
|
||||
if len(bad_exit_list) > 0:
|
||||
self.log.error(
|
||||
@ -467,6 +559,11 @@ class RepoSync:
|
||||
)
|
||||
for issue in bad_exit_list:
|
||||
self.log.error(issue)
|
||||
else:
|
||||
self.log.info(
|
||||
'[' + Color.BOLD + Color.GREEN + ' OK ' + Color.END + '] '
|
||||
'No issues detected.'
|
||||
)
|
||||
|
||||
def generate_compose_dirs(self) -> str:
|
||||
"""
|
||||
@ -474,7 +571,7 @@ class RepoSync:
|
||||
"""
|
||||
compose_base_dir = os.path.join(
|
||||
self.compose_base,
|
||||
"Rocky-{}-{}".format(self.major_version, self.date_stamp)
|
||||
"Rocky-{}-{}".format(self.fullversion, self.date_stamp)
|
||||
)
|
||||
self.log.info('Creating compose directory %s' % compose_base_dir)
|
||||
if not os.path.exists(compose_base_dir):
|
||||
@ -482,7 +579,7 @@ class RepoSync:
|
||||
|
||||
return compose_base_dir
|
||||
|
||||
def symlink_to_latest(self):
|
||||
def symlink_to_latest(self, generated_dir):
|
||||
"""
|
||||
Emulates pungi and symlinks latest-Rocky-X
|
||||
|
||||
@ -490,7 +587,13 @@ class RepoSync:
|
||||
'latest' directory is what is rsynced on to staging after completion.
|
||||
This link should not change often.
|
||||
"""
|
||||
pass
|
||||
try:
|
||||
os.remove(self.compose_latest_dir)
|
||||
except:
|
||||
pass
|
||||
|
||||
self.log.info('Symlinking to latest-{}-{}...'.format(self.shortname, self.major_version))
|
||||
os.symlink(generated_dir, self.compose_latest_dir)
|
||||
|
||||
def generate_conf(self, dest_path='/var/tmp') -> str:
|
||||
"""
|
||||
@ -520,15 +623,10 @@ class RepoSync:
|
||||
if not os.path.exists(dest_path):
|
||||
os.makedirs(dest_path, exist_ok=True)
|
||||
config_file = open(fname, "w+")
|
||||
repolist = []
|
||||
for repo in self.repos:
|
||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||
self.repo_base_url,
|
||||
self.project_id,
|
||||
prehashed,
|
||||
repo,
|
||||
)
|
||||
|
||||
constructed_url_debug = '{}/{}/repo/{}{}/$basearch-debug'.format(
|
||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||
self.repo_base_url,
|
||||
self.project_id,
|
||||
prehashed,
|
||||
@ -542,27 +640,17 @@ class RepoSync:
|
||||
repo,
|
||||
)
|
||||
|
||||
# normal
|
||||
config_file.write('[%s]\n' % repo)
|
||||
config_file.write('name=%s\n' % repo)
|
||||
config_file.write('baseurl=%s\n' % constructed_url)
|
||||
config_file.write("enabled=1\n")
|
||||
config_file.write("gpgcheck=0\n\n")
|
||||
|
||||
# debug
|
||||
config_file.write('[%s-debug]\n' % repo)
|
||||
config_file.write('name=%s debug\n' % repo)
|
||||
config_file.write('baseurl=%s\n' % constructed_url_debug)
|
||||
config_file.write("enabled=1\n")
|
||||
config_file.write("gpgcheck=0\n\n")
|
||||
|
||||
# src
|
||||
config_file.write('[%s-source]\n' % repo)
|
||||
config_file.write('name=%s source\n' % repo)
|
||||
config_file.write('baseurl=%s\n' % constructed_url_src)
|
||||
config_file.write("enabled=1\n")
|
||||
config_file.write("gpgcheck=0\n\n")
|
||||
repodata = {
|
||||
'name': repo,
|
||||
'baseurl': constructed_url,
|
||||
'srcbaseurl': constructed_url_src,
|
||||
'gpgkey': self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
|
||||
}
|
||||
repolist.append(repodata)
|
||||
|
||||
template = self.tmplenv.get_template('repoconfig.tmpl')
|
||||
output = template.render(repos=repolist)
|
||||
config_file.write(output)
|
||||
|
||||
config_file.close()
|
||||
return fname
|
||||
@ -605,6 +693,22 @@ class RepoSync:
|
||||
)
|
||||
return cmd
|
||||
|
||||
def git_cmd(self) -> str:
|
||||
"""
|
||||
This generates the git command. This is when we need to pull down extra
|
||||
files or do work from a git repository.
|
||||
"""
|
||||
cmd = None
|
||||
if os.path.exists("/usr/bin/git"):
|
||||
cmd = "/usr/bin/git"
|
||||
else:
|
||||
self.log.error('/usr/bin/git was not found. Good bye.')
|
||||
raise SystemExit("\n\n/usr/bin/git was not found.\n\nPlease "
|
||||
" ensure that you have installed the necessary packages on "
|
||||
" this system. "
|
||||
)
|
||||
return cmd
|
||||
|
||||
def repoclosure_work(self, sync_root, work_root, log_root):
|
||||
"""
|
||||
This is where we run repoclosures, based on the configuration of each
|
||||
@ -662,7 +766,7 @@ class RepoSync:
|
||||
)
|
||||
repoclosure_cmd = ('/usr/bin/dnf repoclosure {} '
|
||||
'--repofrompath={},file://{}/{}/{}/os --repo={} --check={} {} '
|
||||
'| tee -a {}/{}-repoclosure-{}-{}.log').format(
|
||||
'| tee -a {}/{}-repoclosure-{}.log').format(
|
||||
repoclosure_arch_list,
|
||||
repo,
|
||||
sync_root,
|
||||
@ -673,8 +777,7 @@ class RepoSync:
|
||||
join_repo_comb,
|
||||
log_root,
|
||||
repo,
|
||||
arch,
|
||||
self.date_stamp
|
||||
arch
|
||||
)
|
||||
repoclosure_entry_point_open = open(repoclosure_entry_point_sh, "w+")
|
||||
repoclosure_entry_point_open.write('#!/bin/bash\n')
|
||||
@ -735,10 +838,10 @@ class RepoSync:
|
||||
)
|
||||
|
||||
output, errors = podcheck.communicate()
|
||||
if 'Exited (0)' in output.decode():
|
||||
self.log.info('%s seems ok' % pod)
|
||||
else:
|
||||
self.log.error('%s had issues closing' % pod)
|
||||
if 'Exited (0)' not in output.decode():
|
||||
self.log.error(
|
||||
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' + pod
|
||||
)
|
||||
bad_exit_list.append(pod)
|
||||
|
||||
rmcmd = '{} rm {}'.format(
|
||||
@ -764,11 +867,84 @@ class RepoSync:
|
||||
for issue in bad_exit_list:
|
||||
self.log.error(issue)
|
||||
|
||||
def deploy_extra_files(self):
|
||||
def deploy_extra_files(self, global_work_root):
|
||||
"""
|
||||
deploys extra files based on info of rlvars
|
||||
deploys extra files based on info of rlvars including a
|
||||
extra_files.json
|
||||
|
||||
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
|
||||
a bunch of compose-esque stuff.
|
||||
"""
|
||||
pass
|
||||
cmd = self.git_cmd()
|
||||
tmpclone = '/tmp/clone'
|
||||
extra_files_dir = os.path.join(
|
||||
global_work_root,
|
||||
'extra-files'
|
||||
)
|
||||
self.log.info(
|
||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||
'Deploying extra files to work directory ...'
|
||||
)
|
||||
|
||||
if not os.path.exists(extra_files_dir):
|
||||
os.makedirs(extra_files_dir, exist_ok=True)
|
||||
|
||||
clonecmd = '{} clone {} -b {} -q {}'.format(
|
||||
cmd,
|
||||
self.extra_files['git_repo'],
|
||||
self.extra_files['branch'],
|
||||
tmpclone
|
||||
)
|
||||
|
||||
git_clone = subprocess.call(
|
||||
shlex.split(clonecmd),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
|
||||
# Copy files to work root
|
||||
for extra in self.extra_files['list']:
|
||||
src = '/tmp/clone/' + extra
|
||||
# Copy extra files to root of compose here also - The extra files
|
||||
# are meant to be picked up by our ISO creation process and also
|
||||
# exist on our mirrors.
|
||||
try:
|
||||
shutil.copy2(src, extra_files_dir)
|
||||
except:
|
||||
self.log.warn(
|
||||
'[' + Color.BOLD + Color.YELLOW + 'WARN' + Color.END + '] ' +
|
||||
'Extra file not copied: ' + src
|
||||
)
|
||||
|
||||
try:
|
||||
shutil.rmtree(tmpclone)
|
||||
except OSError as e:
|
||||
self.log.error(
|
||||
'[' + Color.BOLD + Color.RED + 'FAIL' + Color.END + '] ' +
|
||||
'Directory ' + tmpclone + ' could not be removed: ' +
|
||||
e.strerror
|
||||
)
|
||||
|
||||
# Create metadata here?
|
||||
|
||||
self.log.info(
|
||||
'[' + Color.BOLD + Color.GREEN + 'INFO' + Color.END + '] ' +
|
||||
'Extra files phase completed.'
|
||||
)
|
||||
|
||||
def deploy_treeinfo(self, repo, sync_root, arch):
|
||||
"""
|
||||
Deploys initial treeinfo files. These have the potential of being
|
||||
overwritten by our ISO process, which is fine.
|
||||
"""
|
||||
arches_to_tree = self.arches
|
||||
if arch:
|
||||
arches_to_tree = [arch]
|
||||
|
||||
repos_to_tree = self.repos
|
||||
if repo and not self.fullrun:
|
||||
repos_to_tree = [repo]
|
||||
|
||||
|
||||
class SigRepoSync:
|
||||
"""
|
||||
@ -785,6 +961,7 @@ class SigRepoSync:
|
||||
arch=None,
|
||||
ignore_source: bool = False,
|
||||
repoclosure: bool = False,
|
||||
refresh_extra_files: bool = False,
|
||||
skip_all: bool = False,
|
||||
hashed: bool = False,
|
||||
parallel: bool = False,
|
||||
@ -801,6 +978,7 @@ class SigRepoSync:
|
||||
self.skip_all = skip_all
|
||||
self.hashed = hashed
|
||||
self.repoclosure = repoclosure
|
||||
self.refresh_extra_files = refresh_extra_files
|
||||
# Enables podman syncing, which should effectively speed up operations
|
||||
self.parallel = parallel
|
||||
# Relevant config items
|
||||
@ -848,6 +1026,11 @@ class SigRepoSync:
|
||||
"work/logs"
|
||||
)
|
||||
|
||||
self.compose_global_work_root = os.path.join(
|
||||
self.compose_latest_dir,
|
||||
"work/global"
|
||||
)
|
||||
|
||||
# This is temporary for now.
|
||||
if logger is None:
|
||||
self.log = logging.getLogger("sigreposync")
|
1731
iso/empanadas/empanadas/util/iso_utils.py
Normal file
1731
iso/empanadas/empanadas/util/iso_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
79
iso/empanadas/empanadas/util/shared.py
Normal file
79
iso/empanadas/empanadas/util/shared.py
Normal file
@ -0,0 +1,79 @@
|
||||
# These are shared utilities used
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
|
||||
class Shared:
|
||||
"""
|
||||
Quick utilities that may be commonly used
|
||||
"""
|
||||
@staticmethod
|
||||
def get_checksum(path, hashtype, logger):
|
||||
"""
|
||||
Generates a checksum from the provided path by doing things in chunks.
|
||||
This way we don't do it in memory.
|
||||
"""
|
||||
try:
|
||||
checksum = hashlib.new(hashtype)
|
||||
except ValueError:
|
||||
logger.error("Invalid hash type: %s" % hashtype)
|
||||
return False
|
||||
|
||||
try:
|
||||
input_file = open(path, "rb")
|
||||
except IOError as e:
|
||||
logger.error("Could not open file %s: %s" % (path, e))
|
||||
return False
|
||||
|
||||
while True:
|
||||
chunk = input_file.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
checksum.update(chunk)
|
||||
|
||||
input_file.close()
|
||||
stat = os.stat(path)
|
||||
base = os.path.basename(path)
|
||||
# This emulates our current syncing scripts that runs stat and
|
||||
# sha256sum and what not with a very specific output.
|
||||
return "%s: %s bytes\n%s (%s) = %s\n" % (
|
||||
base,
|
||||
stat.st_size,
|
||||
hashtype.upper(),
|
||||
base,
|
||||
checksum.hexdigest()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def discinfo_write(timestamp, fullname, arch, file_path):
|
||||
"""
|
||||
Ensure discinfo is written correctly
|
||||
"""
|
||||
data = [
|
||||
"%s" % timestamp,
|
||||
"%s" % fullname,
|
||||
"%s" % arch,
|
||||
"ALL"
|
||||
]
|
||||
|
||||
with open(file_path, "w+") as f:
|
||||
f.write("\n".join(data))
|
||||
f.close()
|
||||
|
||||
@staticmethod
|
||||
def media_repo_write(timestamp, fullname, file_path):
|
||||
"""
|
||||
Ensure media.repo exists
|
||||
"""
|
||||
data = [
|
||||
"[InstallMedia]",
|
||||
"name=%s" % fullname,
|
||||
"mediaid=%s" % timestamp,
|
||||
"metadata_expire=-1",
|
||||
"gpgcheck=0",
|
||||
"cost=500",
|
||||
"",
|
||||
]
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("\n".join(data))
|
29
iso/empanadas/images/epelkey.gpg
Normal file
29
iso/empanadas/images/epelkey.gpg
Normal file
@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGE3mOsBEACsU+XwJWDJVkItBaugXhXIIkb9oe+7aadELuVo0kBmc3HXt/Yp
|
||||
CJW9hHEiGZ6z2jwgPqyJjZhCvcAWvgzKcvqE+9i0NItV1rzfxrBe2BtUtZmVcuE6
|
||||
2b+SPfxQ2Hr8llaawRjt8BCFX/ZzM4/1Qk+EzlfTcEcpkMf6wdO7kD6ulBk/tbsW
|
||||
DHX2lNcxszTf+XP9HXHWJlA2xBfP+Dk4gl4DnO2Y1xR0OSywE/QtvEbN5cY94ieu
|
||||
n7CBy29AleMhmbnx9pw3NyxcFIAsEZHJoU4ZW9ulAJ/ogttSyAWeacW7eJGW31/Z
|
||||
39cS+I4KXJgeGRI20RmpqfH0tuT+X5Da59YpjYxkbhSK3HYBVnNPhoJFUc2j5iKy
|
||||
XLgkapu1xRnEJhw05kr4LCbud0NTvfecqSqa+59kuVc+zWmfTnGTYc0PXZ6Oa3rK
|
||||
44UOmE6eAT5zd/ToleDO0VesN+EO7CXfRsm7HWGpABF5wNK3vIEF2uRr2VJMvgqS
|
||||
9eNwhJyOzoca4xFSwCkc6dACGGkV+CqhufdFBhmcAsUotSxe3zmrBjqA0B/nxIvH
|
||||
DVgOAMnVCe+Lmv8T0mFgqZSJdIUdKjnOLu/GRFhjDKIak4jeMBMTYpVnU+HhMHLq
|
||||
uDiZkNEvEEGhBQmZuI8J55F/a6UURnxUwT3piyi3Pmr2IFD7ahBxPzOBCQARAQAB
|
||||
tCdGZWRvcmEgKGVwZWw5KSA8ZXBlbEBmZWRvcmFwcm9qZWN0Lm9yZz6JAk4EEwEI
|
||||
ADgWIQT/itE0RZcQbs6BO5GKOHK/MihGfAUCYTeY6wIbDwULCQgHAgYVCgkICwIE
|
||||
FgIDAQIeAQIXgAAKCRCKOHK/MihGfFX/EACBPWv20+ttYu1A5WvtHJPzwbj0U4yF
|
||||
3zTQpBglQ2UfkRpYdipTlT3Ih6j5h2VmgRPtINCc/ZE28adrWpBoeFIS2YAKOCLC
|
||||
nZYtHl2nCoLq1U7FSttUGsZ/t8uGCBgnugTfnIYcmlP1jKKA6RJAclK89evDQX5n
|
||||
R9ZD+Cq3CBMlttvSTCht0qQVlwycedH8iWyYgP/mF0W35BIn7NuuZwWhgR00n/VG
|
||||
4nbKPOzTWbsP45awcmivdrS74P6mL84WfkghipdmcoyVb1B8ZP4Y/Ke0RXOnLhNe
|
||||
CfrXXvuW+Pvg2RTfwRDtehGQPAgXbmLmz2ZkV69RGIr54HJv84NDbqZovRTMr7gL
|
||||
9k3ciCzXCiYQgM8yAyGHV0KEhFSQ1HV7gMnt9UmxbxBE2pGU7vu3CwjYga5DpwU7
|
||||
w5wu1TmM5KgZtZvuWOTDnqDLf0cKoIbW8FeeCOn24elcj32bnQDuF9DPey1mqcvT
|
||||
/yEo/Ushyz6CVYxN8DGgcy2M9JOsnmjDx02h6qgWGWDuKgb9jZrvRedpAQCeemEd
|
||||
fhEs6ihqVxRFl16HxC4EVijybhAL76SsM2nbtIqW1apBQJQpXWtQwwdvgTVpdEtE
|
||||
r4ArVJYX5LrswnWEQMOelugUG6S3ZjMfcyOa/O0364iY73vyVgaYK+2XtT2usMux
|
||||
VL469Kj5m13T6w==
|
||||
=Mjs/
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
12
iso/empanadas/images/get_arch
Executable file
12
iso/empanadas/images/get_arch
Executable file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
case "$(uname -m)" in
|
||||
x86_64 | amd64)
|
||||
echo -n "amd64"
|
||||
;;
|
||||
arm64 | aarch64)
|
||||
echo -n "arm64"
|
||||
;;
|
||||
*)
|
||||
echo -n "$(uname -m)"
|
||||
;;
|
||||
esac
|
37
iso/empanadas/images/rhel.repo
Normal file
37
iso/empanadas/images/rhel.repo
Normal file
@ -0,0 +1,37 @@
|
||||
[baseos]
|
||||
name=CentOS Stream $releasever - BaseOS
|
||||
baseurl=https://ord.mirror.rackspace.com/centos-stream/9-stream/BaseOS/$arch/os
|
||||
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=0
|
||||
metadata_expire=6h
|
||||
countme=1
|
||||
enabled=1
|
||||
|
||||
[appstream]
|
||||
name=CentOS Stream $releasever - AppStream
|
||||
baseurl=https://ord.mirror.rackspace.com/centos-stream/9-stream/AppStream/$arch/os
|
||||
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=0
|
||||
metadata_expire=6h
|
||||
countme=1
|
||||
enabled=1
|
||||
|
||||
[extras-common]
|
||||
name=CentOS Stream $releasever - Extras packages
|
||||
baseurl=http://mirror.stream.centos.org/SIGs/9-stream/extras/$arch/extras-common
|
||||
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-SIG-Extras-SHA512
|
||||
gpgcheck=1
|
||||
repo_gpgcheck=0
|
||||
metadata_expire=6h
|
||||
countme=1
|
||||
enabled=1
|
||||
|
||||
[epel]
|
||||
name=Extra Packages for Enterprise Linux $releasever - $basearch
|
||||
baseurl=https://download-ib01.fedoraproject.org/pub/epel/9/Everything/$arch
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
countme=1
|
||||
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9
|
2
iso/empanadas/images/yum-sudo
Normal file
2
iso/empanadas/images/yum-sudo
Normal file
@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
sudo yum $@
|
594
iso/empanadas/poetry.lock
generated
Normal file
594
iso/empanadas/poetry.lock
generated
Normal file
@ -0,0 +1,594 @@
|
||||
[[package]]
|
||||
name = "atomicwrites"
|
||||
version = "1.4.0"
|
||||
description = "Atomic file writes."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "21.4.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[package.extras]
|
||||
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
|
||||
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
|
||||
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
|
||||
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.24.14"
|
||||
description = "The AWS SDK for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.27.14,<1.28.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.6.0,<0.7.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.27.14"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
python-dateutil = ">=2.1,<3.0.0"
|
||||
urllib3 = ">=1.25.4,<1.27"
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.13.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.6.15"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "2.0.12"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5.0"
|
||||
|
||||
[package.extras]
|
||||
unicode_backport = ["unicodedata2"]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.5"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.3"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "4.11.4"
|
||||
description = "Read metadata from Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
perf = ["ipython"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-resources"
|
||||
version = "5.8.0"
|
||||
description = "Read resources from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "2.11.3"
|
||||
description = "A very fast and expressive template engine."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=0.23"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=0.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "jmespath"
|
||||
version = "1.0.1"
|
||||
description = "JSON Matching Expressions"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "kobo"
|
||||
version = "0.24.1"
|
||||
description = "A pile of python modules used by Red Hat release engineering to build their tools"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">2.6"
|
||||
|
||||
[package.dependencies]
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "2.0.1"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "more-itertools"
|
||||
version = "8.13.0"
|
||||
description = "More routines for operating on iterables, beyond itertools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "21.3"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "0.13.1"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "productmd"
|
||||
version = "1.33"
|
||||
description = "Product, compose and installation media metadata library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.11.0"
|
||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.9"
|
||||
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.8"
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["railroad-diagrams", "jinja2"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "5.4.3"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[package.dependencies]
|
||||
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
||||
attrs = ">=17.4.0"
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
|
||||
more-itertools = ">=4.0.0"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<1.0"
|
||||
py = ">=1.5.0"
|
||||
wcwidth = "*"
|
||||
|
||||
[package.extras]
|
||||
checkqa-mypy = ["mypy (==v0.761)"]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0"
|
||||
description = "YAML parser and emitter for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.28.0"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <4"
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2.0.0,<2.1.0"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
|
||||
|
||||
[[package]]
|
||||
name = "rpm-py-installer"
|
||||
version = "1.1.0"
|
||||
description = "RPM Python binding Installer"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "s3transfer"
|
||||
version = "0.6.0"
|
||||
description = "An Amazon S3 Transfer Manager"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.12.36,<2.0a.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.2.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.9"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "wcwidth"
|
||||
version = "0.2.5"
|
||||
description = "Measures the displayed width of unicode strings in a terminal"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "xmltodict"
|
||||
version = "0.13.0"
|
||||
description = "Makes working with XML feel like you are working with JSON"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.8.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.7,<4"
|
||||
content-hash = "ccd47ad1b0819968dbad34b68c3f9afd98bd657ee639f9037731fd2a0746bd16"
|
||||
|
||||
[metadata.files]
|
||||
atomicwrites = [
|
||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||
]
|
||||
attrs = [
|
||||
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
|
||||
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||
]
|
||||
boto3 = [
|
||||
{file = "boto3-1.24.14-py3-none-any.whl", hash = "sha256:490f5e88f5551b33ae3019a37412158b76426d63d1fb910968ade9b6a024e5fe"},
|
||||
{file = "boto3-1.24.14.tar.gz", hash = "sha256:e284705da36faa668c715ae1f74ebbff4320dbfbe3a733df3a8ab076d1ed1226"},
|
||||
]
|
||||
botocore = [
|
||||
{file = "botocore-1.27.14-py3-none-any.whl", hash = "sha256:df1e9b208ff93daac7c645b0b04fb6dccd7f20262eae24d87941727025cbeece"},
|
||||
{file = "botocore-1.27.14.tar.gz", hash = "sha256:bb56fa77b8fa1ec367c2e16dee62d60000451aac5140dcce3ebddc167fd5c593"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
||||
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
||||
]
|
||||
charset-normalizer = [
|
||||
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
||||
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
||||
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
importlib-metadata = [
|
||||
{file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"},
|
||||
{file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"},
|
||||
]
|
||||
importlib-resources = [
|
||||
{file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"},
|
||||
{file = "importlib_resources-5.8.0.tar.gz", hash = "sha256:568c9f16cb204f9decc8d6d24a572eeea27dacbb4cee9e6b03a8025736769751"},
|
||||
]
|
||||
jinja2 = [
|
||||
{file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"},
|
||||
{file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"},
|
||||
]
|
||||
jmespath = [
|
||||
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
|
||||
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||
]
|
||||
kobo = [
|
||||
{file = "kobo-0.24.1.tar.gz", hash = "sha256:d5a30cc20c323f3e9d9b4b2e511650c4b98929b88859bd8cf57463876686e407"},
|
||||
]
|
||||
markupsafe = [
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"},
|
||||
{file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
|
||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
|
||||
{file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
|
||||
{file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
|
||||
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
|
||||
]
|
||||
more-itertools = [
|
||||
{file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"},
|
||||
{file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||
]
|
||||
pluggy = [
|
||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||
]
|
||||
productmd = [
|
||||
{file = "productmd-1.33-py3-none-any.whl", hash = "sha256:467dfeb84e74834b6a65508536ccd8ec2d81c24a0ecee5e77d2c358e97eae164"},
|
||||
{file = "productmd-1.33.tar.gz", hash = "sha256:aaf49bdd2a5cb97f7c6b5011f669dbed153efc7bc61e6935fa796a1b94d16b7e"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
||||
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
||||
]
|
||||
pytest = [
|
||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
||||
]
|
||||
python-dateutil = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
]
|
||||
pyyaml = [
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
|
||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"},
|
||||
{file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"},
|
||||
]
|
||||
rpm-py-installer = [
|
||||
{file = "rpm-py-installer-1.1.0.tar.gz", hash = "sha256:66e5f4f9247752ed386345642683103afaee50fb16928878a204bc12504b9bbe"},
|
||||
]
|
||||
s3transfer = [
|
||||
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
|
||||
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
|
||||
]
|
||||
six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
||||
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
|
||||
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
|
||||
]
|
||||
wcwidth = [
|
||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
xmltodict = [
|
||||
{file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
|
||||
{file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
|
||||
]
|
||||
zipp = [
|
||||
{file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
|
||||
{file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
|
||||
]
|
34
iso/empanadas/pyproject.toml
Normal file
34
iso/empanadas/pyproject.toml
Normal file
@ -0,0 +1,34 @@
|
||||
[tool.poetry]
|
||||
name = "empanadas"
|
||||
version = "0.1.0"
|
||||
description = "hand crafted ISOs with love and spice"
|
||||
authors = ["Louis Abel <louis@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.7,<4"
|
||||
rpm-py-installer = "~1.1.0"
|
||||
MarkupSafe = "<=2.0.1"
|
||||
PyYAML = "~6.0"
|
||||
Jinja2 = "~2"
|
||||
productmd = "~1.33"
|
||||
importlib-resources = "^5.8.0"
|
||||
boto3 = "^1.24.12"
|
||||
xmltodict = "^0.13.0"
|
||||
requests = "^2.28.0"
|
||||
kobo = "^0.24.1"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "~5"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
sync_from_peridot = "empanadas.scripts.sync_from_peridot:run"
|
||||
sync_from_peridot_test = "empanadas.scripts.sync_from_peridot_test:run"
|
||||
sync_sig = "empanadas.scripts.sync_sig:run"
|
||||
build-iso = "empanadas.scripts.build_iso:run"
|
||||
build-iso-extra = "empanadas.scripts.build_iso_extra:run"
|
||||
pull-unpack-tree = "empanadas.scripts.pull_unpack_tree:run"
|
||||
launch-builds = "empanadas.scripts.launch_builds:run"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
0
iso/empanadas/tests/__init__.py
Normal file
0
iso/empanadas/tests/__init__.py
Normal file
5
iso/empanadas/tests/test_empanadas.py
Normal file
5
iso/empanadas/tests/test_empanadas.py
Normal file
@ -0,0 +1,5 @@
|
||||
from empanadas import __version__
|
||||
|
||||
|
||||
def test_version():
|
||||
assert __version__ == '0.1.0'
|
BIN
iso/py/__pycache__/common.cpython-310.pyc
Normal file
BIN
iso/py/__pycache__/common.cpython-310.pyc
Normal file
Binary file not shown.
@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# builds ISO's
|
||||
|
||||
import argparse
|
||||
from common import *
|
||||
from util import Checks
|
||||
from util import IsoBuild
|
Binary file not shown.
@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# This is a testing script to ensure the RepoSync class is working as intended.
|
||||
|
||||
from common import *
|
||||
import argparse
|
||||
from util import Checks
|
||||
from util import RepoSync
|
||||
|
||||
rlvars = rldict['9']
|
||||
r = Checks(rlvars, config['arch'])
|
||||
r.check_valid_arch()
|
||||
|
||||
#a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False)
|
||||
a = RepoSync(rlvars, config, major="9", repo="ResilientStorage", parallel=True, ignore_debug=False, ignore_source=False)
|
||||
a.run()
|
@ -1,22 +0,0 @@
|
||||
"""
|
||||
Imports all of our classes for this local module
|
||||
"""
|
||||
|
||||
from .check import (
|
||||
Checks,
|
||||
)
|
||||
|
||||
from .dnf_utils import (
|
||||
RepoSync,
|
||||
SigRepoSync
|
||||
)
|
||||
|
||||
from .iso_utils import (
|
||||
IsoBuild,
|
||||
LiveBuild
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'Checks',
|
||||
'RepoSync'
|
||||
]
|
BIN
iso/py/util/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
iso/py/util/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
iso/py/util/__pycache__/check.cpython-310.pyc
Normal file
BIN
iso/py/util/__pycache__/check.cpython-310.pyc
Normal file
Binary file not shown.
BIN
iso/py/util/__pycache__/dnf_utils.cpython-310.pyc
Normal file
BIN
iso/py/util/__pycache__/dnf_utils.cpython-310.pyc
Normal file
Binary file not shown.
BIN
iso/py/util/__pycache__/iso_utils.cpython-310.pyc
Normal file
BIN
iso/py/util/__pycache__/iso_utils.cpython-310.pyc
Normal file
Binary file not shown.
@ -1,152 +0,0 @@
|
||||
"""
|
||||
Builds ISO's for Rocky Linux.
|
||||
|
||||
Louis Abel <label AT rockylinux.org>
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import shlex
|
||||
import time
|
||||
import re
|
||||
from common import Color
|
||||
|
||||
class IsoBuild:
|
||||
"""
|
||||
This helps us build the generic ISO's for a Rocky Linux release. In
|
||||
particular, this is for the boot and dvd images.
|
||||
|
||||
Live images are built in another class.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
rlvars,
|
||||
config,
|
||||
major,
|
||||
host=None,
|
||||
image=None,
|
||||
arch=None,
|
||||
logger=None
|
||||
):
|
||||
self.arch = arch
|
||||
self.image = image
|
||||
self.host = host
|
||||
# Relevant config items
|
||||
self.major_version = major
|
||||
self.date_stamp = config['date_stamp']
|
||||
self.compose_root = config['compose_root']
|
||||
self.compose_base = config['compose_root'] + "/" + major
|
||||
self.iso_base = config['compose_root'] + "/" + major + "/isos"
|
||||
self.current_arch = config['arch']
|
||||
self.extra_files = rlvars['extra_files']
|
||||
|
||||
# Relevant major version items
|
||||
self.revision = rlvars['revision'] + "-" + rlvars['rclvl']
|
||||
self.arches = rlvars['allowed_arches']
|
||||
|
||||
self.staging_dir = os.path.join(
|
||||
config['staging_root'],
|
||||
config['category_stub'],
|
||||
self.revision
|
||||
)
|
||||
|
||||
self.compose_latest_dir = os.path.join(
|
||||
config['compose_root'],
|
||||
major,
|
||||
"latest-Rocky-{}".format(major)
|
||||
)
|
||||
|
||||
self.compose_latest_sync = os.path.join(
|
||||
self.compose_latest_dir,
|
||||
"compose"
|
||||
)
|
||||
|
||||
self.compose_log_dir = os.path.join(
|
||||
self.compose_latest_dir,
|
||||
"work/logs"
|
||||
)
|
||||
|
||||
# This is temporary for now.
|
||||
if logger is None:
|
||||
self.log = logging.getLogger("iso")
|
||||
self.log.setLevel(logging.INFO)
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s :: %(name)s :: %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
self.log.addHandler(handler)
|
||||
|
||||
self.log.info('iso build init')
|
||||
self.log.info(self.revision)
|
||||
|
||||
def run(self):
|
||||
work_root = os.path.join(
|
||||
self.compose_latest_dir,
|
||||
'work'
|
||||
)
|
||||
sync_root = self.compose_latest_sync
|
||||
|
||||
log_root = os.path.join(
|
||||
work_root,
|
||||
"logs"
|
||||
)
|
||||
|
||||
self.iso_build(
|
||||
sync_root,
|
||||
work_root,
|
||||
log_root,
|
||||
self.arch,
|
||||
self.host
|
||||
)
|
||||
|
||||
self.log.info('Compose repo directory: %s' % sync_root)
|
||||
self.log.info('ISO Build Logs: %s' % log_root)
|
||||
self.log.info('ISO Build completed.')
|
||||
|
||||
def iso_build(self, sync_root, work_root, log_root, arch, host):
|
||||
"""
|
||||
Calls out the ISO builds to the individual hosts listed in the map.
|
||||
Each architecture is expected to build their own ISOs, similar to
|
||||
runroot operations of koji and pungi.
|
||||
|
||||
It IS possible to run locally, but that would mean this only builds
|
||||
ISOs for the architecture of the running machine. Please keep this in
|
||||
mind when stating host=local.
|
||||
"""
|
||||
# Check for local build, build accordingly
|
||||
# Check for arch specific build, build accordingly
|
||||
# local AND arch cannot be used together, local supersedes. print
|
||||
# warning.
|
||||
local_only = False
|
||||
if 'local' in self.host:
|
||||
local_only = True
|
||||
|
||||
arch = self.arch.copy()
|
||||
if local_only and self.arch:
|
||||
self.log.warn('You cannot set local build AND an architecture.')
|
||||
self.log.warn('The architecture %s will be set' % self.current_arch)
|
||||
arch = self.current_arch
|
||||
|
||||
def iso_build_local(self, sync_root, work_root, log_root):
|
||||
"""
|
||||
Local iso builds only. Architecture is locked.
|
||||
"""
|
||||
print()
|
||||
|
||||
def iso_build_remote(self, sync_root, work_root, log_root, arch):
|
||||
"""
|
||||
Remote ISO builds. Architecture is all or single.
|
||||
"""
|
||||
print()
|
||||
|
||||
|
||||
class LiveBuild:
|
||||
"""
|
||||
This helps us build the live images for Rocky Linux.
|
||||
"""
|
@ -169,6 +169,6 @@ EOF
|
||||
/bin/cp "${TREEINFO_VAR}" "${PRISTINE_TREE}"
|
||||
}
|
||||
|
||||
export -f treeinfoFixer
|
||||
export -f treeinfoModder
|
||||
export -f treeinfoModderKickstart
|
||||
#export -f treeinfoFixer
|
||||
#export -f treeinfoModder
|
||||
#export -f treeinfoModderKickstart
|
||||
|
Loading…
Reference in New Issue
Block a user