forked from sig_core/toolkit
chore: merge devel -> main
This commit is contained in:
commit
171cb7f2c7
@ -57,7 +57,7 @@ RUN rm -rf /etc/yum.repos.d/*.repo /get_arch
|
|||||||
|
|
||||||
RUN pip install awscli
|
RUN pip install awscli
|
||||||
|
|
||||||
ARG BRANCH r9
|
ARG BRANCH=r9
|
||||||
RUN git clone https://git.resf.org/sig_core/kickstarts.git --branch $BRANCH /kickstarts
|
RUN git clone https://git.resf.org/sig_core/kickstarts.git --branch $BRANCH /kickstarts
|
||||||
|
|
||||||
RUN pip install 'git+https://git.resf.org/sig_core/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas'
|
RUN pip install 'git+https://git.resf.org/sig_core/toolkit.git@devel#egg=empanadas&subdirectory=iso/empanadas'
|
||||||
|
@ -1 +1 @@
|
|||||||
__version__ = '0.2.0'
|
__version__ = '0.4.0'
|
||||||
|
137
iso/empanadas/empanadas/configs/el8-beta.yaml
Normal file
137
iso/empanadas/empanadas/configs/el8-beta.yaml
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
---
|
||||||
|
'8':
|
||||||
|
fullname: 'Rocky Linux 8'
|
||||||
|
revision: '8.7'
|
||||||
|
rclvl: 'RC1'
|
||||||
|
major: '8'
|
||||||
|
minor: '7'
|
||||||
|
profile: '8'
|
||||||
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
provide_multilib: False
|
||||||
|
project_id: '26694529-26cd-44bd-bc59-1c1195364322'
|
||||||
|
repo_symlinks:
|
||||||
|
devel: 'Devel'
|
||||||
|
NFV: 'nfv'
|
||||||
|
renames:
|
||||||
|
all: 'devel'
|
||||||
|
all_repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
- 'PowerTools'
|
||||||
|
- 'HighAvailability'
|
||||||
|
- 'ResilientStorage'
|
||||||
|
- 'RT'
|
||||||
|
- 'NFV'
|
||||||
|
- 'extras'
|
||||||
|
- 'devel'
|
||||||
|
- 'plus'
|
||||||
|
structure:
|
||||||
|
packages: 'os/Packages'
|
||||||
|
repodata: 'os/repodata'
|
||||||
|
iso_map:
|
||||||
|
xorrisofs: False
|
||||||
|
iso_level: False
|
||||||
|
images:
|
||||||
|
dvd:
|
||||||
|
disc: True
|
||||||
|
variant: 'AppStream'
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
minimal:
|
||||||
|
disc: True
|
||||||
|
isoskip: True
|
||||||
|
repos:
|
||||||
|
- 'minimal'
|
||||||
|
- 'BaseOS'
|
||||||
|
variant: 'minimal'
|
||||||
|
BaseOS:
|
||||||
|
disc: False
|
||||||
|
isoskip: True
|
||||||
|
variant: 'BaseOS'
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
lorax:
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
variant: 'BaseOS'
|
||||||
|
lorax_removes:
|
||||||
|
- 'libreport-rhel-anaconda-bugzilla'
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax'
|
||||||
|
- 'genisoimage'
|
||||||
|
- 'isomd5sum'
|
||||||
|
- 'lorax-templates-rhel'
|
||||||
|
- 'lorax-templates-generic'
|
||||||
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r8'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r8-beta'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r8-beta/'
|
||||||
|
repoclosure_map:
|
||||||
|
arches:
|
||||||
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||||
|
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||||
|
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||||
|
repos:
|
||||||
|
BaseOS: []
|
||||||
|
AppStream:
|
||||||
|
- BaseOS
|
||||||
|
PowerTools:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
HighAvailability:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
ResilientStorage:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
RT:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
NFV:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||||
|
testing: 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/COMMUNITY-CHARTER'
|
||||||
|
- 'SOURCES/EULA'
|
||||||
|
- 'SOURCES/LICENSE'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||||
|
...
|
@ -1,21 +1,24 @@
|
|||||||
---
|
---
|
||||||
'8':
|
'8':
|
||||||
fullname: 'Rocky Linux 8'
|
fullname: 'Rocky Linux 8'
|
||||||
revision: '8.6'
|
revision: '8.7'
|
||||||
rclvl: 'RC2'
|
rclvl: 'RC1'
|
||||||
major: '8'
|
major: '8'
|
||||||
minor: '6'
|
minor: '7'
|
||||||
profile: '8'
|
profile: '8'
|
||||||
bugurl: 'https://bugs.rockylinux.org'
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
- aarch64
|
- aarch64
|
||||||
provide_multilib: False
|
provide_multilib: False
|
||||||
project_id: ''
|
project_id: 'e9cfc87c-d2d2-42d5-a121-852101f1a966'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
devel: 'Devel'
|
devel: 'Devel'
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames: {}
|
renames:
|
||||||
|
all: 'devel'
|
||||||
all_repos:
|
all_repos:
|
||||||
- 'BaseOS'
|
- 'BaseOS'
|
||||||
- 'AppStream'
|
- 'AppStream'
|
||||||
@ -27,23 +30,6 @@
|
|||||||
- 'extras'
|
- 'extras'
|
||||||
- 'devel'
|
- 'devel'
|
||||||
- 'plus'
|
- 'plus'
|
||||||
- 'rockyrpi'
|
|
||||||
no_comps_or_groups:
|
|
||||||
- 'extras'
|
|
||||||
- 'devel'
|
|
||||||
- 'plus'
|
|
||||||
- 'rockyrpi'
|
|
||||||
comps_or_groups:
|
|
||||||
- 'BaseOS'
|
|
||||||
- 'AppStream'
|
|
||||||
- 'PowerTools'
|
|
||||||
- 'HighAvailability'
|
|
||||||
- 'ResilientStorage'
|
|
||||||
- 'RT'
|
|
||||||
- 'NFV'
|
|
||||||
has_modules:
|
|
||||||
- 'AppStream'
|
|
||||||
- 'PowerTools'
|
|
||||||
structure:
|
structure:
|
||||||
packages: 'os/Packages'
|
packages: 'os/Packages'
|
||||||
repodata: 'os/repodata'
|
repodata: 'os/repodata'
|
||||||
@ -93,7 +79,7 @@
|
|||||||
format: qcow2
|
format: qcow2
|
||||||
livemap:
|
livemap:
|
||||||
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
branch: 'r9'
|
branch: 'r8'
|
||||||
ksentry:
|
ksentry:
|
||||||
Workstation: rocky-live-workstation.ks
|
Workstation: rocky-live-workstation.ks
|
||||||
Workstation-Lite: rocky-live-workstation-lite.ks
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
137
iso/empanadas/empanadas/configs/el8lh.yaml
Normal file
137
iso/empanadas/empanadas/configs/el8lh.yaml
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
---
|
||||||
|
'8':
|
||||||
|
fullname: 'Rocky Linux 8'
|
||||||
|
revision: '8.8'
|
||||||
|
rclvl: 'RC1'
|
||||||
|
major: '8'
|
||||||
|
minor: '8'
|
||||||
|
profile: '8'
|
||||||
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
provide_multilib: False
|
||||||
|
project_id: '3b0e9ec7-0679-4176-b253-8528eb3255eb'
|
||||||
|
repo_symlinks:
|
||||||
|
devel: 'Devel'
|
||||||
|
NFV: 'nfv'
|
||||||
|
renames:
|
||||||
|
all: 'devel'
|
||||||
|
all_repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
- 'PowerTools'
|
||||||
|
- 'HighAvailability'
|
||||||
|
- 'ResilientStorage'
|
||||||
|
- 'RT'
|
||||||
|
- 'NFV'
|
||||||
|
- 'extras'
|
||||||
|
- 'devel'
|
||||||
|
- 'plus'
|
||||||
|
structure:
|
||||||
|
packages: 'os/Packages'
|
||||||
|
repodata: 'os/repodata'
|
||||||
|
iso_map:
|
||||||
|
xorrisofs: False
|
||||||
|
iso_level: False
|
||||||
|
images:
|
||||||
|
dvd:
|
||||||
|
disc: True
|
||||||
|
variant: 'AppStream'
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
minimal:
|
||||||
|
disc: True
|
||||||
|
isoskip: True
|
||||||
|
repos:
|
||||||
|
- 'minimal'
|
||||||
|
- 'BaseOS'
|
||||||
|
variant: 'minimal'
|
||||||
|
BaseOS:
|
||||||
|
disc: False
|
||||||
|
isoskip: True
|
||||||
|
variant: 'BaseOS'
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
lorax:
|
||||||
|
repos:
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
variant: 'BaseOS'
|
||||||
|
lorax_removes:
|
||||||
|
- 'libreport-rhel-anaconda-bugzilla'
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax'
|
||||||
|
- 'genisoimage'
|
||||||
|
- 'isomd5sum'
|
||||||
|
- 'lorax-templates-rhel'
|
||||||
|
- 'lorax-templates-generic'
|
||||||
|
- 'xorriso'
|
||||||
|
cloudimages:
|
||||||
|
images:
|
||||||
|
EC2:
|
||||||
|
format: raw
|
||||||
|
GenericCloud:
|
||||||
|
format: qcow2
|
||||||
|
livemap:
|
||||||
|
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
|
||||||
|
branch: 'r8'
|
||||||
|
ksentry:
|
||||||
|
Workstation: rocky-live-workstation.ks
|
||||||
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
|
XFCE: rocky-live-xfce.ks
|
||||||
|
KDE: rocky-live-kde.ks
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
required_pkgs:
|
||||||
|
- 'lorax-lmc-novirt'
|
||||||
|
- 'vim-minimal'
|
||||||
|
- 'pykickstart'
|
||||||
|
- 'git'
|
||||||
|
variantmap:
|
||||||
|
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
|
||||||
|
branch: 'r8s'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r8s/'
|
||||||
|
repoclosure_map:
|
||||||
|
arches:
|
||||||
|
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
|
||||||
|
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
|
||||||
|
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
|
||||||
|
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
|
||||||
|
repos:
|
||||||
|
BaseOS: []
|
||||||
|
AppStream:
|
||||||
|
- BaseOS
|
||||||
|
PowerTools:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
HighAvailability:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
ResilientStorage:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
RT:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
NFV:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||||
|
testing: 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/COMMUNITY-CHARTER'
|
||||||
|
- 'SOURCES/EULA'
|
||||||
|
- 'SOURCES/LICENSE'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-rockyofficial'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-rockytesting'
|
||||||
|
...
|
@ -8,13 +8,14 @@
|
|||||||
profile: '9-beta'
|
profile: '9-beta'
|
||||||
bugurl: 'https://bugs.rockylinux.org'
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
checksum: 'sha256'
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
- aarch64
|
- aarch64
|
||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
provide_multilib: True
|
provide_multilib: True
|
||||||
project_id: ''
|
project_id: '0048077b-1573-4cb7-8ba7-cce823857ba5'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames:
|
renames:
|
||||||
@ -87,8 +88,10 @@
|
|||||||
Workstation-Lite: rocky-live-workstation-lite.ks
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
XFCE: rocky-live-xfce.ks
|
XFCE: rocky-live-xfce.ks
|
||||||
KDE: rocky-live-kde.ks
|
KDE: rocky-live-kde.ks
|
||||||
|
MATE: rocky-live-mate.ks
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
|
- aarch64
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax-lmc-novirt'
|
- 'lorax-lmc-novirt'
|
||||||
- 'vim-minimal'
|
- 'vim-minimal'
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
profile: '9'
|
profile: '9'
|
||||||
bugurl: 'https://bugs.rockylinux.org'
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
checksum: 'sha256'
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
- aarch64
|
- aarch64
|
||||||
@ -88,8 +89,10 @@
|
|||||||
Workstation-Lite: rocky-live-workstation-lite.ks
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
XFCE: rocky-live-xfce.ks
|
XFCE: rocky-live-xfce.ks
|
||||||
KDE: rocky-live-kde.ks
|
KDE: rocky-live-kde.ks
|
||||||
|
MATE: rocky-live-mate.ks
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
|
- aarch64
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax-lmc-novirt'
|
- 'lorax-lmc-novirt'
|
||||||
- 'vim-minimal'
|
- 'vim-minimal'
|
||||||
|
58
iso/empanadas/empanadas/configs/el9alt.yaml
Normal file
58
iso/empanadas/empanadas/configs/el9alt.yaml
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# This is specifically for secondary/tertiary architectures
|
||||||
|
---
|
||||||
|
'9altarch':
|
||||||
|
fullname: 'Rocky Linux 9.0'
|
||||||
|
revision: '9.0'
|
||||||
|
rclvl: 'RC2'
|
||||||
|
major: '9'
|
||||||
|
minor: '0'
|
||||||
|
profile: '9'
|
||||||
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
|
allowed_arches:
|
||||||
|
- armv7hl
|
||||||
|
- riscv64
|
||||||
|
provide_multilib: False
|
||||||
|
project_id: ''
|
||||||
|
renames:
|
||||||
|
all: 'devel'
|
||||||
|
all_repos:
|
||||||
|
- 'all'
|
||||||
|
- 'BaseOS'
|
||||||
|
- 'AppStream'
|
||||||
|
- 'CRB'
|
||||||
|
- 'extras'
|
||||||
|
- 'plus'
|
||||||
|
structure:
|
||||||
|
packages: 'os/Packages'
|
||||||
|
repodata: 'os/repodata'
|
||||||
|
iso_map: {}
|
||||||
|
livemap: {}
|
||||||
|
repoclosure_map:
|
||||||
|
arches:
|
||||||
|
armv7hl: '--forcearch=armv7hl --arch=noarch'
|
||||||
|
riscv64: '--forcearch=riscv64 --arch=noarch'
|
||||||
|
repos:
|
||||||
|
devel: []
|
||||||
|
BaseOS: []
|
||||||
|
AppStream:
|
||||||
|
- BaseOS
|
||||||
|
CRB:
|
||||||
|
- BaseOS
|
||||||
|
- AppStream
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||||
|
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/Contributors'
|
||||||
|
- 'SOURCES/COMMUNITY-CHARTER'
|
||||||
|
- 'SOURCES/EULA'
|
||||||
|
- 'SOURCES/LICENSE'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-9'
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
|
||||||
|
...
|
@ -1,20 +1,21 @@
|
|||||||
---
|
---
|
||||||
'9-lookahead':
|
'9-lookahead':
|
||||||
fullname: 'Rocky Linux 9.1'
|
fullname: 'Rocky Linux 9.2'
|
||||||
revision: '9.1'
|
revision: '9.2'
|
||||||
rclvl: 'LH1'
|
rclvl: 'LH1'
|
||||||
major: '9'
|
major: '9'
|
||||||
minor: '1'
|
minor: '2'
|
||||||
profile: '9-lookahead'
|
profile: '9-lookahead'
|
||||||
bugurl: 'https://bugs.rockylinux.org'
|
bugurl: 'https://bugs.rockylinux.org'
|
||||||
checksum: 'sha256'
|
checksum: 'sha256'
|
||||||
|
fedora_major: '20'
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
- aarch64
|
- aarch64
|
||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
provide_multilib: True
|
provide_multilib: True
|
||||||
project_id: ''
|
project_id: '6794b5a8-290b-4d0d-ad5a-47164329cbb0'
|
||||||
repo_symlinks:
|
repo_symlinks:
|
||||||
NFV: 'nfv'
|
NFV: 'nfv'
|
||||||
renames:
|
renames:
|
||||||
@ -87,8 +88,10 @@
|
|||||||
Workstation-Lite: rocky-live-workstation-lite.ks
|
Workstation-Lite: rocky-live-workstation-lite.ks
|
||||||
XFCE: rocky-live-xfce.ks
|
XFCE: rocky-live-xfce.ks
|
||||||
KDE: rocky-live-kde.ks
|
KDE: rocky-live-kde.ks
|
||||||
|
MATE: rocky-live-mate.ks
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
|
- aarch64
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax-lmc-novirt'
|
- 'lorax-lmc-novirt'
|
||||||
- 'vim-minimal'
|
- 'vim-minimal'
|
||||||
|
@ -88,6 +88,7 @@
|
|||||||
KDE: rocky-live-kde.ks
|
KDE: rocky-live-kde.ks
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- x86_64
|
- x86_64
|
||||||
|
- aarch64
|
||||||
required_pkgs:
|
required_pkgs:
|
||||||
- 'lorax-lmc-novirt'
|
- 'lorax-lmc-novirt'
|
||||||
- 'vim-minimal'
|
- 'vim-minimal'
|
||||||
|
@ -17,6 +17,8 @@ parser.add_argument('--logger', type=str)
|
|||||||
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
|
parser.add_argument('--extra-iso', type=str, help="Granular choice in which iso is built")
|
||||||
parser.add_argument('--extra-iso-mode', type=str, default='local')
|
parser.add_argument('--extra-iso-mode', type=str, default='local')
|
||||||
parser.add_argument('--hashed', action='store_true')
|
parser.add_argument('--hashed', action='store_true')
|
||||||
|
parser.add_argument('--updated-image', action='store_true')
|
||||||
|
parser.add_argument('--image-increment',type=str, default='0')
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
rlvars = rldict[results.release]
|
rlvars = rldict[results.release]
|
||||||
major = rlvars['major']
|
major = rlvars['major']
|
||||||
@ -32,7 +34,9 @@ a = IsoBuild(
|
|||||||
extra_iso_mode=results.extra_iso_mode,
|
extra_iso_mode=results.extra_iso_mode,
|
||||||
compose_dir_is_here=results.local_compose,
|
compose_dir_is_here=results.local_compose,
|
||||||
hashed=results.hashed,
|
hashed=results.hashed,
|
||||||
logger=results.logger
|
logger=results.logger,
|
||||||
|
updated_image=results.updated_image,
|
||||||
|
image_increment=results.image_increment
|
||||||
)
|
)
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
@ -14,7 +14,8 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
|||||||
|
|
||||||
# All of our options
|
# All of our options
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
parser.add_argument('--symlink', action='store_true', help="symlink")
|
parser.add_argument('--sig', type=str, help="SIG Name if applicable")
|
||||||
|
parser.add_argument('--symlink', action='store_true', help="symlink to latest")
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
@ -48,6 +49,9 @@ def run():
|
|||||||
profile = rlvars['profile']
|
profile = rlvars['profile']
|
||||||
logger = log
|
logger = log
|
||||||
|
|
||||||
|
if results.sig is not None:
|
||||||
|
shortname = 'SIG-' + results.sig
|
||||||
|
|
||||||
generated_dir = Shared.generate_compose_dirs(
|
generated_dir = Shared.generate_compose_dirs(
|
||||||
compose_base,
|
compose_base,
|
||||||
shortname,
|
shortname,
|
||||||
|
38
iso/empanadas/empanadas/scripts/peridot_repoclosure.py
Normal file
38
iso/empanadas/empanadas/scripts/peridot_repoclosure.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# This is for doing repoclosures upstream
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from empanadas.common import *
|
||||||
|
from empanadas.util import Checks
|
||||||
|
from empanadas.util import RepoSync
|
||||||
|
|
||||||
|
# Start up the parser baby
|
||||||
|
parser = argparse.ArgumentParser(description="Peridot Upstream Repoclosure")
|
||||||
|
|
||||||
|
# All of our options
|
||||||
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
|
parser.add_argument('--simple', action='store_false')
|
||||||
|
parser.add_argument('--enable-repo-gpg-check', action='store_true')
|
||||||
|
parser.add_argument('--hashed', action='store_true')
|
||||||
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
|
# Parse them
|
||||||
|
results = parser.parse_args()
|
||||||
|
rlvars = rldict[results.release]
|
||||||
|
major = rlvars['major']
|
||||||
|
|
||||||
|
r = Checks(rlvars, config['arch'])
|
||||||
|
r.check_valid_arch()
|
||||||
|
|
||||||
|
a = RepoSync(
|
||||||
|
rlvars,
|
||||||
|
config,
|
||||||
|
major=major,
|
||||||
|
hashed=results.hashed,
|
||||||
|
parallel=results.simple,
|
||||||
|
repo_gpg_check=results.enable_repo_gpg_check,
|
||||||
|
logger=results.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
a.run_upstream_repoclosure()
|
@ -9,7 +9,7 @@ from empanadas.util import IsoBuild
|
|||||||
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
||||||
|
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
parser.add_argument('--s3', action='store_true', help="Release Candidate")
|
parser.add_argument('--s3', action='store_true', help="S3")
|
||||||
parser.add_argument('--arch', type=str, help="Architecture")
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
parser.add_argument('--force-download', action='store_true', help="Force a download")
|
parser.add_argument('--force-download', action='store_true', help="Force a download")
|
||||||
|
@ -9,7 +9,7 @@ from empanadas.util import IsoBuild
|
|||||||
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
parser = argparse.ArgumentParser(description="ISO Artifact Builder")
|
||||||
|
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
parser.add_argument('--s3', action='store_true', help="Release Candidate")
|
parser.add_argument('--s3', action='store_true', help="S3")
|
||||||
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
parser.add_argument('--rc', action='store_true', help="Release Candidate")
|
||||||
parser.add_argument('--arch', type=str, help="Architecture")
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
parser.add_argument('--local-compose', action='store_true', help="Compose Directory is Here")
|
||||||
|
@ -26,6 +26,9 @@ parser.add_argument('--refresh-treeinfo', action='store_true')
|
|||||||
# I am aware this is confusing, I want podman to be the default option
|
# I am aware this is confusing, I want podman to be the default option
|
||||||
parser.add_argument('--simple', action='store_false')
|
parser.add_argument('--simple', action='store_false')
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
parser.add_argument('--disable-gpg-check', action='store_false')
|
||||||
|
parser.add_argument('--disable-repo-gpg-check', action='store_false')
|
||||||
|
parser.add_argument('--clean-old-packages', action='store_true')
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
@ -54,6 +57,9 @@ a = RepoSync(
|
|||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
refresh_extra_files=results.refresh_extra_files,
|
refresh_extra_files=results.refresh_extra_files,
|
||||||
refresh_treeinfo=results.refresh_treeinfo,
|
refresh_treeinfo=results.refresh_treeinfo,
|
||||||
|
gpg_check=results.disable_gpg_check,
|
||||||
|
repo_gpg_check=results.disable_repo_gpg_check,
|
||||||
|
reposync_clean_old=results.clean_old_packages,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
@ -15,7 +15,7 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
|||||||
# All of our options
|
# All of our options
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version", required=True)
|
||||||
parser.add_argument('--repo', type=str, help="Repository name")
|
parser.add_argument('--repo', type=str, help="Repository name")
|
||||||
parser.add_argument('--sig', type=str, help="SIG name")
|
parser.add_argument('--sig', type=str, help="SIG name", required=True)
|
||||||
parser.add_argument('--arch', type=str, help="Architecture")
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
parser.add_argument('--ignore-debug', action='store_true')
|
parser.add_argument('--ignore-debug', action='store_true')
|
||||||
parser.add_argument('--ignore-source', action='store_true')
|
parser.add_argument('--ignore-source', action='store_true')
|
||||||
@ -25,9 +25,13 @@ parser.add_argument('--hashed', action='store_true')
|
|||||||
parser.add_argument('--dry-run', action='store_true')
|
parser.add_argument('--dry-run', action='store_true')
|
||||||
parser.add_argument('--full-run', action='store_true')
|
parser.add_argument('--full-run', action='store_true')
|
||||||
parser.add_argument('--no-fail', action='store_true')
|
parser.add_argument('--no-fail', action='store_true')
|
||||||
|
parser.add_argument('--refresh-extra-files', action='store_true')
|
||||||
# I am aware this is confusing, I want podman to be the default option
|
# I am aware this is confusing, I want podman to be the default option
|
||||||
parser.add_argument('--simple', action='store_false')
|
parser.add_argument('--simple', action='store_false')
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
parser.add_argument('--disable-gpg-check', action='store_false')
|
||||||
|
parser.add_argument('--disable-repo-gpg-check', action='store_false')
|
||||||
|
parser.add_argument('--clean-old-packages', action='store_true')
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
results = parser.parse_args()
|
results = parser.parse_args()
|
||||||
@ -46,6 +50,7 @@ a = SigRepoSync(
|
|||||||
repo=results.repo,
|
repo=results.repo,
|
||||||
arch=results.arch,
|
arch=results.arch,
|
||||||
ignore_source=results.ignore_source,
|
ignore_source=results.ignore_source,
|
||||||
|
ignore_debug=results.ignore_debug,
|
||||||
repoclosure=results.repoclosure,
|
repoclosure=results.repoclosure,
|
||||||
skip_all=results.skip_all,
|
skip_all=results.skip_all,
|
||||||
hashed=results.hashed,
|
hashed=results.hashed,
|
||||||
@ -53,7 +58,11 @@ a = SigRepoSync(
|
|||||||
dryrun=results.dry_run,
|
dryrun=results.dry_run,
|
||||||
fullrun=results.full_run,
|
fullrun=results.full_run,
|
||||||
nofail=results.no_fail,
|
nofail=results.no_fail,
|
||||||
logger=results.logger
|
refresh_extra_files=results.refresh_extra_files,
|
||||||
|
logger=results.logger,
|
||||||
|
gpg_check=results.disable_gpg_check,
|
||||||
|
repo_gpg_check=results.disable_repo_gpg_check,
|
||||||
|
reposync_clean_old=results.clean_old_packages,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,17 +1,43 @@
|
|||||||
---
|
---
|
||||||
altarch:
|
altarch:
|
||||||
'8':
|
'8':
|
||||||
rockyrpi:
|
profile: 'altarch'
|
||||||
|
project_id: 'a1aac235-dd66-4d5b-8ff0-87467732f322'
|
||||||
|
repo:
|
||||||
|
altarch-common:
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
|
altarch-rockyrpi:
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
project_id: ''
|
|
||||||
additional_dirs:
|
additional_dirs:
|
||||||
- 'images'
|
- 'images'
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-AltArch'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-AltArch'
|
||||||
'9':
|
'9':
|
||||||
rockyrpi:
|
profile: 'altarch'
|
||||||
|
project_id: '6047887d-a395-4bc7-a0bd-fc1873b5d13d'
|
||||||
|
repo:
|
||||||
|
altarch-common:
|
||||||
|
allowed_arches:
|
||||||
|
- aarch64
|
||||||
|
altarch-rockyrpi:
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
project_id: ''
|
|
||||||
additional_dirs:
|
additional_dirs:
|
||||||
- 'images'
|
- 'images'
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-AltArch'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-AltArch'
|
||||||
...
|
...
|
||||||
|
@ -2,8 +2,10 @@
|
|||||||
cloud:
|
cloud:
|
||||||
'8':
|
'8':
|
||||||
profile: 'cloud'
|
profile: 'cloud'
|
||||||
cloud-kernel:
|
|
||||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
|
cloud-kernel:
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
- x86_64
|
- x86_64
|
||||||
@ -11,7 +13,6 @@ cloud:
|
|||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
- x86_64
|
- x86_64
|
||||||
project_id: 'f91da90d-5bdb-4cf2-80ea-e07f8dae5a5c'
|
|
||||||
extra_files:
|
extra_files:
|
||||||
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
|
||||||
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/'
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r8/'
|
||||||
@ -21,18 +22,28 @@ cloud:
|
|||||||
list:
|
list:
|
||||||
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||||
'9':
|
'9':
|
||||||
|
profile: 'cloud'
|
||||||
|
project_id: '15016370-1410-4459-a1a2-a1576041fd19'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
cloud-kernel:
|
cloud-kernel:
|
||||||
project_id: ''
|
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
- x86_64
|
- x86_64
|
||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
cloud-common:
|
cloud-common:
|
||||||
project_id: ''
|
|
||||||
allowed_arches:
|
allowed_arches:
|
||||||
- aarch64
|
- aarch64
|
||||||
- x86_64
|
- x86_64
|
||||||
- ppc64le
|
- ppc64le
|
||||||
- s390x
|
- s390x
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-cloud.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-cloud/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Cloud'
|
||||||
...
|
...
|
||||||
|
@ -1,13 +1,49 @@
|
|||||||
---
|
---
|
||||||
core:
|
core:
|
||||||
'8':
|
'8':
|
||||||
|
profile: 'core'
|
||||||
|
project_id: ''
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
core-common:
|
core-common:
|
||||||
project_id: ''
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
core-infra:
|
core-infra:
|
||||||
project_id: ''
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-core.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-core/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Core'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Core'
|
||||||
'9':
|
'9':
|
||||||
|
profile: 'core'
|
||||||
|
project_id: ''
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
core-common:
|
core-common:
|
||||||
project_id: ''
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- ppc64le
|
||||||
|
- s390x
|
||||||
core-infra:
|
core-infra:
|
||||||
project_id: ''
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- ppc64le
|
||||||
|
- s390x
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-core.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-core/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Core'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Core'
|
||||||
...
|
...
|
||||||
|
37
iso/empanadas/empanadas/sig/desktop.yaml
Normal file
37
iso/empanadas/empanadas/sig/desktop.yaml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
desktop:
|
||||||
|
'8':
|
||||||
|
profile: 'desktop'
|
||||||
|
project_id: '8b3c9b53-0633-47bd-98a3-1ca3ec141278'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
|
desktop-common:
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-desktop.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-desktop/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
|
||||||
|
'9':
|
||||||
|
profile: 'desktop'
|
||||||
|
project_id: 'b0460c25-22cf-496c-a3a3-067b9a2af14a'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
|
desktop-common:
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-desktop.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-desktop/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Desktop'
|
||||||
|
...
|
38
iso/empanadas/empanadas/sig/virt.yaml
Normal file
38
iso/empanadas/empanadas/sig/virt.yaml
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
virt:
|
||||||
|
'8':
|
||||||
|
profile: 'virt'
|
||||||
|
project_id: 'd911867a-658e-4f41-8343-5ceac6c41f67'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
|
virt-common:
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-virt.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-virt/-/raw/r8/'
|
||||||
|
branch: 'r8'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
|
||||||
|
'9':
|
||||||
|
profile: 'virt'
|
||||||
|
project_id: '925ceece-47ce-4f51-90f7-ff8689e4fe5e'
|
||||||
|
addtional_dirs: []
|
||||||
|
repo:
|
||||||
|
virt-common:
|
||||||
|
allowed_arches:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- ppc64le
|
||||||
|
extra_files:
|
||||||
|
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-virt.git'
|
||||||
|
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-virt/-/raw/r9/'
|
||||||
|
branch: 'r9'
|
||||||
|
gpg:
|
||||||
|
stable: 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
|
||||||
|
list:
|
||||||
|
- 'SOURCES/RPM-GPG-KEY-Rocky-SIG-Virt'
|
||||||
|
...
|
41
iso/empanadas/empanadas/templates/ISOREADME.tmpl
Normal file
41
iso/empanadas/empanadas/templates/ISOREADME.tmpl
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
This directory contains ISO's for the {{ arch }} architecture. You will see
|
||||||
|
multiple types of ISO's, including accompanying manifests and CHECKSUM files
|
||||||
|
that match the given ISO. You will see the following formats:
|
||||||
|
|
||||||
|
* Rocky-X.Y-ARCH-TYPE (ISO File)
|
||||||
|
* Rocky-X.Y-DATE-ARCH-TYPE (ISO File)
|
||||||
|
* Rocky-ARCH-TYPE (Symlink)
|
||||||
|
|
||||||
|
X will be the major release.
|
||||||
|
Y will be the minor release.
|
||||||
|
ARCH is the given architecture.
|
||||||
|
DATE will be the date the ISO was built (if applicable)
|
||||||
|
TYPE will be the type of ISO (boot, dvd, minimal)
|
||||||
|
|
||||||
|
The first format is the most common and is the day-of-release ISO.
|
||||||
|
|
||||||
|
The second format is in the case of rebuilt ISO's, in the case of addressing
|
||||||
|
a bug or providing an updated image (eg for a newer kernel, a newer secure
|
||||||
|
boot shim, and so on).
|
||||||
|
|
||||||
|
The third format in the list is a symlink to the "latest" ISO. Currently, this
|
||||||
|
is not advertised on the main site, but there may be potential for this in
|
||||||
|
the future as a value add.
|
||||||
|
|
||||||
|
This is "unversioned" ISO symlink is for these cases:
|
||||||
|
|
||||||
|
* A pre-determined download location for users/mirrors/service providers who
|
||||||
|
want an always available and deterministic download location, which can be
|
||||||
|
easier to script
|
||||||
|
|
||||||
|
* osinfo database / libvirt use where if a user selects Rocky Linux X, it
|
||||||
|
should be aware of and be able to download from that location. This should
|
||||||
|
be fully supported in Rocky Linux 8.7 and 9.1, and future Fedora versions.
|
||||||
|
|
||||||
|
If you have any questions, please reach out to us:
|
||||||
|
|
||||||
|
* https://chat.rockylinux.org ~Development, ~Infrastructure, ~General
|
||||||
|
* https://forums.rockylinux.org
|
||||||
|
* https://lists.resf.org/mailman3/lists/rocky.lists.resf.org/ - Rocky General Mail List
|
||||||
|
* https://lists.resf.org/mailman3/lists/rocky-mirror.lists.resf.org/ - Mirror Mail List
|
||||||
|
|
@ -11,6 +11,15 @@ metadata provides.
|
|||||||
|
|
||||||
# Notes #
|
# Notes #
|
||||||
|
|
||||||
|
## Unversioned ISO Files ##
|
||||||
|
|
||||||
|
There are unversioned ISO files in the isos and live directories per
|
||||||
|
architecture. This is to allow libvirt users an easy way to download an ISO for
|
||||||
|
a given release of their choosing easily. It also allows users as a whole to
|
||||||
|
always have a pre-determined path to download the latest ISO of a given release
|
||||||
|
by just relying on it being in the URL itself rather than in the ISO name. Note
|
||||||
|
that these unversioned ISO files may or may not advertised on the main site.
|
||||||
|
|
||||||
## Checksums ##
|
## Checksums ##
|
||||||
|
|
||||||
CHECKSUM Validation: https://github.com/rocky-linux/checksums
|
CHECKSUM Validation: https://github.com/rocky-linux/checksums
|
||||||
|
@ -23,3 +23,9 @@ fi
|
|||||||
|
|
||||||
{{ make_manifest }}
|
{{ make_manifest }}
|
||||||
|
|
||||||
|
{% if extra_iso_mode == "podman" %}
|
||||||
|
# symlink to unversioned image name
|
||||||
|
ln -sf {{ isoname }} {{ generic_isoname }}
|
||||||
|
ln -sf {{ isoname }}.manifest {{ generic_isoname }}.manifest
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
@ -53,6 +53,8 @@ elif [ -f "/usr/bin/isoinfo" ]; then
|
|||||||
grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest
|
grep -v '/TRANS.TBL$' | sort >> lorax/images/boot.iso.manifest
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
find lorax -perm 700 -exec chmod 755 {} \;
|
||||||
|
|
||||||
tar czf "${LORAX_TAR}" lorax "${LOGFILE}"
|
tar czf "${LORAX_TAR}" lorax "${LOGFILE}"
|
||||||
|
|
||||||
tar_ret_val=$?
|
tar_ret_val=$?
|
||||||
|
@ -18,9 +18,9 @@ cd /builddir
|
|||||||
|
|
||||||
{{ git_clone }}
|
{{ git_clone }}
|
||||||
if [ -d "/builddir/ks/live/{{ major }}/peridot" ]; then
|
if [ -d "/builddir/ks/live/{{ major }}/peridot" ]; then
|
||||||
pushd /builddir/ks/live/{{ major }}/peridot || { echo "Could not change directory"; exit 1; }
|
pushd /builddir/ks/live/{{ major }}/{{ arch }}/peridot || { echo "Could not change directory"; exit 1; }
|
||||||
else
|
else
|
||||||
pushd /builddir/ks/live/{{ major }}/staging || { echo "Could not change directory"; exit 1; }
|
pushd /builddir/ks/live/{{ major }}/{{ arch }}/stage || { echo "Could not change directory"; exit 1; }
|
||||||
fi
|
fi
|
||||||
ksflatten -c {{ ks_file }} -o /builddir/ks.cfg
|
ksflatten -c {{ ks_file }} -o /builddir/ks.cfg
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
|
21
iso/empanadas/empanadas/templates/icicle/tdl-new.xml.tmpl
Normal file
21
iso/empanadas/empanadas/templates/icicle/tdl-new.xml.tmpl
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
<template>
|
||||||
|
<name>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}}</name>
|
||||||
|
<os>
|
||||||
|
<name>Fedora</name>
|
||||||
|
<version>{{fedora_major}}</version>
|
||||||
|
<arch>{{architecture}}</arch>
|
||||||
|
<install type='url'>
|
||||||
|
<url>https://download.rockylinux.org/stg/rocky/{{major}}/BaseOS/{{architecture}}/{{installdir}}</url>
|
||||||
|
</install>
|
||||||
|
<icicle>
|
||||||
|
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
|
||||||
|
</icicle>
|
||||||
|
<kernelparam>console=tty0 inst.usefbx</kernelparam>
|
||||||
|
</os>
|
||||||
|
<description>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}} Generated on {{utcnow}}</description>
|
||||||
|
<disk>
|
||||||
|
<size>{{size}}</size>
|
||||||
|
</disk>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
|
@ -10,7 +10,6 @@
|
|||||||
<icicle>
|
<icicle>
|
||||||
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
|
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
|
||||||
</icicle>
|
</icicle>
|
||||||
<kernelparam>console=tty0 inst.usefbx</kernelparam>
|
|
||||||
</os>
|
</os>
|
||||||
<description>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}} Generated on {{utcnow}}</description>
|
<description>Rocky-{{major}}-{{type}}-{{version_variant}}.{{iso8601date}}.{{release}}.{{architecture}} Generated on {{utcnow}}</description>
|
||||||
<disk>
|
<disk>
|
||||||
|
@ -3,24 +3,24 @@
|
|||||||
name={{repo.name}}
|
name={{repo.name}}
|
||||||
baseurl={{ repo.baseurl }}
|
baseurl={{ repo.baseurl }}
|
||||||
enabled=1
|
enabled=1
|
||||||
gpgcheck=1
|
gpgcheck={{ gpg_check }}
|
||||||
repo_gpgcheck=1
|
repo_gpgcheck={{ repo_gpg_check }}
|
||||||
gpgkey={{ repo.gpgkey }}
|
gpgkey={{ repo.gpgkey }}
|
||||||
|
|
||||||
[{{ repo.name }}-debug]
|
[{{ repo.name }}-debug]
|
||||||
name={{repo.name}}
|
name={{repo.name}}
|
||||||
baseurl={{ repo.baseurl }}-debug
|
baseurl={{ repo.baseurl }}-debug
|
||||||
enabled=1
|
enabled=1
|
||||||
gpgcheck=1
|
gpgcheck={{ gpg_check }}
|
||||||
repo_gpgcheck=1
|
repo_gpgcheck={{ repo_gpg_check }}
|
||||||
gpgkey={{ repo.gpgkey }}
|
gpgkey={{ repo.gpgkey }}
|
||||||
|
|
||||||
[{{ repo.name }}-source]
|
[{{ repo.name }}-source]
|
||||||
name={{repo.name}}
|
name={{repo.name}}
|
||||||
baseurl={{ repo.srcbaseurl }}
|
baseurl={{ repo.srcbaseurl }}
|
||||||
enabled=1
|
enabled=1
|
||||||
gpgcheck=1
|
gpgcheck={{ gpg_check }}
|
||||||
repo_gpgcheck=1
|
repo_gpgcheck={{ repo_gpg_check }}
|
||||||
gpgkey={{ repo.gpgkey }}
|
gpgkey={{ repo.gpgkey }}
|
||||||
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -3,6 +3,7 @@ set -o pipefail
|
|||||||
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
{{ import_gpg_cmd }} | tee -a {{ sync_log }}
|
||||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||||
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
||||||
|
{{ metadata_cmd }} | tee -a {{ sync_log }}
|
||||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||||
|
|
||||||
# Yes this is a bit hacky. Can't think of a better way to do this.
|
# Yes this is a bit hacky. Can't think of a better way to do this.
|
||||||
@ -18,6 +19,11 @@ if [ "$ret_val" -eq 0 ]; then
|
|||||||
echo "SOME PACKAGES DID NOT DOWNLOAD" | tee -a {{ sync_log }}
|
echo "SOME PACKAGES DID NOT DOWNLOAD" | tee -a {{ sync_log }}
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
|
{% if deploy_extra_files %}
|
||||||
|
pushd {{ download_path }}
|
||||||
|
curl -RO {{ gpg_key_url }}
|
||||||
|
popd
|
||||||
|
{% endif %}
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
@ -4,6 +4,7 @@ set -o pipefail
|
|||||||
{{ arch_force_cp }} | tee -a {{ sync_log }}
|
{{ arch_force_cp }} | tee -a {{ sync_log }}
|
||||||
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
{{ dnf_plugin_cmd }} | tee -a {{ sync_log }}
|
||||||
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
sed -i 's/enabled=1/enabled=0/g' /etc/yum.repos.d/*.repo
|
||||||
|
{{ metadata_cmd }} | tee -a {{ sync_log }}
|
||||||
{{ sync_cmd }} | tee -a {{ sync_log }}
|
{{ sync_cmd }} | tee -a {{ sync_log }}
|
||||||
|
|
||||||
# Yes this is a bit hacky. Can't think of a better way to do this.
|
# Yes this is a bit hacky. Can't think of a better way to do this.
|
||||||
@ -25,6 +26,11 @@ if [ "$ret_val" -eq 0 ]; then
|
|||||||
# echo "Repository is empty." | tee -a {{ sync_log }}
|
# echo "Repository is empty." | tee -a {{ sync_log }}
|
||||||
# rm -rf {{ download_path }}
|
# rm -rf {{ download_path }}
|
||||||
#fi
|
#fi
|
||||||
|
{%- if deploy_extra_files %}
|
||||||
|
pushd {{ download_path }}
|
||||||
|
curl -RO {{ gpg_key_url }}
|
||||||
|
popd
|
||||||
|
{% endif %}
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
-indev {{ boot_iso }}
|
-indev {{ boot_iso }}
|
||||||
-outdev {{ isoname }}
|
-outdev {{ isoname }}
|
||||||
-boot_image any replay
|
-boot_image any replay
|
||||||
|
-joliet on
|
||||||
|
-system_id {{ 'PPC' if arch == 'ppc64le' else 'LINUX' }}
|
||||||
|
-compliance joliet_long_names
|
||||||
|
{{ '-compliance untranslated_names' if arch == 'ppc64le' }}
|
||||||
-volid {{ volid }}
|
-volid {{ volid }}
|
||||||
{{ graft }}
|
{{ graft }}
|
||||||
-end
|
-end
|
||||||
|
1663
iso/empanadas/empanadas/util/dnf_utils.backup
Normal file
1663
iso/empanadas/empanadas/util/dnf_utils.backup
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -61,6 +61,8 @@ class IsoBuild:
|
|||||||
extra_iso_mode: str = 'local',
|
extra_iso_mode: str = 'local',
|
||||||
compose_dir_is_here: bool = False,
|
compose_dir_is_here: bool = False,
|
||||||
hashed: bool = False,
|
hashed: bool = False,
|
||||||
|
updated_image: bool = False,
|
||||||
|
image_increment: str = '0',
|
||||||
image=None,
|
image=None,
|
||||||
logger=None
|
logger=None
|
||||||
):
|
):
|
||||||
@ -93,13 +95,18 @@ class IsoBuild:
|
|||||||
self.checksum = rlvars['checksum']
|
self.checksum = rlvars['checksum']
|
||||||
self.profile = rlvars['profile']
|
self.profile = rlvars['profile']
|
||||||
self.hashed = hashed
|
self.hashed = hashed
|
||||||
|
self.updated_image = updated_image
|
||||||
|
self.updated_image_increment = "." + image_increment
|
||||||
|
self.updated_image_date = (time.strftime("%Y%m%d", time.localtime())
|
||||||
|
+ self.updated_image_increment)
|
||||||
|
|
||||||
# Relevant major version items
|
# Relevant major version items
|
||||||
self.arch = arch
|
self.arch = arch
|
||||||
self.arches = rlvars['allowed_arches']
|
self.arches = rlvars['allowed_arches']
|
||||||
self.release = rlvars['revision']
|
self.release = rlvars['revision']
|
||||||
self.minor_version = rlvars['minor']
|
self.minor_version = rlvars['minor']
|
||||||
self.revision = rlvars['revision'] + "-" + rlvars['rclvl']
|
self.revision_level = rlvars['revision'] + "-" + rlvars['rclvl']
|
||||||
|
self.revision = rlvars['revision']
|
||||||
self.rclvl = rlvars['rclvl']
|
self.rclvl = rlvars['rclvl']
|
||||||
self.repos = rlvars['iso_map']['lorax']['repos']
|
self.repos = rlvars['iso_map']['lorax']['repos']
|
||||||
self.repo_base_url = config['repo_base_url']
|
self.repo_base_url = config['repo_base_url']
|
||||||
@ -113,12 +120,6 @@ class IsoBuild:
|
|||||||
if 'container' in rlvars and len(rlvars['container']) > 0:
|
if 'container' in rlvars and len(rlvars['container']) > 0:
|
||||||
self.container = rlvars['container']
|
self.container = rlvars['container']
|
||||||
|
|
||||||
self.staging_dir = os.path.join(
|
|
||||||
config['staging_root'],
|
|
||||||
config['category_stub'],
|
|
||||||
self.revision
|
|
||||||
)
|
|
||||||
|
|
||||||
# all bucket related info
|
# all bucket related info
|
||||||
self.s3_region = config['aws_region']
|
self.s3_region = config['aws_region']
|
||||||
self.s3_bucket = config['bucket']
|
self.s3_bucket = config['bucket']
|
||||||
@ -196,7 +197,7 @@ class IsoBuild:
|
|||||||
self.compose_dir_is_here,
|
self.compose_dir_is_here,
|
||||||
self.hashed
|
self.hashed
|
||||||
)
|
)
|
||||||
self.log.info(self.revision)
|
self.log.info(self.revision_level)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
work_root = os.path.join(
|
work_root = os.path.join(
|
||||||
@ -502,6 +503,7 @@ class IsoBuild:
|
|||||||
self.log.info('Removing boot.iso from %s' % image)
|
self.log.info('Removing boot.iso from %s' % image)
|
||||||
try:
|
try:
|
||||||
os.remove(path_to_image + '/images/boot.iso')
|
os.remove(path_to_image + '/images/boot.iso')
|
||||||
|
os.remove(path_to_image + '/images/boot.iso.manifest')
|
||||||
except:
|
except:
|
||||||
self.log.error(
|
self.log.error(
|
||||||
'[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' +
|
'[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' +
|
||||||
@ -509,21 +511,9 @@ class IsoBuild:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _copy_boot_to_work(self, force_unpack, arch):
|
def _copy_boot_to_work(self, force_unpack, arch):
|
||||||
src_to_image = os.path.join(
|
src_to_image = os.path.join(self.lorax_work_dir, arch, 'lorax')
|
||||||
self.lorax_work_dir,
|
iso_to_go = os.path.join(self.iso_work_dir, arch)
|
||||||
arch,
|
path_to_src_image = os.path.join(src_to_image, 'images/boot.iso')
|
||||||
'lorax'
|
|
||||||
)
|
|
||||||
|
|
||||||
iso_to_go = os.path.join(
|
|
||||||
self.iso_work_dir,
|
|
||||||
arch
|
|
||||||
)
|
|
||||||
|
|
||||||
path_to_src_image = '{}/{}'.format(
|
|
||||||
src_to_image,
|
|
||||||
'/images/boot.iso'
|
|
||||||
)
|
|
||||||
|
|
||||||
rclevel = ''
|
rclevel = ''
|
||||||
if self.release_candidate:
|
if self.release_candidate:
|
||||||
@ -538,15 +528,13 @@ class IsoBuild:
|
|||||||
'boot'
|
'boot'
|
||||||
)
|
)
|
||||||
|
|
||||||
isobootpath = '{}/{}'.format(
|
isobootpath = os.path.join(iso_to_go, discname)
|
||||||
iso_to_go,
|
manifest = '{}.manifest'.format(isobootpath)
|
||||||
discname
|
link_name = '{}-{}-boot.iso'.format(self.shortname, arch)
|
||||||
)
|
link_manifest = link_name + '.manifest'
|
||||||
|
isobootpath = os.path.join(iso_to_go, discname)
|
||||||
manifest = '{}.{}'.format(
|
linkbootpath = os.path.join(iso_to_go, link_name)
|
||||||
isobootpath,
|
manifestlink = os.path.join(iso_to_go, link_manifest)
|
||||||
'manifest'
|
|
||||||
)
|
|
||||||
|
|
||||||
if not force_unpack:
|
if not force_unpack:
|
||||||
file_check = isobootpath
|
file_check = isobootpath
|
||||||
@ -556,9 +544,18 @@ class IsoBuild:
|
|||||||
|
|
||||||
self.log.info('Copying %s boot iso to work directory...' % arch)
|
self.log.info('Copying %s boot iso to work directory...' % arch)
|
||||||
os.makedirs(iso_to_go, exist_ok=True)
|
os.makedirs(iso_to_go, exist_ok=True)
|
||||||
|
try:
|
||||||
shutil.copy2(path_to_src_image, isobootpath)
|
shutil.copy2(path_to_src_image, isobootpath)
|
||||||
|
if os.path.exists(linkbootpath):
|
||||||
|
os.remove(linkbootpath)
|
||||||
|
os.symlink(discname, linkbootpath)
|
||||||
|
except Exception as e:
|
||||||
|
self.log.error(Color.FAIL + 'We could not copy the image or create a symlink.')
|
||||||
|
raise SystemExit(e)
|
||||||
|
|
||||||
if os.path.exists(path_to_src_image + '.manifest'):
|
if os.path.exists(path_to_src_image + '.manifest'):
|
||||||
shutil.copy2(path_to_src_image + '.manifest', manifest)
|
shutil.copy2(path_to_src_image + '.manifest', manifest)
|
||||||
|
os.symlink(manifest.split('/')[-1], manifestlink)
|
||||||
|
|
||||||
self.log.info('Creating checksum for %s boot iso...' % arch)
|
self.log.info('Creating checksum for %s boot iso...' % arch)
|
||||||
checksum = Shared.get_checksum(isobootpath, self.checksum, self.log)
|
checksum = Shared.get_checksum(isobootpath, self.checksum, self.log)
|
||||||
@ -569,6 +566,14 @@ class IsoBuild:
|
|||||||
c.write(checksum)
|
c.write(checksum)
|
||||||
c.close()
|
c.close()
|
||||||
|
|
||||||
|
linksum = Shared.get_checksum(linkbootpath, self.checksum, self.log)
|
||||||
|
if not linksum:
|
||||||
|
self.log.error(Color.FAIL + linkbootpath + ' not found! Did we actually make the symlink?')
|
||||||
|
return
|
||||||
|
with open(linkbootpath + '.CHECKSUM', "w+") as l:
|
||||||
|
l.write(linksum)
|
||||||
|
l.close()
|
||||||
|
|
||||||
def _copy_nondisc_to_repo(self, force_unpack, arch, repo):
|
def _copy_nondisc_to_repo(self, force_unpack, arch, repo):
|
||||||
"""
|
"""
|
||||||
Syncs data from a non-disc set of images to the appropriate repo. Repo
|
Syncs data from a non-disc set of images to the appropriate repo. Repo
|
||||||
@ -581,6 +586,13 @@ class IsoBuild:
|
|||||||
'os'
|
'os'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
kspathway = os.path.join(
|
||||||
|
self.compose_latest_sync,
|
||||||
|
repo,
|
||||||
|
arch,
|
||||||
|
'kickstart'
|
||||||
|
)
|
||||||
|
|
||||||
src_to_image = os.path.join(
|
src_to_image = os.path.join(
|
||||||
self.lorax_work_dir,
|
self.lorax_work_dir,
|
||||||
arch,
|
arch,
|
||||||
@ -614,6 +626,7 @@ class IsoBuild:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
shutil.copytree(src_to_image, pathway, copy_function=shutil.copy2, dirs_exist_ok=True)
|
shutil.copytree(src_to_image, pathway, copy_function=shutil.copy2, dirs_exist_ok=True)
|
||||||
|
shutil.copytree(src_to_image, kspathway, copy_function=shutil.copy2, dirs_exist_ok=True)
|
||||||
except:
|
except:
|
||||||
self.log.error('%s already exists??' % repo)
|
self.log.error('%s already exists??' % repo)
|
||||||
|
|
||||||
@ -769,11 +782,14 @@ class IsoBuild:
|
|||||||
mock_sh_template = self.tmplenv.get_template('extraisobuild.tmpl.sh')
|
mock_sh_template = self.tmplenv.get_template('extraisobuild.tmpl.sh')
|
||||||
iso_template = self.tmplenv.get_template('buildExtraImage.tmpl.sh')
|
iso_template = self.tmplenv.get_template('buildExtraImage.tmpl.sh')
|
||||||
xorriso_template = self.tmplenv.get_template('xorriso.tmpl.txt')
|
xorriso_template = self.tmplenv.get_template('xorriso.tmpl.txt')
|
||||||
|
iso_readme_template = self.tmplenv.get_template('ISOREADME.tmpl')
|
||||||
|
|
||||||
mock_iso_path = '/var/tmp/lorax-{}.cfg'.format(self.major_version)
|
mock_iso_path = '/var/tmp/lorax-{}.cfg'.format(self.major_version)
|
||||||
mock_sh_path = '{}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image)
|
mock_sh_path = '{}/extraisobuild-{}-{}.sh'.format(entries_dir, arch, image)
|
||||||
iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image)
|
iso_template_path = '{}/buildExtraImage-{}-{}.sh'.format(entries_dir, arch, image)
|
||||||
xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image)
|
xorriso_template_path = '{}/xorriso-{}-{}.txt'.format(entries_dir, arch, image)
|
||||||
|
iso_readme_path = '{}/{}/README'.format(self.iso_work_dir, arch)
|
||||||
|
print(iso_readme_path)
|
||||||
|
|
||||||
log_root = os.path.join(
|
log_root = os.path.join(
|
||||||
work_root,
|
work_root,
|
||||||
@ -801,6 +817,10 @@ class IsoBuild:
|
|||||||
if self.release_candidate:
|
if self.release_candidate:
|
||||||
rclevel = '-' + self.rclvl
|
rclevel = '-' + self.rclvl
|
||||||
|
|
||||||
|
datestamp = ''
|
||||||
|
if self.updated_image:
|
||||||
|
datestamp = '-' + self.updated_image_date
|
||||||
|
|
||||||
volid = '{}-{}-{}{}-{}-{}'.format(
|
volid = '{}-{}-{}{}-{}-{}'.format(
|
||||||
self.shortname,
|
self.shortname,
|
||||||
self.major_version,
|
self.major_version,
|
||||||
@ -810,15 +830,17 @@ class IsoBuild:
|
|||||||
volname
|
volname
|
||||||
)
|
)
|
||||||
|
|
||||||
isoname = '{}-{}.{}{}-{}-{}.iso'.format(
|
isoname = '{}-{}{}{}-{}-{}.iso'.format(
|
||||||
self.shortname,
|
self.shortname,
|
||||||
self.major_version,
|
self.revision,
|
||||||
self.minor_version,
|
|
||||||
rclevel,
|
rclevel,
|
||||||
|
datestamp,
|
||||||
arch,
|
arch,
|
||||||
image
|
image
|
||||||
)
|
)
|
||||||
|
|
||||||
|
generic_isoname = '{}-{}-{}.iso'.format(self.shortname, arch, image)
|
||||||
|
|
||||||
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
|
||||||
' '.join(required_pkgs),
|
' '.join(required_pkgs),
|
||||||
log_path_command
|
log_path_command
|
||||||
@ -867,6 +889,7 @@ class IsoBuild:
|
|||||||
isoname=isoname,
|
isoname=isoname,
|
||||||
volid=volid,
|
volid=volid,
|
||||||
graft=xorpoint,
|
graft=xorpoint,
|
||||||
|
arch=arch,
|
||||||
)
|
)
|
||||||
xorriso_template_entry = open(xorriso_template_path, "w+")
|
xorriso_template_entry = open(xorriso_template_path, "w+")
|
||||||
xorriso_template_entry.write(xorriso_template_output)
|
xorriso_template_entry.write(xorriso_template_output)
|
||||||
@ -894,6 +917,11 @@ class IsoBuild:
|
|||||||
make_manifest=make_manifest,
|
make_manifest=make_manifest,
|
||||||
lorax_pkg_cmd=lorax_pkg_cmd,
|
lorax_pkg_cmd=lorax_pkg_cmd,
|
||||||
isoname=isoname,
|
isoname=isoname,
|
||||||
|
generic_isoname=generic_isoname,
|
||||||
|
)
|
||||||
|
|
||||||
|
iso_readme_template_output = iso_readme_template.render(
|
||||||
|
arch=arch
|
||||||
)
|
)
|
||||||
|
|
||||||
mock_iso_entry = open(mock_iso_path, "w+")
|
mock_iso_entry = open(mock_iso_path, "w+")
|
||||||
@ -908,6 +936,10 @@ class IsoBuild:
|
|||||||
iso_template_entry.write(iso_template_output)
|
iso_template_entry.write(iso_template_output)
|
||||||
iso_template_entry.close()
|
iso_template_entry.close()
|
||||||
|
|
||||||
|
iso_readme_entry = open(iso_readme_path, "w+")
|
||||||
|
iso_readme_entry.write(iso_readme_template_output)
|
||||||
|
iso_readme_entry.close()
|
||||||
|
|
||||||
os.chmod(mock_sh_path, 0o755)
|
os.chmod(mock_sh_path, 0o755)
|
||||||
os.chmod(iso_template_path, 0o755)
|
os.chmod(iso_template_path, 0o755)
|
||||||
|
|
||||||
@ -943,6 +975,11 @@ class IsoBuild:
|
|||||||
isos_dir = os.path.join(work_root, "isos")
|
isos_dir = os.path.join(work_root, "isos")
|
||||||
bad_exit_list = []
|
bad_exit_list = []
|
||||||
checksum_list = []
|
checksum_list = []
|
||||||
|
|
||||||
|
datestamp = ''
|
||||||
|
if self.updated_image:
|
||||||
|
datestamp = '-' + self.updated_image_date
|
||||||
|
|
||||||
for i in images:
|
for i in images:
|
||||||
entry_name_list = []
|
entry_name_list = []
|
||||||
image_name = i
|
image_name = i
|
||||||
@ -956,17 +993,25 @@ class IsoBuild:
|
|||||||
if self.release_candidate:
|
if self.release_candidate:
|
||||||
rclevel = '-' + self.rclvl
|
rclevel = '-' + self.rclvl
|
||||||
|
|
||||||
isoname = '{}/{}-{}.{}{}-{}-{}.iso'.format(
|
isoname = '{}/{}-{}{}{}-{}-{}.iso'.format(
|
||||||
a,
|
a,
|
||||||
self.shortname,
|
self.shortname,
|
||||||
self.major_version,
|
self.revision,
|
||||||
self.minor_version,
|
|
||||||
rclevel,
|
rclevel,
|
||||||
|
datestamp,
|
||||||
|
a,
|
||||||
|
i
|
||||||
|
)
|
||||||
|
|
||||||
|
genericname = '{}/{}-{}-{}.iso'.format(
|
||||||
|
a,
|
||||||
|
self.shortname,
|
||||||
a,
|
a,
|
||||||
i
|
i
|
||||||
)
|
)
|
||||||
|
|
||||||
checksum_list.append(isoname)
|
checksum_list.append(isoname)
|
||||||
|
checksum_list.append(genericname)
|
||||||
|
|
||||||
for pod in entry_name_list:
|
for pod in entry_name_list:
|
||||||
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
|
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
|
||||||
@ -1358,6 +1403,7 @@ class IsoBuild:
|
|||||||
|
|
||||||
source_path = latest_artifacts[arch]
|
source_path = latest_artifacts[arch]
|
||||||
drop_name = source_path.split('/')[-1]
|
drop_name = source_path.split('/')[-1]
|
||||||
|
checksum_name = drop_name + '.CHECKSUM'
|
||||||
full_drop = '{}/{}'.format(
|
full_drop = '{}/{}'.format(
|
||||||
image_arch_dir,
|
image_arch_dir,
|
||||||
drop_name
|
drop_name
|
||||||
@ -1407,6 +1453,15 @@ class IsoBuild:
|
|||||||
arch,
|
arch,
|
||||||
formattype
|
formattype
|
||||||
)
|
)
|
||||||
|
latest_path = latest_name.split('/')[-1]
|
||||||
|
latest_checksum = '{}/{}-{}-{}.latest.{}.{}.CHECKSUM'.format(
|
||||||
|
image_arch_dir,
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
imagename,
|
||||||
|
arch,
|
||||||
|
formattype
|
||||||
|
)
|
||||||
# For some reason python doesn't have a "yeah just change this
|
# For some reason python doesn't have a "yeah just change this
|
||||||
# link" part of the function
|
# link" part of the function
|
||||||
if os.path.exists(latest_name):
|
if os.path.exists(latest_name):
|
||||||
@ -1414,6 +1469,17 @@ class IsoBuild:
|
|||||||
|
|
||||||
os.symlink(drop_name, latest_name)
|
os.symlink(drop_name, latest_name)
|
||||||
|
|
||||||
|
self.log.info('Creating checksum for latest symlinked image...')
|
||||||
|
shutil.copy2(checksum_drop, latest_checksum)
|
||||||
|
with open(latest_checksum, 'r') as link:
|
||||||
|
checkdata = link.read()
|
||||||
|
|
||||||
|
checkdata = checkdata.replace(drop_name, latest_path)
|
||||||
|
|
||||||
|
with open(latest_checksum, 'w+') as link:
|
||||||
|
link.write(checkdata)
|
||||||
|
link.close()
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Image download phase completed')
|
self.log.info(Color.INFO + 'Image download phase completed')
|
||||||
|
|
||||||
|
|
||||||
@ -1436,6 +1502,8 @@ class LiveBuild:
|
|||||||
image=None,
|
image=None,
|
||||||
justcopyit: bool = False,
|
justcopyit: bool = False,
|
||||||
force_build: bool = False,
|
force_build: bool = False,
|
||||||
|
updated_image: bool = False,
|
||||||
|
image_increment: str = '0',
|
||||||
logger=None
|
logger=None
|
||||||
):
|
):
|
||||||
|
|
||||||
@ -1449,7 +1517,6 @@ class LiveBuild:
|
|||||||
self.major_version = major
|
self.major_version = major
|
||||||
self.compose_dir_is_here = compose_dir_is_here
|
self.compose_dir_is_here = compose_dir_is_here
|
||||||
self.date_stamp = config['date_stamp']
|
self.date_stamp = config['date_stamp']
|
||||||
self.date = time.strftime("%Y%m%d", time.localtime())
|
|
||||||
self.compose_root = config['compose_root']
|
self.compose_root = config['compose_root']
|
||||||
self.compose_base = config['compose_root'] + "/" + major
|
self.compose_base = config['compose_root'] + "/" + major
|
||||||
self.current_arch = config['arch']
|
self.current_arch = config['arch']
|
||||||
@ -1483,6 +1550,11 @@ class LiveBuild:
|
|||||||
if 'container' in rlvars and len(rlvars['container']) > 0:
|
if 'container' in rlvars and len(rlvars['container']) > 0:
|
||||||
self.container = rlvars['container']
|
self.container = rlvars['container']
|
||||||
|
|
||||||
|
self.updated_image = updated_image
|
||||||
|
self.updated_image_increment = "." + image_increment
|
||||||
|
self.date = (time.strftime("%Y%m%d", time.localtime())
|
||||||
|
+ self.updated_image_increment)
|
||||||
|
|
||||||
# Templates
|
# Templates
|
||||||
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
file_loader = FileSystemLoader(f"{_rootdir}/templates")
|
||||||
self.tmplenv = Environment(loader=file_loader)
|
self.tmplenv = Environment(loader=file_loader)
|
||||||
@ -1643,16 +1715,17 @@ class LiveBuild:
|
|||||||
)
|
)
|
||||||
required_pkgs = self.livemap['required_pkgs']
|
required_pkgs = self.livemap['required_pkgs']
|
||||||
|
|
||||||
volid = '{}-{}-{}'.format(
|
volid = '{}-{}-{}-{}'.format(
|
||||||
self.shortname,
|
self.shortname,
|
||||||
image,
|
self.major_version,
|
||||||
self.release
|
self.minor_version,
|
||||||
|
image
|
||||||
)
|
)
|
||||||
|
|
||||||
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
||||||
self.shortname,
|
self.shortname,
|
||||||
image,
|
|
||||||
self.release,
|
self.release,
|
||||||
|
image,
|
||||||
self.current_arch,
|
self.current_arch,
|
||||||
self.date
|
self.date
|
||||||
)
|
)
|
||||||
@ -1865,11 +1938,18 @@ class LiveBuild:
|
|||||||
live_dir_arch = os.path.join(self.live_work_dir, arch)
|
live_dir_arch = os.path.join(self.live_work_dir, arch)
|
||||||
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
isoname = '{}-{}-{}-{}-{}.iso'.format(
|
||||||
self.shortname,
|
self.shortname,
|
||||||
image,
|
|
||||||
self.release,
|
self.release,
|
||||||
|
image,
|
||||||
arch,
|
arch,
|
||||||
self.date
|
self.date
|
||||||
)
|
)
|
||||||
|
isolink = '{}-{}-{}-{}-{}.iso'.format(
|
||||||
|
self.shortname,
|
||||||
|
self.major_version,
|
||||||
|
image,
|
||||||
|
arch,
|
||||||
|
'latest'
|
||||||
|
)
|
||||||
live_res_dir = '/var/lib/mock/{}-{}-{}/result'.format(
|
live_res_dir = '/var/lib/mock/{}-{}-{}/result'.format(
|
||||||
self.shortname.lower(),
|
self.shortname.lower(),
|
||||||
self.major_version,
|
self.major_version,
|
||||||
@ -1907,8 +1987,17 @@ class LiveBuild:
|
|||||||
self.log.info(Color.INFO + 'Copying image to work directory')
|
self.log.info(Color.INFO + 'Copying image to work directory')
|
||||||
source_path = os.path.join(live_res_dir, isoname)
|
source_path = os.path.join(live_res_dir, isoname)
|
||||||
dest_path = os.path.join(live_dir_arch, isoname)
|
dest_path = os.path.join(live_dir_arch, isoname)
|
||||||
|
link_path = os.path.join(live_dir_arch, isolink)
|
||||||
os.makedirs(live_dir_arch, exist_ok=True)
|
os.makedirs(live_dir_arch, exist_ok=True)
|
||||||
|
try:
|
||||||
shutil.copy2(source_path, dest_path)
|
shutil.copy2(source_path, dest_path)
|
||||||
|
if os.path.exists(link_path):
|
||||||
|
os.remove(link_path)
|
||||||
|
os.symlink(isoname, link_path)
|
||||||
|
except:
|
||||||
|
self.log.error(Color.FAIL + 'We could not copy the image or create a symlink.')
|
||||||
|
return
|
||||||
|
|
||||||
self.log.info(Color.INFO + 'Generating checksum')
|
self.log.info(Color.INFO + 'Generating checksum')
|
||||||
checksum = Shared.get_checksum(dest_path, self.checksum, self.log)
|
checksum = Shared.get_checksum(dest_path, self.checksum, self.log)
|
||||||
if not checksum:
|
if not checksum:
|
||||||
@ -1917,3 +2006,11 @@ class LiveBuild:
|
|||||||
with open(dest_path + '.CHECKSUM', "w+") as c:
|
with open(dest_path + '.CHECKSUM', "w+") as c:
|
||||||
c.write(checksum)
|
c.write(checksum)
|
||||||
c.close()
|
c.close()
|
||||||
|
|
||||||
|
linksum = Shared.get_checksum(link_path, self.checksum, self.log)
|
||||||
|
if not linksum:
|
||||||
|
self.log.error(Color.FAIL + link_path + ' not found. Did we copy it?')
|
||||||
|
return
|
||||||
|
with open(link_path + '.CHECKSUM', "w+") as c:
|
||||||
|
c.write(linksum)
|
||||||
|
c.close()
|
||||||
|
@ -5,6 +5,7 @@ import json
|
|||||||
import hashlib
|
import hashlib
|
||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import shutil
|
||||||
import yaml
|
import yaml
|
||||||
import requests
|
import requests
|
||||||
import boto3
|
import boto3
|
||||||
@ -401,7 +402,21 @@ class Shared:
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_conf(data, logger, dest_path='/var/tmp') -> str:
|
def generate_conf(
|
||||||
|
shortname,
|
||||||
|
major_version,
|
||||||
|
repos,
|
||||||
|
repo_base_url,
|
||||||
|
project_id,
|
||||||
|
hashed,
|
||||||
|
extra_files,
|
||||||
|
gpgkey,
|
||||||
|
gpg_check,
|
||||||
|
repo_gpg_check,
|
||||||
|
templates,
|
||||||
|
logger,
|
||||||
|
dest_path='/var/tmp'
|
||||||
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Generates the necessary repo conf file for the operation. This repo
|
Generates the necessary repo conf file for the operation. This repo
|
||||||
file should be temporary in nature. This will generate a repo file
|
file should be temporary in nature. This will generate a repo file
|
||||||
@ -413,35 +428,35 @@ class Shared:
|
|||||||
"""
|
"""
|
||||||
fname = os.path.join(
|
fname = os.path.join(
|
||||||
dest_path,
|
dest_path,
|
||||||
"{}-{}-config.repo".format(data.shortname, data.major_version)
|
"{}-{}-config.repo".format(shortname, major_version)
|
||||||
)
|
)
|
||||||
data.log.info('Generating the repo configuration: %s' % fname)
|
logger.info('Generating the repo configuration: %s' % fname)
|
||||||
|
|
||||||
if data.repo_base_url.startswith("/"):
|
if repo_base_url.startswith("/"):
|
||||||
logger.error("Local file syncs are not supported.")
|
logger.error("Local file syncs are not supported.")
|
||||||
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
raise SystemExit(Color.BOLD + "Local file syncs are not "
|
||||||
"supported." + Color.END)
|
"supported." + Color.END)
|
||||||
|
|
||||||
prehashed = ''
|
prehashed = ''
|
||||||
if data.hashed:
|
if hashed:
|
||||||
prehashed = "hashed-"
|
prehashed = "hashed-"
|
||||||
# create dest_path
|
# create dest_path
|
||||||
if not os.path.exists(dest_path):
|
if not os.path.exists(dest_path):
|
||||||
os.makedirs(dest_path, exist_ok=True)
|
os.makedirs(dest_path, exist_ok=True)
|
||||||
config_file = open(fname, "w+")
|
config_file = open(fname, "w+")
|
||||||
repolist = []
|
repolist = []
|
||||||
for repo in data.repos:
|
for repo in repos:
|
||||||
|
|
||||||
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
constructed_url = '{}/{}/repo/{}{}/$basearch'.format(
|
||||||
data.repo_base_url,
|
repo_base_url,
|
||||||
data.project_id,
|
project_id,
|
||||||
prehashed,
|
prehashed,
|
||||||
repo,
|
repo,
|
||||||
)
|
)
|
||||||
|
|
||||||
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
constructed_url_src = '{}/{}/repo/{}{}/src'.format(
|
||||||
data.repo_base_url,
|
repo_base_url,
|
||||||
data.project_id,
|
project_id,
|
||||||
prehashed,
|
prehashed,
|
||||||
repo,
|
repo,
|
||||||
)
|
)
|
||||||
@ -450,12 +465,16 @@ class Shared:
|
|||||||
'name': repo,
|
'name': repo,
|
||||||
'baseurl': constructed_url,
|
'baseurl': constructed_url,
|
||||||
'srcbaseurl': constructed_url_src,
|
'srcbaseurl': constructed_url_src,
|
||||||
'gpgkey': data.extra_files['git_raw_path'] + data.extra_files['gpg'][data.gpgkey]
|
'gpgkey': extra_files['git_raw_path'] + extra_files['gpg'][gpgkey]
|
||||||
}
|
}
|
||||||
repolist.append(repodata)
|
repolist.append(repodata)
|
||||||
|
|
||||||
template = data.tmplenv.get_template('repoconfig.tmpl')
|
template = templates.get_template('repoconfig.tmpl')
|
||||||
output = template.render(repos=repolist)
|
output = template.render(
|
||||||
|
repos=repolist,
|
||||||
|
gpg_check=gpg_check,
|
||||||
|
repo_gpg_check=repo_gpg_check
|
||||||
|
)
|
||||||
config_file.write(output)
|
config_file.write(output)
|
||||||
|
|
||||||
config_file.close()
|
config_file.close()
|
||||||
@ -821,7 +840,14 @@ class Shared:
|
|||||||
isokwargs["input_charset"] = None
|
isokwargs["input_charset"] = None
|
||||||
|
|
||||||
if opts['use_xorrisofs']:
|
if opts['use_xorrisofs']:
|
||||||
cmd = ['/usr/bin/xorriso', '-dialog', 'on', '<', opts['graft_points']]
|
cmd = [
|
||||||
|
'/usr/bin/xorriso',
|
||||||
|
'-dialog',
|
||||||
|
'on',
|
||||||
|
'<',
|
||||||
|
opts['graft_points'],
|
||||||
|
'2>&1'
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
cmd = Shared.get_mkisofs_cmd(
|
cmd = Shared.get_mkisofs_cmd(
|
||||||
opts['iso_name'],
|
opts['iso_name'],
|
||||||
@ -937,7 +963,7 @@ class Shared:
|
|||||||
Write compose info similar to pungi.
|
Write compose info similar to pungi.
|
||||||
|
|
||||||
arches and repos may be better suited for a dictionary. that is a
|
arches and repos may be better suited for a dictionary. that is a
|
||||||
future thing we will work on for 0.3.0.
|
future thing we will work on for 0.5.0.
|
||||||
"""
|
"""
|
||||||
cijson = file_path + '.json'
|
cijson = file_path + '.json'
|
||||||
ciyaml = file_path + '.yaml'
|
ciyaml = file_path + '.yaml'
|
||||||
@ -961,3 +987,90 @@ class Shared:
|
|||||||
with open(ciyaml, 'w+') as ymdump:
|
with open(ciyaml, 'w+') as ymdump:
|
||||||
yaml.dump(jsonData, ymdump)
|
yaml.dump(jsonData, ymdump)
|
||||||
ymdump.close()
|
ymdump.close()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def symlink_to_latest(shortname, major_version, generated_dir, compose_latest_dir, logger):
|
||||||
|
"""
|
||||||
|
Emulates pungi and symlinks latest-Rocky-X
|
||||||
|
This link will be what is updated in full runs. Whatever is in this
|
||||||
|
'latest' directory is what is rsynced on to staging after completion.
|
||||||
|
This link should not change often.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
os.remove(compose_latest_dir)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
logger.info('Symlinking to latest-{}-{}...'.format(shortname, major_version))
|
||||||
|
os.symlink(generated_dir, compose_latest_dir)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def deploy_extra_files(extra_files, sync_root, global_work_root, logger):
|
||||||
|
"""
|
||||||
|
deploys extra files based on info of rlvars including a
|
||||||
|
extra_files.json
|
||||||
|
|
||||||
|
might also deploy COMPOSE_ID and maybe in the future a metadata dir with
|
||||||
|
a bunch of compose-esque stuff.
|
||||||
|
"""
|
||||||
|
#logger.info(Color.INFO + 'Deploying treeinfo, discinfo, and media.repo')
|
||||||
|
|
||||||
|
cmd = Shared.git_cmd(logger)
|
||||||
|
tmpclone = '/tmp/clone'
|
||||||
|
extra_files_dir = os.path.join(
|
||||||
|
global_work_root,
|
||||||
|
'extra-files'
|
||||||
|
)
|
||||||
|
metadata_dir = os.path.join(
|
||||||
|
sync_root,
|
||||||
|
"metadata"
|
||||||
|
)
|
||||||
|
if not os.path.exists(extra_files_dir):
|
||||||
|
os.makedirs(extra_files_dir, exist_ok=True)
|
||||||
|
|
||||||
|
if not os.path.exists(metadata_dir):
|
||||||
|
os.makedirs(metadata_dir, exist_ok=True)
|
||||||
|
|
||||||
|
clonecmd = '{} clone {} -b {} -q {}'.format(
|
||||||
|
cmd,
|
||||||
|
extra_files['git_repo'],
|
||||||
|
extra_files['branch'],
|
||||||
|
tmpclone
|
||||||
|
)
|
||||||
|
|
||||||
|
git_clone = subprocess.call(
|
||||||
|
shlex.split(clonecmd),
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(Color.INFO + 'Deploying extra files to work and metadata directories ...')
|
||||||
|
|
||||||
|
# Copy files to work root
|
||||||
|
for extra in extra_files['list']:
|
||||||
|
src = '/tmp/clone/' + extra
|
||||||
|
# Copy extra files to root of compose here also - The extra files
|
||||||
|
# are meant to be picked up by our ISO creation process and also
|
||||||
|
# exist on our mirrors.
|
||||||
|
try:
|
||||||
|
shutil.copy2(src, extra_files_dir)
|
||||||
|
shutil.copy2(src, metadata_dir)
|
||||||
|
except:
|
||||||
|
logger.warn(Color.WARN + 'Extra file not copied: ' + src)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.rmtree(tmpclone)
|
||||||
|
except OSError as e:
|
||||||
|
logger.error(Color.FAIL + 'Directory ' + tmpclone +
|
||||||
|
' could not be removed: ' + e.strerror
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dnf_sync(repo, sync_root, work_root, arch, logger):
|
||||||
|
"""
|
||||||
|
This is for normal dnf syncs. This is very slow.
|
||||||
|
"""
|
||||||
|
logger.error('DNF syncing has been removed.')
|
||||||
|
logger.error('Please install podman and enable parallel')
|
||||||
|
raise SystemExit()
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "empanadas"
|
name = "empanadas"
|
||||||
version = "0.3.0"
|
version = "0.4.0"
|
||||||
description = "hand crafted ISOs with love and spice"
|
description = "hand crafted ISOs with love and spice"
|
||||||
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
authors = ["Louis Abel <label@rockylinux.org>", "Neil Hanlon <neil@rockylinux.org>"]
|
||||||
|
|
||||||
@ -34,6 +34,7 @@ build-image = "empanadas.scripts.build_image:run"
|
|||||||
finalize_compose = "empanadas.scripts.finalize_compose:run"
|
finalize_compose = "empanadas.scripts.finalize_compose:run"
|
||||||
pull-cloud-image = "empanadas.scripts.pull_cloud_image:run"
|
pull-cloud-image = "empanadas.scripts.pull_cloud_image:run"
|
||||||
generate_compose = "empanadas.scripts.generate_compose:run"
|
generate_compose = "empanadas.scripts.generate_compose:run"
|
||||||
|
peridot_repoclosure = "empanadas.scripts.peridot_repoclosure:run"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
@ -2,4 +2,4 @@ from empanadas import __version__
|
|||||||
|
|
||||||
|
|
||||||
def test_version():
|
def test_version():
|
||||||
assert __version__ == '0.2.0'
|
assert __version__ == '0.4.0'
|
||||||
|
@ -1 +1 @@
|
|||||||
# Mirrormanager Mangling tools
|
# Mirrormanager Mangling tools and other Accessories
|
||||||
|
@ -7,3 +7,4 @@ LIST=${LIST:-mirrorlist}
|
|||||||
MIRRORLIST_BASE="http://mirrors.rockylinux.org/${LIST}"
|
MIRRORLIST_BASE="http://mirrors.rockylinux.org/${LIST}"
|
||||||
|
|
||||||
MIRROR_DISPLAY_COUNT=1
|
MIRROR_DISPLAY_COUNT=1
|
||||||
|
GIT_URL="https://git.rockylinux.org"
|
||||||
|
3
mangle/generators/README.md
Normal file
3
mangle/generators/README.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Generators
|
||||||
|
|
||||||
|
These help generate comps or other data we need for peridot or pungi
|
20
mangle/generators/common
Normal file
20
mangle/generators/common
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# To be sourced by scripts to use
|
||||||
|
|
||||||
|
if [ -z "$RLVER" ]; then
|
||||||
|
echo "You must set RLVER."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PREPOPDROP="/tmp/prepopulate.json"
|
||||||
|
VERSDROP="/tmp/versions.list"
|
||||||
|
|
||||||
|
# Source Major common
|
||||||
|
# Override: Not Allowed
|
||||||
|
test -f "$(dirname "${BASH_SOURCE[0]}")/common_${RLVER}" && source "$(dirname "${BASH_SOURCE[0]}")/common_${RLVER}"
|
||||||
|
if [ "$?" -ne 0 ]; then
|
||||||
|
echo "Could not source common_${RLVER}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
STREAM_COMPOSE_BASEURL="https://composes.stream.centos.org/production"
|
||||||
|
COMPOSE_BASEDIR="/mnt/compose"
|
31
mangle/generators/common.py
Normal file
31
mangle/generators/common.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
class common:
|
||||||
|
def rlver(self, rlver):
|
||||||
|
default = "Not Supported"
|
||||||
|
return getattr(self, 'rl' + str(rlver), lambda: default)()
|
||||||
|
|
||||||
|
def rl8(self):
|
||||||
|
REPOS = {
|
||||||
|
'AppStream': ['aarch64', 'x86_64'],
|
||||||
|
'BaseOS': ['aarch64', 'x86_64'],
|
||||||
|
'HighAvailability': ['aarch64', 'x86_64'],
|
||||||
|
'PowerTools': ['aarch64', 'x86_64'],
|
||||||
|
'ResilientStorage': ['aarch64', 'x86_64'],
|
||||||
|
'RT': ['x86_64'],
|
||||||
|
}
|
||||||
|
return REPOS
|
||||||
|
|
||||||
|
def rl9(self):
|
||||||
|
REPOS = {
|
||||||
|
'AppStream': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
|
'BaseOS': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
|
'CRB': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
|
'HighAvailability': ['aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||||
|
'NFV': ['x86_64'],
|
||||||
|
'ResilientStorage': ['ppc64le', 's390x', 'x86_64'],
|
||||||
|
'RT': ['x86_64'],
|
||||||
|
'SAP': ['ppc64le', 's390x', 'x86_64'],
|
||||||
|
'SAPHANA': ['ppc64le', 'x86_64']
|
||||||
|
}
|
||||||
|
return REPOS
|
||||||
|
|
||||||
|
switcher = common()
|
4
mangle/generators/common_8
Normal file
4
mangle/generators/common_8
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# To be sourced by scripts to use
|
||||||
|
|
||||||
|
REPO=("BaseOS" "AppStream" "CRB" "HighAvailability" "ResilientStorage" "NFV" "RT")
|
||||||
|
ARCH=("aarch64" "x86_64")
|
4
mangle/generators/common_9
Normal file
4
mangle/generators/common_9
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# To be sourced by scripts to use
|
||||||
|
|
||||||
|
REPO=("BaseOS" "AppStream" "CRB" "HighAvailability" "ResilientStorage" "NFV" "RT" "SAP" "SAPHANA")
|
||||||
|
ARCH=("aarch64" "ppc64le" "s390x" "x86_64")
|
18
mangle/generators/generate_comps
Executable file
18
mangle/generators/generate_comps
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
source common
|
||||||
|
|
||||||
|
if [ ! -f "/usr/bin/pungi-koji" ]; then
|
||||||
|
echo "You must have pungi installed."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
VER="${RLVER}"
|
||||||
|
BRANCH="r${VER}"
|
||||||
|
REPO="${GIT_URL}/rocky/pungi-rocky.git"
|
||||||
|
|
||||||
|
CMD="/usr/bin/pungi-koji --production --no-latest-link --just-phase init --config rocky.conf --compose-dir /var/tmp/composes/ --label RC-${VER}"
|
||||||
|
|
||||||
|
git clone ${REPO} -b ${BRANCH} /var/tmp/pungi
|
||||||
|
pushd /var/tmp/pungi
|
||||||
|
${CMD}
|
||||||
|
popd
|
38
mangle/generators/generate_prepopulate_from_pungi
Executable file
38
mangle/generators/generate_prepopulate_from_pungi
Executable file
@ -0,0 +1,38 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Parses a local compose's repos
|
||||||
|
#set -x
|
||||||
|
|
||||||
|
if [ -n "$1" ] && [ -n "$2" ]; then
|
||||||
|
MAJOR=$1
|
||||||
|
DATE=$2
|
||||||
|
else
|
||||||
|
echo "Major version not specified"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the date format
|
||||||
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
|
grep_val=$?
|
||||||
|
|
||||||
|
if [ "$grep_val" -ne 0 ]; then
|
||||||
|
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export RLVER=$MAJOR
|
||||||
|
source common
|
||||||
|
|
||||||
|
drop="${PREPOPDROP}"
|
||||||
|
current=$(pwd)
|
||||||
|
compose_dir="${COMPOSE_BASEDIR}/Rocky-${MAJOR}-${DATE}/compose"
|
||||||
|
|
||||||
|
pushd "${compose_dir}" || { echo "Could not change directory"; exit 1; }
|
||||||
|
/usr/bin/python3 "${current}/prepopulate_parser.py"
|
||||||
|
ret_val=$?
|
||||||
|
popd || { echo "Could not change back..."; exit 1; }
|
||||||
|
|
||||||
|
if [ "$ret_val" -ne "0" ]; then
|
||||||
|
echo "There was an error running through the parser."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "File located at: $drop"
|
52
mangle/generators/generate_prepopulate_from_stream
Executable file
52
mangle/generators/generate_prepopulate_from_stream
Executable file
@ -0,0 +1,52 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Parses a CentOS Stream compose's repos
|
||||||
|
#set -x
|
||||||
|
|
||||||
|
if [ -n "$1" ] && [ -n "$2" ]; then
|
||||||
|
MAJOR=$1
|
||||||
|
DATE=$2
|
||||||
|
else
|
||||||
|
echo "Major version not specified"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the date format
|
||||||
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
|
grep_val=$?
|
||||||
|
|
||||||
|
if [ "$grep_val" -ne 0 ]; then
|
||||||
|
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export RLVER=$MAJOR
|
||||||
|
source common
|
||||||
|
|
||||||
|
drop="${PREPOPDROP}"
|
||||||
|
current=$(pwd)
|
||||||
|
tmpdir=$(mktemp -d)
|
||||||
|
stream_compose_url="${STREAM_COMPOSE_BASEURL}/CentOS-Stream-${MAJOR}-${DATE}/compose"
|
||||||
|
|
||||||
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
|
for x in "${REPO[@]}"; do
|
||||||
|
echo "Working on ${x}"
|
||||||
|
for y in "${ARCH[@]}"; do
|
||||||
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
|
mkdir -p "${x}/${y}/repodata"
|
||||||
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
|
for z in "${repodatas[@]}"; do
|
||||||
|
wget -q -nc "${z}"
|
||||||
|
done
|
||||||
|
wget -q -nc "${stream_compose_url}/${x}/${y}/os/repodata/repomd.xml"
|
||||||
|
popd || { echo "Could not change back..."; exit 1; }
|
||||||
|
done
|
||||||
|
done
|
||||||
|
/usr/bin/python3 "${current}/prepopulate_parser.py"
|
||||||
|
ret_val=$?
|
||||||
|
popd || { echo "Could not change back..."; exit 1; }
|
||||||
|
|
||||||
|
if [ "$ret_val" -ne "0" ]; then
|
||||||
|
echo "There was an error running through the parser."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "File located at: $drop"
|
52
mangle/generators/generate_versions_from_stream
Executable file
52
mangle/generators/generate_versions_from_stream
Executable file
@ -0,0 +1,52 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Parses a CentOS Stream compose's repos
|
||||||
|
#set -x
|
||||||
|
|
||||||
|
if [ -n "$1" ] && [ -n "$2" ]; then
|
||||||
|
MAJOR=$1
|
||||||
|
DATE=$2
|
||||||
|
else
|
||||||
|
echo "Major version not specified"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the date format
|
||||||
|
echo "${DATE}" | grep -Eq '[0-9]+\.[0-9]'
|
||||||
|
grep_val=$?
|
||||||
|
|
||||||
|
if [ "$grep_val" -ne 0 ]; then
|
||||||
|
echo "Date format incorrect. You must use: YYYYMMDD.X"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export RLVER=$MAJOR
|
||||||
|
source common
|
||||||
|
|
||||||
|
drop="${VERSDROP}"
|
||||||
|
current=$(pwd)
|
||||||
|
tmpdir=$(mktemp -d)
|
||||||
|
stream_compose_url="${STREAM_COMPOSE_BASEURL}/CentOS-Stream-${MAJOR}-${DATE}/compose"
|
||||||
|
|
||||||
|
pushd "${tmpdir}" || { echo "Could not change directory"; exit 1; }
|
||||||
|
for x in "${REPO[@]}"; do
|
||||||
|
echo "Working on ${x}"
|
||||||
|
for y in "${ARCH[@]}"; do
|
||||||
|
repodatas=( $(dnf reposync --repofrompath ${x},${stream_compose_url}/${x}/${y}/os --download-metadata --repoid=${x} -p ${x}/${y} --forcearch ${y} --norepopath --remote-time --assumeyes -u | grep repodata) )
|
||||||
|
mkdir -p "${x}/${y}/repodata"
|
||||||
|
pushd "${x}/${y}/repodata" || { echo "Could not change directory"; exit 1; }
|
||||||
|
for z in "${repodatas[@]}"; do
|
||||||
|
wget -q -nc "${z}"
|
||||||
|
done
|
||||||
|
wget -q -nc "${stream_compose_url}/${x}/${y}/os/repodata/repomd.xml"
|
||||||
|
popd || { echo "Could not change back..."; exit 1; }
|
||||||
|
done
|
||||||
|
done
|
||||||
|
/usr/bin/python3 "${current}/version_parser.py"
|
||||||
|
ret_val=$?
|
||||||
|
popd || { echo "Could not change back..."; exit 1; }
|
||||||
|
|
||||||
|
if [ "$ret_val" -ne "0" ]; then
|
||||||
|
echo "There was an error running through the parser."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "File located at: $drop"
|
69
mangle/generators/prepopulate_parser.py
Executable file
69
mangle/generators/prepopulate_parser.py
Executable file
@ -0,0 +1,69 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import json
|
||||||
|
import dnf
|
||||||
|
import createrepo_c as cr
|
||||||
|
from common import *
|
||||||
|
|
||||||
|
REPOS = switcher.rlver((os.environ['RLVER']))
|
||||||
|
|
||||||
|
# Source packages we do not ship or are rocky branded
|
||||||
|
IGNORES = [
|
||||||
|
'insights-client',
|
||||||
|
'rhc',
|
||||||
|
'centos-indexhtml',
|
||||||
|
'centos-logos',
|
||||||
|
'centos-stream-release',
|
||||||
|
'redhat-indexhtml',
|
||||||
|
'redhat-logos',
|
||||||
|
'redhat-release'
|
||||||
|
]
|
||||||
|
|
||||||
|
def warningcb(warning_type, message):
|
||||||
|
print("WARNING: %s" % message)
|
||||||
|
return True
|
||||||
|
|
||||||
|
repo_prepop = {}
|
||||||
|
for k in REPOS:
|
||||||
|
repo_prepop[k] = {}
|
||||||
|
for arch in REPOS[k]:
|
||||||
|
PRIMARY_XML_PATH = None
|
||||||
|
FILELISTS_XML_PATH = None
|
||||||
|
OTHER_XML_PATH = None
|
||||||
|
REPO_PATH = k + '/' + arch
|
||||||
|
repomd = cr.Repomd()
|
||||||
|
cr.xml_parse_repomd(os.path.join(REPO_PATH, "repodata/repomd.xml"), repomd, warningcb)
|
||||||
|
for record in repomd.records:
|
||||||
|
if record.type == "primary":
|
||||||
|
PRIMARY_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
elif record.type == "filelists":
|
||||||
|
FILELISTS_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
elif record.type == "other":
|
||||||
|
OTHER_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
|
||||||
|
package_iterator = cr.PackageIterator(primary_path=PRIMARY_XML_PATH, filelists_path=FILELISTS_XML_PATH, other_path=OTHER_XML_PATH, warningcb=warningcb)
|
||||||
|
repo_prepop[k][arch] = {}
|
||||||
|
for pkg in package_iterator:
|
||||||
|
name = pkg.name + '.' + pkg.arch
|
||||||
|
subject = dnf.subject.Subject(pkg.rpm_sourcerpm)
|
||||||
|
possible_nevra = subject.get_nevra_possibilities()
|
||||||
|
srcname = possible_nevra[0].name
|
||||||
|
# Ignore packages (by source) that we do not ship
|
||||||
|
if srcname in IGNORES:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create the initial list if the package (by source) does not exist
|
||||||
|
if srcname not in repo_prepop[k][arch]:
|
||||||
|
repo_prepop[k][arch][srcname] = []
|
||||||
|
|
||||||
|
# Avoids duplicate entries - This is especially helpful for modules
|
||||||
|
if name not in repo_prepop[k][arch][srcname]:
|
||||||
|
repo_prepop[k][arch][srcname].append(name)
|
||||||
|
|
||||||
|
# Sorts the list items
|
||||||
|
repo_prepop[k][arch][srcname].sort()
|
||||||
|
|
||||||
|
entry_point = open('/tmp/prepopulate.json', 'w+')
|
||||||
|
json.dump(repo_prepop, entry_point, indent=2, sort_keys=True)
|
||||||
|
entry_point.close()
|
72
mangle/generators/version_parser.py
Executable file
72
mangle/generators/version_parser.py
Executable file
@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import json
|
||||||
|
import dnf
|
||||||
|
import createrepo_c as cr
|
||||||
|
from common import *
|
||||||
|
|
||||||
|
REPOS = switcher.rlver((os.environ['RLVER']))
|
||||||
|
|
||||||
|
# Source packages we do not ship or are rocky branded
|
||||||
|
IGNORES = [
|
||||||
|
'insights-client',
|
||||||
|
'rhc',
|
||||||
|
'centos-indexhtml',
|
||||||
|
'centos-logos',
|
||||||
|
'centos-stream-release',
|
||||||
|
'redhat-indexhtml',
|
||||||
|
'redhat-logos',
|
||||||
|
'redhat-release'
|
||||||
|
]
|
||||||
|
|
||||||
|
def warningcb(warning_type, message):
|
||||||
|
print("WARNING: %s" % message)
|
||||||
|
return True
|
||||||
|
|
||||||
|
repo_prepop = {}
|
||||||
|
with open('/tmp/versions.list', 'w+') as fp:
|
||||||
|
for k in REPOS:
|
||||||
|
repo_prepop[k] = {}
|
||||||
|
for arch in REPOS[k]:
|
||||||
|
PRIMARY_XML_PATH = None
|
||||||
|
FILELISTS_XML_PATH = None
|
||||||
|
OTHER_XML_PATH = None
|
||||||
|
REPO_PATH = k + '/' + arch
|
||||||
|
repomd = cr.Repomd()
|
||||||
|
cr.xml_parse_repomd(os.path.join(REPO_PATH, "repodata/repomd.xml"), repomd, warningcb)
|
||||||
|
for record in repomd.records:
|
||||||
|
if record.type == "primary":
|
||||||
|
PRIMARY_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
elif record.type == "filelists":
|
||||||
|
FILELISTS_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
elif record.type == "other":
|
||||||
|
OTHER_XML_PATH = os.path.join(REPO_PATH, record.location_href)
|
||||||
|
|
||||||
|
package_iterator = cr.PackageIterator(primary_path=PRIMARY_XML_PATH, filelists_path=FILELISTS_XML_PATH, other_path=OTHER_XML_PATH, warningcb=warningcb)
|
||||||
|
repo_prepop[k][arch] = {}
|
||||||
|
for pkg in package_iterator:
|
||||||
|
subject = dnf.subject.Subject(pkg.rpm_sourcerpm)
|
||||||
|
possible_nevra = subject.get_nevra_possibilities()
|
||||||
|
srcname = possible_nevra[0].name
|
||||||
|
srcvers = possible_nevra[0].version
|
||||||
|
srcrele = possible_nevra[0].release
|
||||||
|
full = srcname + '-' + srcvers + '-' + srcrele
|
||||||
|
# Ignore packages (by source) that we do not ship
|
||||||
|
if srcname in IGNORES:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create the initial list if the package (by source) does not exist
|
||||||
|
if srcname not in repo_prepop[k][arch]:
|
||||||
|
repo_prepop[k][arch][srcname] = {}
|
||||||
|
|
||||||
|
# Avoids duplicate entries - This is especially helpful for modules
|
||||||
|
repo_prepop[k][arch][srcname]['version'] = srcvers
|
||||||
|
repo_prepop[k][arch][srcname]['release'] = srcrele
|
||||||
|
|
||||||
|
fp.write(full + '\n')
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
entry_point = open('/tmp/versions.json', 'w+')
|
||||||
|
json.dump(repo_prepop, entry_point, indent=2, sort_keys=True)
|
||||||
|
entry_point.close()
|
@ -22,6 +22,7 @@ PRODUCTION_ROOT="/mnt/repos-production"
|
|||||||
# Override: Not Allowed
|
# Override: Not Allowed
|
||||||
# relative to ${ENV}_ROOT
|
# relative to ${ENV}_ROOT
|
||||||
CATEGORY_STUB="mirror/pub/rocky"
|
CATEGORY_STUB="mirror/pub/rocky"
|
||||||
|
SIG_CATEGORY_STUB="mirror/pub/sig"
|
||||||
|
|
||||||
# Override: Required
|
# Override: Required
|
||||||
#RELEASE_DIR="${CATEGORY_STUB}/${REVISION}${APPEND_TO_DIR}"
|
#RELEASE_DIR="${CATEGORY_STUB}/${REVISION}${APPEND_TO_DIR}"
|
||||||
|
51
sync/sync-file-list-parallel.sh
Normal file
51
sync/sync-file-list-parallel.sh
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Syncs everything from staging to production
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
REV=${REVISION}${APPEND_TO_DIR}
|
||||||
|
|
||||||
|
cd "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
# Full file list update for rocky linux itself
|
||||||
|
echo "Starting full file list for category"
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
find . > "${PRODUCTION_ROOT}/${CATEGORY_STUB}/fullfilelist" & CATEPID=$!
|
||||||
|
echo "Starting full file list for root"
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
find . > "${PRODUCTION_ROOT}/fullfilelist" & ROOTPID=$!
|
||||||
|
|
||||||
|
wait $CATEPID
|
||||||
|
wait $ROOTPID
|
||||||
|
|
||||||
|
echo "Generating filelist for quick-fedora-mirror users"
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky & CREALIPID=$!
|
||||||
|
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky & ROOTLIPID=$!
|
||||||
|
|
||||||
|
wait $CREALIPID
|
||||||
|
wait $ROOTLIPID
|
||||||
|
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist fullfiletimelist-rocky-linux
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist-rocky-linux
|
||||||
|
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
31
sync/sync-full-file-list.sh
Normal file
31
sync/sync-full-file-list.sh
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Syncs everything from staging to production
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
REV=${REVISION}${APPEND_TO_DIR}
|
||||||
|
|
||||||
|
cd "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
# Full file list update for rocky linux itself
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
# Hardlink everything except xml files
|
||||||
|
#hardlink -x '.*\.xml.*' "${REVISION}"
|
||||||
|
echo "Starting full file list for category"
|
||||||
|
find . > fullfilelist
|
||||||
|
echo "Generating filelist for quick-fedora-mirror users"
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist-rocky-linux
|
||||||
|
fi
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist fullfiletimelist-rocky-linux
|
||||||
|
fi
|
||||||
|
|
28
sync/sync-root-file-list.sh
Normal file
28
sync/sync-root-file-list.sh
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Syncs everything from staging to production
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
REV=${REVISION}${APPEND_TO_DIR}
|
||||||
|
|
||||||
|
cd "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
# Full file list update for production root
|
||||||
|
cd "${PRODUCTION_ROOT}/" || echo { echo "Failed to change directory"; exit 1; }
|
||||||
|
echo "Starting full file list for root"
|
||||||
|
find . > fullfilelist
|
||||||
|
echo "Generating filelist for quick-fedora-mirror users"
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
|
cp fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
||||||
|
fi
|
||||||
|
|
28
sync/sync-sig-file-list.sh
Normal file
28
sync/sync-sig-file-list.sh
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Syncs everything from staging to production
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
REV=${REVISION}${APPEND_TO_DIR}
|
||||||
|
|
||||||
|
cd "${PRODUCTION_ROOT}/${SIG_CATEGORY_STUB}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
# Full file list update for production root
|
||||||
|
cd "${PRODUCTION_ROOT}/${SIG_CATEGORY_STUB}" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
echo "Starting full file list for root"
|
||||||
|
find . > fullfilelist
|
||||||
|
echo "Generating filelist for quick-fedora-mirror users"
|
||||||
|
if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
|
# We're already here, but Justin Case wanted this
|
||||||
|
cd "${PRODUCTION_ROOT}/${SIG_CATEGORY_STUB}" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
/bin/cp fullfiletimelist-sig fullfiletimelist-sig-old
|
||||||
|
/usr/local/bin/create-filelist > fullfiletimelist-sig
|
||||||
|
cp fullfiletimelist-sig fullfiletimelist
|
||||||
|
fi
|
||||||
|
chown 10004:10005 fullfilelist fullfiletimelist-sig fullfiletimelist
|
||||||
|
fi
|
||||||
|
|
@ -14,33 +14,34 @@ if [ $ret_val -eq "0" ]; then
|
|||||||
TARGET="${PRODUCTION_ROOT}/${CATEGORY_STUB}/${REV:0:3}"
|
TARGET="${PRODUCTION_ROOT}/${CATEGORY_STUB}/${REV:0:3}"
|
||||||
mkdir -p "${TARGET}"
|
mkdir -p "${TARGET}"
|
||||||
echo "Syncing ${REVISION}"
|
echo "Syncing ${REVISION}"
|
||||||
sudo -l && time fpsync -o '-av --numeric-ids --no-compress --chown=10004:10005' -n 24 -t /mnt/compose/partitions "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/" "${TARGET}/"
|
sudo -l && time fpsync -n 24 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/" "${TARGET}/"
|
||||||
|
|
||||||
# Full file list update for production root
|
# Full file list update for production root
|
||||||
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
#cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
echo "Getting a full file list for the root dir"
|
#echo "Getting a full file list for the root dir"
|
||||||
find . > fullfilelist
|
#find . > fullfilelist
|
||||||
if [[ -f /usr/local/bin/create-filelist ]]; then
|
#if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
# We're already here, but Justin Case wanted this
|
# # We're already here, but Justin Case wanted this
|
||||||
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
# cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
# /bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
# /usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
cp fullfiletimelist-rocky fullfiletimelist
|
# cp fullfiletimelist-rocky fullfiletimelist
|
||||||
fi
|
#fi
|
||||||
# Full file list update for rocky linux itself
|
## Full file list update for rocky linux itself
|
||||||
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
# Hardlink everything except xml files
|
## Hardlink everything except xml files
|
||||||
echo "Hard linking"
|
echo "Hard linking"
|
||||||
hardlink -x '.*\.xml.*' "${REVISION}"
|
hardlink -x '.*\.xml.*' "${REVISION}"
|
||||||
echo "Getting a full file list for the rocky dir"
|
#echo "Getting a full file list for the rocky dir"
|
||||||
find . > fullfilelist
|
#find . > fullfilelist
|
||||||
if [[ -f /usr/local/bin/create-filelist ]]; then
|
#if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
# We're already here, but Justin Case wanted this
|
# # We're already here, but Justin Case wanted this
|
||||||
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
# cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
# /bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
# /usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
cp fullfiletimelist-rocky fullfiletimelist
|
# cp fullfiletimelist-rocky fullfiletimelist
|
||||||
fi
|
#fi
|
||||||
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
#chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
||||||
|
echo "Syncing to prod completed. Please run the file list script."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
29
sync/sync-to-prod-sig.sh
Normal file
29
sync/sync-to-prod-sig.sh
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
#if [[ $# -eq 0 ]] || [[ $# -eq 1 ]]; then
|
||||||
|
# echo "Not enough information."
|
||||||
|
# echo "You must use: shortname sig"
|
||||||
|
# exit 1
|
||||||
|
#fi
|
||||||
|
|
||||||
|
cd "${STAGING_ROOT}/${SIG_CATEGORY_STUB}/${MAJOR}" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
TARGET=${PRODUCTION_ROOT}/${SIG_CATEGORY_STUB}/${MAJOR}/
|
||||||
|
mkdir -p "${TARGET}"
|
||||||
|
# disabling because none of our files should be starting with dashes. If they
|
||||||
|
# are something is *seriously* wrong here.
|
||||||
|
# shellcheck disable=SC2035
|
||||||
|
sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
|
{} "${TARGET}"
|
||||||
|
|
||||||
|
cd "${PRODUCTION_ROOT}/${SIG_CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
|
echo "Hard linking"
|
||||||
|
hardlink -x '.*\.xml.*' "${MAJOR}"
|
||||||
|
echo "Syncing to prod completed. Please run the file list script."
|
||||||
|
fi
|
@ -33,27 +33,29 @@ if [ $ret_val -eq "0" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Full file list update for production root
|
# Full file list update for production root
|
||||||
cd "${PRODUCTION_ROOT}/" || echo { echo "Failed to change directory"; exit 1; }
|
#cd "${PRODUCTION_ROOT}/" || echo { echo "Failed to change directory"; exit 1; }
|
||||||
find . > fullfilelist
|
#find . > fullfilelist
|
||||||
if [[ -f /usr/local/bin/create-filelist ]]; then
|
#if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
# We're already here, but Justin Case wanted this
|
# # We're already here, but Justin Case wanted this
|
||||||
cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
# cd "${PRODUCTION_ROOT}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
# /bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
# /usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
cp fullfiletimelist-rocky fullfiletimelist
|
# cp fullfiletimelist-rocky fullfiletimelist
|
||||||
fi
|
#fi
|
||||||
# Full file list update for rocky linux itself
|
## Full file list update for rocky linux itself
|
||||||
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
# Hardlink everything except xml files
|
## Hardlink everything except xml files
|
||||||
|
echo "Hard linking"
|
||||||
hardlink -x '.*\.xml.*' "${REVISION}"
|
hardlink -x '.*\.xml.*' "${REVISION}"
|
||||||
find . > fullfilelist
|
#find . > fullfilelist
|
||||||
if [[ -f /usr/local/bin/create-filelist ]]; then
|
#if [[ -f /usr/local/bin/create-filelist ]]; then
|
||||||
# We're already here, but Justin Case wanted this
|
# # We're already here, but Justin Case wanted this
|
||||||
cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
# cd "${PRODUCTION_ROOT}/${CATEGORY_STUB}/" || { echo "Failed to change directory"; exit 1; }
|
||||||
/bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
# /bin/cp fullfiletimelist-rocky fullfiletimelist-rocky-old
|
||||||
/usr/local/bin/create-filelist > fullfiletimelist-rocky
|
# /usr/local/bin/create-filelist > fullfiletimelist-rocky
|
||||||
cp fullfiletimelist-rocky fullfiletimelist
|
# cp fullfiletimelist-rocky fullfiletimelist
|
||||||
fi
|
#fi
|
||||||
chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
#chown 10004:10005 fullfilelist fullfiletimelist-rocky fullfiletimelist
|
||||||
|
echo "Syncing to prod completed. Please run the file list script."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ if [ $ret_val -eq "0" ]; then
|
|||||||
# shellcheck disable=SC2035
|
# shellcheck disable=SC2035
|
||||||
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
# {} "${TARGET}"
|
# {} "${TARGET}"
|
||||||
sudo -l && time fpsync -o '-av --numeric-ids --no-compress --chown=10004:10005' -n 24 -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
sudo -l && time fpsync -n 24 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||||
|
|
||||||
# This is temporary until we implement rsync into empanadas
|
# This is temporary until we implement rsync into empanadas
|
||||||
#if [ -f "COMPOSE_ID" ]; then
|
#if [ -f "COMPOSE_ID" ]; then
|
||||||
|
40
sync/sync-to-staging-fpsync-testing.sh
Normal file
40
sync/sync-to-staging-fpsync-testing.sh
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Source common variables
|
||||||
|
# shellcheck disable=SC2046,1091,1090
|
||||||
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "You must specify a short name."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Major Version (eg, 8)
|
||||||
|
MAJ=${RLVER}
|
||||||
|
# Short name (eg, NFV, extras, Rocky, gluster9)
|
||||||
|
SHORT=${1}
|
||||||
|
PROFILE=${2}
|
||||||
|
|
||||||
|
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
|
ret_val=$?
|
||||||
|
|
||||||
|
if [ $ret_val -eq "0" ]; then
|
||||||
|
TARGET="${STAGING_ROOT}/${CATEGORY_STUB}/${REV}"
|
||||||
|
mkdir -p "${TARGET}"
|
||||||
|
# disabling because none of our files should be starting with dashes. If they
|
||||||
|
# are something is *seriously* wrong here.
|
||||||
|
# shellcheck disable=SC2035
|
||||||
|
#sudo -l && find **/* -maxdepth 0 -type d | parallel --will-cite -j 18 sudo rsync -av --chown=10004:10005 --progress --relative --human-readable \
|
||||||
|
# {} "${TARGET}"
|
||||||
|
sudo -l && time fpsync -n 18 -o '-a --numeric-ids --no-compress --chown=10004:10005' -t /mnt/compose/partitions "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}${PROFILE}/compose/" "${TARGET}/"
|
||||||
|
|
||||||
|
# This is temporary until we implement rsync into empanadas
|
||||||
|
#if [ -f "COMPOSE_ID" ]; then
|
||||||
|
# cp COMPOSE_ID "${TARGET}"
|
||||||
|
# chown 10004:10005 "${TARGET}/COMPOSE_ID"
|
||||||
|
#fi
|
||||||
|
|
||||||
|
#if [ -d "metadata" ]; then
|
||||||
|
# rsync -av --chown=10004:10005 --progress --relative --human-readable metadata "${TARGET}"
|
||||||
|
#fi
|
||||||
|
fi
|
@ -4,9 +4,9 @@
|
|||||||
# shellcheck disable=SC2046,1091,1090
|
# shellcheck disable=SC2046,1091,1090
|
||||||
source "$(dirname "$0")/common"
|
source "$(dirname "$0")/common"
|
||||||
|
|
||||||
if [[ $# -eq 0 ]] || [[ $# -eq 1 ]]; then
|
if [[ $# -eq 0 ]]; then
|
||||||
echo "Not enough information."
|
echo "Not enough information."
|
||||||
echo "You must use: shortname sig"
|
echo "You must use: sig"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -17,11 +17,11 @@ SHORT=${1}
|
|||||||
# Note, this should be lowercase. eg, storage.
|
# Note, this should be lowercase. eg, storage.
|
||||||
SIG=${2}
|
SIG=${2}
|
||||||
|
|
||||||
cd "/mnt/compose/${MAJ}/latest-${SHORT}-${MAJ}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
cd "/mnt/compose/${MAJ}/latest-SIG-${SHORT}-${MAJ}/compose" || { echo "Failed to change directory"; ret_val=1; exit 1; }
|
||||||
ret_val=$?
|
ret_val=$?
|
||||||
|
|
||||||
if [ $ret_val -eq "0" ]; then
|
if [ $ret_val -eq "0" ]; then
|
||||||
TARGET=${STAGING_ROOT}/${CATEGORY_STUB}/${REV}/${SIG}
|
TARGET=${STAGING_ROOT}/${SIG_CATEGORY_STUB}/
|
||||||
mkdir -p "${TARGET}"
|
mkdir -p "${TARGET}"
|
||||||
# disabling because none of our files should be starting with dashes. If they
|
# disabling because none of our files should be starting with dashes. If they
|
||||||
# are something is *seriously* wrong here.
|
# are something is *seriously* wrong here.
|
||||||
|
Loading…
Reference in New Issue
Block a user