add delete old packages option

This commit is contained in:
Louis Abel 2022-08-03 23:07:47 -07:00
parent 94a530f4b5
commit 18b985cdcc
Signed by: label
GPG Key ID: B37E62D143879B36
6 changed files with 74 additions and 18 deletions

View File

@ -28,6 +28,7 @@ parser.add_argument('--simple', action='store_false')
parser.add_argument('--logger', type=str) parser.add_argument('--logger', type=str)
parser.add_argument('--disable-gpg-check', action='store_false') parser.add_argument('--disable-gpg-check', action='store_false')
parser.add_argument('--disable-repo-gpg-check', action='store_false') parser.add_argument('--disable-repo-gpg-check', action='store_false')
parser.add_argument('--clean-old-packages', action='store_true')
# Parse them # Parse them
results = parser.parse_args() results = parser.parse_args()
@ -58,6 +59,7 @@ a = RepoSync(
refresh_treeinfo=results.refresh_treeinfo, refresh_treeinfo=results.refresh_treeinfo,
gpg_check=results.disable_gpg_check, gpg_check=results.disable_gpg_check,
repo_gpg_check=results.disable_repo_gpg_check, repo_gpg_check=results.disable_repo_gpg_check,
reposync_clean_old=results.clean_old_packages,
) )
def run(): def run():

View File

@ -31,6 +31,7 @@ parser.add_argument('--simple', action='store_false')
parser.add_argument('--logger', type=str) parser.add_argument('--logger', type=str)
parser.add_argument('--disable-gpg-check', action='store_false') parser.add_argument('--disable-gpg-check', action='store_false')
parser.add_argument('--disable-repo-gpg-check', action='store_false') parser.add_argument('--disable-repo-gpg-check', action='store_false')
parser.add_argument('--clean-old-packages', action='store_true')
# Parse them # Parse them
results = parser.parse_args() results = parser.parse_args()
@ -61,6 +62,7 @@ a = SigRepoSync(
logger=results.logger, logger=results.logger,
gpg_check=results.disable_gpg_check, gpg_check=results.disable_gpg_check,
repo_gpg_check=results.disable_repo_gpg_check, repo_gpg_check=results.disable_repo_gpg_check,
reposync_clean_old=results.clean_old_packages,
) )

View File

@ -10,8 +10,8 @@ altarch:
altarch-rockyrpi: altarch-rockyrpi:
allowed_arches: allowed_arches:
- aarch64 - aarch64
additional_dirs: additional_dirs:
- 'images' - 'images'
extra_files: extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git' git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r8/' git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r8/'
@ -30,8 +30,8 @@ altarch:
altarch-rockyrpi: altarch-rockyrpi:
allowed_arches: allowed_arches:
- aarch64 - aarch64
additional_dirs: additional_dirs:
- 'images' - 'images'
extra_files: extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git' git_repo: 'https://git.rockylinux.org/staging/src/rocky-release-altarch.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r9/' git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release-altarch/-/raw/r9/'

View File

@ -11,6 +11,15 @@ metadata provides.
# Notes # # Notes #
## Unversioned ISO Files ##
There are unversioned ISO files in the isos and live directories per
architecture. This is to allow libvirt users an easy way to download an ISO for
a given release of their choosing easily. It also allows users as a whole to
always have a pre-determined path to download the latest ISO of a given release
by just relying on it being in the URL itself rather than in the ISO name. Note
that these unversioned ISO files may or may not advertised on the main site.
## Checksums ## ## Checksums ##
CHECKSUM Validation: https://github.com/rocky-linux/checksums CHECKSUM Validation: https://github.com/rocky-linux/checksums
@ -33,4 +42,3 @@ any of the following:
https://keys.openpgp.org/ https://keys.openpgp.org/
https://keyserver.ubuntu.com https://keyserver.ubuntu.com

View File

@ -61,6 +61,8 @@ class RepoSync:
repo_gpg_check: bool = True, repo_gpg_check: bool = True,
rlmode: str = 'stable', rlmode: str = 'stable',
just_pull_everything: bool = False, just_pull_everything: bool = False,
extra_dnf_args=None,
reposync_clean_old: bool = False,
logger=None logger=None
): ):
self.nofail = nofail self.nofail = nofail
@ -118,6 +120,17 @@ class RepoSync:
file_loader = FileSystemLoader(f"{_rootdir}/templates") file_loader = FileSystemLoader(f"{_rootdir}/templates")
self.tmplenv = Environment(loader=file_loader) self.tmplenv = Environment(loader=file_loader)
# dnf args
dnf_args_to_add = []
if extra_dnf_args:
if '--delete' in extra_dnf_args:
raise SystemExit('Please use the --reposync-clean option instead.')
dnf_args_to_add.extend(extra_dnf_args.split(' '))
self.extra_dnf_args = dnf_args_to_add.copy()
self.reposync_clean_old = reposync_clean_old
# each el can have its own designated container to run stuff in, # each el can have its own designated container to run stuff in,
# otherwise we'll just default to the default config. # otherwise we'll just default to the default config.
self.container = config['container'] self.container = config['container']
@ -321,6 +334,8 @@ class RepoSync:
cmd = Shared.podman_cmd(self.log) cmd = Shared.podman_cmd(self.log)
contrunlist = [] contrunlist = []
bad_exit_list = [] bad_exit_list = []
extra_dnf_args = ' '.join(self.extra_dnf_args.copy())
reposync_delete = '--delete' if self.reposync_clean_old else ''
self.log.info('Generating container entries') self.log.info('Generating container entries')
entries_dir = os.path.join(work_root, "entries") entries_dir = os.path.join(work_root, "entries")
gpg_key_url = self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey] gpg_key_url = self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
@ -422,12 +437,13 @@ class RepoSync:
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
"--repoid={} -p {} --forcearch {} --norepopath --remote-time " "--repoid={} -p {} --forcearch {} --norepopath --remote-time "
"--gpgcheck --assumeyes 2>&1").format( "--gpgcheck --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
os_sync_path, os_sync_path,
a a,
reposync_delete
) )
debug_metadata_cmd = ("/usr/bin/dnf makecache -c {}.{} --repoid={}-debug " debug_metadata_cmd = ("/usr/bin/dnf makecache -c {}.{} --repoid={}-debug "
@ -441,12 +457,13 @@ class RepoSync:
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
"--download-metadata --repoid={}-debug -p {} --forcearch {} " "--download-metadata --repoid={}-debug -p {} --forcearch {} "
"--gpgcheck --norepopath --remote-time --assumeyes 2>&1").format( "--gpgcheck --norepopath --remote-time --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
debug_sync_path, debug_sync_path,
a a,
reposync_delete
) )
dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y" dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y"
@ -571,10 +588,11 @@ class RepoSync:
source_sync_cmd = ("/usr/bin/dnf reposync -c {} " source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
"--download-metadata --repoid={}-source -p {} " "--download-metadata --repoid={}-source -p {} "
"--gpgcheck --norepopath --remote-time --assumeyes 2>&1").format( "--gpgcheck --norepopath --remote-time --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
r, r,
source_sync_path source_sync_path,
reposync_delete
) )
source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') source_sync_template = self.tmplenv.get_template('reposync-src.tmpl')
@ -1494,6 +1512,8 @@ class SigRepoSync:
gpgkey: str = 'stable', gpgkey: str = 'stable',
gpg_check: bool = True, gpg_check: bool = True,
repo_gpg_check: bool = True, repo_gpg_check: bool = True,
extra_dnf_args=None,
reposync_clean_old: bool = False,
logger=None logger=None
): ):
self.nofail = nofail self.nofail = nofail
@ -1534,7 +1554,7 @@ class SigRepoSync:
self.gpgkey = gpgkey self.gpgkey = gpgkey
#self.arches = sigvars['allowed_arches'] #self.arches = sigvars['allowed_arches']
self.project_id = sigvars['project_id'] self.project_id = sigvars['project_id']
if 'additional_vars' in sigvars: if 'additional_dirs' in sigvars:
self.additional_dirs = sigvars['additional_dirs'] self.additional_dirs = sigvars['additional_dirs']
self.compose_id = '{}-{}-{}'.format( self.compose_id = '{}-{}-{}'.format(
@ -1547,6 +1567,17 @@ class SigRepoSync:
file_loader = FileSystemLoader(f"{_rootdir}/templates") file_loader = FileSystemLoader(f"{_rootdir}/templates")
self.tmplenv = Environment(loader=file_loader) self.tmplenv = Environment(loader=file_loader)
# dnf args
dnf_args_to_add = []
if extra_dnf_args:
if '--delete' in extra_dnf_args:
raise SystemExit('Please use the --reposync-clean option instead.')
dnf_args_to_add.extend(extra_dnf_args.split(' '))
self.extra_dnf_args = dnf_args_to_add.copy()
self.reposync_clean_old = reposync_clean_old
# each el can have its own designated container to run stuff in, # each el can have its own designated container to run stuff in,
# otherwise we'll just default to the default config. # otherwise we'll just default to the default config.
self.container = config['container'] self.container = config['container']
@ -1724,6 +1755,7 @@ class SigRepoSync:
else: else:
Shared.dnf_sync(repo, sync_root, work_root, arch, self.log) Shared.dnf_sync(repo, sync_root, work_root, arch, self.log)
self.create_additional_dirs(sync_root)
def podman_sync( def podman_sync(
self, self,
@ -1744,6 +1776,8 @@ class SigRepoSync:
""" """
cmd = Shared.podman_cmd(self.log) cmd = Shared.podman_cmd(self.log)
bad_exit_list = [] bad_exit_list = []
extra_dnf_args = ' '.join(self.extra_dnf_args.copy())
reposync_delete = '--delete' if self.reposync_clean_old else ''
self.log.info('Generating container entries') self.log.info('Generating container entries')
entries_dir = os.path.join(work_root, "entries") entries_dir = os.path.join(work_root, "entries")
gpg_key_url = self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey] gpg_key_url = self.extra_files['git_raw_path'] + self.extra_files['gpg'][self.gpgkey]
@ -1837,12 +1871,13 @@ class SigRepoSync:
sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata " sync_cmd = ("/usr/bin/dnf reposync -c {}.{} --download-metadata "
"--repoid={} -p {} --forcearch {} --norepopath --remote-time " "--repoid={} -p {} --forcearch {} --norepopath --remote-time "
"--gpgcheck --assumeyes 2>&1").format( "--gpgcheck --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
os_sync_path, os_sync_path,
a a,
reposync_delete
) )
debug_metadata_cmd = ("/usr/bin/dnf makecache -c {}.{} --repoid={}-debug " debug_metadata_cmd = ("/usr/bin/dnf makecache -c {}.{} --repoid={}-debug "
@ -1856,12 +1891,13 @@ class SigRepoSync:
debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} " debug_sync_cmd = ("/usr/bin/dnf reposync -c {}.{} "
"--download-metadata --repoid={}-debug -p {} --forcearch {} " "--download-metadata --repoid={}-debug -p {} --forcearch {} "
"--gpgcheck --norepopath --remote-time --assumeyes 2>&1").format( "--gpgcheck --norepopath --remote-time --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
a, a,
r, r,
debug_sync_path, debug_sync_path,
a a,
reposync_delete
) )
dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y" dnf_plugin_cmd = "/usr/bin/dnf install dnf-plugins-core -y"
@ -1936,10 +1972,11 @@ class SigRepoSync:
source_sync_cmd = ("/usr/bin/dnf reposync -c {} " source_sync_cmd = ("/usr/bin/dnf reposync -c {} "
"--download-metadata --repoid={}-source -p {} " "--download-metadata --repoid={}-source -p {} "
"--gpgcheck --norepopath --remote-time --assumeyes 2>&1").format( "--gpgcheck --norepopath --remote-time --assumeyes {} 2>&1").format(
self.dnf_config, self.dnf_config,
r, r,
source_sync_path source_sync_path,
reposync_delete
) )
source_sync_template = self.tmplenv.get_template('reposync-src.tmpl') source_sync_template = self.tmplenv.get_template('reposync-src.tmpl')
@ -2093,3 +2130,9 @@ class SigRepoSync:
) )
self.log.info(Color.INFO + 'Metadata files phase completed.') self.log.info(Color.INFO + 'Metadata files phase completed.')
def create_additional_dirs(self, sync_root):
"""
Creates additional directories
"""
self.log.info(Color.INFO + 'Ensuring additional directories exist')

View File

@ -508,6 +508,7 @@ class IsoBuild:
self.log.info('Removing boot.iso from %s' % image) self.log.info('Removing boot.iso from %s' % image)
try: try:
os.remove(path_to_image + '/images/boot.iso') os.remove(path_to_image + '/images/boot.iso')
os.remove(path_to_image + '/images/boot.iso.manifest')
except: except:
self.log.error( self.log.error(
'[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' + '[' + Color.BOLD + Color.YELLOW + 'FAIL' + Color.END + '] ' +