Compare commits

..

No commits in common. "devel" and "main" have entirely different histories.
devel ... main

10 changed files with 44 additions and 198 deletions

View File

@ -47,7 +47,6 @@
images:
dvd:
disc: True
reposcan: False
variant: 'AppStream'
repos:
- 'BaseOS'
@ -55,7 +54,6 @@
minimal:
disc: True
isoskip: True
reposcan: False
repos:
- 'minimal'
- 'BaseOS'

View File

@ -47,7 +47,6 @@
images:
dvd:
disc: True
reposcan: True
variant: 'AppStream'
repos:
- 'BaseOS'
@ -55,7 +54,6 @@
minimal:
disc: True
isoskip: True
reposcan: False
repos:
- 'minimal'
- 'BaseOS'

View File

@ -53,14 +53,12 @@
images:
dvd:
disc: True
reposcan: True
variant: 'AppStream'
repos:
- 'BaseOS'
- 'AppStream'
minimal:
disc: True
reposcan: False
isoskip: True
repos:
- 'minimal'

View File

@ -53,7 +53,6 @@
images:
dvd:
disc: True
reposcan: True
variant: 'AppStream'
repos:
- 'BaseOS'
@ -61,7 +60,6 @@
minimal:
disc: True
isoskip: True
reposcan: False
repos:
- 'minimal'
- 'BaseOS'

View File

@ -53,7 +53,6 @@
images:
dvd:
disc: True
reposcan: True
variant: 'AppStream'
repos:
- 'BaseOS'
@ -61,7 +60,6 @@
minimal:
disc: True
isoskip: True
reposcan: False
repos:
- 'minimal'
- 'BaseOS'

View File

@ -560,7 +560,7 @@ class RepoSync:
#print(entry_name_list)
for pod in entry_name_list:
podman_cmd_entry = '{} run -d -it --security-opt label=disable -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,
@ -714,7 +714,7 @@ class RepoSync:
self.log.info('Spawning pods for %s' % repo)
for pod in repoclosure_entry_name_list:
podman_cmd_entry = '{} run -d -it --security-opt label=disable -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,
@ -1509,7 +1509,7 @@ class RepoSync:
self.log.info('Spawning pods for %s' % repo)
for pod in repoclosure_entry_name_list:
podman_cmd_entry = '{} run -d -it --security-opt label=disable -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,
@ -2045,7 +2045,7 @@ class SigRepoSync:
#print(entry_name_list)
for pod in entry_name_list:
podman_cmd_entry = '{} run -d -it --security-opt label=disable -v "{}:{}" -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}:z" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,

View File

@ -81,11 +81,12 @@ class IsoBuild:
self.compose_root = config['compose_root']
self.compose_base = config['compose_root'] + "/" + major
self.current_arch = config['arch']
#self.required_pkgs = rlvars['iso_map']['lorax']['required_pkgs']
self.required_pkgs = rlvars['iso_map']['lorax']['required_pkgs']
self.mock_work_root = config['mock_work_root']
self.lorax_result_root = config['mock_work_root'] + "/" + "lorax"
self.mock_isolation = isolation
self.iso_map = rlvars['iso_map']
#self.livemap = rlvars['livemap']
self.cloudimages = rlvars['cloudimages']
self.release_candidate = rc
self.s3 = s3
@ -252,7 +253,6 @@ class IsoBuild:
mock_iso_path = '/var/tmp/lorax-' + self.release + '.cfg'
mock_sh_path = '/var/tmp/isobuild.sh'
iso_template_path = '/var/tmp/buildImage.sh'
required_pkgs = self.iso_map['lorax']['required_pkgs']
rclevel = ''
if self.release_candidate:
@ -294,7 +294,7 @@ class IsoBuild:
builddir=self.mock_work_root,
lorax_work_root=self.lorax_result_root,
bugurl=self.bugurl,
squashfs_only=self.iso_map['lorax'].get('squashfs_only', None),
squashfs_only=self.iso_map['lorax']['squashfs_only'],
)
with open(mock_iso_path, "w+") as mock_iso_entry:
@ -725,7 +725,8 @@ class IsoBuild:
def _extra_iso_build_wrap(self):
"""
Try to figure out where the build is going, podman or mock.
Try to figure out where the build is going, we only support mock for
now.
"""
work_root = os.path.join(
self.compose_latest_dir,
@ -736,23 +737,15 @@ class IsoBuild:
if self.arch:
arches_to_build = [self.arch]
images_to_build = list(self.iso_map['images'].keys())
images_to_build = self.iso_map['images']
if self.extra_iso:
images_to_build = [self.extra_iso]
images_to_skip = []
for y in images_to_build:
if 'isoskip' in self.iso_map['images'][y] and self.iso_map['images'][y]['isoskip']:
self.log.info(Color.WARN + f'Skipping {y} image')
images_to_skip.append(y)
self.log.info(Color.WARN + 'Skipping ' + y + ' image')
continue
reposcan = True
if 'reposcan' in self.iso_map['images'][y] and not self.iso_map['images'][y]['reposcan']:
self.log.info(Color.WARN + f"Skipping compose repository scans for {y}")
reposcan = False
# Kind of hacky, but if we decide to have more than boot/dvd iso's,
# we need to make sure volname matches the initial lorax image,
# which the volid contains "dvd". AKA, file name doesn't always
@ -777,7 +770,6 @@ class IsoBuild:
a,
y,
self.iso_map['images'][y]['repos'],
reposcan=reposcan
)
self._extra_iso_local_config(a, y, grafts, work_root, volname)
@ -790,14 +782,7 @@ class IsoBuild:
raise SystemExit()
if self.extra_iso_mode == 'podman':
# I can't think of a better way to do this
images_to_build_podman = images_to_build.copy()
for item in images_to_build_podman[:]:
for skip in images_to_skip:
if item == skip:
images_to_build_podman.remove(item)
self._extra_iso_podman_run(arches_to_build, images_to_build_podman, work_root)
self._extra_iso_podman_run(arches_to_build, images_to_build, work_root)
def _extra_iso_local_config(self, arch, image, grafts, work_root, volname):
"""
@ -844,7 +829,6 @@ class IsoBuild:
isoname = f'{self.shortname}-{self.release}{rclevel}{datestamp}-{arch}-{image}.iso'
generic_isoname = f'{self.shortname}-{arch}-{image}.iso'
latest_isoname = f'{self.shortname}-{self.major_version}-latest-{arch}-{image}.iso'
required_pkgs = self.iso_map['lorax']['required_pkgs']
lorax_pkg_cmd = '/usr/bin/dnf install {} -y {}'.format(
' '.join(required_pkgs),
@ -1022,7 +1006,7 @@ class IsoBuild:
checksum_list.append(latestname)
for pod in entry_name_list:
podman_cmd_entry = '{} run -d -it --security-opt label=disable -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
podman_cmd_entry = '{} run -d -it -v "{}:{}" -v "{}:{}" --name {} --entrypoint {}/{} {}'.format(
cmd,
self.compose_root,
self.compose_root,
@ -1106,7 +1090,6 @@ class IsoBuild:
arch,
iso,
variants,
reposcan: bool = True,
):
"""
Get a list of packages for an extras ISO. This should NOT be called
@ -1136,28 +1119,26 @@ class IsoBuild:
# actually get the boot data
files = self._get_grafts([lorax_for_var, extra_files_for_var])
# Some variants cannot go through a proper scan.
if reposcan:
# This is to get all the packages for each repo
for repo in variants:
pkg_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['packages']
)
rd_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['repodata']
)
# This is to get all the packages for each repo
for repo in variants:
pkg_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['packages']
)
rd_for_var = os.path.join(
self.compose_latest_sync,
repo,
arch,
self.structure['repodata']
)
for k, v in self._get_grafts([pkg_for_var]).items():
files[os.path.join(repo, "Packages", k)] = v
for k, v in self._get_grafts([pkg_for_var]).items():
files[os.path.join(repo, "Packages", k)] = v
for k, v in self._get_grafts([rd_for_var]).items():
files[os.path.join(repo, "repodata", k)] = v
for k, v in self._get_grafts([rd_for_var]).items():
files[os.path.join(repo, "repodata", k)] = v
grafts = f'{lorax_base_dir}/{iso}-{arch}-grafts'
@ -1537,7 +1518,7 @@ class LiveBuild:
self.compose_base = config['compose_root'] + "/" + major
self.current_arch = config['arch']
self.livemap = rlvars['livemap']
#self.required_pkgs = rlvars['livemap']['required_pkgs']
self.required_pkgs = rlvars['livemap']['required_pkgs']
self.mock_work_root = config['mock_work_root']
self.live_result_root = config['mock_work_root'] + "/lmc"
self.mock_isolation = isolation

View File

@ -4,22 +4,6 @@
# modified version of repo-rss from yum utils
# changelog
# -> 20230912: do not xmlescape entire description variable
# -> 20240819: remove commented commands and imports
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# seth vidal 2005 (c) etc etc
import sys
import os
@ -35,6 +19,8 @@ from xml.etree.ElementTree import ElementTree, TreeBuilder, tostring
from xml.dom import minidom
import dnf
import dnf.exceptions
#from dnf.comps import Comps
#import libxml2
def to_unicode(string: str) -> str:
"""
@ -291,7 +277,9 @@ def main(options):
modobj.disable(['*'])
sack_query = dnfobj.sack.query().available()
#recent = sack_query.filter(latest_per_arch=1)
recent = dnfobj.get_recent(days=days)
#sorted_recents = sorted(set(recent.run()), key=lambda pkg: pkg.buildtime)
sorted_recents = sorted(set(recent), key=lambda pkg: pkg.buildtime)
sorted_recents.reverse()
make_rss_feed(options.filename, options.title, options.link,

View File

@ -1,30 +0,0 @@
#!/bin/bash
# Wrapper for ipaauditor.py audit
source /etc/os-release
case "$ID" in
rocky|centos|rhel)
case "${VERSION_ID:0:1}" in
5|6|7)
echo "Not supported."
exit 3
;;
8)
PYTHON_EXEC="/usr/libexec/platform-python"
;;
*)
PYTHON_EXEC="/usr/bin/python3"
;;
esac ;;
ubuntu|debian)
PYTHON_EXEC="/usr/bin/python3"
;;
fedora)
PYTHON_EXEC="/usr/bin/python3"
esac
$PYTHON_EXEC ipaauditor.py --user test \
--password test \
--server test \
--library python_freeipa \
audit "$@"

View File

@ -58,9 +58,6 @@ audit_parser = subparser.add_parser('audit', epilog='Use this to perform audits
parser.add_argument('--library', type=str, default='ipalib',
help='Choose the ipa library to use for the auditor',
choices=('ipalib', 'python_freeipa'))
parser.add_argument('--user', type=str, default='', help='Set the username (python_freeipa only)')
parser.add_argument('--password', type=str, default='', help='Set the password (python_freeipa only)')
parser.add_argument('--server', type=str, default='', help='Set the server (python_freeipa only)')
audit_parser.add_argument('--type', type=str, required=True,
help='Type of audit: hbac, rbac, group, user',
@ -109,7 +106,7 @@ class EtcIPADefault:
outter_info['ipa_joined_name'] = __config['global']['host']
outter_info['ipa_domain'] = __config['global']['domain']
outter_info['ipa_realm'] = __config['global']['realm']
outter_info['registered_dc'] = __config['global']['host'] if not __config['global'].get('server', None) else __config['global']['server']
outter_info['registered_dc'] = __config['global']['server']
return outter_info
class SssctlInfo:
@ -277,42 +274,9 @@ class IPAAudit:
@staticmethod
def user_pull(api, name, deep):
"""
Gets requested user info
Gets requested rbac info
"""
try:
user_results = IPAQuery.user_data(api, name)
except:
print(f'Could not find {name}', sys.stderr)
sys.exit(1)
user_first = '' if not user_results.get('givenname', None) else user_results['givenname'][0]
user_last = '' if not user_results.get('sn', None) else user_results['sn'][0]
user_uid = '' if not user_results.get('uid', None) else user_results['uid'][0]
user_uidnum = '' if not user_results.get('uidnumber', None) else user_results['uidnumber'][0]
user_gidnum = '' if not user_results.get('gidnumber', None) else user_results['gidnumber'][0]
user_groups = '' if not user_results.get('memberof_group', None) else '\n '.join(user_results['memberof_group'])
user_hbachosts = '' if not user_results.get('memberof_hbacrule', None) else '\n '.join(user_results['memberof_hbacrule'])
user_indhbachosts = '' if not user_results.get('memberofindirect_hbacrule', None) else '\n '.join(user_results['memberofindirect_hbacrule'])
starter_user = {
'User name': user_uid,
'First name': user_first,
'Last name': user_last,
'UID': user_uidnum,
'GID': user_gidnum,
'Groups': user_groups,
}
print('User Information')
print('----------------------------------------')
for key, value in starter_user.items():
if len(value) > 0:
print(f'{key: <16}{value}')
print('')
if deep:
group_list = [] if not user_results.get('memberof_group', None) else user_results['memberof_group']
IPAAudit.user_deep_list(api, name, group_list)
print()
@staticmethod
def group_pull(api, name, deep):
@ -405,7 +369,7 @@ class IPAAudit:
if perm not in starting_perms:
starting_perms.append(perm)
print('Permissions Applied to this Role')
print(f'Permissions Applied to this Role')
print('----------------------------------------')
for item in starting_perms:
print(item)
@ -463,50 +427,10 @@ class IPAAudit:
print(f'{key: <24}{value}')
@staticmethod
def user_deep_list(api, user, groups):
def user_deep_list(api, user):
"""
Does a recursive dig on a user
"""
hbac_rule_list = []
hbac_rule_all_hosts = []
host_list = []
hostgroup_list = []
for group in groups:
group_results = IPAQuery.group_data(api, group)
hbac_list = [] if not group_results.get('memberof_hbacrule', None) else group_results['memberof_hbacrule']
hbacind_list = [] if not group_results.get('memberofindirect_hbacrule', None) else group_results['memberofindirect_hbacrule']
hbac_rule_list.extend(hbac_list)
hbac_rule_list.extend(hbacind_list)
# TODO: Add HBAC list (including services)
# TODO: Add RBAC list
hbac_hosts = []
for hbac in hbac_rule_list:
hbac_results = IPAQuery.hbac_data(api, hbac)
hbac_host_list = [] if not hbac_results.get('memberhost_host', None) else hbac_results['memberhost_host']
hbac_hostgroup_list = [] if not hbac_results.get('memberhost_hostgroup', None) else hbac_results['memberhost_hostgroup']
if hbac_results.get('servicecategory'):
hbac_rule_all_hosts.append(hbac)
for host in hbac_host_list:
hbac_hosts.append(host)
for hostgroup in hbac_hostgroup_list:
hostgroup_data = IPAQuery.hostgroup_data(api, hostgroup)
host_list = [] if not hostgroup_data.get('member_host', None) else hostgroup_data['member_host']
hbac_hosts.extend(host_list)
new_hbac_hosts = sorted(set(hbac_hosts))
print('User Has Access To These Hosts')
print('------------------------------------------')
for hhost in new_hbac_hosts:
print(hhost)
if len(hbac_rule_all_hosts) > 0:
print('!! Notice: User has access to ALL hosts from the following rules:')
hbac_rule_all_hosts = sorted(set(hbac_rule_all_hosts))
for allrule in hbac_rule_all_hosts:
print(allrule)
@staticmethod
def group_deep_list(api, group):
@ -643,7 +567,7 @@ memberOf:{groups}
return api.hbacsvcgroup_show(hbacsvcgroup)['result']
# start main
def get_api(ipa_library='ipalib', user='', password='', server=''):
def get_api(ipa_library='ipalib'):
"""
Gets and returns the right API entrypoint
"""
@ -662,13 +586,7 @@ def get_api(ipa_library='ipalib', user='', password='', server=''):
print('WARNING: No kerberos credentials\n')
command_api = None
elif ipa_library == 'python_freeipa':
api = ClientMeta(server)
try:
api.login(user, password)
command_api = api
except:
print('ERROR: Unable to login, check user/password/server')
command_api = None
print()
else:
print('Unsupported ipa library', sys.stderr)
sys.exit(1)
@ -679,8 +597,7 @@ def main():
"""
Main function entrypoint
"""
command_api = get_api(ipa_library=results.library, user=results.user,
password=results.password, server=results.server)
command_api = get_api()
if command == 'audit':
IPAAudit.entry(command_api, results.type, results.name, results.deep)
elif command == 'info':