mirror of
https://github.com/rocky-linux/peridot-releng.git
synced 2024-11-23 15:51:23 +00:00
Switch to black for formatting
This commit is contained in:
parent
9e91db39a1
commit
147aedf354
@ -28,6 +28,7 @@
|
|||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
# noinspection PyPep8Naming
|
# noinspection PyPep8Naming
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
@ -36,49 +37,47 @@ from group import Group, PackageReq, Environment, EnvGroup
|
|||||||
|
|
||||||
|
|
||||||
def write_variant(groups, environments, categories, out):
|
def write_variant(groups, environments, categories, out):
|
||||||
root = ET.Element('comps')
|
root = ET.Element("comps")
|
||||||
for group in groups:
|
for group in groups:
|
||||||
group_elem = ET.SubElement(root, 'group')
|
group_elem = ET.SubElement(root, "group")
|
||||||
ET.SubElement(group_elem, 'id').text = group.id
|
ET.SubElement(group_elem, "id").text = group.id
|
||||||
for lang in group.name:
|
for lang in group.name:
|
||||||
name = ET.SubElement(group_elem, 'name')
|
name = ET.SubElement(group_elem, "name")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
name.set('xml:lang', lang)
|
name.set("xml:lang", lang)
|
||||||
name.text = group.name[lang]
|
name.text = group.name[lang]
|
||||||
for lang in group.description:
|
for lang in group.description:
|
||||||
description = ET.SubElement(group_elem, 'description')
|
description = ET.SubElement(group_elem, "description")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
description.set('xml:lang', lang)
|
description.set("xml:lang", lang)
|
||||||
description.text = group.description[lang]
|
description.text = group.description[lang]
|
||||||
ET.SubElement(group_elem, 'default').text = str(group.default).lower()
|
ET.SubElement(group_elem, "default").text = str(group.default).lower()
|
||||||
ET.SubElement(group_elem, 'uservisible').text = str(
|
ET.SubElement(group_elem, "uservisible").text = str(group.user_visible).lower()
|
||||||
group.user_visible).lower()
|
package_list = ET.SubElement(group_elem, "packagelist")
|
||||||
package_list = ET.SubElement(group_elem, 'packagelist')
|
|
||||||
for package in group.packages:
|
for package in group.packages:
|
||||||
package_elem = ET.SubElement(package_list, 'packagereq')
|
package_elem = ET.SubElement(package_list, "packagereq")
|
||||||
package_elem.set('type', package.type)
|
package_elem.set("type", package.type)
|
||||||
package_elem.text = package.name
|
package_elem.text = package.name
|
||||||
for environment in environments:
|
for environment in environments:
|
||||||
env_elem = ET.SubElement(root, 'environment')
|
env_elem = ET.SubElement(root, "environment")
|
||||||
ET.SubElement(env_elem, 'id').text = environment.id
|
ET.SubElement(env_elem, "id").text = environment.id
|
||||||
for lang in environment.name:
|
for lang in environment.name:
|
||||||
name = ET.SubElement(env_elem, 'name')
|
name = ET.SubElement(env_elem, "name")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
name.set('xml:lang', lang)
|
name.set("xml:lang", lang)
|
||||||
name.text = environment.name[lang]
|
name.text = environment.name[lang]
|
||||||
for lang in environment.description:
|
for lang in environment.description:
|
||||||
description = ET.SubElement(env_elem, 'description')
|
description = ET.SubElement(env_elem, "description")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
description.set('xml:lang', lang)
|
description.set("xml:lang", lang)
|
||||||
description.text = environment.description[lang]
|
description.text = environment.description[lang]
|
||||||
ET.SubElement(env_elem, 'display_order').text = str(
|
ET.SubElement(env_elem, "display_order").text = str(environment.display_order)
|
||||||
environment.display_order)
|
group_list = ET.SubElement(env_elem, "grouplist")
|
||||||
group_list = ET.SubElement(env_elem, 'grouplist')
|
|
||||||
for group in environment.group_list:
|
for group in environment.group_list:
|
||||||
ET.SubElement(group_list, 'groupid').text = group.name
|
ET.SubElement(group_list, "groupid").text = group.name
|
||||||
option_list = ET.SubElement(env_elem, 'optionlist')
|
option_list = ET.SubElement(env_elem, "optionlist")
|
||||||
for option in environment.option_list:
|
for option in environment.option_list:
|
||||||
ET.SubElement(option_list, 'optionid').text = option.name
|
ET.SubElement(option_list, "optionid").text = option.name
|
||||||
for category_name in categories.keys():
|
for category_name in categories.keys():
|
||||||
category = categories[category_name]
|
category = categories[category_name]
|
||||||
new_group_list = []
|
new_group_list = []
|
||||||
@ -89,37 +88,41 @@ def write_variant(groups, environments, categories, out):
|
|||||||
break
|
break
|
||||||
if len(new_group_list) == 0:
|
if len(new_group_list) == 0:
|
||||||
continue
|
continue
|
||||||
category_elem = ET.SubElement(root, 'category')
|
category_elem = ET.SubElement(root, "category")
|
||||||
ET.SubElement(category_elem, 'id').text = category_name
|
ET.SubElement(category_elem, "id").text = category_name
|
||||||
for lang in category.name:
|
for lang in category.name:
|
||||||
name = ET.SubElement(category_elem, 'name')
|
name = ET.SubElement(category_elem, "name")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
name.set('xml:lang', lang)
|
name.set("xml:lang", lang)
|
||||||
name.text = category.name[lang]
|
name.text = category.name[lang]
|
||||||
for lang in category.description:
|
for lang in category.description:
|
||||||
description = ET.SubElement(category_elem, 'description')
|
description = ET.SubElement(category_elem, "description")
|
||||||
if lang != "":
|
if lang != "":
|
||||||
description.set('xml:lang', lang)
|
description.set("xml:lang", lang)
|
||||||
description.text = category.description[lang]
|
description.text = category.description[lang]
|
||||||
ET.SubElement(category_elem, 'display_order').text = str(
|
ET.SubElement(category_elem, "display_order").text = str(category.display_order)
|
||||||
category.display_order)
|
group_list = ET.SubElement(category_elem, "grouplist")
|
||||||
group_list = ET.SubElement(category_elem, 'grouplist')
|
|
||||||
for group in new_group_list:
|
for group in new_group_list:
|
||||||
ET.SubElement(group_list, 'groupid').text = group.name
|
ET.SubElement(group_list, "groupid").text = group.name
|
||||||
ET.ElementTree(root).write(out, encoding='utf-8', xml_declaration=False)
|
ET.ElementTree(root).write(out, encoding="utf-8", xml_declaration=False)
|
||||||
|
|
||||||
with open(out, 'r') as f:
|
with open(out, "r") as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
with open(out, 'w') as f:
|
with open(out, "w") as f:
|
||||||
f.writelines("""<?xml version="1.0" encoding="UTF-8"?>
|
f.writelines(
|
||||||
|
"""<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<!DOCTYPE comps
|
<!DOCTYPE comps
|
||||||
PUBLIC '-//Red Hat, Inc.//DTD Comps info//EN'
|
PUBLIC '-//Red Hat, Inc.//DTD Comps info//EN'
|
||||||
'comps.dtd'>
|
'comps.dtd'>
|
||||||
""" + minidom.parseString(data).toprettyxml(indent=" ").replace('<?xml version="1.0" ?>\n', ''))
|
"""
|
||||||
|
+ minidom.parseString(data)
|
||||||
|
.toprettyxml(indent=" ")
|
||||||
|
.replace('<?xml version="1.0" ?>\n', "")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main(comps_path: str, variants_path: str, output_path: str):
|
def main(comps_path: str, variants_path: str, output_path: str):
|
||||||
default_arches = ['x86_64', 'aarch64', 'ppc64le', 's390x']
|
default_arches = ["x86_64", "aarch64", "ppc64le", "s390x"]
|
||||||
variants = {}
|
variants = {}
|
||||||
environments = {}
|
environments = {}
|
||||||
categories = {}
|
categories = {}
|
||||||
@ -127,53 +130,55 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
tree = ET.parse(comps_path)
|
tree = ET.parse(comps_path)
|
||||||
root = tree.getroot()
|
root = tree.getroot()
|
||||||
for gchild in root:
|
for gchild in root:
|
||||||
if gchild.tag == 'group':
|
if gchild.tag == "group":
|
||||||
group_name = {}
|
group_name = {}
|
||||||
group_desc = {}
|
group_desc = {}
|
||||||
group_id = ''
|
group_id = ""
|
||||||
is_default = False
|
is_default = False
|
||||||
is_visible = False
|
is_visible = False
|
||||||
variant = ''
|
variant = ""
|
||||||
package_list_xml = None
|
package_list_xml = None
|
||||||
if 'variant' in gchild.attrib:
|
if "variant" in gchild.attrib:
|
||||||
variant = gchild.attrib['variant']
|
variant = gchild.attrib["variant"]
|
||||||
if 'arch' in gchild.attrib:
|
if "arch" in gchild.attrib:
|
||||||
arches = gchild.attrib['arch'].split(',')
|
arches = gchild.attrib["arch"].split(",")
|
||||||
else:
|
else:
|
||||||
arches = default_arches
|
arches = default_arches
|
||||||
for gattr in gchild:
|
for gattr in gchild:
|
||||||
if gattr.tag == 'id':
|
if gattr.tag == "id":
|
||||||
group_id = gattr.text
|
group_id = gattr.text
|
||||||
elif gattr.tag == 'name':
|
elif gattr.tag == "name":
|
||||||
if '{http://www.w3.org/XML/1998/namespace}lang' in gattr.attrib:
|
if "{http://www.w3.org/XML/1998/namespace}lang" in gattr.attrib:
|
||||||
group_name[gattr.attrib[
|
group_name[
|
||||||
'{http://www.w3.org/XML/1998/namespace}lang']] = gattr.text
|
gattr.attrib["{http://www.w3.org/XML/1998/namespace}lang"]
|
||||||
|
] = gattr.text
|
||||||
else:
|
else:
|
||||||
group_name[""] = gattr.text
|
group_name[""] = gattr.text
|
||||||
elif gattr.tag == 'description':
|
elif gattr.tag == "description":
|
||||||
if '{http://www.w3.org/XML/1998/namespace}lang' in gattr.attrib:
|
if "{http://www.w3.org/XML/1998/namespace}lang" in gattr.attrib:
|
||||||
group_desc[gattr.attrib[
|
group_desc[
|
||||||
'{http://www.w3.org/XML/1998/namespace}lang']] = gattr.text
|
gattr.attrib["{http://www.w3.org/XML/1998/namespace}lang"]
|
||||||
|
] = gattr.text
|
||||||
else:
|
else:
|
||||||
group_desc[""] = gattr.text
|
group_desc[""] = gattr.text
|
||||||
elif gattr.tag == 'default':
|
elif gattr.tag == "default":
|
||||||
is_default = gattr.text == 'true'
|
is_default = gattr.text == "true"
|
||||||
elif gattr.tag == 'uservisible':
|
elif gattr.tag == "uservisible":
|
||||||
is_visible = gattr.text == 'true'
|
is_visible = gattr.text == "true"
|
||||||
elif gattr.tag == 'packagelist':
|
elif gattr.tag == "packagelist":
|
||||||
package_list_xml = gattr
|
package_list_xml = gattr
|
||||||
package_list = {}
|
package_list = {}
|
||||||
if variant != '':
|
if variant != "":
|
||||||
package_list[variant] = {}
|
package_list[variant] = {}
|
||||||
for reqxml in package_list_xml:
|
for reqxml in package_list_xml:
|
||||||
req_variant = variant
|
req_variant = variant
|
||||||
req_type = 'default'
|
req_type = "default"
|
||||||
if 'variant' in reqxml.attrib:
|
if "variant" in reqxml.attrib:
|
||||||
req_variant = reqxml.attrib['variant']
|
req_variant = reqxml.attrib["variant"]
|
||||||
if 'type' in reqxml.attrib:
|
if "type" in reqxml.attrib:
|
||||||
req_type = reqxml.attrib['type']
|
req_type = reqxml.attrib["type"]
|
||||||
if 'arch' in reqxml.attrib:
|
if "arch" in reqxml.attrib:
|
||||||
req_arches = reqxml.attrib['arch'].split(',')
|
req_arches = reqxml.attrib["arch"].split(",")
|
||||||
else:
|
else:
|
||||||
req_arches = arches
|
req_arches = arches
|
||||||
if req_variant not in package_list:
|
if req_variant not in package_list:
|
||||||
@ -182,7 +187,8 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
if arch not in package_list[req_variant]:
|
if arch not in package_list[req_variant]:
|
||||||
package_list[req_variant][arch] = []
|
package_list[req_variant][arch] = []
|
||||||
package_list[req_variant][arch].append(
|
package_list[req_variant][arch].append(
|
||||||
PackageReq(reqxml.text, req_type, req_arches))
|
PackageReq(reqxml.text, req_type, req_arches)
|
||||||
|
)
|
||||||
for variant in package_list:
|
for variant in package_list:
|
||||||
if variant not in variants:
|
if variant not in variants:
|
||||||
variants[variant] = {}
|
variants[variant] = {}
|
||||||
@ -191,59 +197,62 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
package_list[variant][arch] = []
|
package_list[variant][arch] = []
|
||||||
if group_id not in variants[variant]:
|
if group_id not in variants[variant]:
|
||||||
variants[variant][group_id] = {}
|
variants[variant][group_id] = {}
|
||||||
variants[variant][group_id][arch] = Group(group_id,
|
variants[variant][group_id][arch] = Group(
|
||||||
group_name,
|
group_id,
|
||||||
group_desc,
|
group_name,
|
||||||
is_default,
|
group_desc,
|
||||||
is_visible,
|
is_default,
|
||||||
package_list[
|
is_visible,
|
||||||
variant][
|
package_list[variant][arch],
|
||||||
arch])
|
)
|
||||||
elif gchild.tag == 'environment' or gchild.tag == 'category':
|
elif gchild.tag == "environment" or gchild.tag == "category":
|
||||||
env_name = {}
|
env_name = {}
|
||||||
env_desc = {}
|
env_desc = {}
|
||||||
env_id = ''
|
env_id = ""
|
||||||
display_order = 0
|
display_order = 0
|
||||||
group_list = []
|
group_list = []
|
||||||
option_list = []
|
option_list = []
|
||||||
for gattr in gchild:
|
for gattr in gchild:
|
||||||
if gattr.tag == 'id':
|
if gattr.tag == "id":
|
||||||
env_id = gattr.text
|
env_id = gattr.text
|
||||||
elif gattr.tag == 'name':
|
elif gattr.tag == "name":
|
||||||
if '{http://www.w3.org/XML/1998/namespace}lang' in gattr.attrib:
|
if "{http://www.w3.org/XML/1998/namespace}lang" in gattr.attrib:
|
||||||
env_name[gattr.attrib[
|
env_name[
|
||||||
'{http://www.w3.org/XML/1998/namespace}lang']] = gattr.text
|
gattr.attrib["{http://www.w3.org/XML/1998/namespace}lang"]
|
||||||
|
] = gattr.text
|
||||||
else:
|
else:
|
||||||
env_name[""] = gattr.text
|
env_name[""] = gattr.text
|
||||||
elif gattr.tag == 'description':
|
elif gattr.tag == "description":
|
||||||
if '{http://www.w3.org/XML/1998/namespace}lang' in gattr.attrib:
|
if "{http://www.w3.org/XML/1998/namespace}lang" in gattr.attrib:
|
||||||
env_desc[gattr.attrib[
|
env_desc[
|
||||||
'{http://www.w3.org/XML/1998/namespace}lang']] = gattr.text
|
gattr.attrib["{http://www.w3.org/XML/1998/namespace}lang"]
|
||||||
|
] = gattr.text
|
||||||
else:
|
else:
|
||||||
env_desc[""] = gattr.text
|
env_desc[""] = gattr.text
|
||||||
elif gattr.tag == 'display_order':
|
elif gattr.tag == "display_order":
|
||||||
display_order = gattr.text
|
display_order = gattr.text
|
||||||
elif gattr.tag == 'grouplist':
|
elif gattr.tag == "grouplist":
|
||||||
for group in gattr:
|
for group in gattr:
|
||||||
if 'arch' in group.attrib:
|
if "arch" in group.attrib:
|
||||||
arches = group.attrib['arch'].split(',')
|
arches = group.attrib["arch"].split(",")
|
||||||
else:
|
else:
|
||||||
arches = default_arches
|
arches = default_arches
|
||||||
group_list.append(EnvGroup(group.text, arches))
|
group_list.append(EnvGroup(group.text, arches))
|
||||||
elif gattr.tag == 'optionlist':
|
elif gattr.tag == "optionlist":
|
||||||
for group in gattr:
|
for group in gattr:
|
||||||
if 'arch' in group.attrib:
|
if "arch" in group.attrib:
|
||||||
arches = group.attrib['arch'].split(',')
|
arches = group.attrib["arch"].split(",")
|
||||||
else:
|
else:
|
||||||
arches = default_arches
|
arches = default_arches
|
||||||
option_list.append(EnvGroup(group.text, arches))
|
option_list.append(EnvGroup(group.text, arches))
|
||||||
new_env = Environment(env_id, env_name, env_desc, display_order,
|
new_env = Environment(
|
||||||
group_list, option_list)
|
env_id, env_name, env_desc, display_order, group_list, option_list
|
||||||
|
)
|
||||||
dictmap = categories
|
dictmap = categories
|
||||||
if gchild.tag == 'environment':
|
if gchild.tag == "environment":
|
||||||
dictmap = environments
|
dictmap = environments
|
||||||
if 'arch' in gchild.attrib:
|
if "arch" in gchild.attrib:
|
||||||
arches = gchild.attrib['arch'].split(',')
|
arches = gchild.attrib["arch"].split(",")
|
||||||
else:
|
else:
|
||||||
arches = default_arches
|
arches = default_arches
|
||||||
for arch in arches:
|
for arch in arches:
|
||||||
@ -262,18 +271,18 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
environment_arch_index = {}
|
environment_arch_index = {}
|
||||||
pungi_variants_tree = ET.parse(variants_path).getroot()
|
pungi_variants_tree = ET.parse(variants_path).getroot()
|
||||||
for pungi_variant in pungi_variants_tree:
|
for pungi_variant in pungi_variants_tree:
|
||||||
if pungi_variant.tag == 'variant':
|
if pungi_variant.tag == "variant":
|
||||||
if pungi_variant.attrib['type'] != 'variant':
|
if pungi_variant.attrib["type"] != "variant":
|
||||||
continue
|
continue
|
||||||
arches = []
|
arches = []
|
||||||
groups = {}
|
groups = {}
|
||||||
n_environments = {}
|
n_environments = {}
|
||||||
variant_id = pungi_variant.attrib['id']
|
variant_id = pungi_variant.attrib["id"]
|
||||||
for child in pungi_variant:
|
for child in pungi_variant:
|
||||||
if child.tag == 'arches':
|
if child.tag == "arches":
|
||||||
for arch in child:
|
for arch in child:
|
||||||
arches.append(arch.text)
|
arches.append(arch.text)
|
||||||
elif child.tag == 'groups':
|
elif child.tag == "groups":
|
||||||
for group in child:
|
for group in child:
|
||||||
groupbase = variants[""]
|
groupbase = variants[""]
|
||||||
if variant_id in variants:
|
if variant_id in variants:
|
||||||
@ -284,17 +293,20 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
for arch_group in groupind.keys():
|
for arch_group in groupind.keys():
|
||||||
if arch_group not in groups:
|
if arch_group not in groups:
|
||||||
groups[arch_group] = []
|
groups[arch_group] = []
|
||||||
if 'default' in group.attrib:
|
if "default" in group.attrib:
|
||||||
groupind[arch_group].default = group.attrib['default'] == 'true'
|
groupind[arch_group].default = (
|
||||||
|
group.attrib["default"] == "true"
|
||||||
|
)
|
||||||
groups[arch_group].append(groupind[arch_group])
|
groups[arch_group].append(groupind[arch_group])
|
||||||
elif child.tag == 'environments':
|
elif child.tag == "environments":
|
||||||
for environment in child:
|
for environment in child:
|
||||||
envind = environment_id_index[environment.text]
|
envind = environment_id_index[environment.text]
|
||||||
for arch_environment in envind.keys():
|
for arch_environment in envind.keys():
|
||||||
if arch_environment not in n_environments:
|
if arch_environment not in n_environments:
|
||||||
n_environments[arch_environment] = []
|
n_environments[arch_environment] = []
|
||||||
n_environments[arch_environment].append(
|
n_environments[arch_environment].append(
|
||||||
envind[arch_environment])
|
envind[arch_environment]
|
||||||
|
)
|
||||||
for arch in arches:
|
for arch in arches:
|
||||||
if arch in groups:
|
if arch in groups:
|
||||||
if arch not in variant_arch_index:
|
if arch not in variant_arch_index:
|
||||||
@ -308,25 +320,29 @@ def main(comps_path: str, variants_path: str, output_path: str):
|
|||||||
if variant_id not in environment_arch_index[arch]:
|
if variant_id not in environment_arch_index[arch]:
|
||||||
environment_arch_index[arch][variant_id] = []
|
environment_arch_index[arch][variant_id] = []
|
||||||
environment_arch_index[arch][variant_id].extend(
|
environment_arch_index[arch][variant_id].extend(
|
||||||
n_environments[arch])
|
n_environments[arch]
|
||||||
|
)
|
||||||
|
|
||||||
for arch in variant_arch_index.keys():
|
for arch in variant_arch_index.keys():
|
||||||
for variant in variant_arch_index[arch].keys():
|
for variant in variant_arch_index[arch].keys():
|
||||||
write_variant(variant_arch_index[arch][variant] if variant in
|
write_variant(
|
||||||
variant_arch_index[
|
variant_arch_index[arch][variant]
|
||||||
arch] else [],
|
if variant in variant_arch_index[arch]
|
||||||
environment_arch_index[arch][variant] if variant in
|
else [],
|
||||||
environment_arch_index[
|
environment_arch_index[arch][variant]
|
||||||
arch] else [],
|
if variant in environment_arch_index[arch]
|
||||||
|
else [],
|
||||||
categories[arch].copy(),
|
categories[arch].copy(),
|
||||||
f'{output_path}/{variant}-{arch}.xml')
|
f"{output_path}/{variant}-{arch}.xml",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Convert comps to Peridot compatible configuration.')
|
description="Convert comps to Peridot compatible configuration."
|
||||||
parser.add_argument('--comps-path', type=str, required=True)
|
)
|
||||||
parser.add_argument('--variants-path', type=str, required=True)
|
parser.add_argument("--comps-path", type=str, required=True)
|
||||||
parser.add_argument('--output-path', type=str, default=".")
|
parser.add_argument("--variants-path", type=str, required=True)
|
||||||
|
parser.add_argument("--output-path", type=str, default=".")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
main(args.comps_path, args.variants_path, args.output_path)
|
main(args.comps_path, args.variants_path, args.output_path)
|
||||||
|
@ -38,8 +38,12 @@ class PeridotCatalogSyncPackageType(str, Enum):
|
|||||||
PACKAGE_TYPE_MODULE_FORK = "PACKAGE_TYPE_MODULE_FORK"
|
PACKAGE_TYPE_MODULE_FORK = "PACKAGE_TYPE_MODULE_FORK"
|
||||||
PACKAGE_TYPE_MODULE_FORK_COMPONENT = "PACKAGE_TYPE_MODULE_FORK_COMPONENT"
|
PACKAGE_TYPE_MODULE_FORK_COMPONENT = "PACKAGE_TYPE_MODULE_FORK_COMPONENT"
|
||||||
PACKAGE_TYPE_NORMAL_FORK_MODULE = "PACKAGE_TYPE_NORMAL_FORK_MODULE"
|
PACKAGE_TYPE_NORMAL_FORK_MODULE = "PACKAGE_TYPE_NORMAL_FORK_MODULE"
|
||||||
PACKAGE_TYPE_NORMAL_FORK_MODULE_COMPONENT = "PACKAGE_TYPE_NORMAL_FORK_MODULE_COMPONENT"
|
PACKAGE_TYPE_NORMAL_FORK_MODULE_COMPONENT = (
|
||||||
PACKAGE_TYPE_MODULE_FORK_MODULE_COMPONENT = "PACKAGE_TYPE_MODULE_FORK_MODULE_COMPONENT"
|
"PACKAGE_TYPE_NORMAL_FORK_MODULE_COMPONENT"
|
||||||
|
)
|
||||||
|
PACKAGE_TYPE_MODULE_FORK_MODULE_COMPONENT = (
|
||||||
|
"PACKAGE_TYPE_MODULE_FORK_MODULE_COMPONENT"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -49,12 +53,12 @@ class PeridotCatalogSyncRepository:
|
|||||||
multilib: list[str]
|
multilib: list[str]
|
||||||
|
|
||||||
def include_filter_to_prototxt(self):
|
def include_filter_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join(
|
||||||
[f" include_filter: \"{f}\"" for f in self.include_filter])
|
[f' include_filter: "{f}"' for f in self.include_filter]
|
||||||
|
)
|
||||||
|
|
||||||
def multilib_to_prototxt(self):
|
def multilib_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join([f' multilib: "{f}"' for f in self.multilib])
|
||||||
[f" multilib: \"{f}\"" for f in self.multilib])
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -65,19 +69,26 @@ class PeridotCatalogSyncPackage:
|
|||||||
repositories: list[PeridotCatalogSyncRepository]
|
repositories: list[PeridotCatalogSyncRepository]
|
||||||
|
|
||||||
def mc_to_prototxt(self):
|
def mc_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join(
|
||||||
[f" module_component: \"{component}\"" for component in
|
[
|
||||||
self.module_components])
|
f' module_component: "{component}"'
|
||||||
|
for component in self.module_components
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def repos_to_prototxt(self):
|
def repos_to_prototxt(self):
|
||||||
return '\n'.join(
|
return "\n".join(
|
||||||
[f""" repository {{
|
[
|
||||||
|
f""" repository {{
|
||||||
name: \"{repo.name}\"{
|
name: \"{repo.name}\"{
|
||||||
repo.include_filter_to_prototxt() if repo.include_filter else ""
|
repo.include_filter_to_prototxt() if repo.include_filter else ""
|
||||||
}{
|
}{
|
||||||
repo.multilib_to_prototxt() if repo.multilib else ""
|
repo.multilib_to_prototxt() if repo.multilib else ""
|
||||||
}
|
}
|
||||||
}}""" for repo in self.repositories])
|
}}"""
|
||||||
|
for repo in self.repositories
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PeridotCatalogSync:
|
class PeridotCatalogSync:
|
||||||
@ -91,42 +102,53 @@ class PeridotCatalogSync:
|
|||||||
self.packages.append(package)
|
self.packages.append(package)
|
||||||
|
|
||||||
def additional_multilib_to_prototxt(self):
|
def additional_multilib_to_prototxt(self):
|
||||||
return '\n'.join(
|
return "\n".join(
|
||||||
[f"additional_multilib: \"{f}\"" for f in
|
[f'additional_multilib: "{f}"' for f in self.additional_multilib]
|
||||||
self.additional_multilib])
|
)
|
||||||
|
|
||||||
def exclude_multilib_filter_to_prototxt(self):
|
def exclude_multilib_filter_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join(
|
||||||
[f"exclude_multilib_filter: \"{f}\"" for f in
|
[f'exclude_multilib_filter: "{f}"' for f in self.exclude_multilib_filter]
|
||||||
self.exclude_multilib_filter])
|
)
|
||||||
|
|
||||||
def filter_arch_to_prototxt(self, arch: dict):
|
def filter_arch_to_prototxt(self, arch: dict):
|
||||||
nl = '\n'
|
nl = "\n"
|
||||||
glob_match = {}
|
glob_match = {}
|
||||||
for k, v in arch.items():
|
for k, v in arch.items():
|
||||||
glob_match[k] = [f" glob_match: \"{f}\"" for f in v]
|
glob_match[k] = [f' glob_match: "{f}"' for f in v]
|
||||||
for k in glob_match.keys():
|
for k in glob_match.keys():
|
||||||
if len(glob_match[k]) > 0:
|
if len(glob_match[k]) > 0:
|
||||||
glob_match[k][0] = '\n' + glob_match[k][0]
|
glob_match[k][0] = "\n" + glob_match[k][0]
|
||||||
return '\n'.join([f""" arch {{
|
return "\n".join(
|
||||||
|
[
|
||||||
|
f""" arch {{
|
||||||
key: \"{f}\"{nl.join(glob_match[f])}
|
key: \"{f}\"{nl.join(glob_match[f])}
|
||||||
}}""" for f in arch.keys()])
|
}}"""
|
||||||
|
for f in arch.keys()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def exclude_filter_to_prototxt(self):
|
def exclude_filter_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join(
|
||||||
[f"""exclude_filter {{
|
[
|
||||||
|
f"""exclude_filter {{
|
||||||
repo_match: \"{f[0]}\"
|
repo_match: \"{f[0]}\"
|
||||||
{self.filter_arch_to_prototxt(f[1])}
|
{self.filter_arch_to_prototxt(f[1])}
|
||||||
}}""" for f in
|
}}"""
|
||||||
self.exclude_filter])
|
for f in self.exclude_filter
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def include_filter_to_prototxt(self):
|
def include_filter_to_prototxt(self):
|
||||||
return '\n' + '\n'.join(
|
return "\n" + "\n".join(
|
||||||
[f"""include_filter {{
|
[
|
||||||
|
f"""include_filter {{
|
||||||
repo_match: \"{f[0]}\"
|
repo_match: \"{f[0]}\"
|
||||||
{self.filter_arch_to_prototxt(f[1])}
|
{self.filter_arch_to_prototxt(f[1])}
|
||||||
}}""" for f in
|
}}"""
|
||||||
self.include_filter])
|
for f in self.include_filter
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def to_prototxt(self):
|
def to_prototxt(self):
|
||||||
ret = f"""# kind: resf.peridot.v1.CatalogSync
|
ret = f"""# kind: resf.peridot.v1.CatalogSync
|
||||||
|
@ -32,8 +32,12 @@ import os
|
|||||||
|
|
||||||
import kobo.conf
|
import kobo.conf
|
||||||
|
|
||||||
from catalog import PeridotCatalogSync, PeridotCatalogSyncPackage, \
|
from catalog import (
|
||||||
PeridotCatalogSyncPackageType, PeridotCatalogSyncRepository
|
PeridotCatalogSync,
|
||||||
|
PeridotCatalogSyncPackage,
|
||||||
|
PeridotCatalogSyncPackageType,
|
||||||
|
PeridotCatalogSyncRepository,
|
||||||
|
)
|
||||||
from scm import SCM
|
from scm import SCM
|
||||||
|
|
||||||
|
|
||||||
@ -51,10 +55,10 @@ def main(pungi_conf_path: str, output_path: str):
|
|||||||
catalog = PeridotCatalogSync()
|
catalog = PeridotCatalogSync()
|
||||||
|
|
||||||
# Set multilib filters
|
# Set multilib filters
|
||||||
catalog.additional_multilib.extend(
|
catalog.additional_multilib.extend(list(conf.get("multilib_whitelist").values())[0])
|
||||||
list(conf.get("multilib_whitelist").values())[0])
|
|
||||||
catalog.exclude_multilib_filter.extend(
|
catalog.exclude_multilib_filter.extend(
|
||||||
list(conf.get("multilib_blacklist").values())[0])
|
list(conf.get("multilib_blacklist").values())[0]
|
||||||
|
)
|
||||||
|
|
||||||
# Set additional packages/filters
|
# Set additional packages/filters
|
||||||
catalog.exclude_filter.extend(conf.get("filter_packages"))
|
catalog.exclude_filter.extend(conf.get("filter_packages"))
|
||||||
@ -73,31 +77,40 @@ def main(pungi_conf_path: str, output_path: str):
|
|||||||
if package not in package_index:
|
if package not in package_index:
|
||||||
package_index[package] = {}
|
package_index[package] = {}
|
||||||
if repo not in package_index[package]:
|
if repo not in package_index[package]:
|
||||||
package_index[package][repo] = {"include_filter": [],
|
package_index[package][repo] = {
|
||||||
"multilib": []}
|
"include_filter": [],
|
||||||
|
"multilib": [],
|
||||||
|
}
|
||||||
na_list = gpjson[repo][arch][package]
|
na_list = gpjson[repo][arch][package]
|
||||||
for na in na_list:
|
for na in na_list:
|
||||||
splitted = na.split(".")
|
splitted = na.split(".")
|
||||||
arch_package = splitted[len(splitted) - 1]
|
arch_package = splitted[len(splitted) - 1]
|
||||||
if arch != arch_package and arch_package != "noarch":
|
if arch != arch_package and arch_package != "noarch":
|
||||||
if arch not in package_index[package][repo]["multilib"]:
|
if arch not in package_index[package][repo]["multilib"]:
|
||||||
package_index[package][repo]["multilib"].append(
|
package_index[package][repo]["multilib"].append(arch)
|
||||||
arch)
|
|
||||||
if na not in package_index[package][repo]["include_filter"]:
|
if na not in package_index[package][repo]["include_filter"]:
|
||||||
package_index[package][repo]["include_filter"].append(
|
package_index[package][repo]["include_filter"].append(na)
|
||||||
na)
|
|
||||||
|
|
||||||
arch_specific_excludes = {}
|
arch_specific_excludes = {}
|
||||||
na_index = {}
|
na_index = {}
|
||||||
for pkg in package_index.keys():
|
for pkg in package_index.keys():
|
||||||
for repo in package_index[pkg].keys():
|
for repo in package_index[pkg].keys():
|
||||||
na_list = list(filter(lambda x: x.endswith('.noarch'), package_index[pkg][repo]["include_filter"]))
|
na_list = list(
|
||||||
|
filter(
|
||||||
|
lambda x: x.endswith(".noarch"),
|
||||||
|
package_index[pkg][repo]["include_filter"],
|
||||||
|
)
|
||||||
|
)
|
||||||
if not na_list:
|
if not na_list:
|
||||||
continue
|
continue
|
||||||
exclude_arches = {}
|
exclude_arches = {}
|
||||||
for na in na_list:
|
for na in na_list:
|
||||||
for arch in all_arches:
|
for arch in all_arches:
|
||||||
if arch not in gpjson[repo] or pkg not in gpjson[repo][arch] or na not in gpjson[repo][arch][pkg]:
|
if (
|
||||||
|
arch not in gpjson[repo]
|
||||||
|
or pkg not in gpjson[repo][arch]
|
||||||
|
or na not in gpjson[repo][arch][pkg]
|
||||||
|
):
|
||||||
if na not in exclude_arches:
|
if na not in exclude_arches:
|
||||||
exclude_arches[na] = []
|
exclude_arches[na] = []
|
||||||
exclude_arches[na].append(arch)
|
exclude_arches[na].append(arch)
|
||||||
@ -132,7 +145,7 @@ def main(pungi_conf_path: str, output_path: str):
|
|||||||
if arch not in filter_tuple:
|
if arch not in filter_tuple:
|
||||||
filter_tuple[arch] = []
|
filter_tuple[arch] = []
|
||||||
for na in repo_arch_index[repo][arch]:
|
for na in repo_arch_index[repo][arch]:
|
||||||
na = na.removesuffix('.noarch')
|
na = na.removesuffix(".noarch")
|
||||||
if na not in filter_tuple[arch]:
|
if na not in filter_tuple[arch]:
|
||||||
filter_tuple[arch].append(na)
|
filter_tuple[arch].append(na)
|
||||||
catalog.exclude_filter.append((repo_key, filter_tuple))
|
catalog.exclude_filter.append((repo_key, filter_tuple))
|
||||||
@ -141,12 +154,18 @@ def main(pungi_conf_path: str, output_path: str):
|
|||||||
catalog.add_package(
|
catalog.add_package(
|
||||||
PeridotCatalogSyncPackage(
|
PeridotCatalogSyncPackage(
|
||||||
package,
|
package,
|
||||||
PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_FORK if not package.startswith("rocky-") else PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_SRC,
|
PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_FORK
|
||||||
|
if not package.startswith("rocky-")
|
||||||
|
else PeridotCatalogSyncPackageType.PACKAGE_TYPE_NORMAL_SRC,
|
||||||
[],
|
[],
|
||||||
[PeridotCatalogSyncRepository(x, package_index[package][x][
|
[
|
||||||
"include_filter"], package_index[package][x]["multilib"])
|
PeridotCatalogSyncRepository(
|
||||||
for
|
x,
|
||||||
x in package_index[package].keys()]
|
package_index[package][x]["include_filter"],
|
||||||
|
package_index[package][x]["multilib"],
|
||||||
|
)
|
||||||
|
for x in package_index[package].keys()
|
||||||
|
],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -159,9 +178,9 @@ def main(pungi_conf_path: str, output_path: str):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Convert Pungi configuration to Peridot compatible '
|
description="Convert Pungi configuration to Peridot compatible " "catalogs."
|
||||||
'catalogs.')
|
)
|
||||||
parser.add_argument('--pungi-conf-path', type=str, required=True)
|
parser.add_argument("--pungi-conf-path", type=str, required=True)
|
||||||
parser.add_argument('--output-path', type=str, default="catalog.cfg")
|
parser.add_argument("--output-path", type=str, default="catalog.cfg")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
main(args.pungi_conf_path, args.output_path)
|
main(args.pungi_conf_path, args.output_path)
|
||||||
|
@ -27,16 +27,16 @@
|
|||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
BASE_URL = 'https://peridot-api.build.resf.org/v1'
|
BASE_URL = "https://peridot-api.build.resf.org/v1"
|
||||||
PROJECT_ID_PROD = '55b17281-bc54-4929-8aca-a8a11d628738'
|
PROJECT_ID_PROD = "55b17281-bc54-4929-8aca-a8a11d628738"
|
||||||
|
|
||||||
|
|
||||||
def construct_url(path, project_id=PROJECT_ID_PROD):
|
def construct_url(path, project_id=PROJECT_ID_PROD):
|
||||||
return f'{BASE_URL}/projects/{project_id}{path}'
|
return f"{BASE_URL}/projects/{project_id}{path}"
|
||||||
|
|
||||||
|
|
||||||
def build_batches_url(batch_type, task_id, page, status,
|
def build_batches_url(batch_type, task_id, page, status, project_id=PROJECT_ID_PROD):
|
||||||
project_id=PROJECT_ID_PROD):
|
|
||||||
return construct_url(
|
return construct_url(
|
||||||
f'/{batch_type}_batches/{task_id}?page={page}&limit=100&filter.status={status}',
|
f"/{batch_type}_batches/{task_id}?page={page}&limit=100&filter.status={status}",
|
||||||
project_id)
|
project_id,
|
||||||
|
)
|
||||||
|
@ -38,7 +38,7 @@ from common import build_batches_url
|
|||||||
|
|
||||||
def get_batch(batch_type, task_id, status, page):
|
def get_batch(batch_type, task_id, status, page):
|
||||||
r = requests.get(build_batches_url(batch_type, task_id, page, status))
|
r = requests.get(build_batches_url(batch_type, task_id, page, status))
|
||||||
return r.json()[f'{batch_type}s']
|
return r.json()[f"{batch_type}s"]
|
||||||
|
|
||||||
|
|
||||||
def process_batch(batch_type, task_id, status):
|
def process_batch(batch_type, task_id, status):
|
||||||
@ -52,18 +52,16 @@ def process_batch(batch_type, task_id, status):
|
|||||||
page = page + 1
|
page = page + 1
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
batch_type = sys.argv[1]
|
batch_type = sys.argv[1]
|
||||||
task_id = sys.argv[2]
|
task_id = sys.argv[2]
|
||||||
|
|
||||||
batch_items = process_batch(batch_type, task_id, 4)
|
batch_items = process_batch(batch_type, task_id, 4)
|
||||||
|
|
||||||
req = {}
|
req = {}
|
||||||
key = f'{batch_type}s'
|
key = f"{batch_type}s"
|
||||||
req[key] = []
|
req[key] = []
|
||||||
for item in batch_items:
|
for item in batch_items:
|
||||||
req[key].append({
|
req[key].append({"package_name": item["name"]})
|
||||||
'package_name': item['name']
|
|
||||||
})
|
|
||||||
|
|
||||||
print(json.dumps(req))
|
print(json.dumps(req))
|
||||||
|
@ -38,13 +38,14 @@ from common import construct_url
|
|||||||
|
|
||||||
def chunks(lst, n):
|
def chunks(lst, n):
|
||||||
for i in range(0, len(lst), n):
|
for i in range(0, len(lst), n):
|
||||||
yield lst[i:i + n]
|
yield lst[i : i + n]
|
||||||
|
|
||||||
|
|
||||||
def get_packages(page):
|
def get_packages(page):
|
||||||
r = requests.get(
|
r = requests.get(
|
||||||
construct_url(f'/packages?limit=100&page={page}&filters.no_builds=1'))
|
construct_url(f"/packages?limit=100&page={page}&filters.no_builds=1")
|
||||||
return r.json()['packages']
|
)
|
||||||
|
return r.json()["packages"]
|
||||||
|
|
||||||
|
|
||||||
def process_packages():
|
def process_packages():
|
||||||
@ -58,13 +59,11 @@ def process_packages():
|
|||||||
page = page + 1
|
page = page + 1
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
batch_items = process_packages()
|
batch_items = process_packages()
|
||||||
|
|
||||||
builds = []
|
builds = []
|
||||||
for item in batch_items:
|
for item in batch_items:
|
||||||
builds.append({
|
builds.append({"package_name": item["name"]})
|
||||||
'package_name': item['name']
|
|
||||||
})
|
|
||||||
for chunk in chunks(builds, 400):
|
for chunk in chunks(builds, 400):
|
||||||
print(json.dumps({"builds": chunk}))
|
print(json.dumps({"builds": chunk}))
|
||||||
|
@ -30,15 +30,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
build_type = sys.argv[1]
|
build_type = sys.argv[1]
|
||||||
key = f'{build_type}s'
|
key = f"{build_type}s"
|
||||||
|
|
||||||
req = {}
|
req = {}
|
||||||
req[key] = []
|
req[key] = []
|
||||||
for line in sys.stdin:
|
for line in sys.stdin:
|
||||||
req[key].append({
|
req[key].append({"package_name": line.strip()})
|
||||||
'package_name': line.strip()
|
|
||||||
})
|
|
||||||
|
|
||||||
print(json.dumps(req))
|
print(json.dumps(req))
|
||||||
|
Loading…
Reference in New Issue
Block a user