mirror of
https://github.com/resf/distro-tools.git
synced 2024-11-21 20:51:27 +00:00
Hydra API can return list[str] for boostVersion (#12)
* Hydra API can return list[str] for boostVersion * Max row count is now 999
This commit is contained in:
parent
9648bfba93
commit
f89f71f3b0
@ -87,7 +87,7 @@ class Advisory(JSONWizard):
|
|||||||
portal_child_ids: list[str] = None
|
portal_child_ids: list[str] = None
|
||||||
portal_product_filter: list[str] = None
|
portal_product_filter: list[str] = None
|
||||||
boostProduct: str = None
|
boostProduct: str = None
|
||||||
boostVersion: int = None
|
boostVersion: int | list[str] = None
|
||||||
detectedProducts: list[str] = None
|
detectedProducts: list[str] = None
|
||||||
caseCount: int = None
|
caseCount: int = None
|
||||||
caseCount_365: int = None
|
caseCount_365: int = None
|
||||||
@ -208,4 +208,5 @@ class API:
|
|||||||
raise Exception((await response.text()))
|
raise Exception((await response.text()))
|
||||||
elif body.get("response", {}).get("numFound", 0) == 0:
|
elif body.get("response", {}).get("numFound", 0) == 0:
|
||||||
return []
|
return []
|
||||||
return Advisory.from_list(list(body["response"]["docs"]))
|
advisory_list = list(body["response"]["docs"])
|
||||||
|
return Advisory.from_list(advisory_list)
|
||||||
|
@ -6,9 +6,9 @@ async def main():
|
|||||||
api = API()
|
api = API()
|
||||||
res = await api.search(
|
res = await api.search(
|
||||||
detected_product="rhel",
|
detected_product="rhel",
|
||||||
rows=1000,
|
rows=999,
|
||||||
from_date="2019-05-05T22:00:00Z",
|
from_date="2019-05-05T22:00:00Z",
|
||||||
sort_asc=True
|
sort_asc=True,
|
||||||
)
|
)
|
||||||
contains_9 = 0
|
contains_9 = 0
|
||||||
contains_8 = 0
|
contains_8 = 0
|
||||||
|
@ -9,7 +9,11 @@ from temporalio import activity
|
|||||||
from tortoise.transactions import in_transaction
|
from tortoise.transactions import in_transaction
|
||||||
|
|
||||||
from apollo.db import RedHatIndexState, RedHatAdvisory, RedHatAdvisoryPackage
|
from apollo.db import RedHatIndexState, RedHatAdvisory, RedHatAdvisoryPackage
|
||||||
from apollo.db import RedHatAdvisoryBugzillaBug, RedHatAdvisoryAffectedProduct, RedHatAdvisoryCVE
|
from apollo.db import (
|
||||||
|
RedHatAdvisoryBugzillaBug,
|
||||||
|
RedHatAdvisoryAffectedProduct,
|
||||||
|
RedHatAdvisoryCVE,
|
||||||
|
)
|
||||||
from apollo.rherrata import API
|
from apollo.rherrata import API
|
||||||
|
|
||||||
from common.logger import Logger
|
from common.logger import Logger
|
||||||
@ -25,16 +29,22 @@ def parse_red_hat_date(rhdate: str) -> datetime.datetime:
|
|||||||
@activity.defn
|
@activity.defn
|
||||||
async def get_last_indexed_date() -> Optional[str]:
|
async def get_last_indexed_date() -> Optional[str]:
|
||||||
state = await RedHatIndexState.get_or_none()
|
state = await RedHatIndexState.get_or_none()
|
||||||
return re.sub(
|
return (
|
||||||
r"\+\d\d:\d\d",
|
re.sub(
|
||||||
"",
|
r"\+\d\d:\d\d",
|
||||||
state.last_indexed_at.isoformat("T") + "Z",
|
"",
|
||||||
) if state else None
|
state.last_indexed_at.isoformat("T") + "Z",
|
||||||
|
)
|
||||||
|
if state
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def fetch_mapped_oval() -> dict[str, ET.ElementTree]:
|
async def fetch_mapped_oval() -> dict[str, ET.ElementTree]:
|
||||||
# Download the oval_url using aiohttp, decompress using bzip and parse
|
# Download the oval_url using aiohttp, decompress using bzip and parse
|
||||||
oval_url = "https://access.redhat.com/security/data/oval/com.redhat.rhsa-all.xml.bz2"
|
oval_url = (
|
||||||
|
"https://access.redhat.com/security/data/oval/com.redhat.rhsa-all.xml.bz2"
|
||||||
|
)
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
async with session.get(oval_url) as response:
|
async with session.get(oval_url) as response:
|
||||||
if response.status == 200:
|
if response.status == 200:
|
||||||
@ -58,9 +68,7 @@ async def fetch_mapped_oval() -> dict[str, ET.ElementTree]:
|
|||||||
@activity.defn
|
@activity.defn
|
||||||
async def get_rh_advisories(from_timestamp: str = None) -> None:
|
async def get_rh_advisories(from_timestamp: str = None) -> None:
|
||||||
logger = Logger()
|
logger = Logger()
|
||||||
advisories = await API().search(
|
advisories = await API().search(from_date=from_timestamp, rows=999, sort_asc=True)
|
||||||
from_date=from_timestamp, rows=10000, sort_asc=True
|
|
||||||
)
|
|
||||||
oval = await fetch_mapped_oval()
|
oval = await fetch_mapped_oval()
|
||||||
|
|
||||||
for advisory in advisories:
|
for advisory in advisories:
|
||||||
@ -73,14 +81,13 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
state.last_indexed_at = advisory_last_indexed_at
|
state.last_indexed_at = advisory_last_indexed_at
|
||||||
await state.save()
|
await state.save()
|
||||||
else:
|
else:
|
||||||
await RedHatIndexState().create(
|
await RedHatIndexState().create(last_index_at=advisory_last_indexed_at)
|
||||||
last_index_at=advisory_last_indexed_at
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Processing advisory %s", advisory.id)
|
logger.info("Processing advisory %s", advisory.id)
|
||||||
|
|
||||||
existing_advisory = await RedHatAdvisory.filter(name=advisory.id
|
existing_advisory = await RedHatAdvisory.filter(
|
||||||
).get_or_none()
|
name=advisory.id
|
||||||
|
).get_or_none()
|
||||||
if existing_advisory:
|
if existing_advisory:
|
||||||
logger.info("Advisory %s already exists, skipping", advisory.id)
|
logger.info("Advisory %s already exists, skipping", advisory.id)
|
||||||
continue
|
continue
|
||||||
@ -110,13 +117,11 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
await RedHatAdvisoryPackage.bulk_create(
|
await RedHatAdvisoryPackage.bulk_create(
|
||||||
[
|
[
|
||||||
RedHatAdvisoryPackage(
|
RedHatAdvisoryPackage(
|
||||||
**{
|
**{"red_hat_advisory_id": ra.id, "nevra": nevra}
|
||||||
"red_hat_advisory_id": ra.id,
|
)
|
||||||
"nevra": nevra
|
for nevra in advisory.portal_package
|
||||||
}
|
|
||||||
) for nevra in advisory.portal_package
|
|
||||||
],
|
],
|
||||||
ignore_conflicts=True
|
ignore_conflicts=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
if advisory.portal_CVE:
|
if advisory.portal_CVE:
|
||||||
@ -147,10 +152,10 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
cvss3 = cve.attrib.get("cvss3")
|
cvss3 = cve.attrib.get("cvss3")
|
||||||
if cvss3:
|
if cvss3:
|
||||||
cvss3_raw = cvss3.split("/", 1)
|
cvss3_raw = cvss3.split("/", 1)
|
||||||
cvss3_scoring_vector = cvss3_raw[
|
cvss3_scoring_vector = (
|
||||||
1] if cvss3_raw else "UNKNOWN"
|
cvss3_raw[1] if cvss3_raw else "UNKNOWN"
|
||||||
cvss3_base_score = cvss3_raw[
|
)
|
||||||
0] if cvss3_raw else "UNKNOWN"
|
cvss3_base_score = cvss3_raw[0] if cvss3_raw else "UNKNOWN"
|
||||||
|
|
||||||
cwe = cve.attrib.get("cwe")
|
cwe = cve.attrib.get("cwe")
|
||||||
if not cwe:
|
if not cwe:
|
||||||
@ -159,16 +164,11 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
cves_to_save.append(
|
cves_to_save.append(
|
||||||
RedHatAdvisoryCVE(
|
RedHatAdvisoryCVE(
|
||||||
**{
|
**{
|
||||||
"red_hat_advisory_id":
|
"red_hat_advisory_id": ra.id,
|
||||||
ra.id,
|
"cve": cve.text,
|
||||||
"cve":
|
"cvss3_scoring_vector": cvss3_scoring_vector,
|
||||||
cve.text,
|
"cvss3_base_score": cvss3_base_score,
|
||||||
"cvss3_scoring_vector":
|
"cwe": cwe,
|
||||||
cvss3_scoring_vector,
|
|
||||||
"cvss3_base_score":
|
|
||||||
cvss3_base_score,
|
|
||||||
"cwe":
|
|
||||||
cwe,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -203,9 +203,10 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
"bugzilla_bug_id": bugzilla_bug_id,
|
"bugzilla_bug_id": bugzilla_bug_id,
|
||||||
"description": bz_map.get(bugzilla_bug_id, ""),
|
"description": bz_map.get(bugzilla_bug_id, ""),
|
||||||
}
|
}
|
||||||
) for bugzilla_bug_id in advisory.portal_BZ
|
)
|
||||||
|
for bugzilla_bug_id in advisory.portal_BZ
|
||||||
],
|
],
|
||||||
ignore_conflicts=True
|
ignore_conflicts=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
affected_products = advisory.get_products()
|
affected_products = advisory.get_products()
|
||||||
@ -219,11 +220,12 @@ async def get_rh_advisories(from_timestamp: str = None) -> None:
|
|||||||
"name": product.name,
|
"name": product.name,
|
||||||
"major_version": product.major_version,
|
"major_version": product.major_version,
|
||||||
"minor_version": product.minor_version,
|
"minor_version": product.minor_version,
|
||||||
"arch": product.arch
|
"arch": product.arch,
|
||||||
}
|
}
|
||||||
) for product in affected_products
|
)
|
||||||
|
for product in affected_products
|
||||||
],
|
],
|
||||||
ignore_conflicts=True
|
ignore_conflicts=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Processed advisory %s", advisory.id)
|
logger.info("Processed advisory %s", advisory.id)
|
||||||
|
Loading…
Reference in New Issue
Block a user