Add OSV API

This commit is contained in:
Mustafa Gezen 2023-02-05 00:24:31 +01:00
parent 5c60d387d8
commit 4afa718da4
Signed by untrusted user who does not match committer: mustafa
GPG Key ID: DCDF010D946438C1
9 changed files with 464 additions and 117 deletions

View File

@ -2,7 +2,10 @@ load("@aspect_rules_py//py:defs.bzl", "py_library")
py_library(
name = "db_lib",
srcs = ["__init__.py"],
srcs = [
"__init__.py",
"advisory.py",
],
imports = ["../.."],
visibility = ["//:__subpackages__"],
deps = ["@pypi_tortoise_orm//:pkg"],

100
apollo/db/advisory.py Normal file
View File

@ -0,0 +1,100 @@
import datetime
from typing import Optional
from tortoise import connections
from apollo.db import Advisory
async def fetch_advisories(
size: int,
page_offset: int,
keyword: Optional[str],
product: Optional[str],
before: Optional[datetime.datetime],
after: Optional[datetime.datetime],
cve: Optional[str],
synopsis: Optional[str],
severity: Optional[str],
kind: Optional[str],
fetch_related: bool = False,
) -> tuple[int, list[Advisory]]:
a = """
with vars (search, size, page_offset, product, before, after, cve, synopsis, severity, kind) as (
values ($1 :: text, $2 :: bigint, $3 :: bigint, $4 :: text, $5 :: timestamp, $6 :: timestamp, $7 :: text, $8 :: text, $9 :: text, $10 :: text)
)
select
a.id,
a.created_at,
a.updated_at,
a.published_at,
a.name,
a.synopsis,
a.description,
a.kind,
a.severity,
a.topic,
a.red_hat_advisory_id,
count(a.*) over () as total
from
advisories a
left outer join advisory_affected_products ap on ap.advisory_id = a.id
left outer join advisory_cves c on c.advisory_id = a.id
left outer join advisory_fixes f on f.advisory_id = a.id
where
((select product from vars) is null or exists (select name from advisory_affected_products where advisory_id = a.id and name like '%' || (select product from vars) || '%'))
and ((select before from vars) is null or a.published_at < (select before from vars))
and ((select after from vars) is null or a.published_at > (select after from vars))
and (a.published_at is not null)
and ((select cve from vars) is null or exists (select cve from advisory_cves where advisory_id = a.id and cve ilike '%' || (select cve from vars) || '%'))
and ((select synopsis from vars) is null or a.synopsis ilike '%' || (select synopsis from vars) || '%')
and ((select severity from vars) is null or a.severity = (select severity from vars))
and ((select kind from vars) is null or a.kind = (select kind from vars))
and ((select search from vars) is null or
ap.name like '%' || (select product from vars) || '%' or
a.synopsis ilike '%' || (select search from vars) || '%' or
a.description ilike '%' || (select search from vars) || '%' or
exists (select cve from advisory_cves where advisory_id = a.id and cve ilike '%' || (select search from vars) || '%') or
exists (select ticket_id from advisory_fixes where advisory_id = a.id and ticket_id ilike '%' || (select search from vars) || '%') or
a.name ilike '%' || (select search from vars) || '%')
group by a.id
order by a.published_at desc
limit (select size from vars) offset (select page_offset from vars)
"""
connection = connections.get("default")
results = await connection.execute_query(
a, [
keyword,
size,
page_offset,
product,
before,
after,
cve,
synopsis,
severity,
kind,
]
)
count = 0
if results:
if results[1]:
count = results[1][0]["total"]
advisories = [Advisory(**x) for x in results[1]]
for advisory in advisories:
await advisory.fetch_related(
"packages",
"cves",
"fixes",
"affected_products",
"packages",
"packages__supported_product",
"packages__supported_products_rh_mirror",
)
return (
count,
advisories,
)

View File

@ -9,6 +9,7 @@ py_library(
"routes/advisories.py",
"routes/api_advisories.py",
"routes/api_compat.py",
"routes/api_osv.py",
"routes/api_red_hat.py",
"routes/api_updateinfo.py",
"routes/login.py",

View File

@ -1,12 +1,14 @@
from typing import TypeVar, Generic
from typing import TypeVar, Generic, Optional
from fastapi import APIRouter
from fastapi import APIRouter, Depends
from fastapi.exceptions import HTTPException
from fastapi_pagination import Params
from fastapi_pagination.links import Page
from fastapi_pagination.ext.tortoise import paginate
from apollo.db import Advisory
from apollo.db.serialize import Advisory_Pydantic
from apollo.db.advisory import fetch_advisories
router = APIRouter(tags=["advisories"])
@ -23,7 +25,17 @@ class Pagination(Page[T], Generic[T]):
"/",
response_model=Pagination[Advisory_Pydantic],
)
async def list_advisories():
async def list_advisories(
params: Params = Depends(),
product: Optional[str] = None,
before_raw: Optional[str] = None,
after_raw: Optional[str] = None,
cve: Optional[str] = None,
synopsis: Optional[str] = None,
keyword: Optional[str] = None,
severity: Optional[str] = None,
kind: Optional[str] = None,
):
advisories = await paginate(
Advisory.all().prefetch_related(
"red_hat_advisory",
@ -47,8 +59,10 @@ async def get_advisory(advisory_name: str):
"cves",
"fixes",
"affected_products",
"red_hat_advisory",
).first()
"packages",
"packages__supported_product",
"packages__supported_products_rh_mirror",
).get_or_none()
if advisory is None:
raise HTTPException(404)

View File

@ -5,8 +5,6 @@ This module implements the compatibility API for Apollo V2 advisories
import datetime
from typing import TypeVar, Generic, Optional, Any, Sequence
from tortoise import connections
from fastapi import APIRouter, Depends, Query, Response
from fastapi.exceptions import HTTPException
from fastapi_pagination import pagination_ctx
@ -20,10 +18,11 @@ from pydantic import BaseModel
from rssgen.feed import RssGenerator
from apollo.db import Advisory, RedHatIndexState
from apollo.db.advisory import fetch_advisories
from apollo.db.serialize import Advisory_Pydantic_V2, Advisory_Pydantic_V2_CVE, Advisory_Pydantic_V2_Fix, Advisory_Pydantic_V2_RPMs
from apollo.server.settings import UI_URL, COMPANY_NAME, MANAGING_EDITOR, get_setting
from common.fastapi import RenderErrorTemplateException
from common.fastapi import RenderErrorTemplateException, parse_rfc3339_date
router = APIRouter(tags=["v2_compat"])
@ -124,7 +123,7 @@ def v3_advisory_to_v2(
rpms = {}
if include_rpms:
for pkg in advisory.packages:
name = f"{pkg.supported_product.variant} {pkg.supported_products_rh_mirror.match_major_version}"
name = f"{pkg.supported_product.name} {pkg.supported_products_rh_mirror.match_major_version}"
if name not in rpms:
rpms[name] = Advisory_Pydantic_V2_RPMs(nvras=[])
if pkg.nevra not in rpms[name].nvras:
@ -159,30 +158,24 @@ def v3_advisory_to_v2(
async def fetch_advisories_compat(
params: CompatParams,
product: str,
before_raw: str,
after_raw: str,
cve: str,
synopsis: str,
keyword: str,
severity: str,
kind: str,
product: Optional[str] = None,
before_raw: Optional[str] = None,
after_raw: Optional[str] = None,
cve: Optional[str] = None,
synopsis: Optional[str] = None,
keyword: Optional[str] = None,
severity: Optional[str] = None,
kind: Optional[str] = None,
):
before = None
after = None
try:
if before_raw:
before = datetime.datetime.fromisoformat(
before_raw.removesuffix("Z")
)
except:
before = parse_rfc3339_date(before_raw)
if not before:
raise RenderErrorTemplateException("Invalid before date", 400) # noqa # pylint: disable=raise-missing-from
try:
if after_raw:
after = datetime.datetime.fromisoformat(after_raw.removesuffix("Z"))
except:
after = parse_rfc3339_date(after_raw)
if not after:
raise RenderErrorTemplateException("Invalid after date", 400) # noqa # pylint: disable=raise-missing-from
q_kind = kind
@ -205,55 +198,10 @@ async def fetch_advisories_compat(
elif q_severity == "SEVERITY_CRITICAL":
q_severity = "Critical"
a = """
with vars (search, size, page_offset, product, before, after, cve, synopsis, severity, kind) as (
values ($1 :: text, $2 :: bigint, $3 :: bigint, $4 :: text, $5 :: timestamp, $6 :: timestamp, $7 :: text, $8 :: text, $9 :: text, $10 :: text)
)
select
a.id,
a.created_at,
a.updated_at,
a.published_at,
a.name,
a.synopsis,
a.description,
a.kind,
a.severity,
a.topic,
a.red_hat_advisory_id,
count(a.*) over () as total
from
advisories a
left outer join advisory_affected_products ap on ap.advisory_id = a.id
left outer join advisory_cves c on c.advisory_id = a.id
left outer join advisory_fixes f on f.advisory_id = a.id
where
((select product from vars) is null or exists (select name from advisory_affected_products where advisory_id = a.id and name like '%' || (select product from vars) || '%'))
and ((select before from vars) is null or a.published_at < (select before from vars))
and ((select after from vars) is null or a.published_at > (select after from vars))
and (a.published_at is not null)
and ((select cve from vars) is null or exists (select cve from advisory_cves where advisory_id = a.id and cve ilike '%' || (select cve from vars) || '%'))
and ((select synopsis from vars) is null or a.synopsis ilike '%' || (select synopsis from vars) || '%')
and ((select severity from vars) is null or a.severity = (select severity from vars))
and ((select kind from vars) is null or a.kind = (select kind from vars))
and ((select search from vars) is null or
ap.name like '%' || (select product from vars) || '%' or
a.synopsis ilike '%' || (select search from vars) || '%' or
a.description ilike '%' || (select search from vars) || '%' or
exists (select cve from advisory_cves where advisory_id = a.id and cve ilike '%' || (select search from vars) || '%') or
exists (select ticket_id from advisory_fixes where advisory_id = a.id and ticket_id ilike '%' || (select search from vars) || '%') or
a.name ilike '%' || (select search from vars) || '%')
group by a.id
order by a.published_at desc
limit (select size from vars) offset (select page_offset from vars)
"""
connection = connections.get("default")
results = await connection.execute_query(
a, [
keyword,
return await fetch_advisories(
params.get_size(),
params.get_offset(),
keyword,
product,
before,
after,
@ -261,18 +209,7 @@ async def fetch_advisories_compat(
synopsis,
q_severity,
q_kind,
]
)
count = 0
if results:
if results[1]:
count = results[1][0]["total"]
advisories = [Advisory(**x) for x in results[1]]
return (
count,
advisories,
fetch_related=True,
)
@ -308,23 +245,11 @@ async def list_advisories_compat_v2(
kind,
)
count = fetch_adv[0]
advisories = fetch_adv[1]
advisories = []
for adv in fetch_adv[1]:
await adv.fetch_related(
"packages",
"cves",
"fixes",
"affected_products",
"packages",
"packages__supported_product",
"packages__supported_products_rh_mirror",
)
advisories.append(adv)
v2_advisories: list[Advisory_Pydantic_V2] = []
for advisory in advisories:
v2_advisories.append(v3_advisory_to_v2(advisory))
v2_advisories: list[Advisory_Pydantic_V2] = [
v3_advisory_to_v2(x) for x in advisories
]
page = create_page(v2_advisories, count, params)
page.lastUpdated = state.last_indexed_at.isoformat("T").replace(

View File

@ -0,0 +1,242 @@
from typing import TypeVar, Generic, Optional
from fastapi import APIRouter, Depends, Query, Response
from fastapi_pagination import create_page
from fastapi_pagination.links import Page
from pydantic import BaseModel
from slugify import slugify
from apollo.db import Advisory
from apollo.db.advisory import fetch_advisories
from apollo.rpmworker.repomd import EPOCH_RE, NVRA_RE
from common.fastapi import Params, to_rfc3339_date
router = APIRouter(tags=["osv"])
T = TypeVar("T")
class Pagination(Page[T], Generic[T]):
class Config:
allow_population_by_field_name = True
fields = {"items": {"alias": "advisories"}}
class OSVSeverity(BaseModel):
type: str
score: str
class OSVPackage(BaseModel):
ecosystem: str
name: str
purl: Optional[str] = None
class OSVEvent(BaseModel):
introduced: Optional[str] = None
fixed: Optional[str] = None
last_affected: Optional[str] = None
limit: Optional[str] = None
class OSVRangeDatabaseSpecific(BaseModel):
pass
class OSVRange(BaseModel):
type: str
repo: str
events: list[OSVEvent]
database_specific: OSVRangeDatabaseSpecific
class OSVEcosystemSpecific(BaseModel):
pass
class OSVAffectedDatabaseSpecific(BaseModel):
pass
class OSVAffected(BaseModel):
package: OSVPackage
ranges: list[OSVRange]
versions: list[str]
ecosystem_specific: OSVEcosystemSpecific
database_specific: OSVAffectedDatabaseSpecific
class OSVReference(BaseModel):
type: str
url: str
class OSVCredit(BaseModel):
name: str
contact: list[str]
class OSVDatabaseSpecific(BaseModel):
pass
class OSVAdvisory(BaseModel):
schema_version: str = "1.3.1"
id: str
modified: str
published: str
withdrawn: Optional[str]
aliases: list[str]
related: list[str]
summary: str
details: str
severity: list[OSVSeverity]
affected: list[OSVAffected]
references: list[OSVReference]
credits: list[OSVCredit]
database_specific: OSVDatabaseSpecific
def to_osv_advisory(advisory: Advisory) -> OSVAdvisory:
affected_pkgs = []
pkg_name_map = {}
for pkg in advisory.packages:
if pkg.package_name not in pkg_name_map:
pkg_name_map[pkg.package_name] = {}
product_name = pkg.product_name
if pkg.supported_products_rh_mirror:
product_name = f"{pkg.supported_product.variant}:{pkg.supported_products_rh_mirror.match_major_version}"
if product_name not in pkg_name_map[pkg.package_name]:
pkg_name_map[pkg.package_name][product_name] = []
pkg_name_map[pkg.package_name][product_name].append(pkg)
for pkg_name, affected_products in pkg_name_map.items():
for product_name, affected_packages in affected_products.items():
if not affected_packages:
continue
first_pkg = None
noarch_pkg = None
arch = None
nvra = None
ver_rel = None
for x in affected_packages:
nvra = NVRA_RE.search(EPOCH_RE.sub("", x.nevra))
if not nvra:
continue
ver_rel = f"{nvra.group(2)}-{nvra.group(3)}"
if x.supported_products_rh_mirror:
first_pkg = x
arch = x.supported_products_rh_mirror.match_arch
break
arch = nvra.group(4).lower()
if arch == "src":
continue
if arch == "noarch":
noarch_pkg = x
continue
first_pkg = x
break
if not first_pkg and noarch_pkg:
first_pkg = noarch_pkg
if not ver_rel:
continue
purl = None
if first_pkg:
slugified = slugify(first_pkg.supported_product.variant)
slugified_distro = slugify(first_pkg.product_name)
slugified_distro = slugified_distro.replace(
f"-{slugify(arch)}",
"",
)
purl = f"pkg:rpm/{slugified}/{pkg_name}@{ver_rel}?arch={arch}&distro={slugified_distro}"
affected = OSVAffected(
package=OSVPackage(
ecosystem=product_name,
name=pkg_name,
purl=purl,
),
ranges=[],
versions=[],
ecosystem_specific=OSVEcosystemSpecific(),
database_specific=OSVAffectedDatabaseSpecific(),
)
for x in affected_packages:
ranges = [
OSVRange(
type="ECOSYSTEM",
repo=x.repo_name,
events=[
OSVEvent(introduced="0"),
OSVEvent(fixed=ver_rel),
],
database_specific=OSVRangeDatabaseSpecific(),
)
]
affected.ranges.extend(ranges)
affected_pkgs.append(affected)
return OSVAdvisory(
id=advisory.name,
modified=to_rfc3339_date(advisory.updated_at),
published=to_rfc3339_date(advisory.published_at),
withdrawn=None,
aliases=[x.cve for x in advisory.cves],
related=[],
summary=advisory.synopsis,
details=advisory.description,
severity=[
OSVSeverity(type="CVSS_V3", score=x.cvss3_scoring_vector)
for x in advisory.cves
],
affected=affected_pkgs,
references=[],
credits=[],
database_specific=OSVDatabaseSpecific(),
)
@router.get("/", response_model=Pagination[OSVAdvisory])
async def get_advisories_osv(
params: Params = Depends(),
product: Optional[str] = None,
before: Optional[str] = None,
after: Optional[str] = None,
cve: Optional[str] = None,
synopsis: Optional[str] = None,
keyword: Optional[str] = None,
severity: Optional[str] = None,
kind: Optional[str] = None,
):
fetch_adv = await fetch_advisories(
params.get_size(),
params.get_offset(),
keyword,
product,
before,
after,
cve,
synopsis,
severity,
kind,
fetch_related=True,
)
count = fetch_adv[0]
advisories = fetch_adv[1]
osv_advisories = [to_osv_advisory(x) for x in advisories]
return create_page(osv_advisories, count, params)

View File

@ -14,11 +14,12 @@ from apollo.server.routes.statistics import router as statistics_router
from apollo.server.routes.login import router as login_router
from apollo.server.routes.logout import router as logout_router
from apollo.server.routes.admin_index import router as admin_index_router
from apollo.server.routes.red_hat_advisories import router as red_hat_advisories_router
from apollo.server.routes.api_advisories import router as api_advisories_router
from apollo.server.routes.api_updateinfo import router as api_updateinfo_router
from apollo.server.routes.api_red_hat import router as api_red_hat_router
from apollo.server.routes.api_compat import router as api_compat_router
from apollo.server.routes.red_hat_advisories import router as red_hat_advisories_router
from apollo.server.routes.api_osv import router as api_osv_router
from apollo.server.settings import SECRET_KEY, SettingsMiddleware, get_setting
from apollo.server.utils import admin_user_scheme, templates
from apollo.db import Settings
@ -53,6 +54,7 @@ app.include_router(api_advisories_router, prefix="/api/v3/advisories")
app.include_router(api_updateinfo_router, prefix="/api/v3/updateinfo")
app.include_router(api_red_hat_router, prefix="/api/v3/red_hat")
app.include_router(api_compat_router, prefix="/v2/advisories")
app.include_router(api_osv_router, prefix="/api/v3/osv")
add_pagination(app)

View File

@ -9,11 +9,13 @@ py_library(
"info.py",
"logger.py",
"temporal.py",
"testing.py",
],
imports = [".."],
visibility = ["//:__subpackages__"],
deps = [
"@pypi_fastapi//:pkg",
"@pypi_pydantic//:pkg",
"@pypi_temporalio//:pkg",
"@pypi_tortoise_orm//:pkg",
],

View File

@ -1,6 +1,12 @@
import datetime
import os
from typing import Any, Optional
from fastapi import Query
from fastapi.staticfiles import StaticFiles
from fastapi_pagination import Params as FastAPIParams
from pydantic import BaseModel, root_validator
class StaticFilesSym(StaticFiles):
@ -20,3 +26,55 @@ class RenderErrorTemplateException(Exception):
def __init__(self, msg=None, status_code=404):
self.msg = msg
self.status_code = status_code
class Params(FastAPIParams):
def get_size(self) -> int:
return self.size
def get_offset(self) -> int:
return self.size * (self.page - 1)
class DateTimeParams(BaseModel):
before: str = Query(default=None)
after: str = Query(default=None)
before_parsed: datetime.datetime = None
after_parsed: datetime.datetime = None
@root_validator(pre=True)
def __root_validator__(cls, value: Any) -> Any: # pylint: disable=no-self-argument
if value["before"]:
before = parse_rfc3339_date(value["before"])
if not before:
raise RenderErrorTemplateException("Invalid before date", 400)
value["before_parsed"] = before
if value["after"]:
after = parse_rfc3339_date(value["after"])
if not after:
raise RenderErrorTemplateException("Invalid after date", 400)
value["after_parsed"] = after
return value
def get_before(self) -> datetime.datetime:
return self.before_parsed
def get_after(self) -> datetime.datetime:
return self.after_parsed
def parse_rfc3339_date(date: str) -> Optional[datetime.datetime]:
if date:
try:
return datetime.datetime.fromisoformat(date.removesuffix("Z"))
except ValueError:
return None
return None
def to_rfc3339_date(date: datetime.datetime) -> str:
return date.isoformat("T").replace("+00:00", "") + "Z"