From 2e94e2dff175290b652a38a6e583597f309066aa Mon Sep 17 00:00:00 2001 From: Mustafa Gezen Date: Mon, 26 Jun 2023 19:57:57 +0200 Subject: [PATCH] Initial commit --- .gitignore | 2 + README.md | 22 ++++ alembic.ini | 110 ++++++++++++++++++ alembic/README | 1 + alembic/env.py | 78 +++++++++++++ alembic/script.py.mako | 24 ++++ .../9fbf9dd04e30_create_entries_table.py | 33 ++++++ data/rh_public_key.asc | 29 +++++ mothership/db/__init__.py | 7 ++ mothership/models/__init__.py | 10 ++ mothership/models/entry.py | 20 ++++ mothership_coordinator/route_entries.py | 81 +++++++++++++ mothership_coordinator/route_upload_srpm.py | 68 +++++++++++ mothership_coordinator/server.py | 35 ++++++ mothership_ui/server.py | 80 +++++++++++++ mothership_ui/templates/details.jinja | 50 ++++++++ mothership_ui/templates/error.jinja | 6 + mothership_ui/templates/index.jinja | 22 ++++ mothership_ui/templates/layout.jinja | 33 ++++++ mothership_ui/utils.py | 3 + mothership_worker/.keep | 0 requirements.txt | 11 ++ tests/__init__.py | 0 23 files changed, 725 insertions(+) create mode 100644 .gitignore create mode 100644 README.md create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/9fbf9dd04e30_create_entries_table.py create mode 100644 data/rh_public_key.asc create mode 100644 mothership/db/__init__.py create mode 100644 mothership/models/__init__.py create mode 100644 mothership/models/entry.py create mode 100644 mothership_coordinator/route_entries.py create mode 100644 mothership_coordinator/route_upload_srpm.py create mode 100644 mothership_coordinator/server.py create mode 100644 mothership_ui/server.py create mode 100644 mothership_ui/templates/details.jinja create mode 100644 mothership_ui/templates/error.jinja create mode 100644 mothership_ui/templates/index.jinja create mode 100644 mothership_ui/templates/layout.jinja create mode 100644 mothership_ui/utils.py create mode 100644 mothership_worker/.keep create mode 100644 requirements.txt create mode 100644 tests/__init__.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..489f97c --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +/.venv \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..c2af76e --- /dev/null +++ b/README.md @@ -0,0 +1,22 @@ +# mothership +Tool to archive RPM packages and attest to their authenticity + +# Introduction +This tool is designed to be used by the Rocky Linux project to archive RPM packages and attest to their authenticity. + +The sources are first staged, then the fully debranded sources are pushed (Optional). + +The import/debrand process is connected to Peridot, while the attestation and archival is fully done by this tool. + +# Configuration +``` +port: 8080 +rekor_endpoint: http://rekor:8090 +git_endpoint: ssh://git@git.rockylinux.org:22220/srpm-attest-test +git_ssh_key_path: PATH_TO_SSH_KEY +peridot: + endpoint: https://peridot-api.build.resf.org + client_id: CLIENT_ID + client_secret: CLIENT_SECRET + project_id: PROJECT_ID +``` diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..5dc47e8 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,110 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql://postgres:postgres@localhost/mothership + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..36112a3 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..55df286 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/9fbf9dd04e30_create_entries_table.py b/alembic/versions/9fbf9dd04e30_create_entries_table.py new file mode 100644 index 0000000..a356b5b --- /dev/null +++ b/alembic/versions/9fbf9dd04e30_create_entries_table.py @@ -0,0 +1,33 @@ +"""create entries table + +Revision ID: 9fbf9dd04e30 +Revises: +Create Date: 2023-06-26 05:24:50.925093 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "9fbf9dd04e30" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "entries", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("entry_uuid", sa.String(), nullable=False), + sa.Column("package_name", sa.String(), nullable=False), + sa.Column("package_version", sa.String(), nullable=False), + sa.Column("package_release", sa.String(), nullable=False), + sa.Column("package_epoch", sa.String(), nullable=False), + sa.Column("os_release", sa.String(), nullable=False), + ) + + +def downgrade() -> None: + op.drop_table("entries") diff --git a/data/rh_public_key.asc b/data/rh_public_key.asc new file mode 100644 index 0000000..32892ec --- /dev/null +++ b/data/rh_public_key.asc @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1.4.5 (GNU/Linux) + +mQINBErgSTsBEACh2A4b0O9t+vzC9VrVtL1AKvUWi9OPCjkvR7Xd8DtJxeeMZ5eF +0HtzIG58qDRybwUe89FZprB1ffuUKzdE+HcL3FbNWSSOXVjZIersdXyH3NvnLLLF +0DNRB2ix3bXG9Rh/RXpFsNxDp2CEMdUvbYCzE79K1EnUTVh1L0Of023FtPSZXX0c +u7Pb5DI5lX5YeoXO6RoodrIGYJsVBQWnrWw4xNTconUfNPk0EGZtEnzvH2zyPoJh +XGF+Ncu9XwbalnYde10OCvSWAZ5zTCpoLMTvQjWpbCdWXJzCm6G+/hx9upke546H +5IjtYm4dTIVTnc3wvDiODgBKRzOl9rEOCIgOuGtDxRxcQkjrC+xvg5Vkqn7vBUyW +9pHedOU+PoF3DGOM+dqv+eNKBvh9YF9ugFAQBkcG7viZgvGEMGGUpzNgN7XnS1gj +/DPo9mZESOYnKceve2tIC87p2hqjrxOHuI7fkZYeNIcAoa83rBltFXaBDYhWAKS1 +PcXS1/7JzP0ky7d0L6Xbu/If5kqWQpKwUInXtySRkuraVfuK3Bpa+X1XecWi24JY +HVtlNX025xx1ewVzGNCTlWn1skQN2OOoQTV4C8/qFpTW6DTWYurd4+fE0OJFJZQF +buhfXYwmRlVOgN5i77NTIJZJQfYFj38c/Iv5vZBPokO6mffrOTv3MHWVgQARAQAB +tDNSZWQgSGF0LCBJbmMuIChyZWxlYXNlIGtleSAyKSA8c2VjdXJpdHlAcmVkaGF0 +LmNvbT6JAjYEEwECACAFAkrgSTsCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAK +CRAZni+R/UMdUWzpD/9s5SFR/ZF3yjY5VLUFLMXIKUztNN3oc45fyLdTI3+UClKC +2tEruzYjqNHhqAEXa2sN1fMrsuKec61Ll2NfvJjkLKDvgVIh7kM7aslNYVOP6BTf +C/JJ7/ufz3UZmyViH/WDl+AYdgk3JqCIO5w5ryrC9IyBzYv2m0HqYbWfphY3uHw5 +un3ndLJcu8+BGP5F+ONQEGl+DRH58Il9Jp3HwbRa7dvkPgEhfFR+1hI+Btta2C7E +0/2NKzCxZw7Lx3PBRcU92YKyaEihfy/aQKZCAuyfKiMvsmzs+4poIX7I9NQCJpyE +IGfINoZ7VxqHwRn/d5mw2MZTJjbzSf+Um9YJyA0iEEyD6qjriWQRbuxpQXmlAJbh +8okZ4gbVFv1F8MzK+4R8VvWJ0XxgtikSo72fHjwha7MAjqFnOq6eo6fEC/75g3NL +Ght5VdpGuHk0vbdENHMC8wS99e5qXGNDued3hlTavDMlEAHl34q2H9nakTGRF5Ki +JUfNh3DVRGhg8cMIti21njiRh7gyFI2OccATY7bBSr79JhuNwelHuxLrCFpY7V25 +OFktl15jZJaMxuQBqYdBgSay2G0U6D1+7VsWufpzd/Abx1/c3oi9ZaJvW22kAggq +dzdA27UUYjWvx42w9menJwh/0jeQcTecIUd0d0rFcw/c1pvgMMl/Q73yzKgKYw== +=zbHE +-----END PGP PUBLIC KEY BLOCK----- diff --git a/mothership/db/__init__.py b/mothership/db/__init__.py new file mode 100644 index 0000000..713c510 --- /dev/null +++ b/mothership/db/__init__.py @@ -0,0 +1,7 @@ +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + + +def new_engine() -> AsyncEngine: + return create_async_engine( + "postgresql+asyncpg://postgres:postgres@localhost:5432/mothership" + ) diff --git a/mothership/models/__init__.py b/mothership/models/__init__.py new file mode 100644 index 0000000..9d20880 --- /dev/null +++ b/mothership/models/__init__.py @@ -0,0 +1,10 @@ +import pydantic +from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass + + +class Base( + MappedAsDataclass, + DeclarativeBase, + dataclass_callable=pydantic.dataclasses.dataclass, +): + pass diff --git a/mothership/models/entry.py b/mothership/models/entry.py new file mode 100644 index 0000000..89bd887 --- /dev/null +++ b/mothership/models/entry.py @@ -0,0 +1,20 @@ +from typing import Optional + +from sqlalchemy.orm import Mapped, mapped_column + +from mothership.models import Base + + +class Entry(Base): + __tablename__ = "entries" + + id: Mapped[Optional[int]] = mapped_column(primary_key=True) + entry_uuid: Mapped[str] = mapped_column(nullable=False) + package_name: Mapped[str] = mapped_column(nullable=False) + package_version: Mapped[str] = mapped_column(nullable=False) + package_release: Mapped[str] = mapped_column(nullable=False) + package_epoch: Mapped[str] = mapped_column(nullable=False) + os_release: Mapped[str] = mapped_column(nullable=False) + + def __repr__(self) -> str: + return f"Entry(id={self.id}, entry_uuid={self.entry_uuid}, package_name={self.package_name}, package_version={self.package_version}, package_release={self.package_release}, package_epoch={self.package_epoch})" diff --git a/mothership_coordinator/route_entries.py b/mothership_coordinator/route_entries.py new file mode 100644 index 0000000..a68a96d --- /dev/null +++ b/mothership_coordinator/route_entries.py @@ -0,0 +1,81 @@ +from typing import TypeVar, Generic +from dataclasses import asdict +from json import loads +from base64 import b64decode + +from fastapi import APIRouter, HTTPException, Request, Depends +from fastapi.responses import JSONResponse +from fastapi_pagination import Page, Params +from fastapi_pagination.ext.sqlalchemy import paginate +from pydantic import BaseModel +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +import rekor_sdk + +from mothership.models.entry import Entry + +router = APIRouter(prefix="/entries") + +T = TypeVar("T") + + +class Pagination(Page[T], Generic[T]): + class Config: + allow_population_by_field_name = True + fields = {"items": {"alias": "entries"}} + + +class DetailedEntry(BaseModel): + entry: Entry + rekor_entry: dict + + +def paginate_entries(session, params): + return paginate(session.query(Entry), params=params) + + +# This method has a lot of hacky stuff, maybe SQLAlchemy was a mistake. +# I miss Tortoise +@router.get("/", response_model=Pagination[Entry], response_model_exclude_none=True) +async def get_entries(req: Request, params: Params = Depends()): + async with AsyncSession(req.app.state.db) as session: + page = await session.run_sync(paginate_entries, params=params) + # Convert items to dict + page.items = [asdict(item) for item in page.items] + # Delete ID field + for item in page.items: + del item["id"] + return JSONResponse(content=page.dict()) + + +@router.get("/{entry_id}", response_class=DetailedEntry) +async def get_entry(req: Request, entry_id: str): + async with AsyncSession(req.app.state.db) as session: + result = await session.execute( + select(Entry).where(Entry.entry_uuid == entry_id) + ) + entry = result.scalars().first() + + # Fetch entry from Rekor + try: + res = req.app.state.entries_api.get_log_entry_by_uuid(entry_id) + + # Get first value + val = list(res.values())[0] + + # Get base64 encoded RPM body + body = loads(b64decode(val.get("body")).decode()) + + entry_dict = asdict(entry) + del entry_dict["id"] + + return JSONResponse( + content={ + "entry": entry_dict, + "rekor_entry": body, + }, + ) + except rekor_sdk.rest.ApiException as exc: + err = loads(exc.body.decode()) + raise HTTPException(status_code=400, detail=err.get("message")) from exc diff --git a/mothership_coordinator/route_upload_srpm.py b/mothership_coordinator/route_upload_srpm.py new file mode 100644 index 0000000..092de2d --- /dev/null +++ b/mothership_coordinator/route_upload_srpm.py @@ -0,0 +1,68 @@ +from typing import Annotated +from base64 import b64encode, b64decode +from json import loads + +import rekor_sdk + +from fastapi import APIRouter, Form, File, Request, HTTPException +from sqlalchemy.ext.asyncio import AsyncSession + +from mothership.models.entry import Entry + +router = APIRouter(prefix="/upload_srpm") + + +@router.post("/", response_model=Entry) +async def upload_srpm( + file: Annotated[bytes, File()], + os_release: Annotated[str, Form()], + req: Request, +) -> Entry: + entry = { + "kind": "rpm", + "apiVersion": "0.0.1", + "spec": { + "package": {"content": b64encode(file).decode()}, + "publicKey": {"content": req.app.state.public_key}, + }, + } + + try: + res: rekor_sdk.LogEntry = req.app.state.entries_api.create_log_entry(entry) + except rekor_sdk.rest.ApiException as exc: + err = loads(exc.body.decode()) + raise HTTPException(status_code=400, detail=err["message"]) + + # Entry uuid is the key + entry_uuid: str = list(res.keys())[0] + + # Res should have one value + val: dict = list(res.values())[0] + + # Get base64 encoded RPM body + body = loads(b64decode(val.get("body")).decode()) + + # From body get the headers (spec.package.headers) + headers = body.get("spec").get("package").get("headers") + + # Get the name, version, release, and epoch from the headers + name = headers.get("Name") + version = headers.get("Version") + release = headers.get("Release") + epoch = headers.get("Epoch") + + entry_db = Entry( + id=None, + entry_uuid=entry_uuid, + package_name=name, + package_version=version, + package_release=release, + package_epoch=epoch, + os_release=os_release, + ) + + async with AsyncSession(req.app.state.db, expire_on_commit=False) as session: + session.add(entry_db) + await session.commit() + + return entry_db diff --git a/mothership_coordinator/server.py b/mothership_coordinator/server.py new file mode 100644 index 0000000..e91f7bb --- /dev/null +++ b/mothership_coordinator/server.py @@ -0,0 +1,35 @@ +from contextlib import asynccontextmanager +from base64 import b64encode + +from fastapi import FastAPI +from fastapi_pagination import add_pagination + +import rekor_sdk + +from mothership.db import new_engine +from mothership_coordinator.route_upload_srpm import router as upload_srpm_router +from mothership_coordinator.route_entries import router as entries_router + + +@asynccontextmanager +async def lifespan(app: FastAPI): + rekor_conf = rekor_sdk.Configuration() + rekor_conf.host = "http://localhost:3000" + entries_api = rekor_sdk.EntriesApi(rekor_sdk.ApiClient(rekor_conf)) + app.state.entries_api = entries_api + + with open("data/rh_public_key.asc", "rb") as f: + app.state.public_key = b64encode(f.read()).decode() + + engine = new_engine() + app.state.db = engine + yield + await engine.dispose() + + +app = FastAPI(lifespan=lifespan) + +app.include_router(upload_srpm_router) +app.include_router(entries_router) + +add_pagination(app) diff --git a/mothership_ui/server.py b/mothership_ui/server.py new file mode 100644 index 0000000..f41c4f1 --- /dev/null +++ b/mothership_ui/server.py @@ -0,0 +1,80 @@ +from typing import List +from json import dumps + +import aiohttp +from fastapi import FastAPI, Request, HTTPException +from fastapi.responses import FileResponse, HTMLResponse + +import pv2_ui_base +from mothership.models.entry import Entry +from mothership_coordinator.route_entries import DetailedEntry + +from mothership_ui.utils import templates + +app = FastAPI() + +css_response = FileResponse(pv2_ui_base.get_css_min_path()) + + +@app.get("/_/healthz") +def health(): + return {"status": "ok"} + + +@app.get("/pv2-ui/pv2.min.css", response_class=FileResponse) +def get_css(): + return css_response + + +@app.get("/favicon.ico") +def get_favicon(): + raise HTTPException(status_code=404) + + +@app.get("/", response_class=HTMLResponse) +async def index(request: Request): + async with aiohttp.ClientSession() as session: + async with session.get("http://127.0.0.1:8000/entries/") as response: + body = await response.json() + entry_list: List[Entry] = [] + for item in body.get("items"): + entry_list.append(Entry(id=None, **item)) + + return templates.TemplateResponse( + "index.jinja", + { + "request": request, + "entries": entry_list, + }, + ) + + +@app.get("/{entry_id}", response_class=HTMLResponse) +async def index(request: Request, entry_id: str): + async with aiohttp.ClientSession() as session: + async with session.get(f"http://127.0.0.1:8000/entries/{entry_id}") as response: + if response.status != 200: + return templates.TemplateResponse( + "error.jinja", + { + "request": request, + "status_code": response.status, + "reason": response.reason, + }, + ) + + body = await response.json() + body["entry"]["id"] = None + detailed_entry = DetailedEntry(**body) + detailed_dict = detailed_entry.dict() + del detailed_dict["rekor_entry"]["spec"]["publicKey"] + rekor_entry = dumps(detailed_dict.get("rekor_entry"), indent=4) + + return templates.TemplateResponse( + "details.jinja", + { + "request": request, + "entry": detailed_entry, + "rekor_entry": rekor_entry, + }, + ) diff --git a/mothership_ui/templates/details.jinja b/mothership_ui/templates/details.jinja new file mode 100644 index 0000000..1a97e13 --- /dev/null +++ b/mothership_ui/templates/details.jinja @@ -0,0 +1,50 @@ +{% extends "layout.jinja" %} + +{% block content %} +
+
+
+
+

+ {{ entry.entry.package_name }}-{{ entry.entry.package_version }}-{{ entry.entry.package_release + }}.src.rpm +

+
+

Name:

+

+ {{ entry.entry.package_name }} +

+
+
+

Package version:

+

+ {{ entry.entry.package_version }} +

+
+
+

Package release:

+

+ {{ entry.entry.package_release }} +

+
+
+

Package epoch:

+

+ {{ entry.entry.package_epoch }} +

+
+
+

+ {{ entry.entry.os_release }} +

+
+
+
+
+
+
{{ rekor_entry }}
+
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/mothership_ui/templates/error.jinja b/mothership_ui/templates/error.jinja new file mode 100644 index 0000000..b6bceef --- /dev/null +++ b/mothership_ui/templates/error.jinja @@ -0,0 +1,6 @@ +{% extends "layout.jinja" %} + +{% block content %} +

{{ status_code }}

+

{{ reason }}

+{% endblock %} \ No newline at end of file diff --git a/mothership_ui/templates/index.jinja b/mothership_ui/templates/index.jinja new file mode 100644 index 0000000..3f486a5 --- /dev/null +++ b/mothership_ui/templates/index.jinja @@ -0,0 +1,22 @@ +{% extends "layout.jinja" %} + +{% block content %} +
+ Supply Chain Entry ID + Package name + Package version + Package release + OS Release +
+{% for entry in entries %} +
+
+ {{ entry.entry_uuid }} + {{ entry.package_name }} + {{ entry.package_version }} + {{ entry.package_release }} + {{ entry.os_release }} +
+
+{% endfor %} +{% endblock %} \ No newline at end of file diff --git a/mothership_ui/templates/layout.jinja b/mothership_ui/templates/layout.jinja new file mode 100644 index 0000000..8ab0c13 --- /dev/null +++ b/mothership_ui/templates/layout.jinja @@ -0,0 +1,33 @@ + + + + + + + + + {% block title %}Peridot Mothership{% endblock %} + + + + {% block head %}{% endblock %} + + + +
+ + + +
+
+ {% block content %}{% endblock %} +
+ + + \ No newline at end of file diff --git a/mothership_ui/utils.py b/mothership_ui/utils.py new file mode 100644 index 0000000..46ab8cf --- /dev/null +++ b/mothership_ui/utils.py @@ -0,0 +1,3 @@ +from fastapi.templating import Jinja2Templates + +templates = Jinja2Templates(directory="mothership_ui/templates") diff --git a/mothership_worker/.keep b/mothership_worker/.keep new file mode 100644 index 0000000..e69de29 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..61ec0ec --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +fastapi==0.98.0 +fastapi-pagination==0.12.4 +python-multipart==0.0.6 +uvicorn[standard]==0.22.0 +Jinja2==3.1.2 +psycopg2==2.9.6 +asyncpg==0.27.0 +sqlalchemy[asyncio]==2.0.17 +alembic==1.11.1 +aiohttp==3.8.4 +rekor-python-sdk @ git+https://github.com/peridotbuild/rekor-python-sdk.git@main \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29