diff --git a/.bazelrc b/.bazelrc index 546eee51..c8ad09c3 100644 --- a/.bazelrc +++ b/.bazelrc @@ -13,11 +13,17 @@ build:inmemory --experimental_inmemory_dotd_files # Minimize what is downloaded build:toplevel --config=inmemory -build:toplevel --experimental_remote_download_outputs=toplevel +build:toplevel --remote_download_outputs=toplevel build:remote --config=toplevel -build:remote --google_credentials=/tmp/credentials.json -build:remote --remote_cache=https://storage.googleapis.com/resf-temporary-bazel-cache +build:remote --remote_cache=grpc://buildcache.default.svc.cluster.local:9092 +build:remote --experimental_remote_downloader=grpc://buildcache.default.svc.cluster.local:9092 +build:remote --noremote_upload_local_results +build:remote --remote_timeout=3600 +build:remote --bes_results_url=https://bz.build.resf.org/invocation/ +build:remote --bes_backend=grpc://buildbuddy-grpc.default.svc.cluster.local:1985 run:remote --experimental_remote_download_outputs=all --noexperimental_inmemory_jdeps_files --noexperimental_inmemory_dotd_files +common:ci --config=remote + build --stamp=true diff --git a/.envrc.prod.mustafarocky b/.envrc.prod.mustafarocky new file mode 100644 index 00000000..1aa5bc67 --- /dev/null +++ b/.envrc.prod.mustafarocky @@ -0,0 +1,6 @@ +unset STABLE_LOCAL_ENVIRONMENT STABLE_OCI_REGISTRY STABLE_OCI_REGISTRY_REPO STABLE_OCI_REGISTRY_NO_NESTED_SUPPORT_IN_2022_SHAME_ON_YOU_AWS STABLE_SITE + +export STABLE_STAGE="-prod" +export STABLE_REGISTRY_SECRET="none" +export STABLE_OCI_REGISTRY="docker.io" +export STABLE_OCI_REGISTRY_REPO="mustafarocky" diff --git a/README.md b/README.md index 01535f98..63af3a42 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,7 @@ __Other components pending__ * publisher - `Composer for Peridot (currently only includes legacy mode)` * peridot - `Modern build system` -* secparse - `Errata mirroring and publishing platform` -* ui - `Product Errata UI` +* apollo - `Errata mirroring and publishing platform` * utils - `Common utilities` * modulemd - `Modulemd parser in Go` @@ -50,6 +49,6 @@ For best experience use IntelliJ+Bazel but `govendor` creates structure that is #### Vendor Go dependencies `./hack/govendor` #### Run UI in development mode -`ibazel run //TARGET:TARGET.server` - example: `ibazel run //secparse/ui:secparse.server` +`ibazel run //TARGET:TARGET.server` - example: `ibazel run //apollo/ui:apollo.server` #### Find UI server targets `bazel query 'attr(tags, "byc_frontend_server", //...)'` diff --git a/secparse/BUILD b/apollo/BUILD.bazel similarity index 100% rename from secparse/BUILD rename to apollo/BUILD.bazel diff --git a/apollo/cmd/apollo/BUILD.bazel b/apollo/cmd/apollo/BUILD.bazel new file mode 100644 index 00000000..afe84b00 --- /dev/null +++ b/apollo/cmd/apollo/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "apollo_lib", + srcs = ["main.go"], + importpath = "peridot.resf.org/apollo/cmd/apollo", + visibility = ["//visibility:private"], + deps = [ + "//apollo/db/connector", + "//apollo/impl/v1:impl", + "//utils", + "//vendor/github.com/sirupsen/logrus", + "//vendor/github.com/spf13/cobra", + ], +) + +go_binary( + name = "apollo", + embed = [":apollo_lib"], + visibility = ["//visibility:public"], +) diff --git a/apollo/cmd/apollo/ci/BUILD.bazel b/apollo/cmd/apollo/ci/BUILD.bazel new file mode 100644 index 00000000..473d0c65 --- /dev/null +++ b/apollo/cmd/apollo/ci/BUILD.bazel @@ -0,0 +1,19 @@ +load("//rules_byc:defs.bzl", "BYCDEPLOY_OUTS_MIGRATE", "container", "peridot_k8s") + +container( + base = "//bases/bazel/go", + files = [ + "//apollo/cmd/apollo", + ], + image_name = "apollo", + tars_to_layer = [ + "//apollo/migrate", + ], +) + +peridot_k8s( + name = "apollo", + src = "deploy.jsonnet", + outs = BYCDEPLOY_OUTS_MIGRATE, + deps = ["//ci"], +) diff --git a/apollo/cmd/apollo/ci/deploy.jsonnet b/apollo/cmd/apollo/ci/deploy.jsonnet new file mode 100644 index 00000000..7498dcb2 --- /dev/null +++ b/apollo/cmd/apollo/ci/deploy.jsonnet @@ -0,0 +1,48 @@ +local bycdeploy = import 'ci/bycdeploy.jsonnet'; +local db = import 'ci/db.jsonnet'; +local kubernetes = import 'ci/kubernetes.jsonnet'; +local temporal = import 'ci/temporal.jsonnet'; +local utils = import 'ci/utils.jsonnet'; + +bycdeploy.new({ + name: 'apollo', + replicas: 1, + dbname: 'apollo', + backend: true, + migrate: true, + legacyDb: true, + command: '/bundle/apollo', + image: kubernetes.tag('apollo'), + tag: kubernetes.version, + dsn: { + name: 'APOLLO_DATABASE_URL', + value: db.dsn_legacy('apollo'), + }, + requests: if kubernetes.prod() then { + cpu: '0.5', + memory: '512M', + }, + ports: [ + { + name: 'http', + containerPort: 9100, + protocol: 'TCP', + expose: true, + }, + { + name: 'grpc', + containerPort: 9101, + protocol: 'TCP', + }, + ], + health: { + port: 9100, + }, + env: [ + { + name: 'APOLLO_PRODUCTION', + value: if kubernetes.dev() then 'false' else 'true', + }, + $.dsn, + ] + temporal.kube_env('APOLLO'), +}) diff --git a/secparse/cmd/secparse/main.go b/apollo/cmd/apollo/main.go similarity index 84% rename from secparse/cmd/secparse/main.go rename to apollo/cmd/apollo/main.go index 0a5b147a..001dab69 100644 --- a/secparse/cmd/secparse/main.go +++ b/apollo/cmd/apollo/main.go @@ -33,30 +33,30 @@ package main import ( "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "peridot.resf.org/secparse/db/connector" - "peridot.resf.org/secparse/impl" + apolloconnector "peridot.resf.org/apollo/db/connector" + apolloimpl "peridot.resf.org/apollo/impl/v1" "peridot.resf.org/utils" ) var root = &cobra.Command{ - Use: "secparse", + Use: "apollo", Run: mn, } var cnf = utils.NewFlagConfig() func init() { - cnf.DefaultPort = 9008 + cnf.DefaultPort = 9100 - dname := "secparse" - cnf.DatabaseName = &dname - cnf.Name = "secparse" + cnf.DatabaseName = utils.Pointer[string]("apollo") + cnf.Name = *cnf.DatabaseName + root.PersistentFlags().String("homepage", "https://errata.build.resf.org", "Frontend root URL") utils.AddFlags(root.PersistentFlags(), cnf) } func mn(_ *cobra.Command, _ []string) { - impl.NewServer(connector.MustAuto()).Run() + apolloimpl.NewServer(apolloconnector.MustAuto()).Run() } func main() { diff --git a/apollo/cmd/apollostarter/BUILD.bazel b/apollo/cmd/apollostarter/BUILD.bazel new file mode 100644 index 00000000..be4f65c8 --- /dev/null +++ b/apollo/cmd/apollostarter/BUILD.bazel @@ -0,0 +1,24 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "apollostarter_lib", + srcs = ["main.go"], + importpath = "peridot.resf.org/apollo/cmd/apollostarter", + visibility = ["//visibility:private"], + deps = [ + "//apollo/db/connector", + "//apollo/worker", + "//proto:common", + "//temporalutils", + "//utils", + "//vendor/github.com/sirupsen/logrus", + "//vendor/github.com/spf13/cobra", + "//vendor/go.temporal.io/sdk/client", + ], +) + +go_binary( + name = "apollostarter", + embed = [":apollostarter_lib"], + visibility = ["//visibility:public"], +) diff --git a/apollo/cmd/apollostarter/ci/BUILD.bazel b/apollo/cmd/apollostarter/ci/BUILD.bazel new file mode 100644 index 00000000..da2ceacf --- /dev/null +++ b/apollo/cmd/apollostarter/ci/BUILD.bazel @@ -0,0 +1,18 @@ +load("//rules_byc:defs.bzl", "BYCDEPLOY_OUTS_MIGRATE", "container", "peridot_k8s") + +container( + base = "//bases/bazel/go", + files = [ + "//apollo/cmd/apollostarter", + ], + image_name = "apollostarter", +) + +peridot_k8s( + name = "apollostarter", + src = "deploy.jsonnet", + outs = BYCDEPLOY_OUTS_MIGRATE, + deps = [ + "//ci", + ], +) diff --git a/apollo/cmd/apollostarter/ci/deploy.jsonnet b/apollo/cmd/apollostarter/ci/deploy.jsonnet new file mode 100644 index 00000000..9872c894 --- /dev/null +++ b/apollo/cmd/apollostarter/ci/deploy.jsonnet @@ -0,0 +1,46 @@ +local bycdeploy = import 'ci/bycdeploy.jsonnet'; +local db = import 'ci/db.jsonnet'; +local kubernetes = import 'ci/kubernetes.jsonnet'; +local temporal = import 'ci/temporal.jsonnet'; +local utils = import 'ci/utils.jsonnet'; + +local site = std.extVar('site'); + +bycdeploy.new({ + name: 'apollostarter', + replicas: 1, + dbname: 'apollo', + backend: true, + migrate: true, + migrate_command: ['/bin/sh'], + migrate_args: ['-c', 'exit 0'], + legacyDb: true, + command: '/bundle/apollostarter', + image: kubernetes.tag('apollostarter'), + tag: kubernetes.version, + dsn: { + name: 'APOLLOSTARTER_DATABASE_URL', + value: db.dsn_legacy('apollo', false, 'apollostarter'), + }, + requests: if kubernetes.prod() then { + cpu: '1', + memory: '2G', + }, + ports: [ + { + name: 'http', + containerPort: 31209, + protocol: 'TCP', + }, + ], + health: { + port: 31209, + }, + env: [ + { + name: 'APOLLOSTARTER_PRODUCTION', + value: if kubernetes.dev() then 'false' else 'true', + }, + $.dsn, + ] + temporal.kube_env('APOLLOSTARTER'), +}) diff --git a/apollo/cmd/apollostarter/main.go b/apollo/cmd/apollostarter/main.go new file mode 100644 index 00000000..979b9baa --- /dev/null +++ b/apollo/cmd/apollostarter/main.go @@ -0,0 +1,153 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package main + +import ( + "context" + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "go.temporal.io/sdk/client" + "log" + apolloconnector "peridot.resf.org/apollo/db/connector" + "peridot.resf.org/apollo/worker" + commonpb "peridot.resf.org/common" + "peridot.resf.org/temporalutils" + "peridot.resf.org/utils" +) + +var root = &cobra.Command{ + Use: "apollostarter", + Run: mn, +} + +var cnf = utils.NewFlagConfig() + +func init() { + cnf.DefaultPort = 31209 + + cnf.DatabaseName = utils.Pointer[string]("apollo") + cnf.Name = "apollostarter" + + temporalutils.AddFlags(root.PersistentFlags()) + utils.AddFlags(root.PersistentFlags(), cnf) +} + +func mn(_ *cobra.Command, _ []string) { + c, err := temporalutils.NewClient(client.Options{}) + if err != nil { + logrus.Fatalln("unable to create Temporal client", err) + } + defer c.Close() + + db := apolloconnector.MustAuto() + + w, err := worker.NewWorker(&worker.NewWorkerInput{ + Temporal: c, + Database: db, + TaskQueue: "apollo-v1-main-queue", + }) + defer w.Client.Close() + + // Poll Red Hat for new CVEs and advisories every two hours + cveWfOpts := client.StartWorkflowOptions{ + ID: "cron_cve_mirror", + TaskQueue: w.TaskQueue, + CronSchedule: "0 */2 * * *", + } + _, err = w.Client.ExecuteWorkflow(context.Background(), cveWfOpts, w.WorkflowController.PollRedHatCVEsWorkflow) + if err != nil { + log.Fatalf("unable to start cve workflow: %v", err) + } + errataWfOpts := client.StartWorkflowOptions{ + ID: "cron_errata_mirror", + TaskQueue: w.TaskQueue, + CronSchedule: "0 */2 * * *", + } + _, err = w.Client.ExecuteWorkflow(context.Background(), errataWfOpts, w.WorkflowController.PollRedHatErrataWorkflow) + if err != nil { + log.Fatalf("unable to start errata workflow: %v", err) + } + + // Poll unresolved CVE status and update every hour + cveStatusWfOpts := client.StartWorkflowOptions{ + ID: "cron_cve_status", + TaskQueue: w.TaskQueue, + CronSchedule: "0 */1 * * *", + } + _, err = w.Client.ExecuteWorkflow(context.Background(), cveStatusWfOpts, w.WorkflowController.UpdateCVEStateWorkflow) + if err != nil { + log.Fatalf("unable to start cve status workflow: %v", err) + } + + // Check if CVE is fixed downstream every 10 minutes + cveDownstreamWfOpts := client.StartWorkflowOptions{ + ID: "cron_cve_downstream", + TaskQueue: w.TaskQueue, + CronSchedule: "*/10 * * * *", + } + _, err = w.Client.ExecuteWorkflow(context.Background(), cveDownstreamWfOpts, w.WorkflowController.DownstreamCVECheckWorkflow) + if err != nil { + log.Fatalf("unable to start cve downstream workflow: %v", err) + } + + // Auto create advisory for fixed CVEs every 30 minutes + cveAdvisoryWfOpts := client.StartWorkflowOptions{ + ID: "cron_cve_advisory", + TaskQueue: w.TaskQueue, + CronSchedule: "*/10 * * * *", + } + _, err = w.Client.ExecuteWorkflow(context.Background(), cveAdvisoryWfOpts, w.WorkflowController.AutoCreateAdvisoryWorkflow) + if err != nil { + log.Fatalf("unable to start cve advisory workflow: %v", err) + } + + // only added so we get a health endpoint + s := utils.NewGRPCServer( + nil, + func(r *utils.Register) { + err := commonpb.RegisterHealthCheckServiceHandlerFromEndpoint(r.Context, r.Mux, r.Endpoint, r.Options) + if err != nil { + logrus.Fatalf("could not register health service: %v", err) + } + }, + func(r *utils.RegisterServer) { + commonpb.RegisterHealthCheckServiceServer(r.Server, &utils.HealthServer{}) + }, + ) + s.WaitGroup.Wait() +} + +func main() { + utils.Main() + if err := root.Execute(); err != nil { + log.Fatal(err) + } +} diff --git a/apollo/cmd/apolloworker/BUILD.bazel b/apollo/cmd/apolloworker/BUILD.bazel new file mode 100644 index 00000000..ccffae58 --- /dev/null +++ b/apollo/cmd/apolloworker/BUILD.bazel @@ -0,0 +1,27 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "apolloworker_lib", + srcs = ["main.go"], + importpath = "peridot.resf.org/apollo/cmd/apolloworker", + visibility = ["//visibility:private"], + deps = [ + "//apollo/db/connector", + "//apollo/rherrata", + "//apollo/rhsecurity", + "//apollo/worker", + "//apollo/workflow", + "//proto:common", + "//temporalutils", + "//utils", + "//vendor/github.com/sirupsen/logrus", + "//vendor/github.com/spf13/cobra", + "//vendor/go.temporal.io/sdk/client", + ], +) + +go_binary( + name = "apolloworker", + embed = [":apolloworker_lib"], + visibility = ["//visibility:public"], +) diff --git a/apollo/cmd/apolloworker/ci/BUILD.bazel b/apollo/cmd/apolloworker/ci/BUILD.bazel new file mode 100644 index 00000000..8dcde682 --- /dev/null +++ b/apollo/cmd/apolloworker/ci/BUILD.bazel @@ -0,0 +1,18 @@ +load("//rules_byc:defs.bzl", "BYCDEPLOY_OUTS_MIGRATE", "container", "peridot_k8s") + +container( + base = "//bases/bazel/go", + files = [ + "//apollo/cmd/apolloworker", + ], + image_name = "apolloworker", +) + +peridot_k8s( + name = "apolloworker", + src = "deploy.jsonnet", + outs = BYCDEPLOY_OUTS_MIGRATE, + deps = [ + "//ci", + ], +) diff --git a/apollo/cmd/apolloworker/ci/deploy.jsonnet b/apollo/cmd/apolloworker/ci/deploy.jsonnet new file mode 100644 index 00000000..9af7f880 --- /dev/null +++ b/apollo/cmd/apolloworker/ci/deploy.jsonnet @@ -0,0 +1,46 @@ +local bycdeploy = import 'ci/bycdeploy.jsonnet'; +local db = import 'ci/db.jsonnet'; +local kubernetes = import 'ci/kubernetes.jsonnet'; +local temporal = import 'ci/temporal.jsonnet'; +local utils = import 'ci/utils.jsonnet'; + +local site = std.extVar('site'); + +bycdeploy.new({ + name: 'apolloworker', + replicas: 1, + dbname: 'apollo', + backend: true, + migrate: true, + migrate_command: ['/bin/sh'], + migrate_args: ['-c', 'exit 0'], + legacyDb: true, + command: '/bundle/apolloworker', + image: kubernetes.tag('apolloworker'), + tag: kubernetes.version, + dsn: { + name: 'APOLLOWORKER_DATABASE_URL', + value: db.dsn_legacy('apollo', false, 'apolloworker'), + }, + requests: if kubernetes.prod() then { + cpu: '1', + memory: '2G', + }, + ports: [ + { + name: 'http', + containerPort: 29209, + protocol: 'TCP', + }, + ], + health: { + port: 29209, + }, + env: [ + { + name: 'APOLLOWORKER_PRODUCTION', + value: if kubernetes.dev() then 'false' else 'true', + }, + $.dsn, + ] + temporal.kube_env('APOLLOWORKER'), +}) diff --git a/apollo/cmd/apolloworker/main.go b/apollo/cmd/apolloworker/main.go new file mode 100644 index 00000000..bd843c4a --- /dev/null +++ b/apollo/cmd/apolloworker/main.go @@ -0,0 +1,143 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package main + +import ( + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "go.temporal.io/sdk/client" + "log" + apolloconnector "peridot.resf.org/apollo/db/connector" + "peridot.resf.org/apollo/rherrata" + "peridot.resf.org/apollo/rhsecurity" + "peridot.resf.org/apollo/worker" + "peridot.resf.org/apollo/workflow" + commonpb "peridot.resf.org/common" + "peridot.resf.org/temporalutils" + "peridot.resf.org/utils" + "sync" +) + +var root = &cobra.Command{ + Use: "apolloworker", + Run: mn, +} + +var cnf = utils.NewFlagConfig() + +func init() { + cnf.DefaultPort = 29209 + + cnf.DatabaseName = utils.Pointer[string]("apollo") + cnf.Name = "apolloworker" + + pflags := root.PersistentFlags() + pflags.String("vendor", "Rocky Enterprise Software Foundation", "Vendor name that is publishing the advisories") + + temporalutils.AddFlags(root.PersistentFlags()) + utils.AddFlags(root.PersistentFlags(), cnf) +} + +func mn(_ *cobra.Command, _ []string) { + c, err := temporalutils.NewClient(client.Options{}) + if err != nil { + logrus.Fatalln("unable to create Temporal client", err) + } + defer c.Close() + + db := apolloconnector.MustAuto() + + options := []workflow.Option{ + workflow.WithSecurityAPI(rhsecurity.NewAPIClient(rhsecurity.NewConfiguration()).DefaultApi), + workflow.WithErrataAPI(rherrata.NewClient()), + } + + w, err := worker.NewWorker( + &worker.NewWorkerInput{ + Temporal: c, + Database: db, + TaskQueue: "apollo-v1-main-queue", + }, + options..., + ) + defer w.Client.Close() + + w.Worker.RegisterWorkflow(w.WorkflowController.AutoCreateAdvisoryWorkflow) + w.Worker.RegisterWorkflow(w.WorkflowController.DownstreamCVECheckWorkflow) + w.Worker.RegisterWorkflow(w.WorkflowController.PollRedHatCVEsWorkflow) + w.Worker.RegisterWorkflow(w.WorkflowController.PollRedHatErrataWorkflow) + w.Worker.RegisterWorkflow(w.WorkflowController.UpdateCVEStateWorkflow) + + w.Worker.RegisterActivity(w.WorkflowController.AutoCreateAdvisoryActivity) + w.Worker.RegisterActivity(w.WorkflowController.GetAllShortCodesActivity) + w.Worker.RegisterActivity(w.WorkflowController.DownstreamCVECheckActivity) + w.Worker.RegisterActivity(w.WorkflowController.PollCVEProcessShortCodeActivity) + w.Worker.RegisterActivity(w.WorkflowController.ProcessRedHatErrataShortCodeActivity) + w.Worker.RegisterActivity(w.WorkflowController.UpdateCVEStateActivity) + + w.Worker.RegisterWorkflow(w.WorkflowController.CollectCVEDataWorkflow) + w.Worker.RegisterActivity(w.WorkflowController.CollectCVEDataActivity) + + var wg sync.WaitGroup + wg.Add(2) + + go func() { + w.Run() + wg.Done() + }() + + go func() { + // only added so we get a health endpoint + s := utils.NewGRPCServer( + nil, + func(r *utils.Register) { + err := commonpb.RegisterHealthCheckServiceHandlerFromEndpoint(r.Context, r.Mux, r.Endpoint, r.Options) + if err != nil { + logrus.Fatalf("could not register health service: %v", err) + } + }, + func(r *utils.RegisterServer) { + commonpb.RegisterHealthCheckServiceServer(r.Server, &utils.HealthServer{}) + }, + ) + s.WaitGroup.Wait() + wg.Done() + }() + + wg.Wait() +} + +func main() { + utils.Main() + if err := root.Execute(); err != nil { + log.Fatal(err) + } +} diff --git a/secparse/db/BUILD.bazel b/apollo/db/BUILD.bazel similarity index 72% rename from secparse/db/BUILD.bazel rename to apollo/db/BUILD.bazel index dbc20ec3..092b0641 100644 --- a/secparse/db/BUILD.bazel +++ b/apollo/db/BUILD.bazel @@ -6,13 +6,12 @@ go_library( "convert.go", "db.go", ], - importpath = "peridot.resf.org/secparse/db", + importpath = "peridot.resf.org/apollo/db", visibility = ["//visibility:public"], deps = [ - "//secparse/admin/proto/v1:proto", - "//secparse/proto/v1:proto", - "//secparse/rpmutils", + "//apollo/proto/v1:pb", "//utils", + "//vendor/github.com/jmoiron/sqlx/types", "//vendor/github.com/lib/pq", "@org_golang_google_protobuf//types/known/timestamppb:go_default_library", "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library", diff --git a/secparse/db/connector/BUILD.bazel b/apollo/db/connector/BUILD.bazel similarity index 68% rename from secparse/db/connector/BUILD.bazel rename to apollo/db/connector/BUILD.bazel index 10e1161d..d1dee1ca 100644 --- a/secparse/db/connector/BUILD.bazel +++ b/apollo/db/connector/BUILD.bazel @@ -3,11 +3,11 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "connector", srcs = ["connector.go"], - importpath = "peridot.resf.org/secparse/db/connector", + importpath = "peridot.resf.org/apollo/db/connector", visibility = ["//visibility:public"], deps = [ - "//secparse/db", - "//secparse/db/psql", + "//apollo/db", + "//apollo/db/psql", "//utils", "//vendor/github.com/sirupsen/logrus", ], diff --git a/secparse/db/connector/connector.go b/apollo/db/connector/connector.go similarity index 92% rename from secparse/db/connector/connector.go rename to apollo/db/connector/connector.go index b574f7c5..08ae7c1c 100644 --- a/secparse/db/connector/connector.go +++ b/apollo/db/connector/connector.go @@ -28,21 +28,21 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package connector +package apolloconnector import ( "github.com/sirupsen/logrus" - "peridot.resf.org/secparse/db" - "peridot.resf.org/secparse/db/psql" + apollodb "peridot.resf.org/apollo/db" + apollopsql "peridot.resf.org/apollo/db/psql" "peridot.resf.org/utils" ) // MustAuto automatically returns the correct access interface or fatally fails -func MustAuto() db.Access { +func MustAuto() apollodb.Access { dbType := utils.GetDbType() switch dbType { case utils.DbPostgres: - return psql.New() + return apollopsql.New() default: logrus.Fatal("invalid database url supplied") return nil diff --git a/secparse/db/convert.go b/apollo/db/convert.go similarity index 59% rename from secparse/db/convert.go rename to apollo/db/convert.go index cff686d6..5bc9d62f 100644 --- a/secparse/db/convert.go +++ b/apollo/db/convert.go @@ -28,21 +28,19 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package db +package apollodb import ( "fmt" "google.golang.org/protobuf/types/known/timestamppb" "google.golang.org/protobuf/types/known/wrapperspb" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - secparsepb "peridot.resf.org/secparse/proto/v1" - "peridot.resf.org/secparse/rpmutils" + apollopb "peridot.resf.org/apollo/pb" + "strings" ) -func DTOShortCodeToPB(sc *ShortCode) *secparseadminpb.ShortCode { - ret := &secparseadminpb.ShortCode{ +func DTOShortCodeToPB(sc *ShortCode) *apollopb.ShortCode { + ret := &apollopb.ShortCode{ Code: sc.Code, - Mode: secparseadminpb.ShortCodeMode(sc.Mode), } if sc.ArchivedAt.Valid { @@ -52,8 +50,8 @@ func DTOShortCodeToPB(sc *ShortCode) *secparseadminpb.ShortCode { return ret } -func DTOListShortCodesToPB(scs []*ShortCode) []*secparseadminpb.ShortCode { - var ret []*secparseadminpb.ShortCode +func DTOListShortCodesToPB(scs []*ShortCode) []*apollopb.ShortCode { + var ret []*apollopb.ShortCode for _, v := range scs { ret = append(ret, DTOShortCodeToPB(v)) @@ -62,16 +60,16 @@ func DTOListShortCodesToPB(scs []*ShortCode) []*secparseadminpb.ShortCode { return ret } -func DTOAdvisoryToPB(sc *Advisory) *secparsepb.Advisory { +func DTOAdvisoryToPB(sc *Advisory) *apollopb.Advisory { var errataType string - switch secparsepb.Advisory_Type(sc.Type) { - case secparsepb.Advisory_Security: + switch apollopb.Advisory_Type(sc.Type) { + case apollopb.Advisory_TYPE_SECURITY: errataType = "SA" break - case secparsepb.Advisory_BugFix: + case apollopb.Advisory_TYPE_BUGFIX: errataType = "BA" break - case secparsepb.Advisory_Enhancement: + case apollopb.Advisory_TYPE_ENHANCEMENT: errataType = "EA" break default: @@ -84,34 +82,67 @@ func DTOAdvisoryToPB(sc *Advisory) *secparsepb.Advisory { publishedAt = timestamppb.New(sc.PublishedAt.Time) } - ret := &secparsepb.Advisory{ - Type: secparsepb.Advisory_Type(sc.Type), + ret := &apollopb.Advisory{ + Type: apollopb.Advisory_Type(sc.Type), ShortCode: sc.ShortCodeCode, Name: fmt.Sprintf("%s%s-%d:%d", sc.ShortCodeCode, errataType, sc.Year, sc.Num), Synopsis: sc.Synopsis, - Severity: secparsepb.Advisory_Severity(sc.Severity), + Severity: apollopb.Advisory_Severity(sc.Severity), Topic: sc.Topic, Description: sc.Description, AffectedProducts: sc.AffectedProducts, - Fixes: sc.Fixes, - Cves: sc.Cves, + Fixes: nil, + Cves: []*apollopb.CVE{}, References: sc.References, PublishedAt: publishedAt, - Rpms: sc.RPMs, + Rpms: nil, + RebootSuggested: sc.RebootSuggested, } if sc.Solution.Valid { ret.Solution = &wrapperspb.StringValue{Value: sc.Solution.String} } + for _, cve := range sc.Cves { + split := strings.SplitN(cve, ":::", 6) + ret.Cves = append(ret.Cves, &apollopb.CVE{ + Name: split[2], + SourceBy: wrapperspb.String(split[0]), + SourceLink: wrapperspb.String(split[1]), + Cvss3ScoringVector: wrapperspb.String(split[3]), + Cvss3BaseScore: wrapperspb.String(split[4]), + Cwe: wrapperspb.String(split[5]), + }) + } + if len(sc.Fixes) > 0 { + ret.Fixes = []*apollopb.Fix{} + } + for _, fix := range sc.Fixes { + split := strings.SplitN(fix, ":::", 4) + ret.Fixes = append(ret.Fixes, &apollopb.Fix{ + Ticket: wrapperspb.String(split[0]), + SourceBy: wrapperspb.String(split[1]), + SourceLink: wrapperspb.String(split[2]), + Description: wrapperspb.String(split[3]), + }) + } + if len(sc.RPMs) > 0 { + ret.Rpms = map[string]*apollopb.RPMs{} + } + for _, rpm := range sc.RPMs { + split := strings.SplitN(rpm, ":::", 2) + nvra := split[0] + productName := split[1] + if ret.Rpms[productName] == nil { + ret.Rpms[productName] = &apollopb.RPMs{} + } - for i, rpm := range sc.RPMs { - sc.RPMs[i] = rpmutils.Epoch().ReplaceAllString(rpm, "") + ret.Rpms[productName].Nvras = append(ret.Rpms[productName].Nvras, nvra) } return ret } -func DTOListAdvisoriesToPB(scs []*Advisory) []*secparsepb.Advisory { - var ret []*secparsepb.Advisory +func DTOListAdvisoriesToPB(scs []*Advisory) []*apollopb.Advisory { + var ret []*apollopb.Advisory for _, v := range scs { ret = append(ret, DTOAdvisoryToPB(v)) @@ -120,10 +151,9 @@ func DTOListAdvisoriesToPB(scs []*Advisory) []*secparsepb.Advisory { return ret } -func DTOCVEToPB(cve *CVE) *secparseadminpb.CVE { - ret := &secparseadminpb.CVE{ - Name: cve.ID, - State: secparseadminpb.CVEState(cve.State), +func DTOCVEToPB(cve *CVE) *apollopb.CVE { + ret := &apollopb.CVE{ + Name: cve.ID, } if cve.SourceBy.Valid { @@ -136,8 +166,8 @@ func DTOCVEToPB(cve *CVE) *secparseadminpb.CVE { return ret } -func DTOListCVEsToPB(cves []*CVE) []*secparseadminpb.CVE { - var ret []*secparseadminpb.CVE +func DTOListCVEsToPB(cves []*CVE) []*apollopb.CVE { + var ret []*apollopb.CVE for _, v := range cves { ret = append(ret, DTOCVEToPB(v)) diff --git a/secparse/db/db.go b/apollo/db/db.go similarity index 61% rename from secparse/db/db.go rename to apollo/db/db.go index b0c84409..3785ac93 100644 --- a/secparse/db/db.go +++ b/apollo/db/db.go @@ -28,27 +28,26 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package db +package apollodb import ( "database/sql" + "github.com/jmoiron/sqlx/types" "github.com/lib/pq" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" + apollopb "peridot.resf.org/apollo/pb" "peridot.resf.org/utils" "time" ) -// ShortCode is the DTO struct for `resf.secparse.admin.ShortCode` +// ShortCode is the DTO struct for `resf.apollo.ShortCode` type ShortCode struct { - Code string `db:"code"` - Mode int8 `db:"mode"` - CreatedAt *time.Time `db:"created_at"` - ArchivedAt sql.NullTime `db:"archived_at"` - MirrorFromDate sql.NullTime `db:"mirror_from_date"` - RedHatProductPrefix sql.NullString `db:"redhat_product_prefix"` + Code string `json:"code" db:"code"` + Mode int8 `json:"mode" db:"mode"` + CreatedAt *time.Time `json:"createdAt" db:"created_at"` + ArchivedAt sql.NullTime `json:"archivedAt" db:"archived_at"` } -// Advisory is the DTO struct for `resf.secparse.Advisory` +// Advisory is the DTO struct for `resf.apollo.Advisory` type Advisory struct { ID int64 `db:"id"` CreatedAt *time.Time `db:"created_at"` @@ -63,32 +62,40 @@ type Advisory struct { Description string `db:"description"` Solution sql.NullString `db:"solution"` - RedHatIssuedAt sql.NullTime `db:"redhat_issued_at"` - ShortCodeCode string `db:"short_code_code"` - PublishedAt sql.NullTime `db:"published_at"` + RedHatIssuedAt sql.NullTime `db:"redhat_issued_at"` + ShortCodeCode string `db:"short_code_code"` + RebootSuggested bool `db:"reboot_suggested"` + PublishedAt sql.NullTime `db:"published_at"` AffectedProducts pq.StringArray `db:"affected_products"` Fixes pq.StringArray `db:"fixes"` Cves pq.StringArray `db:"cves"` + CveIds pq.StringArray `db:"cve_ids"` References pq.StringArray `db:"references"` RPMs pq.StringArray `db:"rpms"` BuildArtifacts pq.StringArray `db:"build_artifacts"` + + // Only used for list/search queries + Total int64 `json:"total" db:"total"` } -// CVE is the DTO struct for `resf.secparse.admin.CVE` +// CVE is the DTO struct for `resf.apollo.CVE` type CVE struct { ID string `db:"id"` CreatedAt *time.Time `db:"created_at"` - State int `db:"state"` AdvisoryId sql.NullInt64 `db:"advisory_id"` ShortCode string `db:"short_code_code"` SourceBy sql.NullString `db:"source_by"` SourceLink sql.NullString `db:"source_link"` + + Content types.NullJSONText `db:"content"` + + AffectedProductId sql.NullInt64 `db:"affected_product_id"` } -// AffectedProduct is the DTO struct for `ctlriq.secparse.admin.AffectedProduct` +// AffectedProduct is the DTO struct for `resf.apollo.AffectedProduct` type AffectedProduct struct { ID int64 `db:"id"` ProductID int64 `db:"product_id"` @@ -99,29 +106,43 @@ type AffectedProduct struct { Advisory sql.NullString `db:"advisory"` } -// Product is the DTO struct for `ctlriq.secparse.admin.Product` +// Product is the DTO struct for `resf.apollo.Product` type Product struct { ID int64 `db:"id"` Name string `db:"name"` - CurrentFullVersion string `db:"current_full_version"` - RedHatMajorVersion sql.NullInt32 `db:"redhat_major_version"` - ShortCode string `db:"short_code_code"` - Archs pq.StringArray `db:"archs"` + CurrentFullVersion string `db:"current_full_version"` + RedHatMajorVersion sql.NullInt32 `db:"redhat_major_version"` + ShortCode string `db:"short_code_code"` + Archs pq.StringArray `db:"archs"` + MirrorFromDate sql.NullTime `json:"mirrorFromDate" db:"mirror_from_date"` + RedHatProductPrefix sql.NullString `json:"redHatProductPrefix" db:"redhat_product_prefix"` + Cpe sql.NullString `json:"cpe" db:"cpe"` + EolAt sql.NullTime `json:"eolAt" db:"eol_at"` + + BuildSystem string `json:"buildSystem" db:"build_system"` + BuildSystemEndpoint string `json:"buildSystemEndpoint" db:"build_system_endpoint"` + KojiCompose sql.NullString `json:"kojiCompose" db:"koji_compose"` + KojiModuleCompose sql.NullString `json:"kojiModuleCompose" db:"koji_module_compose"` + PeridotProjectID sql.NullString `json:"peridotProjectID" db:"peridot_project_id"` } type BuildReference struct { - ID int64 `db:"id"` - AffectedProductId int64 `db:"affected_product_id"` - Rpm string `db:"rpm"` - SrcRpm string `db:"src_rpm"` - CveID string `db:"cve_id"` - KojiID string `db:"koji_id"` + ID int64 `db:"id"` + AffectedProductId int64 `db:"affected_product_id"` + Rpm string `db:"rpm"` + SrcRpm string `db:"src_rpm"` + CveID string `db:"cve_id"` + Sha256Sum string `db:"sha256_sum"` + KojiID sql.NullString `db:"koji_id"` + PeridotID sql.NullString `db:"peridot_id"` } type Fix struct { ID int64 `db:"id"` Ticket sql.NullString `db:"ticket"` + SourceBy sql.NullString `db:"source_by"` + SourceLink sql.NullString `db:"source_link"` Description sql.NullString `db:"description"` } @@ -132,8 +153,9 @@ type AdvisoryReference struct { } type MirrorState struct { - ShortCode string `db:"short_code_code"` - LastSync sql.NullTime `db:"last_sync"` + ShortCode string `db:"short_code_code"` + LastSync sql.NullTime `db:"last_sync"` + ErrataAfter sql.NullTime `db:"errata_after"` } type AdvisoryCVE struct { @@ -148,21 +170,27 @@ type AdvisoryFix struct { type IgnoredUpstreamPackage struct { ID int64 `db:"id"` - ShortCode string `db:"short_code_code"` + ProductID int64 `db:"product_id"` Package string `db:"package"` } +type RebootSuggestedPackage struct { + CreatedAt *time.Time `db:"created_at"` + Name string `db:"name"` +} + type AdvisoryRPM struct { AdvisoryID int64 `db:"advisory_id"` Name string `db:"name"` + ProductID int64 `db:"product_id"` } type Access interface { GetAllShortCodes() ([]*ShortCode, error) GetShortCodeByCode(code string) (*ShortCode, error) - CreateShortCode(code string, mode secparseadminpb.ShortCodeMode) (*ShortCode, error) + CreateShortCode(code string, mode apollopb.ShortCode_Mode) (*ShortCode, error) - GetAllAdvisories(publishedOnly bool) ([]*Advisory, error) + GetAllAdvisories(filters *apollopb.AdvisoryFilters, page int32, limit int32) ([]*Advisory, error) // Advisory is a broad entity with lots of fields // mustafa: It is in my opinion better to accept the same struct // to create and update it. @@ -179,11 +207,12 @@ type Access interface { GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) (*Advisory, error) GetAllUnresolvedCVEs() ([]*CVE, error) - GetAllCVEsWithAllProductsFixed() ([]*CVE, error) + GetPendingAffectedProducts() ([]*AffectedProduct, error) GetAllCVEsFixedDownstream() ([]*CVE, error) GetCVEByID(id string) (*CVE, error) - CreateCVE(cveId string, state secparseadminpb.CVEState, shortCode string, sourceBy *string, sourceLink *string) (*CVE, error) - UpdateCVEState(cve string, state secparseadminpb.CVEState) error + GetAllCVEs() ([]*CVE, error) + CreateCVE(cveId string, shortCode string, sourceBy *string, sourceLink *string, content types.NullJSONText) (*CVE, error) + SetCVEContent(cveId string, content types.JSONText) error GetProductsByShortCode(code string) ([]*Product, error) GetProductByNameAndShortCode(product string, code string) (*Product, error) @@ -193,25 +222,29 @@ type Access interface { GetAllAffectedProductsByCVE(cve string) ([]*AffectedProduct, error) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*AffectedProduct, error) GetAffectedProductByAdvisory(advisory string) (*AffectedProduct, error) + GetAffectedProductByID(id int64) (*AffectedProduct, error) CreateAffectedProduct(productId int64, cveId string, state int, version string, pkg string, advisory *string) (*AffectedProduct, error) UpdateAffectedProductStateAndPackageAndAdvisory(id int64, state int, pkg string, advisory *string) error DeleteAffectedProduct(id int64) error - CreateFix(ticket string, description string) (int64, error) + CreateFix(ticket string, sourceBy string, sourceLink string, description string) (int64, error) // This will return nil rather than an error if no rows are found - GetMirrorStateLastSync(code string) (*time.Time, error) + GetMirrorState(code string) (*MirrorState, error) UpdateMirrorState(code string, lastSync *time.Time) error + UpdateMirrorStateErrata(code string, lastSync *time.Time) error + GetMaxLastSync() (*time.Time, error) - CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, kojiId string) (*BuildReference, error) + CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, sha256Sum string, kojiId *string, peridotId *string) (*BuildReference, error) CreateAdvisoryReference(advisoryId int64, url string) error - GetAllIgnoredPackagesByShortCode(code string) ([]string, error) + GetAllIgnoredPackagesByProductID(productID int64) ([]string, error) + GetAllRebootSuggestedPackages() ([]string, error) // These add methods is treated like an upsert. They're only added if one doesn't exist AddAdvisoryFix(advisoryId int64, fixId int64) error AddAdvisoryCVE(advisoryId int64, cveId string) error - AddAdvisoryRPM(advisoryId int64, name string) error + AddAdvisoryRPM(advisoryId int64, name string, productID int64) error Begin() (utils.Tx, error) UseTransaction(tx utils.Tx) Access diff --git a/secparse/db/mock/BUILD b/apollo/db/mock/BUILD.bazel similarity index 55% rename from secparse/db/mock/BUILD rename to apollo/db/mock/BUILD.bazel index 1058425f..3e9340ec 100644 --- a/secparse/db/mock/BUILD +++ b/apollo/db/mock/BUILD.bazel @@ -3,11 +3,12 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "mock", srcs = ["mock.go"], - importpath = "peridot.resf.org/secparse/db/mock", + importpath = "peridot.resf.org/apollo/db/mock", visibility = ["//visibility:public"], deps = [ - "//secparse/admin/proto/v1:proto", - "//secparse/db", + "//apollo/db", + "//apollo/proto/v1:pb", "//utils", + "//vendor/github.com/jmoiron/sqlx/types", ], ) diff --git a/secparse/db/mock/mock.go b/apollo/db/mock/mock.go similarity index 58% rename from secparse/db/mock/mock.go rename to apollo/db/mock/mock.go index 7abbd8b7..df12feff 100644 --- a/secparse/db/mock/mock.go +++ b/apollo/db/mock/mock.go @@ -28,56 +28,59 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package mock +package apollomock import ( "database/sql" "fmt" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" + "github.com/jmoiron/sqlx/types" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" "peridot.resf.org/utils" "time" ) type Access struct { - ShortCodes []*db.ShortCode - Advisories []*db.Advisory - Cves []*db.CVE - Fixes []*db.Fix - AdvisoryReferences []*db.AdvisoryReference - Products []*db.Product - AffectedProducts []*db.AffectedProduct - BuildReferences []*db.BuildReference - MirrorStates []*db.MirrorState - AdvisoryCVEs []*db.AdvisoryCVE - AdvisoryFixes []*db.AdvisoryFix - IgnoredUpstreamPackages []*db.IgnoredUpstreamPackage - AdvisoryRPMs []*db.AdvisoryRPM + ShortCodes []*apollodb.ShortCode + Advisories []*apollodb.Advisory + Cves []*apollodb.CVE + Fixes []*apollodb.Fix + AdvisoryReferences []*apollodb.AdvisoryReference + Products []*apollodb.Product + AffectedProducts []*apollodb.AffectedProduct + BuildReferences []*apollodb.BuildReference + MirrorStates []*apollodb.MirrorState + AdvisoryCVEs []*apollodb.AdvisoryCVE + AdvisoryFixes []*apollodb.AdvisoryFix + IgnoredUpstreamPackages []*apollodb.IgnoredUpstreamPackage + RebootSuggestedPackages []*apollodb.RebootSuggestedPackage + AdvisoryRPMs []*apollodb.AdvisoryRPM } func New() *Access { return &Access{ - ShortCodes: []*db.ShortCode{}, - Advisories: []*db.Advisory{}, - Cves: []*db.CVE{}, - Fixes: []*db.Fix{}, - AdvisoryReferences: []*db.AdvisoryReference{}, - Products: []*db.Product{}, - AffectedProducts: []*db.AffectedProduct{}, - BuildReferences: []*db.BuildReference{}, - MirrorStates: []*db.MirrorState{}, - AdvisoryCVEs: []*db.AdvisoryCVE{}, - AdvisoryFixes: []*db.AdvisoryFix{}, - IgnoredUpstreamPackages: []*db.IgnoredUpstreamPackage{}, - AdvisoryRPMs: []*db.AdvisoryRPM{}, + ShortCodes: []*apollodb.ShortCode{}, + Advisories: []*apollodb.Advisory{}, + Cves: []*apollodb.CVE{}, + Fixes: []*apollodb.Fix{}, + AdvisoryReferences: []*apollodb.AdvisoryReference{}, + Products: []*apollodb.Product{}, + AffectedProducts: []*apollodb.AffectedProduct{}, + BuildReferences: []*apollodb.BuildReference{}, + MirrorStates: []*apollodb.MirrorState{}, + AdvisoryCVEs: []*apollodb.AdvisoryCVE{}, + AdvisoryFixes: []*apollodb.AdvisoryFix{}, + IgnoredUpstreamPackages: []*apollodb.IgnoredUpstreamPackage{}, + RebootSuggestedPackages: []*apollodb.RebootSuggestedPackage{}, + AdvisoryRPMs: []*apollodb.AdvisoryRPM{}, } } -func (a *Access) GetAllShortCodes() ([]*db.ShortCode, error) { +func (a *Access) GetAllShortCodes() ([]*apollodb.ShortCode, error) { return a.ShortCodes, nil } -func (a *Access) GetShortCodeByCode(code string) (*db.ShortCode, error) { +func (a *Access) GetShortCodeByCode(code string) (*apollodb.ShortCode, error) { for _, val := range a.ShortCodes { if val.Code == code { return val, nil @@ -87,24 +90,22 @@ func (a *Access) GetShortCodeByCode(code string) (*db.ShortCode, error) { return nil, sql.ErrNoRows } -func (a *Access) CreateShortCode(code string, mode secparseadminpb.ShortCodeMode) (*db.ShortCode, error) { +func (a *Access) CreateShortCode(code string, mode apollopb.ShortCode_Mode) (*apollodb.ShortCode, error) { now := time.Now() - shortCode := db.ShortCode{ - Code: code, - Mode: int8(mode), - CreatedAt: &now, - ArchivedAt: sql.NullTime{}, - MirrorFromDate: sql.NullTime{}, - RedHatProductPrefix: sql.NullString{}, + shortCode := apollodb.ShortCode{ + Code: code, + Mode: int8(mode), + CreatedAt: &now, + ArchivedAt: sql.NullTime{}, } a.ShortCodes = append(a.ShortCodes, &shortCode) return &shortCode, nil } -func (a *Access) getAdvisoriesWithJoin(filter func(*db.Advisory) bool) []*db.Advisory { - var advisories []*db.Advisory +func (a *Access) getAdvisoriesWithJoin(filter func(*apollodb.Advisory) bool) []*apollodb.Advisory { + var advisories []*apollodb.Advisory for _, val := range a.Advisories { if filter(val) { advisories = append(advisories, val) @@ -193,9 +194,30 @@ func (a *Access) getAdvisoriesWithJoin(filter func(*db.Advisory) bool) []*db.Adv return advisories } -func (a *Access) GetAllAdvisories(publishedOnly bool) ([]*db.Advisory, error) { - return a.getAdvisoriesWithJoin(func(advisory *db.Advisory) bool { - if publishedOnly { +func (a *Access) GetAllAdvisories(filters *apollopb.AdvisoryFilters, page int32, limit int32) ([]*apollodb.Advisory, error) { + return a.getAdvisoriesWithJoin(func(advisory *apollodb.Advisory) bool { + if filters.Product != nil { + if !utils.StrContains(filters.Product.Value, advisory.AffectedProducts) { + return false + } + } + if advisory.PublishedAt.Valid { + if filters.Before != nil { + if advisory.PublishedAt.Time.After(filters.Before.AsTime()) { + return false + } + } + if filters.After != nil { + if advisory.PublishedAt.Time.Before(filters.After.AsTime()) { + return false + } + } + } + if filters.IncludeUnpublished != nil { + if !filters.IncludeUnpublished.Value && !advisory.PublishedAt.Valid { + return false + } + } else { if !advisory.PublishedAt.Valid { return false } @@ -205,8 +227,8 @@ func (a *Access) GetAllAdvisories(publishedOnly bool) ([]*db.Advisory, error) { }), nil } -func (a *Access) GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) (*db.Advisory, error) { - advisories := a.getAdvisoriesWithJoin(func(advisory *db.Advisory) bool { +func (a *Access) GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) (*apollodb.Advisory, error) { + advisories := a.getAdvisoriesWithJoin(func(advisory *apollodb.Advisory) bool { if advisory.ShortCodeCode == code && advisory.Year == year && advisory.Num == num { return true } @@ -220,14 +242,14 @@ func (a *Access) GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) return advisories[0], nil } -func (a *Access) CreateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { +func (a *Access) CreateAdvisory(advisory *apollodb.Advisory) (*apollodb.Advisory, error) { var lastId int64 = 1 if len(a.Advisories) > 0 { lastId = a.Advisories[len(a.Advisories)-1].ID + 1 } now := time.Now() - ret := &db.Advisory{ + ret := &apollodb.Advisory{ ID: lastId, CreatedAt: &now, Year: advisory.Year, @@ -246,7 +268,7 @@ func (a *Access) CreateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { return ret, nil } -func (a *Access) UpdateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { +func (a *Access) UpdateAdvisory(advisory *apollodb.Advisory) (*apollodb.Advisory, error) { for _, val := range a.Advisories { if val.ID == advisory.ID { val.Year = advisory.Year @@ -267,47 +289,58 @@ func (a *Access) UpdateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { return nil, sql.ErrNoRows } -func (a *Access) GetAllUnresolvedCVEs() ([]*db.CVE, error) { - var cves []*db.CVE +func (a *Access) GetAllUnresolvedCVEs() ([]*apollodb.CVE, error) { + var cves []*apollodb.CVE + var addedCVEIds []string for _, cve := range a.Cves { - switch cve.State { - case - int(secparseadminpb.CVEState_NewFromUpstream), - int(secparseadminpb.CVEState_NewOriginal): + for _, affectedProduct := range a.AffectedProducts { + if affectedProduct.CveID.String == cve.ID { + switch affectedProduct.State { + case + int(apollopb.AffectedProduct_STATE_UNDER_INVESTIGATION_UPSTREAM), + int(apollopb.AffectedProduct_STATE_UNDER_INVESTIGATION_DOWNSTREAM), + int(apollopb.AffectedProduct_STATE_AFFECTED_UPSTREAM), + int(apollopb.AffectedProduct_STATE_AFFECTED_DOWNSTREAM): + nCve := *cve + nCve.AffectedProductId = sql.NullInt64{Valid: true, Int64: affectedProduct.ID} + cves = append(cves, &nCve) + break + } + } + } + } + for _, cve := range a.Cves { + if !utils.StrContains(cve.ID, addedCVEIds) { cves = append(cves, cve) - break } } return cves, nil } -func (a *Access) GetAllCVEsWithAllProductsFixed() ([]*db.CVE, error) { - var cves []*db.CVE - var fixedAffectedProducts []*db.AffectedProduct +func (a *Access) GetPendingAffectedProducts() ([]*apollodb.AffectedProduct, error) { + var ret []*apollodb.AffectedProduct for _, affectedProduct := range a.AffectedProducts { - switch affectedProduct.State { - case - int(secparseadminpb.AffectedProductState_FixedUpstream), - int(secparseadminpb.AffectedProductState_WillNotFixUpstream), - int(secparseadminpb.AffectedProductState_WillNotFixDownstream), - int(secparseadminpb.AffectedProductState_OutOfSupportScope): - fixedAffectedProducts = append(fixedAffectedProducts, affectedProduct) + if affectedProduct.State == int(apollopb.AffectedProduct_STATE_FIXED_UPSTREAM) { + ret = append(ret, affectedProduct) } } + return ret, nil +} + +func (a *Access) GetAllCVEsFixedDownstream() ([]*apollodb.CVE, error) { + var cves []*apollodb.CVE + for _, cve := range a.Cves { - switch cve.State { - case - int(secparseadminpb.CVEState_NewFromUpstream), - int(secparseadminpb.CVEState_NewOriginal), - int(secparseadminpb.CVEState_ResolvedUpstream), - int(secparseadminpb.CVEState_ResolvedDownstream): - for _, fixed := range fixedAffectedProducts { - if fixed.CveID.String == cve.ID { - cves = append(cves, cve) + for _, affectedProduct := range a.AffectedProducts { + if affectedProduct.CveID.String == cve.ID { + if affectedProduct.State == int(apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM) { + nCve := *cve + nCve.AffectedProductId = sql.NullInt64{Valid: true, Int64: affectedProduct.ID} + cves = append(cves, &nCve) break } } @@ -317,19 +350,7 @@ func (a *Access) GetAllCVEsWithAllProductsFixed() ([]*db.CVE, error) { return cves, nil } -func (a *Access) GetAllCVEsFixedDownstream() ([]*db.CVE, error) { - var cves []*db.CVE - - for _, cve := range a.Cves { - if cve.State == int(secparseadminpb.CVEState_ResolvedDownstream) { - cves = append(cves, cve) - } - } - - return cves, nil -} - -func (a *Access) GetCVEByID(id string) (*db.CVE, error) { +func (a *Access) GetCVEByID(id string) (*apollodb.CVE, error) { for _, cve := range a.Cves { if cve.ID == id { return cve, nil @@ -339,7 +360,11 @@ func (a *Access) GetCVEByID(id string) (*db.CVE, error) { return nil, sql.ErrNoRows } -func (a *Access) CreateCVE(cveId string, state secparseadminpb.CVEState, shortCode string, sourceBy *string, sourceLink *string) (*db.CVE, error) { +func (a *Access) GetAllCVEs() ([]*apollodb.CVE, error) { + return a.Cves, nil +} + +func (a *Access) CreateCVE(cveId string, shortCode string, sourceBy *string, sourceLink *string, content types.NullJSONText) (*apollodb.CVE, error) { var sby sql.NullString var sl sql.NullString @@ -354,32 +379,33 @@ func (a *Access) CreateCVE(cveId string, state secparseadminpb.CVEState, shortCo } now := time.Now() - cve := &db.CVE{ + cve := &apollodb.CVE{ ID: cveId, CreatedAt: &now, - State: int(state), AdvisoryId: sql.NullInt64{}, ShortCode: shortCode, SourceBy: sby, SourceLink: sl, + Content: content, } a.Cves = append(a.Cves, cve) return cve, nil } -func (a *Access) UpdateCVEState(cve string, state secparseadminpb.CVEState) error { - for _, c := range a.Cves { - if c.ID == cve { - c.State = int(state) +func (a *Access) SetCVEContent(cveId string, content types.JSONText) error { + for _, cve := range a.Cves { + if cve.ID == cveId { + cve.Content = types.NullJSONText{Valid: true, JSONText: content} + return nil } } - return nil + return sql.ErrNoRows } -func (a *Access) GetProductsByShortCode(code string) ([]*db.Product, error) { - var products []*db.Product +func (a *Access) GetProductsByShortCode(code string) ([]*apollodb.Product, error) { + var products []*apollodb.Product for _, product := range a.Products { if product.ShortCode == code { @@ -390,7 +416,7 @@ func (a *Access) GetProductsByShortCode(code string) ([]*db.Product, error) { return products, nil } -func (a *Access) GetProductByNameAndShortCode(name string, code string) (*db.Product, error) { +func (a *Access) GetProductByNameAndShortCode(name string, code string) (*apollodb.Product, error) { for _, product := range a.Products { if product.Name == name && product.ShortCode == code { return product, nil @@ -400,7 +426,7 @@ func (a *Access) GetProductByNameAndShortCode(name string, code string) (*db.Pro return nil, sql.ErrNoRows } -func (a *Access) GetProductByID(id int64) (*db.Product, error) { +func (a *Access) GetProductByID(id int64) (*apollodb.Product, error) { for _, product := range a.Products { if product.ID == id { return product, nil @@ -410,7 +436,7 @@ func (a *Access) GetProductByID(id int64) (*db.Product, error) { return nil, sql.ErrNoRows } -func (a *Access) CreateProduct(name string, currentFullVersion string, redHatMajorVersion *int32, code string, archs []string) (*db.Product, error) { +func (a *Access) CreateProduct(name string, currentFullVersion string, redHatMajorVersion *int32, code string, archs []string) (*apollodb.Product, error) { var lastId int64 = 1 if len(a.Products) > 0 { lastId = a.Products[len(a.Products)-1].ID + 1 @@ -422,21 +448,23 @@ func (a *Access) CreateProduct(name string, currentFullVersion string, redHatMaj rhmv.Valid = true } - product := &db.Product{ - ID: lastId, - Name: name, - CurrentFullVersion: currentFullVersion, - RedHatMajorVersion: rhmv, - ShortCode: code, - Archs: archs, + product := &apollodb.Product{ + ID: lastId, + Name: name, + CurrentFullVersion: currentFullVersion, + RedHatMajorVersion: rhmv, + ShortCode: code, + Archs: archs, + MirrorFromDate: sql.NullTime{}, + RedHatProductPrefix: sql.NullString{}, } a.Products = append(a.Products, product) return product, nil } -func (a *Access) GetAllAffectedProductsByCVE(cve string) ([]*db.AffectedProduct, error) { - var affectedProducts []*db.AffectedProduct +func (a *Access) GetAllAffectedProductsByCVE(cve string) ([]*apollodb.AffectedProduct, error) { + var affectedProducts []*apollodb.AffectedProduct for _, affectedProduct := range a.AffectedProducts { if affectedProduct.CveID.String == cve { @@ -447,7 +475,7 @@ func (a *Access) GetAllAffectedProductsByCVE(cve string) ([]*db.AffectedProduct, return affectedProducts, nil } -func (a *Access) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*db.AffectedProduct, error) { +func (a *Access) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*apollodb.AffectedProduct, error) { for _, affectedProduct := range a.AffectedProducts { if affectedProduct.CveID.String == cve && affectedProduct.Package == pkg { return affectedProduct, nil @@ -457,7 +485,7 @@ func (a *Access) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*db. return nil, sql.ErrNoRows } -func (a *Access) GetAffectedProductByAdvisory(advisory string) (*db.AffectedProduct, error) { +func (a *Access) GetAffectedProductByAdvisory(advisory string) (*apollodb.AffectedProduct, error) { for _, affectedProduct := range a.AffectedProducts { if affectedProduct.Advisory.String == advisory { return affectedProduct, nil @@ -467,7 +495,17 @@ func (a *Access) GetAffectedProductByAdvisory(advisory string) (*db.AffectedProd return nil, sql.ErrNoRows } -func (a *Access) CreateAffectedProduct(productId int64, cveId string, state int, version string, pkg string, advisory *string) (*db.AffectedProduct, error) { +func (a *Access) GetAffectedProductByID(id int64) (*apollodb.AffectedProduct, error) { + for _, affectedProduct := range a.AffectedProducts { + if affectedProduct.ID == id { + return affectedProduct, nil + } + } + + return nil, sql.ErrNoRows +} + +func (a *Access) CreateAffectedProduct(productId int64, cveId string, state int, version string, pkg string, advisory *string) (*apollodb.AffectedProduct, error) { var lastId int64 = 1 if len(a.AffectedProducts) > 0 { lastId = a.AffectedProducts[len(a.AffectedProducts)-1].ID + 1 @@ -479,7 +517,7 @@ func (a *Access) CreateAffectedProduct(productId int64, cveId string, state int, adv.Valid = true } - affectedProduct := &db.AffectedProduct{ + affectedProduct := &apollodb.AffectedProduct{ ID: lastId, ProductID: productId, CveID: sql.NullString{Valid: true, String: cveId}, @@ -529,15 +567,17 @@ func (a *Access) DeleteAffectedProduct(id int64) error { return nil } -func (a *Access) CreateFix(ticket string, description string) (int64, error) { +func (a *Access) CreateFix(ticket string, sourceBy string, sourceLink string, description string) (int64, error) { var lastId int64 = 1 if len(a.Fixes) > 0 { lastId = a.Fixes[len(a.Fixes)-1].ID + 1 } - fix := &db.Fix{ + fix := &apollodb.Fix{ ID: lastId, Ticket: sql.NullString{Valid: true, String: ticket}, + SourceBy: sql.NullString{Valid: true, String: sourceBy}, + SourceLink: sql.NullString{Valid: true, String: sourceLink}, Description: sql.NullString{Valid: true, String: description}, } a.Fixes = append(a.Fixes, fix) @@ -545,13 +585,13 @@ func (a *Access) CreateFix(ticket string, description string) (int64, error) { return lastId, nil } -func (a *Access) GetMirrorStateLastSync(code string) (*time.Time, error) { - var lastSync *time.Time +func (a *Access) GetMirrorState(code string) (*apollodb.MirrorState, error) { + var lastSync *apollodb.MirrorState for _, mirrorState := range a.MirrorStates { if mirrorState.ShortCode == code { if mirrorState.LastSync.Valid { - lastSync = &mirrorState.LastSync.Time + lastSync = mirrorState } } } @@ -573,7 +613,7 @@ func (a *Access) UpdateMirrorState(code string, lastSync *time.Time) error { } } - mirrorState := &db.MirrorState{ + mirrorState := &apollodb.MirrorState{ ShortCode: code, LastSync: sql.NullTime{Valid: true, Time: *lastSync}, } @@ -582,19 +622,62 @@ func (a *Access) UpdateMirrorState(code string, lastSync *time.Time) error { return nil } -func (a *Access) CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, kojiId string) (*db.BuildReference, error) { +func (a *Access) UpdateMirrorStateErrata(code string, lastSync *time.Time) error { + for _, mirrorState := range a.MirrorStates { + if mirrorState.ShortCode == code { + mirrorState.ErrataAfter.Time = *lastSync + mirrorState.ErrataAfter.Valid = true + + return nil + } + } + + mirrorState := &apollodb.MirrorState{ + ShortCode: code, + ErrataAfter: sql.NullTime{Valid: true, Time: *lastSync}, + } + a.MirrorStates = append(a.MirrorStates, mirrorState) + + return nil +} + +func (a *Access) GetMaxLastSync() (*time.Time, error) { + var maxLastSync *time.Time + + for _, mirrorState := range a.MirrorStates { + if mirrorState.LastSync.Valid { + if maxLastSync == nil || mirrorState.LastSync.Time.After(*maxLastSync) { + maxLastSync = &mirrorState.LastSync.Time + } + } + } + + if maxLastSync == nil { + return nil, sql.ErrNoRows + } + + return maxLastSync, nil +} + +func (a *Access) CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, sha256Sum string, kojiId *string, peridotId *string) (*apollodb.BuildReference, error) { var lastId int64 = 1 if len(a.BuildReferences) > 0 { lastId = a.BuildReferences[len(a.BuildReferences)-1].ID + 1 } - buildReference := &db.BuildReference{ + buildReference := &apollodb.BuildReference{ ID: lastId, AffectedProductId: affectedProductId, Rpm: rpm, SrcRpm: srcRpm, CveID: cveId, - KojiID: kojiId, + Sha256Sum: sha256Sum, + } + if kojiId != nil { + buildReference.KojiID = sql.NullString{Valid: true, String: *kojiId} + } + if peridotId != nil { + buildReference.PeridotID = sql.NullString{Valid: true, String: *peridotId} } a.BuildReferences = append(a.BuildReferences, buildReference) @@ -608,7 +691,7 @@ func (a *Access) CreateAdvisoryReference(advisoryId int64, url string) error { lastId = a.AdvisoryReferences[len(a.AdvisoryReferences)-1].ID + 1 } - advisoryReference := &db.AdvisoryReference{ + advisoryReference := &apollodb.AdvisoryReference{ ID: lastId, URL: url, AdvisoryId: advisoryId, @@ -618,11 +701,11 @@ func (a *Access) CreateAdvisoryReference(advisoryId int64, url string) error { return nil } -func (a *Access) GetAllIgnoredPackagesByShortCode(code string) ([]string, error) { +func (a *Access) GetAllIgnoredPackagesByProductID(productID int64) ([]string, error) { var packages []string for _, ignoredPackage := range a.IgnoredUpstreamPackages { - if ignoredPackage.ShortCode == code { + if ignoredPackage.ProductID == productID { packages = append(packages, ignoredPackage.Package) } } @@ -630,8 +713,18 @@ func (a *Access) GetAllIgnoredPackagesByShortCode(code string) ([]string, error) return packages, nil } +func (a *Access) GetAllRebootSuggestedPackages() ([]string, error) { + var packages []string + + for _, p := range a.RebootSuggestedPackages { + packages = append(packages, p.Name) + } + + return packages, nil +} + func (a *Access) AddAdvisoryFix(advisoryId int64, fixId int64) error { - advisoryFix := &db.AdvisoryFix{ + advisoryFix := &apollodb.AdvisoryFix{ AdvisoryID: advisoryId, FixID: fixId, } @@ -641,7 +734,7 @@ func (a *Access) AddAdvisoryFix(advisoryId int64, fixId int64) error { } func (a *Access) AddAdvisoryCVE(advisoryId int64, cveId string) error { - advisoryCVE := &db.AdvisoryCVE{ + advisoryCVE := &apollodb.AdvisoryCVE{ AdvisoryID: advisoryId, CveID: cveId, } @@ -650,10 +743,11 @@ func (a *Access) AddAdvisoryCVE(advisoryId int64, cveId string) error { return nil } -func (a *Access) AddAdvisoryRPM(advisoryId int64, name string) error { - advisoryRPM := &db.AdvisoryRPM{ +func (a *Access) AddAdvisoryRPM(advisoryId int64, name string, productID int64) error { + advisoryRPM := &apollodb.AdvisoryRPM{ AdvisoryID: advisoryId, Name: name, + ProductID: productID, } a.AdvisoryRPMs = append(a.AdvisoryRPMs, advisoryRPM) @@ -664,6 +758,6 @@ func (a *Access) Begin() (utils.Tx, error) { return &utils.MockTx{}, nil } -func (a *Access) UseTransaction(_ utils.Tx) db.Access { +func (a *Access) UseTransaction(_ utils.Tx) apollodb.Access { return a } diff --git a/secparse/db/psql/BUILD.bazel b/apollo/db/psql/BUILD.bazel similarity index 60% rename from secparse/db/psql/BUILD.bazel rename to apollo/db/psql/BUILD.bazel index 1a14f79c..243f0ef3 100644 --- a/secparse/db/psql/BUILD.bazel +++ b/apollo/db/psql/BUILD.bazel @@ -3,12 +3,13 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "psql", srcs = ["psql.go"], - importpath = "peridot.resf.org/secparse/db/psql", + importpath = "peridot.resf.org/apollo/db/psql", visibility = ["//visibility:public"], deps = [ - "//secparse/admin/proto/v1:proto", - "//secparse/db", + "//apollo/db", + "//apollo/proto/v1:pb", "//utils", "//vendor/github.com/jmoiron/sqlx", + "//vendor/github.com/jmoiron/sqlx/types", ], ) diff --git a/apollo/db/psql/psql.go b/apollo/db/psql/psql.go new file mode 100644 index 00000000..3810f0f8 --- /dev/null +++ b/apollo/db/psql/psql.go @@ -0,0 +1,802 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package apollopsql + +import ( + "database/sql" + "github.com/jmoiron/sqlx/types" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "time" + + "github.com/jmoiron/sqlx" + "peridot.resf.org/utils" +) + +type Access struct { + db *sqlx.DB + query utils.SqlQuery +} + +func New() *Access { + pgx := utils.PgInitx() + return &Access{ + db: pgx, + query: pgx, + } +} + +func (a *Access) GetAllShortCodes() ([]*apollodb.ShortCode, error) { + var shortCodes []*apollodb.ShortCode + err := a.query.Select( + &shortCodes, + ` + select + code, + mode, + created_at, + archived_at + from short_codes + order by created_at desc + `, + ) + if err != nil { + return nil, err + } + + return shortCodes, nil +} + +func (a *Access) GetShortCodeByCode(code string) (*apollodb.ShortCode, error) { + var shortCode apollodb.ShortCode + err := a.query.Get(&shortCode, "select code, mode, created_at from short_codes where code = $1", code) + if err != nil { + return nil, err + } + + return &shortCode, nil +} + +func (a *Access) CreateShortCode(code string, mode apollopb.ShortCode_Mode) (*apollodb.ShortCode, error) { + var shortCode apollodb.ShortCode + err := a.query.Get(&shortCode, "insert into short_codes (code, mode) values ($1, $2) returning code, mode, created_at", code, int(mode)) + if err != nil { + return nil, err + } + + return &shortCode, nil +} + +func (a *Access) GetAllAdvisories(filters *apollopb.AdvisoryFilters, page int32, limit int32) ([]*apollodb.Advisory, error) { + if filters == nil { + filters = &apollopb.AdvisoryFilters{} + } + + var advisories []*apollodb.Advisory + err := a.query.Select( + &advisories, + ` + select + q1.* + from + ( + select + a.id, + a.created_at, + a.year, + a.num, + a.synopsis, + a.topic, + a.severity, + a.type, + a.description, + a.solution, + a.redhat_issued_at, + a.short_code_code, + a.reboot_suggested, + a.published_at, + array_remove(array_agg(distinct p.name), NULL) as affected_products, + (select array_agg(distinct( + case when c.content is null then c.source_by || ':::' || c.source_link || ':::' || c.id || ':::::::::' + else c.source_by || ':::' || c.source_link || ':::' || c.id || ':::' || jsonb_extract_path_text(c.content, 'cvss3', 'cvss3_scoring_vector') || ':::' || jsonb_extract_path_text(c.content, 'cvss3', 'cvss3_base_score') || ':::' || jsonb_extract_path_text(c.content, 'cwe') + end + )) from advisory_cves ac inner join cves c on c.id = ac.cve_id where ac.advisory_id = a.id) as cves, + (select array_agg(distinct(url)) from advisory_references where advisory_id = a.id) as references, + case when $4 :: bool = true then array(select distinct concat(rpm, ':::', src_rpm) from build_references where affected_product_id in (select id from affected_products where advisory = 'RH' || (case when a.type=1 then 'SA' when a.type=2 then 'BA' else 'EA' end) || '-' || a.year || ':' || a.num)) + else array [] :: text[] + end as build_artifacts, + case when $7 :: bool = true then array(select distinct(ar.name || ':::' || p.name) from advisory_rpms ar inner join products p on p.id = ar.product_id where advisory_id = a.id) + else array [] :: text[] + end as rpms, + count(a.*) over() as total + from advisories a + inner join affected_products ap on ap.advisory = 'RH' || (case when a.type=1 then 'SA' when a.type=2 then 'BA' else 'EA' end) || '-' || a.year || ':' || a.num + inner join products p on ap.product_id = p.id + where + ($1 :: text is null or p.name = $1 :: text) + and ($2 :: timestamp is null or a.published_at < $2 :: timestamp) + and ($3 :: timestamp is null or a.published_at > $3 :: timestamp) + and (a.published_at is not null or $4 :: bool = true) + and ($6 :: text is null or a.synopsis ilike '%' || $6 :: text || '%') + and ($9 :: numeric = 0 or a.severity = $9 :: numeric) + and ($10 :: numeric = 0 or a.type = $10 :: numeric) + group by a.id + order by a.published_at desc + limit $11 offset $12 + ) as q1 + where + ($8 :: text is null or ((q1.synopsis ilike '%' || $8 :: text || '%') or (q1.topic ilike '%' || $8 :: text || '%') or (q1.description ilike '%' || $8 :: text || '%') or (q1.solution ilike '%' || $8 :: text || '%') or exists (select from unnest(q1.cves) e where e ilike '%' || $8 :: text || '%'))) + and ($5 :: text is null or exists (select from unnest(q1.cves) e where e ilike '%' || $5 :: text || '%')) + `, + utils.StringValueToNullString(filters.Product), + utils.TimestampToNullTime(filters.Before), + utils.TimestampToNullTime(filters.After), + utils.BoolValueP(filters.IncludeUnpublished), + utils.StringValueToNullString(filters.Cve), + utils.StringValueToNullString(filters.Synopsis), + utils.BoolValueP(filters.IncludeRpms), + utils.StringValueToNullString(filters.Keyword), + int32(filters.Severity), + int32(filters.Type), + utils.UnlimitedLimit(limit), + utils.GetOffset(page, limit), + ) + if err != nil { + return nil, err + } + + return advisories, nil +} + +func (a *Access) GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) (*apollodb.Advisory, error) { + var advisory apollodb.Advisory + err := a.query.Get( + &advisory, + ` + select + a.id, + a.created_at, + a.year, + a.num, + a.synopsis, + a.topic, + a.severity, + a.type, + a.description, + a.solution, + a.redhat_issued_at, + a.short_code_code, + a.reboot_suggested, + a.published_at, + array_remove(array_agg(distinct p.name), NULL) as affected_products, + (select array_agg(distinct(f.ticket || ':::' || f.source_by || ':::' || f.source_link || ':::' || f.description)) from advisory_fixes adf inner join fixes f on f.id = adf.fix_id where adf.advisory_id = a.id) as fixes, + (select array_agg(distinct( + case when c.content is null then c.source_by || ':::' || c.source_link || ':::' || c.id || ':::::::::' + else c.source_by || ':::' || c.source_link || ':::' || c.id || ':::' || jsonb_extract_path_text(c.content, 'cvss3', 'cvss3_scoring_vector') || ':::' || jsonb_extract_path_text(c.content, 'cvss3', 'cvss3_base_score') || ':::' || jsonb_extract_path_text(c.content, 'cwe') + end + )) from advisory_cves ac inner join cves c on c.id = ac.cve_id where ac.advisory_id = a.id) as cves, + (select array_agg(distinct(url)) from advisory_references where advisory_id = a.id) as references, + (select array_agg(distinct(ar.name || ':::' || p.name)) from advisory_rpms ar inner join products p on p.id = ar.product_id where advisory_id = a.id) as rpms + from advisories a + inner join affected_products ap on ap.advisory = 'RH' || (case when a.type=1 then 'SA' when a.type=2 then 'BA' else 'EA' end) || '-' || a.year || ':' || a.num + inner join products p on ap.product_id = p.id + where + a.year = $1 + and a.num = $2 + and a.short_code_code = $3 + group by a.id + `, + year, + num, + code, + ) + if err != nil { + return nil, err + } + + return &advisory, nil +} + +func (a *Access) CreateAdvisory(advisory *apollodb.Advisory) (*apollodb.Advisory, error) { + var ret apollodb.Advisory + + var redHatIssuedAt *time.Time + var publishedAt *time.Time + + if advisory.RedHatIssuedAt.Valid { + redHatIssuedAt = &advisory.RedHatIssuedAt.Time + } + if advisory.PublishedAt.Valid { + publishedAt = &advisory.PublishedAt.Time + } + + err := a.query.Get( + &ret, + ` + insert into advisories + (year, num, synopsis, topic, severity, type, description, solution, + redhat_issued_at, short_code_code, reboot_suggested, published_at) + values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + returning + id, + created_at, + year, + num, + synopsis, + topic, + severity, + type, + description, + solution, + redhat_issued_at, + short_code_code, + reboot_suggested, + published_at + `, + advisory.Year, + advisory.Num, + advisory.Synopsis, + advisory.Topic, + advisory.Severity, + advisory.Type, + advisory.Description, + advisory.Solution, + redHatIssuedAt, + advisory.ShortCodeCode, + advisory.RebootSuggested, + publishedAt, + ) + if err != nil { + return nil, err + } + + return &ret, nil +} + +func (a *Access) UpdateAdvisory(advisory *apollodb.Advisory) (*apollodb.Advisory, error) { + var ret apollodb.Advisory + + var publishedAt *time.Time + + if advisory.PublishedAt.Valid { + publishedAt = &advisory.PublishedAt.Time + } + + err := a.query.Get( + &ret, + ` + update advisories + set + year = $1, + num = $2, + synopsis = $3, + topic = $4, + severity = $5, + type = $6, + description = $7, + solution = $8, + short_code_code = $9, + reboot_suggested = $10, + published_at = $11 + where + id = $12 + returning + id, + created_at, + year, + num, + synopsis, + topic, + severity, + type, + description, + solution, + redhat_issued_at, + short_code_code, + reboot_suggested, + published_at + `, + advisory.Year, + advisory.Num, + advisory.Synopsis, + advisory.Topic, + advisory.Severity, + advisory.Type, + advisory.Description, + advisory.Solution, + advisory.ShortCodeCode, + advisory.RebootSuggested, + publishedAt, + advisory.ID, + ) + if err != nil { + return nil, err + } + + return &ret, nil +} + +func (a *Access) GetAllUnresolvedCVEs() ([]*apollodb.CVE, error) { + var cves []*apollodb.CVE + err := a.query.Select( + &cves, + ` + select + c.id, + c.created_at, + c.short_code_code, + c.source_by, + c.source_link, + c.content, + ap.id as affected_product_id + from cves c + left join affected_products ap on ap.cve_id = c.id + where (ap.state is null or ap.state in (1, 2, 8, 9)) + `, + ) + if err != nil { + return nil, err + } + + return cves, nil +} + +func (a *Access) GetPendingAffectedProducts() ([]*apollodb.AffectedProduct, error) { + var ret []*apollodb.AffectedProduct + err := a.query.Select( + &ret, + ` + select + ap.id, + ap.product_id, + ap.cve_id, + ap.state, + ap.version, + ap.package, + ap.advisory + from affected_products ap + where ap.state = 3 + `, + ) + if err != nil { + return nil, err + } + + return ret, nil +} + +func (a *Access) GetAllCVEsFixedDownstream() ([]*apollodb.CVE, error) { + var cves []*apollodb.CVE + err := a.query.Select( + &cves, + ` + select + c.id, + c.created_at, + c.short_code_code, + c.source_by, + c.source_link, + c.content, + ap.id as affected_product_id + from cves c + inner join affected_products ap on ap.cve_id = c.id + where + ap.state = 4 + `, + ) + if err != nil { + return nil, err + } + + return cves, nil +} + +func (a *Access) GetCVEByID(id string) (*apollodb.CVE, error) { + var cve apollodb.CVE + err := a.query.Get(&cve, "select id, created_at, short_code_code, source_by, source_link, content from cves where id = $1", id) + if err != nil { + return nil, err + } + + return &cve, nil +} + +func (a *Access) GetAllCVEs() ([]*apollodb.CVE, error) { + var cves []*apollodb.CVE + err := a.query.Select(&cves, "select id, created_at, short_code_code, source_by, source_link, content from cves") + if err != nil { + return nil, err + } + + return cves, nil +} + +func (a *Access) CreateCVE(cveId string, shortCode string, sourceBy *string, sourceLink *string, content types.NullJSONText) (*apollodb.CVE, error) { + var cve apollodb.CVE + err := a.query.Get(&cve, "insert into cves (id, short_code_code, source_by, source_link, content) values ($1, $2, $3, $4, $5) returning id, created_at, short_code_code, source_by, source_link, content", cveId, shortCode, sourceBy, sourceLink, content) + if err != nil { + return nil, err + } + + return &cve, nil +} + +func (a *Access) SetCVEContent(cveId string, content types.JSONText) error { + _, err := a.query.Exec("update cves set content = $1 where id = $2", content, cveId) + return err +} + +func (a *Access) GetProductsByShortCode(code string) ([]*apollodb.Product, error) { + var products []*apollodb.Product + err := a.query.Select( + &products, + ` + select + id, + name, + current_full_version, + redhat_major_version, + short_code_code, + archs, + mirror_from_date, + redhat_product_prefix, + cpe, + eol_at, + build_system, + build_system_endpoint, + koji_compose, + koji_module_compose, + peridot_project_id + from products + where + short_code_code = $1 + and (eol_at < now() or eol_at is null) + `, + code, + ) + if err != nil { + return nil, err + } + + return products, nil +} + +func (a *Access) GetProductByNameAndShortCode(name string, code string) (*apollodb.Product, error) { + var product apollodb.Product + err := a.query.Get( + &product, + ` + select + id, + name, + current_full_version, + redhat_major_version, + short_code_code, + archs, + mirror_from_date, + redhat_product_prefix, + cpe, + eol_at, + build_system, + build_system_endpoint, + koji_compose, + koji_module_compose, + peridot_project_id + from products + where + name = $1 + and short_code_code = $2 + `, + name, + code, + ) + if err != nil { + return nil, err + } + + return &product, nil +} + +func (a *Access) GetProductByID(id int64) (*apollodb.Product, error) { + var product apollodb.Product + err := a.query.Get( + &product, + ` + select + id, + name, + current_full_version, + redhat_major_version, + short_code_code, + archs, + mirror_from_date, + redhat_product_prefix, + cpe, + eol_at, + build_system, + build_system_endpoint, + koji_compose, + koji_module_compose, + peridot_project_id + from products + where + id = $1 + `, + id, + ) + if err != nil { + return nil, err + } + + return &product, nil +} + +func (a *Access) CreateProduct(name string, currentFullVersion string, redHatMajorVersion *int32, code string, archs []string) (*apollodb.Product, error) { + var product apollodb.Product + err := a.query.Get(&product, "insert into products (name, current_full_version, redhat_major_version, short_code_code, archs) values ($1, $2, $3, $4) returning id, name, current_full_version, redhat_major_version, short_code_code, archs", name, currentFullVersion, redHatMajorVersion, code, archs) + if err != nil { + return nil, err + } + + return &product, nil +} + +func (a *Access) GetAllAffectedProductsByCVE(cve string) ([]*apollodb.AffectedProduct, error) { + var affectedProducts []*apollodb.AffectedProduct + err := a.query.Select(&affectedProducts, "select id, product_id, cve_id, state, version, package, advisory from affected_products where cve_id = $1", cve) + if err != nil { + return nil, err + } + + return affectedProducts, nil +} + +func (a *Access) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*apollodb.AffectedProduct, error) { + var affectedProduct apollodb.AffectedProduct + err := a.query.Get(&affectedProduct, "select id, product_id, cve_id, state, version, package, advisory from affected_products where cve_id = $1 and package = $2", cve, pkg) + if err != nil { + return nil, err + } + + return &affectedProduct, nil +} + +func (a *Access) GetAffectedProductByAdvisory(advisory string) (*apollodb.AffectedProduct, error) { + var affectedProduct apollodb.AffectedProduct + err := a.query.Get(&affectedProduct, "select id, product_id, cve_id, state, version, package, advisory from affected_products where advisory = $1", advisory) + if err != nil { + return nil, err + } + + return &affectedProduct, nil +} + +func (a *Access) GetAffectedProductByID(id int64) (*apollodb.AffectedProduct, error) { + var affectedProduct apollodb.AffectedProduct + err := a.query.Get(&affectedProduct, "select id, product_id, cve_id, state, version, package, advisory from affected_products where id = $1", id) + if err != nil { + return nil, err + } + + return &affectedProduct, nil +} + +func (a *Access) CreateAffectedProduct(productId int64, cveId string, state int, version string, pkg string, advisory *string) (*apollodb.AffectedProduct, error) { + var affectedProduct apollodb.AffectedProduct + err := a.query.Get(&affectedProduct, "insert into affected_products (product_id, cve_id, state, version, package, advisory) values ($1, $2, $3, $4, $5, $6) returning id, product_id, cve_id, state, version, package, advisory", productId, cveId, state, version, pkg, advisory) + if err != nil { + return nil, err + } + + return &affectedProduct, nil +} + +func (a *Access) UpdateAffectedProductStateAndPackageAndAdvisory(id int64, state int, pkg string, advisory *string) error { + _, err := a.query.Exec( + ` + update affected_products + set + state = $1, + package = $2, + advisory = $3 + where id = $4 + `, + state, + pkg, + advisory, + id, + ) + return err +} + +func (a *Access) DeleteAffectedProduct(id int64) error { + _, err := a.query.Exec( + ` + delete from affected_products + where id = $1 + `, + id, + ) + return err +} + +func (a *Access) CreateFix(ticket string, sourceBy string, sourceLink, description string) (int64, error) { + var id int64 + err := a.query.Get(&id, "insert into fixes (ticket, source_by, source_link, description) values ($1, $2, $3, $4) returning id", ticket, sourceBy, sourceLink, description) + return id, err +} + +func (a *Access) GetMirrorState(code string) (*apollodb.MirrorState, error) { + var lastSync apollodb.MirrorState + err := a.query.Get(&lastSync, "select short_code_code, last_sync, errata_after from mirror_state where short_code_code = $1", code) + if err != nil { + if err == sql.ErrNoRows { + return nil, nil + } + + return nil, err + } + + return &lastSync, nil +} + +func (a *Access) UpdateMirrorState(code string, lastSync *time.Time) error { + _, err := a.query.Exec( + ` + insert into mirror_state (short_code_code, last_sync) + values ($1, $2) + on conflict (short_code_code) do + update + set last_sync = EXCLUDED.last_sync + `, + code, + lastSync, + ) + return err +} + +func (a *Access) UpdateMirrorStateErrata(code string, lastSync *time.Time) error { + _, err := a.query.Exec( + ` + insert into mirror_state (short_code_code, errata_after) + values ($1, $2) + on conflict (short_code_code) do + update + set errata_after = EXCLUDED.errata_after + `, + code, + lastSync, + ) + return err +} + +func (a *Access) GetMaxLastSync() (*time.Time, error) { + var lastSync time.Time + err := a.query.Get(&lastSync, "select max(last_sync) from mirror_state") + if err != nil { + return nil, err + } + + return &lastSync, nil +} + +func (a *Access) CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, sha256Sum string, kojiId *string, peridotId *string) (*apollodb.BuildReference, error) { + var buildReference apollodb.BuildReference + err := a.query.Get( + &buildReference, + ` + insert into build_references + (affected_product_id, rpm, src_rpm, cve_id, sha256_sum, koji_id, peridot_id) + values ($1, $2, $3, $4, $5, $6, $7) + returning id, affected_product_id, rpm, src_rpm, cve_id, sha256_sum, koji_id, peridot_id + `, + affectedProductId, + rpm, + srcRpm, + cveId, + sha256Sum, + kojiId, + peridotId, + ) + if err != nil { + return nil, err + } + + return &buildReference, nil +} + +func (a *Access) CreateAdvisoryReference(advisoryId int64, url string) error { + _, err := a.query.Exec("insert into advisory_references (advisory_id, url) values ($1, $2)", advisoryId, url) + return err +} + +func (a *Access) GetAllIgnoredPackagesByProductID(productID int64) ([]string, error) { + var packages []string + err := a.query.Select(&packages, "select package from ignored_upstream_packages where product_id = $1", productID) + if err != nil { + return nil, err + } + + return packages, nil +} + +func (a *Access) GetAllRebootSuggestedPackages() ([]string, error) { + var packages []string + err := a.query.Select(&packages, "select name from reboot_suggested_packages") + if err != nil { + return nil, err + } + + return packages, nil +} + +func (a *Access) AddAdvisoryFix(advisoryId int64, fixId int64) error { + _, err := a.query.Exec("insert into advisory_fixes (advisory_id, fix_id) values ($1, $2) on conflict do nothing", advisoryId, fixId) + if err != nil { + return err + } + + return nil +} + +func (a *Access) AddAdvisoryCVE(advisoryId int64, cveId string) error { + _, err := a.query.Exec("insert into advisory_cves (advisory_id, cve_id) values ($1, $2) on conflict do nothing", advisoryId, cveId) + if err != nil { + return err + } + + return nil +} + +func (a *Access) AddAdvisoryRPM(advisoryId int64, name string, productID int64) error { + _, err := a.query.Exec("insert into advisory_rpms (advisory_id, name, product_id) values ($1, $2, $3) on conflict do nothing", advisoryId, name, productID) + if err != nil { + return err + } + + return nil +} + +func (a *Access) Begin() (utils.Tx, error) { + tx, err := a.db.Beginx() + if err != nil { + return nil, err + } + + return tx, nil +} + +func (a *Access) UseTransaction(tx utils.Tx) apollodb.Access { + newAccess := *a + newAccess.query = tx + + return &newAccess +} diff --git a/secparse/impl/BUILD.bazel b/apollo/impl/v1/BUILD.bazel similarity index 51% rename from secparse/impl/BUILD.bazel rename to apollo/impl/v1/BUILD.bazel index e007d809..a666e6cf 100644 --- a/secparse/impl/BUILD.bazel +++ b/apollo/impl/v1/BUILD.bazel @@ -6,16 +6,21 @@ go_library( "advisory.go", "server.go", ], - importpath = "peridot.resf.org/secparse/impl", + importpath = "peridot.resf.org/apollo/impl/v1", visibility = ["//visibility:public"], deps = [ - "//secparse/db", - "//secparse/proto/v1:proto", - "//secparse/rpmutils", + "//apollo/db", + "//apollo/proto/v1:pb", + "//apollo/rpmutils", + "//proto:common", "//utils", + "//vendor/github.com/gorilla/feeds", "//vendor/github.com/sirupsen/logrus", + "//vendor/github.com/spf13/viper", + "@go_googleapis//google/api:httpbody_go_proto", "@org_golang_google_grpc//:go_default_library", "@org_golang_google_grpc//codes", "@org_golang_google_grpc//status", + "@org_golang_google_protobuf//types/known/timestamppb:go_default_library", ], ) diff --git a/apollo/impl/v1/advisory.go b/apollo/impl/v1/advisory.go new file mode 100644 index 00000000..de6a0e81 --- /dev/null +++ b/apollo/impl/v1/advisory.go @@ -0,0 +1,182 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package apolloimpl + +import ( + "context" + "database/sql" + "fmt" + "github.com/gorilla/feeds" + "github.com/sirupsen/logrus" + "google.golang.org/genproto/googleapis/api/httpbody" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/types/known/timestamppb" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rpmutils" + "peridot.resf.org/utils" + "strconv" + "time" +) + +func (s *Server) ListAdvisories(_ context.Context, req *apollopb.ListAdvisoriesRequest) (*apollopb.ListAdvisoriesResponse, error) { + if err := req.ValidateAll(); err != nil { + return nil, err + } + if req.Filters != nil { + req.Filters.IncludeUnpublished = nil + } + + page := utils.MinPage(req.Page) + limit := utils.MinLimit(req.Limit) + ret, err := s.db.GetAllAdvisories(req.Filters, page, limit) + if err != nil { + s.log.Errorf("could not get advisories, error: %s", err) + return nil, status.Error(codes.Internal, "failed to list advisories") + } + total := int64(0) + if len(ret) > 0 { + total = ret[0].Total + } + + var lastUpdatedPb *timestamppb.Timestamp + lastUpdated, err := s.db.GetMaxLastSync() + if err != nil && err != sql.ErrNoRows { + s.log.Errorf("could not get last sync time, error: %s", err) + return nil, status.Error(codes.Internal, "failed to get last updated") + } + if lastUpdated != nil { + lastUpdatedPb = timestamppb.New(*lastUpdated) + } + + return &apollopb.ListAdvisoriesResponse{ + Advisories: apollodb.DTOListAdvisoriesToPB(ret), + Total: total, + Page: page, + Size: limit, + LastUpdated: lastUpdatedPb, + }, nil +} + +func (s *Server) ListAdvisoriesRSS(_ context.Context, req *apollopb.ListAdvisoriesRSSRequest) (*httpbody.HttpBody, error) { + if err := req.ValidateAll(); err != nil { + return nil, err + } + if req.Filters == nil { + req.Filters = &apollopb.AdvisoryFilters{} + } + req.Filters.IncludeUnpublished = nil + + ret, err := s.db.GetAllAdvisories(req.Filters, 0, 25) + if err != nil { + s.log.Errorf("could not get advisories, error: %s", err) + return nil, status.Error(codes.Internal, "failed to list advisories") + } + total := int64(0) + if len(ret) > 0 { + total = ret[0].Total + } + + var updated time.Time + if total != 0 { + updated = ret[0].PublishedAt.Time + } + + feed := &feeds.Feed{ + Title: "Apollo Security RSS Feed", + Link: &feeds.Link{Href: s.homepage}, + Description: "Security advisories issued using Apollo Errata Management", + Author: &feeds.Author{ + Name: "Rocky Enterprise Software Foundation, Inc.", + Email: "releng@rockylinux.org", + }, + Updated: updated, + Items: []*feeds.Item{}, + Copyright: "(C) Rocky Enterprise Software Foundation, Inc. 2022. All rights reserved. CVE sources are copyright of their respective owners.", + } + if s.rssFeedTitle != "" { + feed.Title = s.rssFeedTitle + } + if s.rssFeedDescription != "" { + feed.Description = s.rssFeedDescription + } + for _, a := range ret { + dtoToPB := apollodb.DTOAdvisoryToPB(a) + item := &feeds.Item{ + Title: fmt.Sprintf("%s: %s", dtoToPB.Name, a.Synopsis), + Link: &feeds.Link{Href: fmt.Sprintf("%s/%s", s.homepage, dtoToPB.Name)}, + Description: a.Topic, + Id: fmt.Sprintf("%d", a.ID), + Created: a.PublishedAt.Time, + } + feed.Items = append(feed.Items, item) + } + + rss, err := feed.ToRss() + if err != nil { + s.log.Errorf("could not generate RSS feed, error: %s", err) + return nil, status.Error(codes.Internal, "failed to generate RSS feed") + } + + return &httpbody.HttpBody{ + ContentType: "application/rss+xml", + Data: []byte(rss), + }, nil +} + +func (s *Server) GetAdvisory(_ context.Context, req *apollopb.GetAdvisoryRequest) (*apollopb.GetAdvisoryResponse, error) { + if err := req.ValidateAll(); err != nil { + return nil, err + } + advisoryId := rpmutils.AdvisoryId().FindStringSubmatch(req.Id) + code := advisoryId[1] + year, err := strconv.Atoi(advisoryId[3]) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid year") + } + num, err := strconv.Atoi(advisoryId[4]) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid num") + } + + advisory, err := s.db.GetAdvisoryByCodeAndYearAndNum(code, year, num) + if err != nil { + logrus.Error(err) + } + if err != nil || !advisory.PublishedAt.Valid { + return nil, utils.CouldNotFindObject + } + + return &apollopb.GetAdvisoryResponse{ + Advisory: apollodb.DTOAdvisoryToPB(advisory), + }, nil +} diff --git a/secparse/admin/impl/server.go b/apollo/impl/v1/server.go similarity index 67% rename from secparse/admin/impl/server.go rename to apollo/impl/v1/server.go index a1a6c959..165179a8 100644 --- a/secparse/admin/impl/server.go +++ b/apollo/impl/v1/server.go @@ -28,50 +28,41 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package impl +package apolloimpl import ( "context" - hydraclient "github.com/ory/hydra-client-go/client" "github.com/sirupsen/logrus" + "github.com/spf13/viper" "google.golang.org/grpc" - "net/url" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/servicecatalog" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + commonpb "peridot.resf.org/common" "peridot.resf.org/utils" ) type Server struct { - secparseadminpb.UnimplementedSecparseAdminServer + apollopb.UnimplementedApolloServiceServer - log *logrus.Logger - db db.Access - hydra *hydraclient.OryHydra + log *logrus.Logger + db apollodb.Access + rssFeedTitle string + rssFeedDescription string + homepage string } -func NewServer(db db.Access) *Server { - publicURL, err := url.Parse(servicecatalog.HydraPublic()) - if err != nil { - logrus.Fatalf("failed to parse hydra public url: %s", err) - } - - hydraSDK := hydraclient.NewHTTPClientWithConfig(nil, &hydraclient.TransportConfig{ - Schemes: []string{publicURL.Scheme}, - Host: publicURL.Host, - BasePath: publicURL.Path, - }) - +func NewServer(db apollodb.Access) *Server { return &Server{ - log: logrus.New(), - db: db, - hydra: hydraSDK, + log: logrus.New(), + db: db, + rssFeedTitle: "RESF Errata Feed", + rssFeedDescription: "Advisories issued by the Rocky Enterprise Software Foundation", + homepage: viper.GetString("homepage"), } } func (s *Server) interceptor(ctx context.Context, req interface{}, usi *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { n := utils.EndInterceptor - n = utils.AuthInterceptor(s.hydra, nil, []string{}, true, n) return n(ctx, req, usi, handler) } @@ -82,18 +73,22 @@ func (s *Server) Run() { Interceptor: s.interceptor, }, func(r *utils.Register) { - err := secparseadminpb.RegisterSecparseAdminHandlerFromEndpoint( - r.Context, - r.Mux, - r.Endpoint, - r.Options, - ) - if err != nil { - s.log.Fatalf("could not register handler - %s", err) + endpoints := []utils.GrpcEndpointRegister{ + commonpb.RegisterHealthCheckServiceHandlerFromEndpoint, + apollopb.RegisterApolloServiceHandlerFromEndpoint, + } + + for _, endpoint := range endpoints { + err := endpoint(r.Context, r.Mux, r.Endpoint, r.Options) + if err != nil { + s.log.Fatalf("could not register handler - %v", err) + } } }, func(r *utils.RegisterServer) { - secparseadminpb.RegisterSecparseAdminServer(r.Server, s) + commonpb.RegisterHealthCheckServiceServer(r.Server, &utils.HealthServer{}) + + apollopb.RegisterApolloServiceServer(r.Server, s) }, ) diff --git a/secparse/migrate/20210702021142_create_short_codes.down.sql b/apollo/migrate/20210702021142_create_short_codes.down.sql similarity index 100% rename from secparse/migrate/20210702021142_create_short_codes.down.sql rename to apollo/migrate/20210702021142_create_short_codes.down.sql diff --git a/secparse/migrate/20210702021142_create_short_codes.up.sql b/apollo/migrate/20210702021142_create_short_codes.up.sql similarity index 96% rename from secparse/migrate/20210702021142_create_short_codes.up.sql rename to apollo/migrate/20210702021142_create_short_codes.up.sql index d4d11bf8..35cd61b0 100644 --- a/secparse/migrate/20210702021142_create_short_codes.up.sql +++ b/apollo/migrate/20210702021142_create_short_codes.up.sql @@ -34,8 +34,6 @@ create table short_codes ( code text not null primary key, mode numeric not null, - mirror_from_date timestamp, - redhat_product_prefix text, created_at timestamp default now() not null, archived_at timestamp diff --git a/secparse/migrate/20210702041952_create_products.down.sql b/apollo/migrate/20210702021143_create_products.down.sql similarity index 100% rename from secparse/migrate/20210702041952_create_products.down.sql rename to apollo/migrate/20210702021143_create_products.down.sql diff --git a/secparse/migrate/20210702041952_create_products.up.sql b/apollo/migrate/20210702021143_create_products.up.sql similarity index 71% rename from secparse/migrate/20210702041952_create_products.up.sql rename to apollo/migrate/20210702021143_create_products.up.sql index 64cef3bf..16f79a78 100644 --- a/secparse/migrate/20210702041952_create_products.up.sql +++ b/apollo/migrate/20210702021143_create_products.up.sql @@ -32,11 +32,20 @@ create table products ( - id bigserial primary key, - name text not null, - current_full_version text not null, - redhat_major_version numeric, - short_code_code text references short_codes (code) not null, - archs text[] not null, - eol_at timestamp + id bigserial primary key, + name text not null, + current_full_version text not null, + redhat_major_version numeric, + short_code_code text references short_codes (code) not null, + archs text[] not null, + mirror_from_date timestamp, + redhat_product_prefix text, + cpe text, + eol_at timestamp, + + build_system text not null, + build_system_endpoint text not null, + koji_compose text, + koji_module_compose text, + peridot_project_id text ) diff --git a/secparse/migrate/20210702041656_create_advisories.down.sql b/apollo/migrate/20210702041656_create_advisories.down.sql similarity index 100% rename from secparse/migrate/20210702041656_create_advisories.down.sql rename to apollo/migrate/20210702041656_create_advisories.down.sql diff --git a/secparse/migrate/20210702041656_create_advisories.up.sql b/apollo/migrate/20210702041656_create_advisories.up.sql similarity index 66% rename from secparse/migrate/20210702041656_create_advisories.up.sql rename to apollo/migrate/20210702041656_create_advisories.up.sql index ab0e944e..eb65c454 100644 --- a/secparse/migrate/20210702041656_create_advisories.up.sql +++ b/apollo/migrate/20210702041656_create_advisories.up.sql @@ -32,20 +32,21 @@ create table advisories ( - id bigserial primary key not null, - created_at timestamp default now() not null, + id bigserial primary key not null, + created_at timestamp default now() not null, - year numeric not null, - num numeric not null, + year numeric not null, + num numeric not null, - synopsis text not null, - topic text not null, - severity numeric not null, - type numeric not null, - description text not null, - solution text null, + synopsis text not null, + topic text not null, + severity numeric not null, + type numeric not null, + description text not null, + solution text null, - redhat_issued_at timestamp null, - short_code_code text references short_codes (code) not null, - published_at timestamp null + redhat_issued_at timestamp null, + short_code_code text references short_codes (code) not null, + reboot_suggested bool default false not null, + published_at timestamp null ) diff --git a/secparse/migrate/20210702041659_create_cves.down.sql b/apollo/migrate/20210702041659_create_cves.down.sql similarity index 100% rename from secparse/migrate/20210702041659_create_cves.down.sql rename to apollo/migrate/20210702041659_create_cves.down.sql diff --git a/secparse/migrate/20210702041659_create_cves.up.sql b/apollo/migrate/20210702041659_create_cves.up.sql similarity index 96% rename from secparse/migrate/20210702041659_create_cves.up.sql rename to apollo/migrate/20210702041659_create_cves.up.sql index 1b41d897..eb99444e 100644 --- a/secparse/migrate/20210702041659_create_cves.up.sql +++ b/apollo/migrate/20210702041659_create_cves.up.sql @@ -34,7 +34,6 @@ create table cves ( id text primary key, created_at timestamp default now() not null, - state numeric not null, short_code_code text references short_codes (code) not null, source_by text, diff --git a/secparse/migrate/20210702041701_create_fixes.down.sql b/apollo/migrate/20210702041701_create_fixes.down.sql similarity index 100% rename from secparse/migrate/20210702041701_create_fixes.down.sql rename to apollo/migrate/20210702041701_create_fixes.down.sql diff --git a/secparse/migrate/20210702041701_create_fixes.up.sql b/apollo/migrate/20210702041701_create_fixes.up.sql similarity index 93% rename from secparse/migrate/20210702041701_create_fixes.up.sql rename to apollo/migrate/20210702041701_create_fixes.up.sql index 4451353c..e108b73a 100644 --- a/secparse/migrate/20210702041701_create_fixes.up.sql +++ b/apollo/migrate/20210702041701_create_fixes.up.sql @@ -32,7 +32,9 @@ create table fixes ( - id bigserial primary key, - ticket text, - description text + id bigserial primary key, + ticket text, + source_by text, + source_link text, + description text ) diff --git a/secparse/migrate/20210702041756_create_advisory_references.down.sql b/apollo/migrate/20210702041756_create_advisory_references.down.sql similarity index 100% rename from secparse/migrate/20210702041756_create_advisory_references.down.sql rename to apollo/migrate/20210702041756_create_advisory_references.down.sql diff --git a/secparse/migrate/20210702041756_create_advisory_references.up.sql b/apollo/migrate/20210702041756_create_advisory_references.up.sql similarity index 100% rename from secparse/migrate/20210702041756_create_advisory_references.up.sql rename to apollo/migrate/20210702041756_create_advisory_references.up.sql diff --git a/secparse/migrate/20210702041956_create_affected_products.down.sql b/apollo/migrate/20210702041956_create_affected_products.down.sql similarity index 100% rename from secparse/migrate/20210702041956_create_affected_products.down.sql rename to apollo/migrate/20210702041956_create_affected_products.down.sql diff --git a/secparse/migrate/20210702041956_create_affected_products.up.sql b/apollo/migrate/20210702041956_create_affected_products.up.sql similarity index 100% rename from secparse/migrate/20210702041956_create_affected_products.up.sql rename to apollo/migrate/20210702041956_create_affected_products.up.sql diff --git a/secparse/migrate/20210702043031_create_build_references.down.sql b/apollo/migrate/20210702043031_create_build_references.down.sql similarity index 100% rename from secparse/migrate/20210702043031_create_build_references.down.sql rename to apollo/migrate/20210702043031_create_build_references.down.sql diff --git a/secparse/migrate/20210702043031_create_build_references.up.sql b/apollo/migrate/20210702043031_create_build_references.up.sql similarity index 81% rename from secparse/migrate/20210702043031_create_build_references.up.sql rename to apollo/migrate/20210702043031_create_build_references.up.sql index 84b3a580..ae81e888 100644 --- a/secparse/migrate/20210702043031_create_build_references.up.sql +++ b/apollo/migrate/20210702043031_create_build_references.up.sql @@ -32,11 +32,12 @@ create table build_references ( - id bigserial primary key, - affected_product_id bigint references affected_products (id), - rpm text not null, - src_rpm text not null, - cve_id text references cves (id) not null, - koji_id text, - peridot_id text + id bigserial primary key, + affected_product_id bigint references affected_products (id), + rpm text not null, + src_rpm text not null, + cve_id text references cves (id) not null, + sha256_sum text not null, + koji_id text, + peridot_id text ) diff --git a/secparse/migrate/20210703202420_create_mirror_state.down.sql b/apollo/migrate/20210703202420_create_mirror_state.down.sql similarity index 100% rename from secparse/migrate/20210703202420_create_mirror_state.down.sql rename to apollo/migrate/20210703202420_create_mirror_state.down.sql diff --git a/secparse/migrate/20210703202420_create_mirror_state.up.sql b/apollo/migrate/20210703202420_create_mirror_state.up.sql similarity index 93% rename from secparse/migrate/20210703202420_create_mirror_state.up.sql rename to apollo/migrate/20210703202420_create_mirror_state.up.sql index 8021ff89..49f66d94 100644 --- a/secparse/migrate/20210703202420_create_mirror_state.up.sql +++ b/apollo/migrate/20210703202420_create_mirror_state.up.sql @@ -32,6 +32,7 @@ create table mirror_state ( - short_code_code text references short_codes (code) primary key, - last_sync timestamp + short_code_code text references short_codes (code) primary key, + last_sync timestamp, + errata_after timestamp ); diff --git a/secparse/migrate/20210711014759_create_advisory_cves.down.sql b/apollo/migrate/20210711014759_create_advisory_cves.down.sql similarity index 100% rename from secparse/migrate/20210711014759_create_advisory_cves.down.sql rename to apollo/migrate/20210711014759_create_advisory_cves.down.sql diff --git a/secparse/migrate/20210711014759_create_advisory_cves.up.sql b/apollo/migrate/20210711014759_create_advisory_cves.up.sql similarity index 100% rename from secparse/migrate/20210711014759_create_advisory_cves.up.sql rename to apollo/migrate/20210711014759_create_advisory_cves.up.sql diff --git a/secparse/migrate/20210711020255_create_advisory_fixes.down.sql b/apollo/migrate/20210711020255_create_advisory_fixes.down.sql similarity index 100% rename from secparse/migrate/20210711020255_create_advisory_fixes.down.sql rename to apollo/migrate/20210711020255_create_advisory_fixes.down.sql diff --git a/secparse/migrate/20210711020255_create_advisory_fixes.up.sql b/apollo/migrate/20210711020255_create_advisory_fixes.up.sql similarity index 100% rename from secparse/migrate/20210711020255_create_advisory_fixes.up.sql rename to apollo/migrate/20210711020255_create_advisory_fixes.up.sql diff --git a/secparse/migrate/20210713031253_create_ignored_upstream_packages.down.sql b/apollo/migrate/20210713031253_create_ignored_upstream_packages.down.sql similarity index 100% rename from secparse/migrate/20210713031253_create_ignored_upstream_packages.down.sql rename to apollo/migrate/20210713031253_create_ignored_upstream_packages.down.sql diff --git a/secparse/migrate/20210713031253_create_ignored_upstream_packages.up.sql b/apollo/migrate/20210713031253_create_ignored_upstream_packages.up.sql similarity index 91% rename from secparse/migrate/20210713031253_create_ignored_upstream_packages.up.sql rename to apollo/migrate/20210713031253_create_ignored_upstream_packages.up.sql index e06709bc..6f94701d 100644 --- a/secparse/migrate/20210713031253_create_ignored_upstream_packages.up.sql +++ b/apollo/migrate/20210713031253_create_ignored_upstream_packages.up.sql @@ -32,7 +32,7 @@ create table ignored_upstream_packages ( - id bigserial primary key, - short_code_code text references short_codes (code) not null, - package text not null + id bigserial primary key, + product_id bigint references products (id) not null, + package text not null ); diff --git a/secparse/migrate/20210717040945_create_advisory_rpms.down.sql b/apollo/migrate/20210717040945_create_advisory_rpms.down.sql similarity index 100% rename from secparse/migrate/20210717040945_create_advisory_rpms.down.sql rename to apollo/migrate/20210717040945_create_advisory_rpms.down.sql diff --git a/secparse/migrate/20210717040945_create_advisory_rpms.up.sql b/apollo/migrate/20210717040945_create_advisory_rpms.up.sql similarity index 89% rename from secparse/migrate/20210717040945_create_advisory_rpms.up.sql rename to apollo/migrate/20210717040945_create_advisory_rpms.up.sql index ee63fd8e..198441a1 100644 --- a/secparse/migrate/20210717040945_create_advisory_rpms.up.sql +++ b/apollo/migrate/20210717040945_create_advisory_rpms.up.sql @@ -32,8 +32,9 @@ create table advisory_rpms ( - advisory_id bigint references advisories (id) not null, - name text not null, + advisory_id bigint references advisories (id) not null, + name text not null, + product_id bigint references products (id) not null, - unique (advisory_id, name) + unique (advisory_id, name) ) diff --git a/secparse/migrate/20210803052113_updateinfo_history.down.sql b/apollo/migrate/20210803052113_updateinfo_history.down.sql similarity index 100% rename from secparse/migrate/20210803052113_updateinfo_history.down.sql rename to apollo/migrate/20210803052113_updateinfo_history.down.sql diff --git a/secparse/migrate/20210803052113_updateinfo_history.up.sql b/apollo/migrate/20210803052113_updateinfo_history.up.sql similarity index 100% rename from secparse/migrate/20210803052113_updateinfo_history.up.sql rename to apollo/migrate/20210803052113_updateinfo_history.up.sql diff --git a/apollo/migrate/20220913121538_create_reboot_suggested_packages.down.sql b/apollo/migrate/20220913121538_create_reboot_suggested_packages.down.sql new file mode 100644 index 00000000..66b788f5 --- /dev/null +++ b/apollo/migrate/20220913121538_create_reboot_suggested_packages.down.sql @@ -0,0 +1 @@ +drop table reboot_suggested_packages; diff --git a/apollo/migrate/20220913121538_create_reboot_suggested_packages.up.sql b/apollo/migrate/20220913121538_create_reboot_suggested_packages.up.sql new file mode 100644 index 00000000..d3fd23c5 --- /dev/null +++ b/apollo/migrate/20220913121538_create_reboot_suggested_packages.up.sql @@ -0,0 +1,6 @@ +create table reboot_suggested_packages +( + created_at timestamp default now() not null, + + name text unique not null +) diff --git a/apollo/migrate/20220921231425_add_indexes.down.sql b/apollo/migrate/20220921231425_add_indexes.down.sql new file mode 100644 index 00000000..458b43b8 --- /dev/null +++ b/apollo/migrate/20220921231425_add_indexes.down.sql @@ -0,0 +1,9 @@ +drop index affected_products_advisoryx; +drop index affected_products_product_idx; +drop index advisory_fixes_advisory_idx; +drop index advisory_fixes_fix_idx; +drop index advisory_cves_advisory_idx; +drop index advisory_cves_cve_idx; +drop index advisory_references_advisory_idx; +drop index advisory_rpms_advisory_idx; +drop index advisory_rpms_product_idx; diff --git a/apollo/migrate/20220921231425_add_indexes.up.sql b/apollo/migrate/20220921231425_add_indexes.up.sql new file mode 100644 index 00000000..1232742f --- /dev/null +++ b/apollo/migrate/20220921231425_add_indexes.up.sql @@ -0,0 +1,9 @@ +create index affected_products_advisoryx on affected_products (advisory); +create index affected_products_product_idx on affected_products (product_id); +create index advisory_fixes_advisory_idx on advisory_fixes (advisory_id); +create index advisory_fixes_fix_idx on advisory_fixes (fix_id); +create index advisory_cves_advisory_idx on advisory_cves (advisory_id); +create index advisory_cves_cve_idx on advisory_cves (cve_id); +create index advisory_references_advisory_idx on advisory_references (advisory_id); +create index advisory_rpms_advisory_idx on advisory_rpms (advisory_id); +create index advisory_rpms_product_idx on advisory_rpms (product_id); diff --git a/apollo/migrate/20221020182658_add_content_to_cves.down.sql b/apollo/migrate/20221020182658_add_content_to_cves.down.sql new file mode 100644 index 00000000..41914a84 --- /dev/null +++ b/apollo/migrate/20221020182658_add_content_to_cves.down.sql @@ -0,0 +1 @@ +alter table cves drop column content; diff --git a/apollo/migrate/20221020182658_add_content_to_cves.up.sql b/apollo/migrate/20221020182658_add_content_to_cves.up.sql new file mode 100644 index 00000000..557265d3 --- /dev/null +++ b/apollo/migrate/20221020182658_add_content_to_cves.up.sql @@ -0,0 +1 @@ +alter table cves add column content jsonb; diff --git a/secparse/migrate/BUILD b/apollo/migrate/BUILD similarity index 100% rename from secparse/migrate/BUILD rename to apollo/migrate/BUILD diff --git a/secparse/proto/v1/BUILD.bazel b/apollo/proto/v1/BUILD.bazel similarity index 70% rename from secparse/proto/v1/BUILD.bazel rename to apollo/proto/v1/BUILD.bazel index cf4a0e1e..75b73f5c 100644 --- a/secparse/proto/v1/BUILD.bazel +++ b/apollo/proto/v1/BUILD.bazel @@ -1,48 +1,58 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-openapiv2:defs.bzl", "protoc_gen_openapiv2") load("@openapi_tools_generator_bazel//:defs.bzl", "openapi_generator") proto_library( - name = "secparsepb_proto", + name = "apollopb_proto", srcs = [ "advisory.proto", - "secparse.proto", + "affected_product.proto", + "apollo.proto", + "build.proto", + "cve.proto", + "fix.proto", + "short_code.proto", ], visibility = ["//visibility:public"], deps = [ - "//proto:commonpb_proto", "@com_envoyproxy_protoc_gen_validate//validate:validate_proto", - "@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-openapiv2/options:options_proto", "@com_google_protobuf//:timestamp_proto", "@com_google_protobuf//:wrappers_proto", "@go_googleapis//google/api:annotations_proto", + "@go_googleapis//google/api:httpbody_proto", ], ) go_proto_library( - name = "secparsepb_go_proto", + name = "apollopb_go_proto", compilers = [ "//:go_apiv2", "//:go_grpc", "//:go_validate", "@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-grpc-gateway:go_gen_grpc_gateway", ], - importpath = "peridot.resf.org/secparse/proto/v1", - proto = ":secparsepb_proto", + importpath = "peridot.resf.org/apollo/pb", + proto = ":apollopb_proto", visibility = ["//visibility:public"], deps = [ - "//proto:common", "@com_envoyproxy_protoc_gen_validate//validate:validate_go_proto", - "@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-openapiv2/options:options_go_proto", "@go_googleapis//google/api:annotations_go_proto", + "@go_googleapis//google/api:httpbody_go_proto", ], ) +go_library( + name = "pb", + embed = [":apollopb_go_proto"], + importpath = "peridot.resf.org/apollo/pb", + visibility = ["//visibility:public"], +) + protoc_gen_openapiv2( name = "openapi", - proto = ":secparsepb_proto", + proto = ":apollopb_proto", simple_operation_ids = True, single_output = True, ) @@ -53,10 +63,3 @@ openapi_generator( spec = ":openapi", visibility = ["//visibility:public"], ) - -go_library( - name = "proto", - embed = [":secparsepb_go_proto"], - importpath = "peridot.resf.org/secparse/proto/v1", - visibility = ["//visibility:public"], -) diff --git a/secparse/proto/v1/advisory.proto b/apollo/proto/v1/advisory.proto similarity index 52% rename from secparse/proto/v1/advisory.proto rename to apollo/proto/v1/advisory.proto index fe5b9c68..7dc1a279 100644 --- a/secparse/proto/v1/advisory.proto +++ b/apollo/proto/v1/advisory.proto @@ -1,22 +1,27 @@ syntax = "proto3"; -package resf.secparse; +package resf.apollo.v1; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; -import "validate/validate.proto"; +import "apollo/proto/v1/cve.proto"; +import "apollo/proto/v1/fix.proto"; -option go_package = "peridot.resf.org/secparse/proto/v1;secparsepb"; +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +message RPMs { + repeated string nvras = 1; +} // Advisory // // Product advisory message Advisory { enum Type { - UnknownType = 0; - Security = 1; - BugFix = 2; - Enhancement = 3; + TYPE_UNKNOWN = 0; + TYPE_SECURITY = 1; + TYPE_BUGFIX = 2; + TYPE_ENHANCEMENT = 3; } // Type @@ -42,11 +47,11 @@ message Advisory { string synopsis = 4; enum Severity { - UnknownSeverity = 0; - Low = 1; - Moderate = 2; - Important = 3; - Critical = 4; + SEVERITY_UNKNOWN = 0; + SEVERITY_LOW = 1; + SEVERITY_MODERATE = 2; + SEVERITY_IMPORTANT = 3; + SEVERITY_CRITICAL = 4; } // Severity @@ -78,12 +83,12 @@ message Advisory { // Fixes // // A list of tickets from upstream bug trackers - repeated string fixes = 10; + repeated Fix fixes = 10; // CVEs // // A list of CVEs assigned to this advisory - repeated string cves = 11; + repeated CVE cves = 11; // References // @@ -98,60 +103,10 @@ message Advisory { // RPMs // // Affected RPMs - repeated string rpms = 14; -} + map rpms = 14; -// ListAdvisoriesRequest -// -// Request body for `ListAdvisories` -// All fields are optional -message ListAdvisoriesRequest { - // Product + // Reboot suggested // - // The product to fetch advisories for - // For example: Rocky Linux, RL or Rocky - string product = 1; - - // Version - // - // The version to fetch advisories for - // For example: 8.3 or 8.4 - string version = 2; - - // Before - // - // Advisories published before timestamp - google.protobuf.Timestamp before = 3; - - // After - // - // Advisories published after timestamp - google.protobuf.Timestamp after = 4; -} - -// ListAdvisoriesResponse -// -// Response body for `ListAdvisories` -message ListAdvisoriesResponse { - repeated Advisory advisories = 1; -} - -// GetAdvisoryRequest -// -// Request body for `GetAdvisory` -message GetAdvisoryRequest { - // ID - // - // Errata ID - // Example: RLSA:2021-1515 - string id = 1 [(validate.rules).string = { - pattern: "^(.+)([SEB]A)-([0-9]{4}):([0-9]+)$", - }]; -} - -// GetAdvisoryResponse -// -// Response body for `GetAdvisory` -message GetAdvisoryResponse { - Advisory advisory = 1; + // Whether a system reboot should be suggested after applying this advisory + bool reboot_suggested = 15; } diff --git a/apollo/proto/v1/affected_product.proto b/apollo/proto/v1/affected_product.proto new file mode 100644 index 00000000..ca3458c4 --- /dev/null +++ b/apollo/proto/v1/affected_product.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +import "google/protobuf/wrappers.proto"; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +message AffectedProduct { + int64 product_id = 1; + google.protobuf.StringValue cve_id = 2; + string version = 3; + + enum State { + STATE_UNKNOWN = 0; + // CVE only affects downstream + STATE_UNDER_INVESTIGATION_DOWNSTREAM = 1; + // CVE affecting upstream and a fix still hasn't been issued + STATE_UNDER_INVESTIGATION_UPSTREAM = 2; + // CVE has been fixed upstream + STATE_FIXED_UPSTREAM = 3; + // CVE has been fixed downstream + // At this stage the CVE can be included in errata + STATE_FIXED_DOWNSTREAM = 4; + // CVE will NOT be fixed upstream + STATE_WILL_NOT_FIX_UPSTREAM = 5; + // CVE will NOT be fixed downstream + // This will probably never happen with Core, but may happen for SIGs + STATE_WILL_NOT_FIX_DOWNSTREAM = 6; + // CVE is out of support scope + STATE_OUT_OF_SUPPORT_SCOPE = 7; + // CVE affects product and upstream is working on a fix + STATE_AFFECTED_UPSTREAM = 8; + // CVE affects product and a fix is being worked out + STATE_AFFECTED_DOWNSTREAM = 9; + } + State state = 4; + + string package = 5; + google.protobuf.StringValue advisory = 6; +} diff --git a/apollo/proto/v1/apollo.proto b/apollo/proto/v1/apollo.proto new file mode 100644 index 00000000..7923444f --- /dev/null +++ b/apollo/proto/v1/apollo.proto @@ -0,0 +1,161 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +import "google/api/annotations.proto"; +import "google/api/httpbody.proto"; +import "google/protobuf/wrappers.proto"; +import "google/protobuf/timestamp.proto"; +import "validate/validate.proto"; +import "apollo/proto/v1/advisory.proto"; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +service ApolloService { + // ListAdvisories + // + // Return a list of advisories by given filters. + // No filters returns all advisories + // This method is paginated + rpc ListAdvisories (ListAdvisoriesRequest) returns (ListAdvisoriesResponse) { + option (google.api.http) = { + get: "/v2/advisories" + }; + } + + // ListAdvisoriesRSS + // + // Same as ListAdvisories but returns an RSS feed + // Only returns 25 latest advisories + // Supports filters + rpc ListAdvisoriesRSS (ListAdvisoriesRSSRequest) returns (google.api.HttpBody) { + option (google.api.http) = { + get: "/v2/advisories:rss" + }; + } + + // GetAdvisory + // + // Returns an advisory with given ID if found, else returns NotFound + rpc GetAdvisory (GetAdvisoryRequest) returns (GetAdvisoryResponse) { + option (google.api.http) = { + get: "/v2/advisories/{id=*}" + }; + } +} + +message AdvisoryFilters { + // Product + // + // The product to fetch advisories for + // For example: Rocky Linux 8 + google.protobuf.StringValue product = 1; + + // Before + // + // Advisories published before timestamp + google.protobuf.Timestamp before = 2; + + // After + // + // Advisories published after timestamp + google.protobuf.Timestamp after = 3; + + // Include unpublished + // + // Whether to include unpublished advisories + // apollo/impl never respects this, but internal services + // may rely on this + google.protobuf.BoolValue include_unpublished = 4; + + // CVE + // + // Only return advisories with given CVE + google.protobuf.StringValue cve = 5; + + // Synopsis + // + // Only return advisories if synopsis contains given text + google.protobuf.StringValue synopsis = 6; + + // Include RPMs + // + // Includes RPMs in list response (slow) + google.protobuf.BoolValue include_rpms = 7; + + // Keyword + // + // Searches all fields for given keyword + google.protobuf.StringValue keyword = 8; + + // Severity + // + // Only return advisories with given severity + Advisory.Severity severity = 9; + + // Type + // + // Only return advisories with given type + Advisory.Type type = 10; +} + +// ListAdvisoriesRequest +// +// Request body for `ListAdvisories` +// All fields are optional +message ListAdvisoriesRequest { + // Filters for the given query + // No filters returns all advisories + AdvisoryFilters filters = 1; + + int32 page = 2; + int32 limit = 3 [(validate.rules).int32.lte = 100]; +} + +// ListAdvisoriesResponse +// +// Response body for `ListAdvisories` +message ListAdvisoriesResponse { + repeated Advisory advisories = 1; + + // Total packages from server + int64 total = 2; + + // Limit from request + int32 size = 3; + + // Current page + int32 page = 4; + + // Last updated + google.protobuf.Timestamp last_updated = 5; +} + +// ListAdvisoriesRSSRequest +// Request body for `ListAdvisoriesRSS` +// All fields are optional +message ListAdvisoriesRSSRequest { + // Filters for the given query + // No filters returns all advisories + AdvisoryFilters filters = 1; +} + +// GetAdvisoryRequest +// +// Request body for `GetAdvisory` +message GetAdvisoryRequest { + // ID + // + // Errata ID + // Example: RLSA:2021-1515 + string id = 1 [(validate.rules).string = { + pattern: "^(.+)([SEB]A)-([0-9]{4}):([0-9]+)$", + }]; +} + +// GetAdvisoryResponse +// +// Response body for `GetAdvisory` +message GetAdvisoryResponse { + Advisory advisory = 1; +} diff --git a/apollo/proto/v1/build.proto b/apollo/proto/v1/build.proto new file mode 100644 index 00000000..1ff48c5c --- /dev/null +++ b/apollo/proto/v1/build.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +enum BuildStatus { + BUILD_STATUS_UNKNOWN = 0; + BUILD_STATUS_FIXED = 1; + BUILD_STATUS_NOT_FIXED = 2; + BUILD_STATUS_WILL_NOT_FIX = 3; + BUILD_STATUS_SKIP = 4; +} diff --git a/apollo/proto/v1/cve.proto b/apollo/proto/v1/cve.proto new file mode 100644 index 00000000..f9c8aae1 --- /dev/null +++ b/apollo/proto/v1/cve.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +import "google/protobuf/wrappers.proto"; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +message CVE { + string name = 1; + + google.protobuf.StringValue source_by = 2; + google.protobuf.StringValue source_link = 3; + + google.protobuf.StringValue cvss3_scoring_vector = 4; + google.protobuf.StringValue cvss3_base_score = 5; + google.protobuf.StringValue cwe = 6; +} + +message ListUnresolvedCVEsRequest {} +message ListUnresolvedCVEsResponse { + repeated CVE cves = 1; +} + +message ListFixedCVEsRequest {} +message ListFixedCVEsResponse { + repeated CVE cves = 1; +} diff --git a/apollo/proto/v1/fix.proto b/apollo/proto/v1/fix.proto new file mode 100644 index 00000000..6c0e69ed --- /dev/null +++ b/apollo/proto/v1/fix.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +import "google/protobuf/wrappers.proto"; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +message Fix { + google.protobuf.StringValue ticket = 1; + google.protobuf.StringValue source_by = 2; + google.protobuf.StringValue source_link = 3; + google.protobuf.StringValue description = 4; +} diff --git a/apollo/proto/v1/short_code.proto b/apollo/proto/v1/short_code.proto new file mode 100644 index 00000000..8f7c9d44 --- /dev/null +++ b/apollo/proto/v1/short_code.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package resf.apollo.v1; + +option go_package = "peridot.resf.org/apollo/pb;apollopb"; + +message ShortCode { + // Code + // + // Full short code + string code = 1; + + enum Mode { + MODE_UNKNOWN = 0; + MODE_PUBLISH = 1; + MODE_MIRROR = 2; + } + // Mode + // + // Mode for short code + // Currently only publish and mirror is supported + Mode mode = 2; + + // Archived + // + // Whether the short code is archived or not + // An archived short code CANNOT be used to issue errata + bool archived = 3; +} diff --git a/secparse/rherrata/BUILD.bazel b/apollo/rherrata/BUILD.bazel similarity index 82% rename from secparse/rherrata/BUILD.bazel rename to apollo/rherrata/BUILD.bazel index 380a784d..af10f137 100644 --- a/secparse/rherrata/BUILD.bazel +++ b/apollo/rherrata/BUILD.bazel @@ -8,10 +8,10 @@ go_library( "errata.go", "mock.go", ], - importpath = "peridot.resf.org/secparse/rherrata", + importpath = "peridot.resf.org/apollo/rherrata", visibility = ["//visibility:public"], deps = [ - "//secparse/proto/v1:proto", + "//apollo/proto/v1:pb", "//vendor/github.com/PuerkitoBio/goquery", "//vendor/github.com/go-chi/chi", "//vendor/github.com/gocolly/colly/v2:colly", @@ -24,7 +24,7 @@ go_test( data = glob(["testdata/**"]), embed = [":rherrata"], deps = [ - "//secparse/proto/v1:proto", + "//apollo/proto/v1:pb", "//vendor/github.com/stretchr/testify/require", ], ) diff --git a/secparse/rherrata/api.go b/apollo/rherrata/api.go similarity index 92% rename from secparse/rherrata/api.go rename to apollo/rherrata/api.go index 324cb23a..4f638a1f 100644 --- a/secparse/rherrata/api.go +++ b/apollo/rherrata/api.go @@ -38,7 +38,7 @@ import ( type APIService interface { GetErrata(advisory string) (*Errata, error) - GetAdvisories(currentVersion string) ([]*CompactErrata, error) + GetAdvisories(currentVersion string, after *time.Time) ([]*CompactErrata, error) } // API is the APIService implementation. Should not be used directly @@ -54,9 +54,9 @@ func NewClient() *API { client: &http.Client{ Timeout: 30 * time.Second, }, - userAgent: "secparse/rherrata/0.1", + userAgent: "apollo/rherrata/0.2", baseURLErrata: "https://access.redhat.com/errata", - baseURLAPI: "https://api.access.redhat.com/rs/search", + baseURLAPI: "https://access.redhat.com/hydra/rest/search/kcs", } } diff --git a/secparse/rherrata/compact_errata.go b/apollo/rherrata/compact_errata.go similarity index 86% rename from secparse/rherrata/compact_errata.go rename to apollo/rherrata/compact_errata.go index f09e140e..4e7a959f 100644 --- a/secparse/rherrata/compact_errata.go +++ b/apollo/rherrata/compact_errata.go @@ -35,10 +35,12 @@ import ( "fmt" "net/url" "strings" + "time" ) var internalAfterDates = map[string]string{ "8.4": "2021-04-29T00:00:00Z", + "9.0": "2022-05-17T00:00:00Z", } type CompactErrata struct { @@ -61,7 +63,7 @@ type internalAdvisoriesResponse struct { Response *internalAdvisoriesInnerResponse `json:"response"` } -func (a *API) GetAdvisories(currentVersion string) ([]*CompactErrata, error) { +func (a *API) GetAdvisories(currentVersion string, after *time.Time) ([]*CompactErrata, error) { req, err := a.newRequest("GET", a.baseURLAPI, nil) if err != nil { return nil, err @@ -71,12 +73,14 @@ func (a *API) GetAdvisories(currentVersion string) ([]*CompactErrata, error) { usableVersion := strings.Replace(currentVersion, ".", "%5C.", -1) fq2 := fmt.Sprintf("portal_product_filter:Red%%5C+Hat%%5C+Enterprise%%5C+Linux%%7C*%%7C%s%%7C*", usableVersion) var fq3 string - if afterDate := internalAfterDates[currentVersion]; afterDate != "" { + if after != nil { + fq3 = "&fq=" + url.QueryEscape(fmt.Sprintf("portal_publication_date:[%s TO NOW]", after.Format(time.RFC3339))) + } else if afterDate := internalAfterDates[currentVersion]; afterDate != "" { fq3 = "&fq=" + url.QueryEscape(fmt.Sprintf("portal_publication_date:[%s TO NOW]", afterDate)) } - req.URL.RawQuery = fmt.Sprintf("fq=%s&fq=%s%s&q=*:*&rows=1000&sort=portal_publication_date+desc&start=0", fq1, fq2, fq3) + req.URL.RawQuery = fmt.Sprintf("fq=%s&fq=%s%s&q=*:*&rows=10000&sort=portal_publication_date+desc&start=0", fq1, fq2, fq3) - req.Header.Set("Accept", "application/vnd.redhat.solr+json") + req.Header.Set("Accept", "application/json") res, err := a.client.Do(req) if err != nil { diff --git a/secparse/rherrata/errata.go b/apollo/rherrata/errata.go similarity index 92% rename from secparse/rherrata/errata.go rename to apollo/rherrata/errata.go index a5eb80a8..7269d5d4 100644 --- a/secparse/rherrata/errata.go +++ b/apollo/rherrata/errata.go @@ -35,7 +35,7 @@ import ( "fmt" "github.com/PuerkitoBio/goquery" "github.com/gocolly/colly/v2" - secparsepb "peridot.resf.org/secparse/proto/v1" + apollopb "peridot.resf.org/apollo/pb" "strings" "time" ) @@ -62,8 +62,8 @@ type UpdatedPackages struct { type Errata struct { Synopsis string - Type secparsepb.Advisory_Type - Severity secparsepb.Advisory_Severity + Type apollopb.Advisory_Type + Severity apollopb.Advisory_Severity Topic []string Description []string Solution []string @@ -178,9 +178,9 @@ func (a *API) GetErrata(advisory string) (*Errata, error) { c.OnHTML("div#type-severity", func(element *colly.HTMLElement) { typeSeverity := strings.Split(element.DOM.Find("p").Text(), ":") if typeSeverity[0] == "Product Enhancement Advisory" { - errata.Type = secparsepb.Advisory_Enhancement + errata.Type = apollopb.Advisory_TYPE_ENHANCEMENT } else if typeSeverity[0] == "Bug Fix Advisory" { - errata.Type = secparsepb.Advisory_BugFix + errata.Type = apollopb.Advisory_TYPE_BUGFIX } else { if len(typeSeverity) != 2 { err = errors.New("invalid type/severity") @@ -195,28 +195,28 @@ func (a *API) GetErrata(advisory string) (*Errata, error) { switch strings.TrimSpace(typeSplit[0]) { case "Security": - errata.Type = secparsepb.Advisory_Security + errata.Type = apollopb.Advisory_TYPE_SECURITY break case "BugFix": - errata.Type = secparsepb.Advisory_BugFix + errata.Type = apollopb.Advisory_TYPE_BUGFIX break case "Enhancement": - errata.Type = secparsepb.Advisory_Enhancement + errata.Type = apollopb.Advisory_TYPE_ENHANCEMENT break } switch strings.TrimSpace(typeSeverity[1]) { case "Low": - errata.Severity = secparsepb.Advisory_Low + errata.Severity = apollopb.Advisory_SEVERITY_LOW break case "Moderate": - errata.Severity = secparsepb.Advisory_Moderate + errata.Severity = apollopb.Advisory_SEVERITY_MODERATE break case "Important": - errata.Severity = secparsepb.Advisory_Important + errata.Severity = apollopb.Advisory_SEVERITY_IMPORTANT break case "Critical": - errata.Severity = secparsepb.Advisory_Critical + errata.Severity = apollopb.Advisory_SEVERITY_CRITICAL break } } diff --git a/secparse/rherrata/errata_test.go b/apollo/rherrata/errata_test.go similarity index 97% rename from secparse/rherrata/errata_test.go rename to apollo/rherrata/errata_test.go index ecb55775..0ad8c886 100644 --- a/secparse/rherrata/errata_test.go +++ b/apollo/rherrata/errata_test.go @@ -33,7 +33,7 @@ package rherrata import ( "github.com/stretchr/testify/require" "io/ioutil" - secparsepb "peridot.resf.org/secparse/proto/v1" + apollopb "peridot.resf.org/apollo/pb" "testing" ) @@ -53,7 +53,7 @@ func TestRHBA20212759(t *testing.T) { require.Nil(t, err) require.Equal(t, "firefox bugfix update", errata.Synopsis) - require.Equal(t, secparsepb.Advisory_BugFix, errata.Type) + require.Equal(t, apollopb.Advisory_TYPE_BUGFIX, errata.Type) require.Len(t, errata.Topic, 1) require.Equal(t, "An update for firefox is now available for Red Hat Enterprise Linux 8.", errata.Topic[0]) require.Len(t, errata.Description, 3) @@ -95,8 +95,8 @@ func TestRHBA20212743(t *testing.T) { require.Nil(t, err) require.Equal(t, "Important: firefox security update", errata.Synopsis) - require.Equal(t, secparsepb.Advisory_Security, errata.Type) - require.Equal(t, secparsepb.Advisory_Important, errata.Severity) + require.Equal(t, apollopb.Advisory_TYPE_SECURITY, errata.Type) + require.Equal(t, apollopb.Advisory_SEVERITY_IMPORTANT, errata.Severity) require.Len(t, errata.Topic, 2) require.Equal(t, "An update for firefox is now available for Red Hat Enterprise Linux 8.", errata.Topic[0]) require.Equal(t, "Red Hat Product Security has rated this update as having a security impact of Important. A Common Vulnerability Scoring System (CVSS) base score, which gives a detailed severity rating, is available for each vulnerability from the CVE link(s) in the References section.", errata.Topic[1]) diff --git a/secparse/rherrata/mock.go b/apollo/rherrata/mock.go similarity index 100% rename from secparse/rherrata/mock.go rename to apollo/rherrata/mock.go diff --git a/secparse/rherrata/testdata/RHBA-2021-2759.html b/apollo/rherrata/testdata/RHBA-2021-2759.html similarity index 100% rename from secparse/rherrata/testdata/RHBA-2021-2759.html rename to apollo/rherrata/testdata/RHBA-2021-2759.html diff --git a/secparse/rherrata/testdata/RHSA-2021-2743.html b/apollo/rherrata/testdata/RHSA-2021-2743.html similarity index 100% rename from secparse/rherrata/testdata/RHSA-2021-2743.html rename to apollo/rherrata/testdata/RHSA-2021-2743.html diff --git a/secparse/rhsecurity/BUILD.bazel b/apollo/rhsecurity/BUILD.bazel similarity index 90% rename from secparse/rhsecurity/BUILD.bazel rename to apollo/rhsecurity/BUILD.bazel index a08c7288..b2ed8b42 100644 --- a/secparse/rhsecurity/BUILD.bazel +++ b/apollo/rhsecurity/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "response.go", "utils.go", ], - importpath = "peridot.resf.org/secparse/rhsecurity", + importpath = "peridot.resf.org/apollo/rhsecurity", visibility = ["//visibility:public"], deps = ["//vendor/golang.org/x/oauth2"], ) diff --git a/secparse/rhsecurity/README.md b/apollo/rhsecurity/README.md similarity index 100% rename from secparse/rhsecurity/README.md rename to apollo/rhsecurity/README.md diff --git a/secparse/rhsecurity/Red-Hat-Security-Data-API.yaml b/apollo/rhsecurity/Red-Hat-Security-Data-API.yaml similarity index 99% rename from secparse/rhsecurity/Red-Hat-Security-Data-API.yaml rename to apollo/rhsecurity/Red-Hat-Security-Data-API.yaml index 0d9d133d..b0231e3b 100644 --- a/secparse/rhsecurity/Red-Hat-Security-Data-API.yaml +++ b/apollo/rhsecurity/Red-Hat-Security-Data-API.yaml @@ -1,4 +1,6 @@ openapi: 3.0.0 +x-stoplight: + id: yjn90w5p8y4ly info: title: Red Hat Security Data API version: '1.0' @@ -63,7 +65,7 @@ paths: name: cwe description: CVEs with CWE - schema: - type: string + type: number in: query name: cvss_score description: CVEs with CVSS score greater than or equal to this value @@ -181,7 +183,7 @@ components: type: string minLength: 1 cvss_score: - type: string + type: number cvss_scoring_vector: type: string CWE: diff --git a/secparse/rhsecurity/api_default.go b/apollo/rhsecurity/api_default.go similarity index 95% rename from secparse/rhsecurity/api_default.go rename to apollo/rhsecurity/api_default.go index 9fdd8744..8f7e9d4b 100644 --- a/secparse/rhsecurity/api_default.go +++ b/apollo/rhsecurity/api_default.go @@ -61,12 +61,11 @@ type DefaultApi interface { type DefaultApiService service type ApiGetCveRequest struct { - ctx _context.Context + ctx _context.Context ApiService DefaultApi - cVE string + cVE string } - func (r ApiGetCveRequest) Execute() (CVEDetailed, *_nethttp.Response, error) { return r.ApiService.GetCveExecute(r) } @@ -81,8 +80,8 @@ func (r ApiGetCveRequest) Execute() (CVEDetailed, *_nethttp.Response, error) { func (a *DefaultApiService) GetCve(ctx _context.Context, cVE string) ApiGetCveRequest { return ApiGetCveRequest{ ApiService: a, - ctx: ctx, - cVE: cVE, + ctx: ctx, + cVE: cVE, } } @@ -167,21 +166,21 @@ func (a *DefaultApiService) GetCveExecute(r ApiGetCveRequest) (CVEDetailed, *_ne } type ApiGetCvesRequest struct { - ctx _context.Context - ApiService DefaultApi - before *string - after *string - ids *string - bug *string - advisory *string - severity *string - package_ *string - product *string - cwe *string - cvssScore *string - cvss3Score *string - page *float32 - perPage *float32 + ctx _context.Context + ApiService DefaultApi + before *string + after *string + ids *string + bug *string + advisory *string + severity *string + package_ *string + product *string + cwe *string + cvssScore *float32 + cvss3Score *string + page *float32 + perPage *float32 createdDaysAgo *float32 } @@ -221,7 +220,7 @@ func (r ApiGetCvesRequest) Cwe(cwe string) ApiGetCvesRequest { r.cwe = &cwe return r } -func (r ApiGetCvesRequest) CvssScore(cvssScore string) ApiGetCvesRequest { +func (r ApiGetCvesRequest) CvssScore(cvssScore float32) ApiGetCvesRequest { r.cvssScore = &cvssScore return r } @@ -255,7 +254,7 @@ func (r ApiGetCvesRequest) Execute() ([]CVE, *_nethttp.Response, error) { func (a *DefaultApiService) GetCves(ctx _context.Context) ApiGetCvesRequest { return ApiGetCvesRequest{ ApiService: a, - ctx: ctx, + ctx: ctx, } } diff --git a/secparse/rhsecurity/client.go b/apollo/rhsecurity/client.go similarity index 100% rename from secparse/rhsecurity/client.go rename to apollo/rhsecurity/client.go diff --git a/secparse/rhsecurity/configuration.go b/apollo/rhsecurity/configuration.go similarity index 95% rename from secparse/rhsecurity/configuration.go rename to apollo/rhsecurity/configuration.go index 40a1902e..d4da8ca7 100644 --- a/secparse/rhsecurity/configuration.go +++ b/apollo/rhsecurity/configuration.go @@ -78,9 +78,9 @@ type ServerVariable struct { // ServerConfiguration stores the information about a server type ServerConfiguration struct { - URL string + URL string Description string - Variables map[string]ServerVariable + Variables map[string]ServerVariable } // ServerConfigurations stores multiple ServerConfiguration items @@ -101,17 +101,16 @@ type Configuration struct { // NewConfiguration returns a new Configuration object func NewConfiguration() *Configuration { cfg := &Configuration{ - DefaultHeader: make(map[string]string), - UserAgent: "OpenAPI-Generator/1.0.0/go", - Debug: false, - Servers: ServerConfigurations{ + DefaultHeader: make(map[string]string), + UserAgent: "OpenAPI-Generator/1.0.0/go", + Debug: false, + Servers: ServerConfigurations{ { - URL: "https://access.redhat.com/hydra/rest/securitydata", + URL: "https://access.redhat.com/hydra/rest/securitydata", Description: "No description provided", }, }, - OperationServers: map[string]ServerConfigurations{ - }, + OperationServers: map[string]ServerConfigurations{}, } return cfg } diff --git a/secparse/rhsecurity/docs/CVE.md b/apollo/rhsecurity/docs/CVE.md similarity index 97% rename from secparse/rhsecurity/docs/CVE.md rename to apollo/rhsecurity/docs/CVE.md index 1d4542db..7353ae7a 100644 --- a/secparse/rhsecurity/docs/CVE.md +++ b/apollo/rhsecurity/docs/CVE.md @@ -10,7 +10,7 @@ Name | Type | Description | Notes **Advisories** | **[]string** | | **Bugzilla** | **string** | | **BugzillaDescription** | **string** | | -**CvssScore** | Pointer to **string** | | [optional] +**CvssScore** | Pointer to **float32** | | [optional] **CvssScoringVector** | Pointer to **string** | | [optional] **CWE** | **string** | | **AffectedPackages** | **[]string** | | @@ -159,20 +159,20 @@ SetBugzillaDescription sets BugzillaDescription field to given value. ### GetCvssScore -`func (o *CVE) GetCvssScore() string` +`func (o *CVE) GetCvssScore() float32` GetCvssScore returns the CvssScore field if non-nil, zero value otherwise. ### GetCvssScoreOk -`func (o *CVE) GetCvssScoreOk() (*string, bool)` +`func (o *CVE) GetCvssScoreOk() (*float32, bool)` GetCvssScoreOk returns a tuple with the CvssScore field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetCvssScore -`func (o *CVE) SetCvssScore(v string)` +`func (o *CVE) SetCvssScore(v float32)` SetCvssScore sets CvssScore field to given value. diff --git a/secparse/rhsecurity/docs/CVEDetailed.md b/apollo/rhsecurity/docs/CVEDetailed.md similarity index 100% rename from secparse/rhsecurity/docs/CVEDetailed.md rename to apollo/rhsecurity/docs/CVEDetailed.md diff --git a/secparse/rhsecurity/docs/CVEDetailedAffectedRelease.md b/apollo/rhsecurity/docs/CVEDetailedAffectedRelease.md similarity index 100% rename from secparse/rhsecurity/docs/CVEDetailedAffectedRelease.md rename to apollo/rhsecurity/docs/CVEDetailedAffectedRelease.md diff --git a/secparse/rhsecurity/docs/CVEDetailedBugzilla.md b/apollo/rhsecurity/docs/CVEDetailedBugzilla.md similarity index 100% rename from secparse/rhsecurity/docs/CVEDetailedBugzilla.md rename to apollo/rhsecurity/docs/CVEDetailedBugzilla.md diff --git a/secparse/rhsecurity/docs/CVEDetailedCvss3.md b/apollo/rhsecurity/docs/CVEDetailedCvss3.md similarity index 100% rename from secparse/rhsecurity/docs/CVEDetailedCvss3.md rename to apollo/rhsecurity/docs/CVEDetailedCvss3.md diff --git a/secparse/rhsecurity/docs/CVEDetailedPackageState.md b/apollo/rhsecurity/docs/CVEDetailedPackageState.md similarity index 100% rename from secparse/rhsecurity/docs/CVEDetailedPackageState.md rename to apollo/rhsecurity/docs/CVEDetailedPackageState.md diff --git a/secparse/rhsecurity/docs/DefaultApi.md b/apollo/rhsecurity/docs/DefaultApi.md similarity index 96% rename from secparse/rhsecurity/docs/DefaultApi.md rename to apollo/rhsecurity/docs/DefaultApi.md index 38420fb8..8d8cfec0 100644 --- a/secparse/rhsecurity/docs/DefaultApi.md +++ b/apollo/rhsecurity/docs/DefaultApi.md @@ -110,7 +110,7 @@ func main() { package_ := "package__example" // string | CVEs which affect the package (optional) product := "product_example" // string | CVEs which affect the product. The parameter supports Perl compatible regular expressions. (optional) cwe := "cwe_example" // string | CVEs with CWE (optional) - cvssScore := "cvssScore_example" // string | CVEs with CVSS score greater than or equal to this value (optional) + cvssScore := float32(8.14) // float32 | CVEs with CVSS score greater than or equal to this value (optional) cvss3Score := "cvss3Score_example" // string | CVEs with CVSSv3 score greater than or equal to this value (optional) page := float32(8.14) // float32 | CVEs for page number (optional) perPage := float32(8.14) // float32 | Number of CVEs to return per page (optional) @@ -148,7 +148,7 @@ Name | Type | Description | Notes **package_** | **string** | CVEs which affect the package | **product** | **string** | CVEs which affect the product. The parameter supports Perl compatible regular expressions. | **cwe** | **string** | CVEs with CWE | - **cvssScore** | **string** | CVEs with CVSS score greater than or equal to this value | + **cvssScore** | **float32** | CVEs with CVSS score greater than or equal to this value | **cvss3Score** | **string** | CVEs with CVSSv3 score greater than or equal to this value | **page** | **float32** | CVEs for page number | **perPage** | **float32** | Number of CVEs to return per page | diff --git a/secparse/rhsecurity/gen.go b/apollo/rhsecurity/gen.go similarity index 100% rename from secparse/rhsecurity/gen.go rename to apollo/rhsecurity/gen.go diff --git a/secparse/rhsecurity/model_cve.go b/apollo/rhsecurity/model_cve.go similarity index 90% rename from secparse/rhsecurity/model_cve.go rename to apollo/rhsecurity/model_cve.go index ed04e5e5..127615cc 100644 --- a/secparse/rhsecurity/model_cve.go +++ b/apollo/rhsecurity/model_cve.go @@ -17,19 +17,19 @@ import ( // CVE CVE model used in listing type CVE struct { - CVE string `json:"CVE"` - Severity string `json:"severity"` - PublicDate string `json:"public_date"` - Advisories []string `json:"advisories"` - Bugzilla string `json:"bugzilla"` - BugzillaDescription string `json:"bugzilla_description"` - CvssScore *string `json:"cvss_score,omitempty"` - CvssScoringVector *string `json:"cvss_scoring_vector,omitempty"` - CWE string `json:"CWE"` - AffectedPackages []string `json:"affected_packages"` - ResourceUrl string `json:"resource_url"` - Cvss3ScoringVector string `json:"cvss3_scoring_vector"` - Cvss3Score string `json:"cvss3_score"` + CVE string `json:"CVE"` + Severity string `json:"severity"` + PublicDate string `json:"public_date"` + Advisories []string `json:"advisories"` + Bugzilla string `json:"bugzilla"` + BugzillaDescription string `json:"bugzilla_description"` + CvssScore *float32 `json:"cvss_score,omitempty"` + CvssScoringVector *string `json:"cvss_scoring_vector,omitempty"` + CWE string `json:"CWE"` + AffectedPackages []string `json:"affected_packages"` + ResourceUrl string `json:"resource_url"` + Cvss3ScoringVector string `json:"cvss3_scoring_vector"` + Cvss3Score string `json:"cvss3_score"` } // NewCVE instantiates a new CVE object @@ -73,7 +73,7 @@ func (o *CVE) GetCVE() string { // GetCVEOk returns a tuple with the CVE field value // and a boolean to check if the value has been set. func (o *CVE) GetCVEOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.CVE, true @@ -97,7 +97,7 @@ func (o *CVE) GetSeverity() string { // GetSeverityOk returns a tuple with the Severity field value // and a boolean to check if the value has been set. func (o *CVE) GetSeverityOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Severity, true @@ -121,7 +121,7 @@ func (o *CVE) GetPublicDate() string { // GetPublicDateOk returns a tuple with the PublicDate field value // and a boolean to check if the value has been set. func (o *CVE) GetPublicDateOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.PublicDate, true @@ -145,7 +145,7 @@ func (o *CVE) GetAdvisories() []string { // GetAdvisoriesOk returns a tuple with the Advisories field value // and a boolean to check if the value has been set. func (o *CVE) GetAdvisoriesOk() (*[]string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Advisories, true @@ -169,7 +169,7 @@ func (o *CVE) GetBugzilla() string { // GetBugzillaOk returns a tuple with the Bugzilla field value // and a boolean to check if the value has been set. func (o *CVE) GetBugzillaOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Bugzilla, true @@ -193,7 +193,7 @@ func (o *CVE) GetBugzillaDescription() string { // GetBugzillaDescriptionOk returns a tuple with the BugzillaDescription field value // and a boolean to check if the value has been set. func (o *CVE) GetBugzillaDescriptionOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.BugzillaDescription, true @@ -205,9 +205,9 @@ func (o *CVE) SetBugzillaDescription(v string) { } // GetCvssScore returns the CvssScore field value if set, zero value otherwise. -func (o *CVE) GetCvssScore() string { +func (o *CVE) GetCvssScore() float32 { if o == nil || o.CvssScore == nil { - var ret string + var ret float32 return ret } return *o.CvssScore @@ -215,7 +215,7 @@ func (o *CVE) GetCvssScore() string { // GetCvssScoreOk returns a tuple with the CvssScore field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *CVE) GetCvssScoreOk() (*string, bool) { +func (o *CVE) GetCvssScoreOk() (*float32, bool) { if o == nil || o.CvssScore == nil { return nil, false } @@ -231,8 +231,8 @@ func (o *CVE) HasCvssScore() bool { return false } -// SetCvssScore gets a reference to the given string and assigns it to the CvssScore field. -func (o *CVE) SetCvssScore(v string) { +// SetCvssScore gets a reference to the given float32 and assigns it to the CvssScore field. +func (o *CVE) SetCvssScore(v float32) { o.CvssScore = &v } @@ -281,7 +281,7 @@ func (o *CVE) GetCWE() string { // GetCWEOk returns a tuple with the CWE field value // and a boolean to check if the value has been set. func (o *CVE) GetCWEOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.CWE, true @@ -305,7 +305,7 @@ func (o *CVE) GetAffectedPackages() []string { // GetAffectedPackagesOk returns a tuple with the AffectedPackages field value // and a boolean to check if the value has been set. func (o *CVE) GetAffectedPackagesOk() (*[]string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.AffectedPackages, true @@ -329,7 +329,7 @@ func (o *CVE) GetResourceUrl() string { // GetResourceUrlOk returns a tuple with the ResourceUrl field value // and a boolean to check if the value has been set. func (o *CVE) GetResourceUrlOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.ResourceUrl, true @@ -353,7 +353,7 @@ func (o *CVE) GetCvss3ScoringVector() string { // GetCvss3ScoringVectorOk returns a tuple with the Cvss3ScoringVector field value // and a boolean to check if the value has been set. func (o *CVE) GetCvss3ScoringVectorOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cvss3ScoringVector, true @@ -377,7 +377,7 @@ func (o *CVE) GetCvss3Score() string { // GetCvss3ScoreOk returns a tuple with the Cvss3Score field value // and a boolean to check if the value has been set. func (o *CVE) GetCvss3ScoreOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cvss3Score, true @@ -467,5 +467,3 @@ func (v *NullableCVE) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/model_cve_detailed.go b/apollo/rhsecurity/model_cve_detailed.go similarity index 92% rename from secparse/rhsecurity/model_cve_detailed.go rename to apollo/rhsecurity/model_cve_detailed.go index eceeff07..9b9dd1f8 100644 --- a/secparse/rhsecurity/model_cve_detailed.go +++ b/apollo/rhsecurity/model_cve_detailed.go @@ -17,17 +17,17 @@ import ( // CVEDetailed CVE model used when retrieving a specific CVE type CVEDetailed struct { - ThreatSeverity string `json:"threat_severity"` - PublicDate string `json:"public_date"` - Bugzilla CVEDetailedBugzilla `json:"bugzilla"` - Cvss3 CVEDetailedCvss3 `json:"cvss3"` - Cwe string `json:"cwe"` - Details []string `json:"details"` - Acknowledgement string `json:"acknowledgement"` + ThreatSeverity string `json:"threat_severity"` + PublicDate string `json:"public_date"` + Bugzilla CVEDetailedBugzilla `json:"bugzilla"` + Cvss3 CVEDetailedCvss3 `json:"cvss3"` + Cwe string `json:"cwe"` + Details []string `json:"details"` + Acknowledgement string `json:"acknowledgement"` AffectedRelease *[]CVEDetailedAffectedRelease `json:"affected_release,omitempty"` - Name string `json:"name"` - Csaw bool `json:"csaw"` - PackageState *[]CVEDetailedPackageState `json:"package_state,omitempty"` + Name string `json:"name"` + Csaw bool `json:"csaw"` + PackageState *[]CVEDetailedPackageState `json:"package_state,omitempty"` } // NewCVEDetailed instantiates a new CVEDetailed object @@ -69,7 +69,7 @@ func (o *CVEDetailed) GetThreatSeverity() string { // GetThreatSeverityOk returns a tuple with the ThreatSeverity field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetThreatSeverityOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.ThreatSeverity, true @@ -93,7 +93,7 @@ func (o *CVEDetailed) GetPublicDate() string { // GetPublicDateOk returns a tuple with the PublicDate field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetPublicDateOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.PublicDate, true @@ -117,7 +117,7 @@ func (o *CVEDetailed) GetBugzilla() CVEDetailedBugzilla { // GetBugzillaOk returns a tuple with the Bugzilla field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetBugzillaOk() (*CVEDetailedBugzilla, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Bugzilla, true @@ -141,7 +141,7 @@ func (o *CVEDetailed) GetCvss3() CVEDetailedCvss3 { // GetCvss3Ok returns a tuple with the Cvss3 field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetCvss3Ok() (*CVEDetailedCvss3, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cvss3, true @@ -165,7 +165,7 @@ func (o *CVEDetailed) GetCwe() string { // GetCweOk returns a tuple with the Cwe field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetCweOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cwe, true @@ -189,7 +189,7 @@ func (o *CVEDetailed) GetDetails() []string { // GetDetailsOk returns a tuple with the Details field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetDetailsOk() (*[]string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Details, true @@ -213,7 +213,7 @@ func (o *CVEDetailed) GetAcknowledgement() string { // GetAcknowledgementOk returns a tuple with the Acknowledgement field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetAcknowledgementOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Acknowledgement, true @@ -269,7 +269,7 @@ func (o *CVEDetailed) GetName() string { // GetNameOk returns a tuple with the Name field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetNameOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Name, true @@ -293,7 +293,7 @@ func (o *CVEDetailed) GetCsaw() bool { // GetCsawOk returns a tuple with the Csaw field value // and a boolean to check if the value has been set. func (o *CVEDetailed) GetCsawOk() (*bool, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Csaw, true @@ -409,5 +409,3 @@ func (v *NullableCVEDetailed) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/model_cve_detailed_affected_release.go b/apollo/rhsecurity/model_cve_detailed_affected_release.go similarity index 95% rename from secparse/rhsecurity/model_cve_detailed_affected_release.go rename to apollo/rhsecurity/model_cve_detailed_affected_release.go index 2a371bce..247a46e1 100644 --- a/secparse/rhsecurity/model_cve_detailed_affected_release.go +++ b/apollo/rhsecurity/model_cve_detailed_affected_release.go @@ -17,11 +17,11 @@ import ( // CVEDetailedAffectedRelease struct for CVEDetailedAffectedRelease type CVEDetailedAffectedRelease struct { - ProductName string `json:"product_name"` - ReleaseDate string `json:"release_date"` - Advisory string `json:"advisory"` - Cpe string `json:"cpe"` - Package *string `json:"package,omitempty"` + ProductName string `json:"product_name"` + ReleaseDate string `json:"release_date"` + Advisory string `json:"advisory"` + Cpe string `json:"cpe"` + Package *string `json:"package,omitempty"` } // NewCVEDetailedAffectedRelease instantiates a new CVEDetailedAffectedRelease object @@ -58,7 +58,7 @@ func (o *CVEDetailedAffectedRelease) GetProductName() string { // GetProductNameOk returns a tuple with the ProductName field value // and a boolean to check if the value has been set. func (o *CVEDetailedAffectedRelease) GetProductNameOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.ProductName, true @@ -82,7 +82,7 @@ func (o *CVEDetailedAffectedRelease) GetReleaseDate() string { // GetReleaseDateOk returns a tuple with the ReleaseDate field value // and a boolean to check if the value has been set. func (o *CVEDetailedAffectedRelease) GetReleaseDateOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.ReleaseDate, true @@ -106,7 +106,7 @@ func (o *CVEDetailedAffectedRelease) GetAdvisory() string { // GetAdvisoryOk returns a tuple with the Advisory field value // and a boolean to check if the value has been set. func (o *CVEDetailedAffectedRelease) GetAdvisoryOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Advisory, true @@ -130,7 +130,7 @@ func (o *CVEDetailedAffectedRelease) GetCpe() string { // GetCpeOk returns a tuple with the Cpe field value // and a boolean to check if the value has been set. func (o *CVEDetailedAffectedRelease) GetCpeOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cpe, true @@ -228,5 +228,3 @@ func (v *NullableCVEDetailedAffectedRelease) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/model_cve_detailed_bugzilla.go b/apollo/rhsecurity/model_cve_detailed_bugzilla.go similarity index 97% rename from secparse/rhsecurity/model_cve_detailed_bugzilla.go rename to apollo/rhsecurity/model_cve_detailed_bugzilla.go index 366b4b71..01ca8530 100644 --- a/secparse/rhsecurity/model_cve_detailed_bugzilla.go +++ b/apollo/rhsecurity/model_cve_detailed_bugzilla.go @@ -18,8 +18,8 @@ import ( // CVEDetailedBugzilla struct for CVEDetailedBugzilla type CVEDetailedBugzilla struct { Description string `json:"description"` - Id string `json:"id"` - Url string `json:"url"` + Id string `json:"id"` + Url string `json:"url"` } // NewCVEDetailedBugzilla instantiates a new CVEDetailedBugzilla object @@ -55,7 +55,7 @@ func (o *CVEDetailedBugzilla) GetDescription() string { // GetDescriptionOk returns a tuple with the Description field value // and a boolean to check if the value has been set. func (o *CVEDetailedBugzilla) GetDescriptionOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Description, true @@ -79,7 +79,7 @@ func (o *CVEDetailedBugzilla) GetId() string { // GetIdOk returns a tuple with the Id field value // and a boolean to check if the value has been set. func (o *CVEDetailedBugzilla) GetIdOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Id, true @@ -103,7 +103,7 @@ func (o *CVEDetailedBugzilla) GetUrl() string { // GetUrlOk returns a tuple with the Url field value // and a boolean to check if the value has been set. func (o *CVEDetailedBugzilla) GetUrlOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Url, true @@ -163,5 +163,3 @@ func (v *NullableCVEDetailedBugzilla) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/model_cve_detailed_cvss3.go b/apollo/rhsecurity/model_cve_detailed_cvss3.go similarity index 96% rename from secparse/rhsecurity/model_cve_detailed_cvss3.go rename to apollo/rhsecurity/model_cve_detailed_cvss3.go index 3db3a581..8402bc29 100644 --- a/secparse/rhsecurity/model_cve_detailed_cvss3.go +++ b/apollo/rhsecurity/model_cve_detailed_cvss3.go @@ -17,9 +17,9 @@ import ( // CVEDetailedCvss3 struct for CVEDetailedCvss3 type CVEDetailedCvss3 struct { - Cvss3BaseScore string `json:"cvss3_base_score"` + Cvss3BaseScore string `json:"cvss3_base_score"` Cvss3ScoringVector string `json:"cvss3_scoring_vector"` - Status string `json:"status"` + Status string `json:"status"` } // NewCVEDetailedCvss3 instantiates a new CVEDetailedCvss3 object @@ -55,7 +55,7 @@ func (o *CVEDetailedCvss3) GetCvss3BaseScore() string { // GetCvss3BaseScoreOk returns a tuple with the Cvss3BaseScore field value // and a boolean to check if the value has been set. func (o *CVEDetailedCvss3) GetCvss3BaseScoreOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cvss3BaseScore, true @@ -79,7 +79,7 @@ func (o *CVEDetailedCvss3) GetCvss3ScoringVector() string { // GetCvss3ScoringVectorOk returns a tuple with the Cvss3ScoringVector field value // and a boolean to check if the value has been set. func (o *CVEDetailedCvss3) GetCvss3ScoringVectorOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cvss3ScoringVector, true @@ -103,7 +103,7 @@ func (o *CVEDetailedCvss3) GetStatus() string { // GetStatusOk returns a tuple with the Status field value // and a boolean to check if the value has been set. func (o *CVEDetailedCvss3) GetStatusOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Status, true @@ -163,5 +163,3 @@ func (v *NullableCVEDetailedCvss3) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/model_cve_detailed_package_state.go b/apollo/rhsecurity/model_cve_detailed_package_state.go similarity index 97% rename from secparse/rhsecurity/model_cve_detailed_package_state.go rename to apollo/rhsecurity/model_cve_detailed_package_state.go index 523563ec..e604c56b 100644 --- a/secparse/rhsecurity/model_cve_detailed_package_state.go +++ b/apollo/rhsecurity/model_cve_detailed_package_state.go @@ -18,9 +18,9 @@ import ( // CVEDetailedPackageState struct for CVEDetailedPackageState type CVEDetailedPackageState struct { ProductName string `json:"product_name"` - FixState string `json:"fix_state"` + FixState string `json:"fix_state"` PackageName string `json:"package_name"` - Cpe string `json:"cpe"` + Cpe string `json:"cpe"` } // NewCVEDetailedPackageState instantiates a new CVEDetailedPackageState object @@ -57,7 +57,7 @@ func (o *CVEDetailedPackageState) GetProductName() string { // GetProductNameOk returns a tuple with the ProductName field value // and a boolean to check if the value has been set. func (o *CVEDetailedPackageState) GetProductNameOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.ProductName, true @@ -81,7 +81,7 @@ func (o *CVEDetailedPackageState) GetFixState() string { // GetFixStateOk returns a tuple with the FixState field value // and a boolean to check if the value has been set. func (o *CVEDetailedPackageState) GetFixStateOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.FixState, true @@ -105,7 +105,7 @@ func (o *CVEDetailedPackageState) GetPackageName() string { // GetPackageNameOk returns a tuple with the PackageName field value // and a boolean to check if the value has been set. func (o *CVEDetailedPackageState) GetPackageNameOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.PackageName, true @@ -129,7 +129,7 @@ func (o *CVEDetailedPackageState) GetCpe() string { // GetCpeOk returns a tuple with the Cpe field value // and a boolean to check if the value has been set. func (o *CVEDetailedPackageState) GetCpeOk() (*string, bool) { - if o == nil { + if o == nil { return nil, false } return &o.Cpe, true @@ -192,5 +192,3 @@ func (v *NullableCVEDetailedPackageState) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } - - diff --git a/secparse/rhsecurity/response.go b/apollo/rhsecurity/response.go similarity index 100% rename from secparse/rhsecurity/response.go rename to apollo/rhsecurity/response.go diff --git a/secparse/rhsecurity/utils.go b/apollo/rhsecurity/utils.go similarity index 100% rename from secparse/rhsecurity/utils.go rename to apollo/rhsecurity/utils.go diff --git a/secparse/rhsecuritymock/BUILD.bazel b/apollo/rhsecuritymock/BUILD.bazel similarity index 68% rename from secparse/rhsecuritymock/BUILD.bazel rename to apollo/rhsecuritymock/BUILD.bazel index dc257de9..925e65bd 100644 --- a/secparse/rhsecuritymock/BUILD.bazel +++ b/apollo/rhsecuritymock/BUILD.bazel @@ -3,10 +3,10 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "rhsecuritymock", srcs = ["client.go"], - importpath = "peridot.resf.org/secparse/rhsecuritymock", + importpath = "peridot.resf.org/apollo/rhsecuritymock", visibility = ["//visibility:public"], deps = [ - "//secparse/rhsecurity", + "//apollo/rhsecurity", "//utils", ], ) diff --git a/secparse/rhsecuritymock/client.go b/apollo/rhsecuritymock/client.go similarity index 90% rename from secparse/rhsecuritymock/client.go rename to apollo/rhsecuritymock/client.go index 28156c62..24abcf17 100644 --- a/secparse/rhsecuritymock/client.go +++ b/apollo/rhsecuritymock/client.go @@ -33,20 +33,23 @@ package rhsecuritymock import ( _context "context" _nethttp "net/http" - "peridot.resf.org/secparse/rhsecurity" + "peridot.resf.org/apollo/rhsecurity" "peridot.resf.org/utils" ) type Client struct { orig rhsecurity.DefaultApi - ActiveCVE *rhsecurity.CVEDetailed - Cves []*rhsecurity.CVE + ActivePage float32 + ActiveCVE *rhsecurity.CVEDetailed + Cves map[float32][]*rhsecurity.CVE } func New() *Client { return &Client{ - orig: rhsecurity.NewAPIClient(rhsecurity.NewConfiguration()).DefaultApi, + orig: rhsecurity.NewAPIClient(rhsecurity.NewConfiguration()).DefaultApi, + ActivePage: 1, + Cves: map[float32][]*rhsecurity.CVE{}, } } @@ -80,6 +83,7 @@ func (c *Client) GetCveExecute(_ rhsecurity.ApiGetCveRequest) (rhsecurity.CVEDet * @return ApiGetCvesRequest */ func (c *Client) GetCves(ctx _context.Context) rhsecurity.ApiGetCvesRequest { + c.ActivePage = 1 return c.orig.GetCves(ctx) } @@ -89,9 +93,10 @@ func (c *Client) GetCves(ctx _context.Context) rhsecurity.ApiGetCvesRequest { */ func (c *Client) GetCvesExecute(_ rhsecurity.ApiGetCvesRequest) ([]rhsecurity.CVE, *_nethttp.Response, error) { var cves []rhsecurity.CVE - for _, cve := range c.Cves { + for _, cve := range c.Cves[c.ActivePage] { cves = append(cves, *cve) } + c.ActivePage++ return cves, &_nethttp.Response{}, nil } diff --git a/secparse/rpmutils/BUILD.bazel b/apollo/rpmutils/BUILD.bazel similarity index 74% rename from secparse/rpmutils/BUILD.bazel rename to apollo/rpmutils/BUILD.bazel index d352217e..199018fc 100644 --- a/secparse/rpmutils/BUILD.bazel +++ b/apollo/rpmutils/BUILD.bazel @@ -3,6 +3,6 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "rpmutils", srcs = ["regex.go"], - importpath = "peridot.resf.org/secparse/rpmutils", + importpath = "peridot.resf.org/apollo/rpmutils", visibility = ["//visibility:public"], ) diff --git a/secparse/rpmutils/regex.go b/apollo/rpmutils/regex.go similarity index 100% rename from secparse/rpmutils/regex.go rename to apollo/rpmutils/regex.go diff --git a/apollo/seed.sql b/apollo/seed.sql new file mode 100644 index 00000000..8181889b --- /dev/null +++ b/apollo/seed.sql @@ -0,0 +1,82 @@ +/* + * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. + * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. + * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +insert into short_codes (code, mode) +values ('RL', 2); +insert into short_codes (code, mode) +values ('RK', 1); +--insert into products (id, name, current_full_version, redhat_major_version, short_code_code, archs, mirror_from_date, redhat_product_prefix, cpe) +--values (1, 'Rocky Linux 9', '9.0', 9, 'RL', array ['x86_64', 'aarch64', 'ppc64le', 's390x'], '2022-05-15'::timestamp, 'Rocky Linux', 'cpe:/o:redhat:enterprise_linux:9'); +insert into products (id, name, current_full_version, redhat_major_version, short_code_code, archs, mirror_from_date, redhat_product_prefix, cpe, build_system, build_system_endpoint, koji_compose, koji_module_compose) +values (2, 'Rocky Linux 8', '8.6', 8, 'RL', array ['x86_64', 'aarch64'], '2022-05-15'::timestamp, 'Rocky Linux', 'cpe:/o:redhat:enterprise_linux:8', 'koji', 'https://koji.rockylinux.org/kojihub', 'dist-rocky8-compose', 'dist-rocky8-module-compose'); +--insert into ignored_upstream_packages (product_id, package) +--values +-- (1, 'tfm-rubygem-unicode*'), +-- (1, 'katello-host-tools*'), +-- (1, 'openssl-ibmca*'), +-- (1, 'insights-client*'), +-- (1, 'tfm-rubygem-unicode-display_width*'), +-- (1, 'pulp*'), +-- (1, 'satellite*'), +-- (1, 'tfm-rubygem-unf_ext*'), +-- (1, 'foreman*'), +-- (1, 'kpatch*'), +-- (1, 'rhc-worker-playbook*'); +insert into ignored_upstream_packages (product_id, package) +values + (2, 'tfm-rubygem-unicode*'), + (2, 'katello-host-tools*'), + (2, 'openssl-ibmca*'), + (2, 'insights-client*'), + (2, 'tfm-rubygem-unicode-display_width*'), + (2, 'pulp*'), + (2, 'satellite*'), + (2, 'tfm-rubygem-unf_ext*'), + (2, 'foreman*'), + (2, 'kpatch*'), + (2, 'rhc-worker-playbook*'); +insert into reboot_suggested_packages (name) +values + ('kernel'), + ('kernel-PAE'), + ('kernel-rt'), + ('kernel-smp'), + ('kernel-xen'), + ('linux-firmware'), + ('*-firmware-*'), + ('dbus'), + ('glibc'), + ('hal'), + ('systemd'), + ('udev'), + ('gnutls'), + ('openssl-libs'); diff --git a/secparse/ui/BUILD.bazel b/apollo/ui/BUILD.bazel similarity index 62% rename from secparse/ui/BUILD.bazel rename to apollo/ui/BUILD.bazel index 1c84b70f..31a9920a 100644 --- a/secparse/ui/BUILD.bazel +++ b/apollo/ui/BUILD.bazel @@ -1,6 +1,6 @@ -package(default_visibility = ["//visibility:public"]) +load("//rules_byc:defs.bzl", "BYCDEPLOY_OUTS_BASE", "byc_frontend", "container", "peridot_k8s") -load("//rules_byc:defs.bzl", "byc_frontend", "container") +package(default_visibility = ["//visibility:public"]) server_deps = ["//common/frontend_server"] @@ -9,20 +9,22 @@ server_entrypoint = "server/index.mjs" server_srcs = glob(["server/**/*.mjs"]) byc_frontend( - name = "secparse", + name = "apollo", srcs = glob([ "src/**/*.tsx", "src/**/*.ts", ]), - entrypoint = "secparse/ui/src/entrypoint.tsx", + entrypoint = "apollo/ui/src/entrypoint.tsx", index_html = "//rules_byc/internal/byc_bundle:index_no_mobile.hbs", server_deps = server_deps, server_entrypoint = server_entrypoint, server_srcs = server_srcs, - title = "Rocky Linux Product Errata", + tailwind_config = "//rules_byc/internal/byc_bundle:tailwind.config.nopreflight.js", + title = "Rocky Enterprise Software Foundation Product Errata", deps = [ + "//apollo/proto/v1:client_typescript", + "//common/mui", "//common/ui", - "//secparse/proto/v1:client_typescript", "//tailwind:css", "@npm//@mui/icons-material", "@npm//@mui/material", @@ -39,10 +41,17 @@ byc_frontend( container( base = "//bases/bazel/node", files = [ - ":secparse.bundle", + ":apollo.bundle", ], frontend = True, - image_name = "secparse-frontend", + image_name = "apollo-frontend", server_entrypoint = server_entrypoint, server_files = server_srcs + server_deps, ) + +peridot_k8s( + name = "apollo-frontend", + src = "deploy.jsonnet", + outs = BYCDEPLOY_OUTS_BASE, + deps = ["//ci"], +) diff --git a/apollo/ui/deploy.jsonnet b/apollo/ui/deploy.jsonnet new file mode 100644 index 00000000..a5768462 --- /dev/null +++ b/apollo/ui/deploy.jsonnet @@ -0,0 +1,25 @@ +local bycdeploy = import 'ci/bycdeploy.jsonnet'; +local kubernetes = import 'ci/kubernetes.jsonnet'; +local frontend = import 'ci/frontend.jsonnet'; + +local tag = std.extVar('tag'); + +bycdeploy.new({ + name: 'apollo-frontend', + backend: false, + migrate: false, + image: kubernetes.tag($.name), + tag: tag, + env: frontend.server_env, + ports: [ + { + name: 'http', + containerPort: 8086, + protocol: 'TCP', + expose: true, + }, + ], + health: { + port: 8086, + }, +}) diff --git a/secparse/ui/server/index.mjs b/apollo/ui/server/index.mjs similarity index 80% rename from secparse/ui/server/index.mjs rename to apollo/ui/server/index.mjs index 0a09c5dd..b942f86d 100644 --- a/secparse/ui/server/index.mjs +++ b/apollo/ui/server/index.mjs @@ -31,23 +31,23 @@ */ import server from '../../../common/frontend_server/index.mjs'; +import { svcNameHttp, endpointHttp, NS } from '../../../common/frontend_server/upstream.mjs'; export default async function run(webpackConfig) { + const devFrontendUrl = 'http://errata.pdot.localhost:9007'; + const envPublicUrl = process.env['APOLLO_FRONTEND_HTTP_PUBLIC_URL']; + const frontendUrl = process.env['BYC_NS'] ? envPublicUrl : devFrontendUrl; + server({ - prodFrontendUrl: 'https://errata.rockylinux.org', - devFrontendUrl: 'http://errata.pdot.localhost:9007', + baseURL: frontendUrl, apis: { '/api': { - prodApiUrl: 'http://secparse-backend-http-prod-service.secparse-backend.svc.cluster.local', - devApiUrl: 'http://localhost:9008' + prodApiUrl: endpointHttp(svcNameHttp('apollo'), NS('apollo')), + devApiUrl: `https://apollo-dev.internal.rdev.ciq.localhost` }, - '/manage/api': { - prodApiUrl: 'http://secparse-admin-backend-http-prod-service.secparse-admin-backend.svc.cluster.local', - devApiUrl: 'http://localhost:12000' - } }, - authenticatedPrefix: '/manage', port: 9007, + disableAuth: true, webpackConfig }).then(); } diff --git a/secparse/ui/src/api.ts b/apollo/ui/src/api.ts similarity index 92% rename from secparse/ui/src/api.ts rename to apollo/ui/src/api.ts index 607965f8..39de095d 100644 --- a/secparse/ui/src/api.ts +++ b/apollo/ui/src/api.ts @@ -30,10 +30,10 @@ * POSSIBILITY OF SUCH DAMAGE. */ -import * as secparse from 'bazel-bin/secparse/proto/v1/client_typescript'; +import * as apollo from 'bazel-bin/apollo/proto/v1/client_typescript'; -export const api = new secparse.SecparseApi( - new secparse.Configuration({ +export const api = new apollo.ApolloServiceApi( + new apollo.Configuration({ basePath: '/api', }) ); diff --git a/secparse/ui/src/components/Overview.tsx b/apollo/ui/src/components/Overview.tsx similarity index 50% rename from secparse/ui/src/components/Overview.tsx rename to apollo/ui/src/components/Overview.tsx index 3328e3d7..e6797956 100644 --- a/secparse/ui/src/components/Overview.tsx +++ b/apollo/ui/src/components/Overview.tsx @@ -35,47 +35,69 @@ import { DataGrid, GridColDef, GridColumns, - GridPageChangeParams, GridRowsProp, -} from '@material-ui/data-grid'; +} from '@mui/x-data-grid'; import { FormControl, InputLabel, MenuItem, Select, CircularProgress, -} from '@material-ui/core'; + TextField, + Container, + Typography, + Divider, +} from '@mui/material'; import { Link } from 'react-router-dom'; import { - SecparseAdvisory, - SecparseListAdvisoriesResponse, -} from 'bazel-bin/secparse/proto/v1/client_typescript/models'; + V1Advisory, + V1ListAdvisoriesResponse, +} from 'bazel-bin/apollo/proto/v1/client_typescript/models'; import { api } from '../api'; import { reqap } from 'common/ui/reqap'; +import { severityToBadge, typeToBadge } from 'apollo/ui/src/enumToText'; export const Overview = () => { // When advisories is set to null that means an error has occurred // Undefined means loading const [advisories, setAdvisories] = React.useState< - SecparseAdvisory[] | undefined | null + V1Advisory[] | undefined | null >(); const [pageSize, setPageSize] = React.useState(25); + const [page, setPage] = React.useState(0); + const [total, setTotal] = React.useState(0); + const [filterSynopsis, setFilterSynopsis] = React.useState< + string | undefined + >(); + const [filterCve, setFilterCve] = React.useState(); React.useEffect(() => { - (async () => { - let err, res: void | SecparseListAdvisoriesResponse | undefined; - [err, res] = await reqap(() => api.listAdvisories({})); - if (err || !res) { - setAdvisories(null); - return; - } + const timer = setTimeout(() => { + (async () => { + let err, res: void | V1ListAdvisoriesResponse | undefined; + [err, res] = await reqap(() => + api.listAdvisories({ + page, + limit: pageSize, + filtersSynopsis: filterSynopsis, + filtersCve: filterCve, + }) + ); + if (err || !res) { + setAdvisories(null); + return; + } - if (res) { - setAdvisories(res.advisories); - } - })().then(); - }, []); + if (res) { + setAdvisories(res.advisories); + setTotal(parseInt(res.total || '0')); + } + })().then(); + }, 500); + + return () => clearTimeout(timer); + }, [pageSize, page, filterSynopsis, filterCve]); const columns: GridColDef[] = [ { @@ -103,6 +125,7 @@ export const Overview = () => { headerName: 'Severity', width: 150, sortable: false, + renderCell: (params) => severityToBadge(params.value, 'small'), }, { field: 'products', @@ -118,12 +141,8 @@ export const Overview = () => { }, ]; - const handlePageSizeChange = (params: GridPageChangeParams) => { - setPageSize(params.pageSize); - }; - return ( -
+
{advisories === undefined && } {advisories === null && (

@@ -131,26 +150,59 @@ export const Overview = () => {

)} {advisories && ( - ({ - id: advisory.name, - synopsis: advisory.synopsis, - severity: advisory.severity, - products: advisory.affectedProducts?.join(', '), - publish_date: Intl.DateTimeFormat('en-US', { - day: '2-digit', - month: 'short', - year: 'numeric', - }).format(advisory.publishedAt), - }))} - columns={columns} - disableSelectionOnClick - pageSize={pageSize} - onPageSizeChange={handlePageSizeChange} - /> + <> + + Filters + setFilterSynopsis(e.target.value)} + /> + setFilterCve(e.target.value)} + /> + + + + ({ + id: advisory.name, + synopsis: advisory.synopsis, + severity: advisory.severity, + products: advisory.affectedProducts?.join(', '), + publish_date: Intl.DateTimeFormat('en-US', { + day: '2-digit', + month: 'short', + year: 'numeric', + }).format(advisory.publishedAt), + }))} + rowsPerPageOptions={[10, 25, 50, 100]} + rowCount={total} + paginationMode="server" + columns={columns} + density="compact" + pageSize={pageSize} + onPageChange={(page) => setPage(page)} + onPageSizeChange={(newPageSize) => setPageSize(newPageSize)} + /> + + )}
); diff --git a/apollo/ui/src/components/Root.tsx b/apollo/ui/src/components/Root.tsx new file mode 100644 index 00000000..c5ce7f62 --- /dev/null +++ b/apollo/ui/src/components/Root.tsx @@ -0,0 +1,137 @@ +/* + * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. + * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. + * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +import React from 'react'; + +import { + AppBar, + Toolbar, + Container, + CssBaseline, + Drawer, + Divider, + IconButton, + List, +} from '@mui/material'; +import ChevronLeftIcon from '@mui/icons-material/ChevronLeft'; +import MenuIcon from '@mui/icons-material/Menu'; + +import { useStyles } from '../styles'; +import { Switch, Route } from 'react-router'; +import { Overview } from './Overview'; +import { Link } from 'react-router-dom'; +import { RESFLogo } from 'common/ui/RESFLogo'; +import classnames from 'classnames'; +import { ShowErrata } from './ShowErrata'; + +export const Root = () => { + const [open, setOpen] = React.useState(false); + const classes = useStyles(); + + const handleDrawerClose = () => { + setOpen(false); + }; + + const handleDrawerOpen = () => { + setOpen(true); + }; + + const inManage = location.pathname.startsWith('/manage'); + + return ( +
+ + + {inManage && ( + + + + )} + +
+ +
+ Product Errata{inManage && ' (Admin)'} +
+
+ +
+
+ {inManage && ( + +
+ + + +
+ +
+ )} +
+
+ + + + +
+
+ ); +}; diff --git a/apollo/ui/src/components/ShowErrata.tsx b/apollo/ui/src/components/ShowErrata.tsx new file mode 100644 index 00000000..175ce83b --- /dev/null +++ b/apollo/ui/src/components/ShowErrata.tsx @@ -0,0 +1,239 @@ +/* + * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. + * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. + * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +import React from 'react'; +import { + V1Advisory, + V1GetAdvisoryResponse, +} from 'bazel-bin/apollo/proto/v1/client_typescript'; +import { reqap } from 'common/ui/reqap'; +import { api } from '../api'; +import { RouteComponentProps } from 'react-router'; +import { + Card, + CardContent, + Chip, + CircularProgress, + Paper, + Tab, + Tabs, + Typography, +} from '@mui/material'; +import { severityToBadge, severityToText, typeToText } from 'apollo/ui/src/enumToText'; + +interface ShowErrataParams { + id: string; +} + +export interface ShowErrataProps + extends RouteComponentProps {} + +export const ShowErrata = (props: ShowErrataProps) => { + const [errata, setErrata] = React.useState< + V1Advisory | undefined | null + >(); + const [tabValue, setTabValue] = React.useState(0); + + React.useEffect(() => { + (async () => { + let err, res: void | V1GetAdvisoryResponse | undefined; + [err, res] = await reqap(() => + api.getAdvisory({ id: props.match.params.id }) + ); + if (err || !res) { + setErrata(null); + return; + } + + if (res) { + setErrata(res.advisory); + } + })().then(); + }, []); + + const handleTabChange = ({}, val: number) => { + setTabValue(val); + }; + + return ( +
+ {errata === undefined && } + {errata === null && ( + + Oh no! Something has gone wrong! + + )} + {errata && ( + <> +
+ {errata.name} +
+ {severityToBadge(errata.severity)} + +
+
+ + + + + + + + {tabValue === 0 && ( + +
+ Synopsis + {errata.synopsis} +
+
+ Type + {typeToText(errata.type)} +
+
+ Severity + {severityToText(errata.severity)} +
+
+ Topic + {errata.topic?.split('\n').map((x) => ( +

{x}

+ ))} +
+
+ Description + {errata.description?.split('\n').map((x) => ( +

{x}

+ ))} +
+
+ Affected products +
    + {errata.affectedProducts?.map((x) => ( +
  • {x}
  • + ))} +
+
+
+ Fixes + +
+
+ CVEs +
    + {errata.cves?.map((x) => { + let text = `${x.name}${ + x.sourceBy !== '' && ` (Source: ${x.sourceBy})` + }`; + + return ( +
  • + {x.sourceLink === '' ? ( + {text} + ) : ( + + {text} + + )} +
  • + ); + })} + {errata.cves?.length === 0 &&
  • No CVEs
  • } +
+
+
+ References +
    + {errata.references?.map((x) => ( +
  • {x}
  • + ))} + {errata.references?.length === 0 &&
  • No references
  • } +
+
+
+ )} + {tabValue === 1 && ( + +
+ {Object.keys(errata.rpms || {}).map(product => ( +
+ {product} +
+ SRPMs +
    + {errata.rpms[product].nvras + ?.filter((x) => x.indexOf('.src.rpm') !== -1) + .map((x) => ( +
  • {x}
  • + ))} +
+
+
+ RPMs +
    + {errata.rpms[product].nvras + ?.filter((x) => x.indexOf('.src.rpm') === -1) + .map((x) => ( +
  • {x}
  • + ))} +
+
+
+ ))} +
+
+ )} +
+ + )} +
+ ); +}; diff --git a/secparse/ui/src/entrypoint.tsx b/apollo/ui/src/entrypoint.tsx similarity index 85% rename from secparse/ui/src/entrypoint.tsx rename to apollo/ui/src/entrypoint.tsx index 95034da6..df3d7167 100644 --- a/secparse/ui/src/entrypoint.tsx +++ b/apollo/ui/src/entrypoint.tsx @@ -32,12 +32,14 @@ import React from 'react'; import ReactDOM from 'react-dom'; +import { BrowserRouter } from 'react-router-dom'; +import CssBaseline from '@mui/material/CssBaseline'; import { Root } from './components/Root'; import 'tailwind/tailwind.css'; -import { green } from '@material-ui/core/colors'; -import { createMuiTheme, ThemeProvider } from '@material-ui/core'; +import { createMuiTheme, ThemeProvider } from '@mui/material'; +import { PeridotThemeProvider } from 'common/mui/theme'; const theme = createMuiTheme({ palette: { @@ -52,9 +54,12 @@ const theme = createMuiTheme({ export const app = () => { ReactDOM.render( - - - , + + + + + + , document.getElementById('root') ); }; diff --git a/apollo/ui/src/enumToText.tsx b/apollo/ui/src/enumToText.tsx new file mode 100644 index 00000000..087da649 --- /dev/null +++ b/apollo/ui/src/enumToText.tsx @@ -0,0 +1,104 @@ +/* + * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. + * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. + * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +import React from 'react'; + +import { + AdvisorySeverity, + V1AdvisoryType, +} from 'bazel-bin/apollo/proto/v1/client_typescript'; +import Chip from '@mui/material/Chip'; + +export const severityToText = (severity?: AdvisorySeverity): string => { + switch (severity) { + case AdvisorySeverity.Critical: + return 'Critical'; + case AdvisorySeverity.Important: + return 'Important'; + case AdvisorySeverity.Moderate: + return 'Moderate'; + case AdvisorySeverity.Low: + return 'Low'; + default: + return 'None'; + } +}; + +export const severityToBadge = ( + severity?: AdvisorySeverity, + size?: 'small', +): React.ReactNode => { + let color: 'primary' | 'secondary' | 'success' | 'info' | 'error' | 'warning' = 'success'; + + switch (severity) { + case AdvisorySeverity.Critical: + color = 'error'; + break; + case AdvisorySeverity.Important: + color = 'warning'; + break; + case AdvisorySeverity.Moderate: + color = 'secondary'; + break; + case AdvisorySeverity.Low: + color = 'primary'; + break; + } + + return ; +}; + +export const typeToText = (type?: V1AdvisoryType): string => { + switch (type) { + case V1AdvisoryType.Bugfix: + return 'Bug Fix'; + case V1AdvisoryType.Security: + return 'Security'; + case V1AdvisoryType.Enhancement: + return 'Enhancement'; + default: + return 'Unknown'; + } +}; + +export const typeToBadge = ( + type?: V1AdvisoryType, + size?: 'small', +): React.ReactNode => { + let color: 'info' | 'warning' = 'info'; + + switch (type) { + case V1AdvisoryType.Security: + color = 'warning'; + } + + return ; +}; diff --git a/secparse/ui/src/styles.ts b/apollo/ui/src/styles.ts similarity index 98% rename from secparse/ui/src/styles.ts rename to apollo/ui/src/styles.ts index 87913b79..2ba908d7 100644 --- a/secparse/ui/src/styles.ts +++ b/apollo/ui/src/styles.ts @@ -30,7 +30,7 @@ * POSSIBILITY OF SUCH DAMAGE. */ -import { makeStyles } from '@material-ui/core/styles'; +import { makeStyles } from '@mui/styles'; const drawerWidth = 240; diff --git a/apollo/worker/BUILD.bazel b/apollo/worker/BUILD.bazel new file mode 100644 index 00000000..e240224e --- /dev/null +++ b/apollo/worker/BUILD.bazel @@ -0,0 +1,15 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "worker", + srcs = ["worker.go"], + importpath = "peridot.resf.org/apollo/worker", + visibility = ["//visibility:public"], + deps = [ + "//apollo/db", + "//apollo/workflow", + "//vendor/github.com/sirupsen/logrus", + "//vendor/go.temporal.io/sdk/client", + "//vendor/go.temporal.io/sdk/worker", + ], +) diff --git a/secparse/admin/impl/short_code.go b/apollo/worker/worker.go similarity index 62% rename from secparse/admin/impl/short_code.go rename to apollo/worker/worker.go index e5fce1c6..07de4050 100644 --- a/secparse/admin/impl/short_code.go +++ b/apollo/worker/worker.go @@ -28,40 +28,55 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package impl +package worker import ( - "context" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/utils" + "github.com/sirupsen/logrus" + "go.temporal.io/sdk/client" + "go.temporal.io/sdk/worker" + apollodb "peridot.resf.org/apollo/db" + "peridot.resf.org/apollo/workflow" ) -func (s *Server) ListShortCodes(_ context.Context, _ *secparseadminpb.ListShortCodesRequest) (*secparseadminpb.ListShortCodesResponse, error) { - shortCodes, err := s.db.GetAllShortCodes() - if err != nil { - return nil, utils.CouldNotRetrieveObjects - } +type Worker struct { + Client client.Client + TaskQueue string + WorkflowController *workflow.Controller + Worker worker.Worker - return &secparseadminpb.ListShortCodesResponse{ - ShortCodes: db.DTOListShortCodesToPB(shortCodes), - }, nil + log *logrus.Logger } -func (s *Server) CreateShortCode(_ context.Context, req *secparseadminpb.CreateShortCodeRequest) (*secparseadminpb.CreateShortCodeResponse, error) { - if err := req.ValidateAll(); err != nil { +type NewWorkerInput struct { + Temporal client.Client + Database apollodb.Access + TaskQueue string +} + +func NewWorker(input *NewWorkerInput, workflowOpts ...workflow.Option) (*Worker, error) { + log := logrus.New() + + controller, err := workflow.NewController(&workflow.NewControllerInput{ + Temporal: input.Temporal, + Database: input.Database, + MainQueue: input.TaskQueue, + }, workflowOpts...) + if err != nil { return nil, err } - shortCode, err := s.db.CreateShortCode(req.Code, req.Mode) - if err != nil { - if utils.IsUniqueViolation(err) { - return nil, utils.ObjectAlreadyExists - } - return nil, utils.CouldNotCreateObject - } - - return &secparseadminpb.CreateShortCodeResponse{ - ShortCode: db.DTOShortCodeToPB(shortCode), + return &Worker{ + Client: input.Temporal, + TaskQueue: input.TaskQueue, + WorkflowController: controller, + Worker: worker.New(input.Temporal, input.TaskQueue, worker.Options{}), + log: log, }, nil } + +func (w *Worker) Run() { + err := w.Worker.Run(worker.InterruptCh()) + if err != nil { + w.log.Fatalf("could not run worker: %v", err) + } +} diff --git a/apollo/workflow/BUILD.bazel b/apollo/workflow/BUILD.bazel new file mode 100644 index 00000000..0828990f --- /dev/null +++ b/apollo/workflow/BUILD.bazel @@ -0,0 +1,59 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "workflow", + srcs = [ + "autocreate_advisory.go", + "collect_cve_data.go", + "common.go", + "downstream_check.go", + "poll_mirror_cve.go", + "poll_mirror_errata.go", + "update_cve_state.go", + "workflow.go", + ], + importpath = "peridot.resf.org/apollo/workflow", + visibility = ["//visibility:public"], + deps = [ + "//apollo/db", + "//apollo/proto/v1:pb", + "//apollo/rherrata", + "//apollo/rhsecurity", + "//apollo/rpmutils", + "//koji", + "//utils", + "//vendor/github.com/gobwas/glob", + "//vendor/github.com/jmoiron/sqlx/types", + "//vendor/github.com/pkg/errors", + "//vendor/github.com/sirupsen/logrus", + "//vendor/github.com/spf13/viper", + "//vendor/go.temporal.io/sdk/activity", + "//vendor/go.temporal.io/sdk/client", + "//vendor/go.temporal.io/sdk/workflow", + ], +) + +go_test( + name = "workflow_test", + srcs = [ + "autocreate_advisory_test.go", + "downstream_check_test.go", + "poll_mirror_cve_test.go", + "poll_mirror_errata_test.go", + "update_cve_state_test.go", + "workflow_test.go", + ], + data = glob(["testdata/**"]), + embed = [":workflow"], + deps = [ + "//apollo/db", + "//apollo/db/mock", + "//apollo/proto/v1:pb", + "//apollo/rherrata", + "//apollo/rhsecurity", + "//apollo/rhsecuritymock", + "//koji", + "//vendor/github.com/stretchr/testify/require", + "//vendor/go.temporal.io/sdk/testsuite", + ], +) diff --git a/secparse/cron/autocreate_advisory.go b/apollo/workflow/autocreate_advisory.go similarity index 69% rename from secparse/cron/autocreate_advisory.go rename to apollo/workflow/autocreate_advisory.go index 66e61bbe..f5e1d4aa 100644 --- a/secparse/cron/autocreate_advisory.go +++ b/apollo/workflow/autocreate_advisory.go @@ -28,31 +28,40 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "database/sql" + "fmt" "github.com/sirupsen/logrus" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" + "go.temporal.io/sdk/workflow" + apollodb "peridot.resf.org/apollo/db" + "peridot.resf.org/apollo/rpmutils" "strconv" "strings" + "time" ) -func (i *Instance) CreateAdvisoryForFixedCVEs() { - cves, err := i.db.GetAllCVEsFixedDownstream() +func (c *Controller) AutoCreateAdvisoryActivity() error { + cves, err := c.db.GetAllCVEsFixedDownstream() if err != nil { - logrus.Errorf("Could not get cves fixed downstream: %v", err) - return + c.log.Errorf("Could not get cves fixed downstream: %v", err) + return fmt.Errorf("could not get cves fixed downstream") + } + + rebootSuggestedPackages, err := c.db.GetAllRebootSuggestedPackages() + if err != nil { + c.log.Errorf("Could not get reboot suggested packages: %v", err) + return fmt.Errorf("could not get reboot suggested packages") } for _, cve := range cves { - beginTx, err := i.db.Begin() + beginTx, err := c.db.Begin() if err != nil { logrus.Errorf("could not begin tx: %v", err) continue } - tx := i.db.UseTransaction(beginTx) + tx := c.db.UseTransaction(beginTx) affectedProducts, err := tx.GetAllAffectedProductsByCVE(cve.ID) if err != nil { @@ -61,13 +70,18 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { continue } - var existingAdvisory *db.Advisory + var existingAdvisory *apollodb.Advisory didSkip := false for _, affectedProduct := range affectedProducts { if !affectedProduct.Advisory.Valid { continue } + product, err := c.db.GetProductByID(affectedProduct.ProductID) + if err != nil { + c.log.Errorf("could not get product by id %d: %v", affectedProduct.ProductID, err) + return fmt.Errorf("could not get product by id %d", affectedProduct.ProductID) + } advisorySplit := strings.Split(affectedProduct.Advisory.String, "-") numYearSplit := strings.Split(advisorySplit[1], ":") @@ -83,10 +97,10 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { continue } - existingAdvisory, err = tx.GetAdvisoryByCodeAndYearAndNum(cve.ShortCode, year, num) + existingAdvisory, err = tx.GetAdvisoryByCodeAndYearAndNum(product.ShortCode, year, num) if err != nil { if err == sql.ErrNoRows { - errata, err := i.errata.GetErrata(affectedProduct.Advisory.String) + errata, err := c.errata.GetErrata(affectedProduct.Advisory.String) if err != nil { logrus.Errorf("could not get errata from Red Hat: %v", err) didSkip = true @@ -94,15 +108,20 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { } for i, _ := range errata.Topic { - errata.Topic[i] = strings.Replace(errata.Topic[i], "Red Hat Enterprise Linux", "Rocky Linux", -1) - errata.Topic[i] = strings.Replace(errata.Topic[i], "Red Hat", "Rocky Linux", -1) + errata.Topic[i] = strings.Replace(errata.Topic[i], "Red Hat Enterprise Linux", product.RedHatProductPrefix.String, -1) + errata.Topic[i] = strings.Replace(errata.Topic[i], "Red Hat", c.vendor, -1) } for i, _ := range errata.Description { - errata.Description[i] = strings.Replace(errata.Description[i], "Red Hat Enterprise Linux", "Rocky Linux", -1) - errata.Description[i] = strings.Replace(errata.Description[i], "Red Hat", "Rocky Linux", -1) + errata.Description[i] = strings.Replace(errata.Description[i], "Red Hat Enterprise Linux", product.RedHatProductPrefix.String, -1) + errata.Description[i] = strings.Replace(errata.Description[i], "Red Hat", c.vendor, -1) } - newAdvisory := &db.Advisory{ + var packageName string + if !rpmutils.Module().MatchString(affectedProduct.Package) { + packageName = affectedProduct.Package + } + + newAdvisory := &apollodb.Advisory{ Year: year, Num: num, Synopsis: errata.Synopsis, @@ -111,9 +130,20 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { Type: int(errata.Type), Description: strings.Join(errata.Description, "\n"), RedHatIssuedAt: sql.NullTime{Valid: true, Time: errata.IssuedAt}, - ShortCodeCode: cve.ShortCode, + ShortCodeCode: product.ShortCode, PublishedAt: sql.NullTime{}, } + if packageName != "" { + match, err := c.checkForRebootSuggestedPackage(rebootSuggestedPackages, packageName) + if err != nil { + c.log.Errorf("could not check for reboot suggested package: %v", err) + return fmt.Errorf("could not check for reboot suggested package") + } + if match { + newAdvisory.RebootSuggested = true + } + } + newAdvisory, err = tx.CreateAdvisory(newAdvisory) if err != nil { logrus.Errorf("Could not create new advisory: %v", err) @@ -123,7 +153,8 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { existingAdvisory = newAdvisory for _, fix := range errata.Fixes { - id, err := tx.CreateFix(fix.BugzillaID, fix.Description) + sourceLink := fmt.Sprintf("https://bugzilla.redhat.com/show_bug.cgi?id=%s", fix.BugzillaID) + id, err := tx.CreateFix(fix.BugzillaID, "Red Hat", sourceLink, fix.Description) if err != nil { logrus.Errorf("Could not create fix for BZ#%s", fix.BugzillaID) didSkip = true @@ -172,13 +203,6 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { didSkip = true break } - - err = tx.UpdateCVEState(cve.ID, secparseadminpb.CVEState_IncludedInAdvisory) - if err != nil { - logrus.Errorf("Could not update CVE state: %v", err) - didSkip = true - break - } } if didSkip { @@ -194,4 +218,14 @@ func (i *Instance) CreateAdvisoryForFixedCVEs() { logrus.Infof("Created/updated advisory %s-%d:%d for %s", existingAdvisory.ShortCodeCode, existingAdvisory.Year, existingAdvisory.Num, cve.ID) } + + return nil +} + +func (c *Controller) AutoCreateAdvisoryWorkflow(ctx workflow.Context) error { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 5 * time.Minute, + StartToCloseTimeout: time.Hour, + }) + return workflow.ExecuteActivity(activityCtx, c.AutoCreateAdvisoryActivity).Get(ctx, nil) } diff --git a/secparse/cmd/secparseadmin/main.go b/apollo/workflow/autocreate_advisory_test.go similarity index 73% rename from secparse/cmd/secparseadmin/main.go rename to apollo/workflow/autocreate_advisory_test.go index 5e590c58..42e7e2c7 100644 --- a/secparse/cmd/secparseadmin/main.go +++ b/apollo/workflow/autocreate_advisory_test.go @@ -28,40 +28,4 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package main - -import ( - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "peridot.resf.org/secparse/admin/impl" - "peridot.resf.org/secparse/db/connector" - "peridot.resf.org/utils" -) - -var root = &cobra.Command{ - Use: "secparseadmin", - Run: mn, -} - -var cnf = utils.NewFlagConfig() - -func init() { - cnf.DefaultPort = 12000 - - dname := "secparse" - cnf.DatabaseName = &dname - cnf.Name = "secparse" - - utils.AddFlags(root.PersistentFlags(), cnf) -} - -func mn(_ *cobra.Command, _ []string) { - impl.NewServer(connector.MustAuto()).Run() -} - -func main() { - utils.Main() - if err := root.Execute(); err != nil { - logrus.Fatal(err) - } -} +package workflow diff --git a/secparse/impl/server.go b/apollo/workflow/collect_cve_data.go similarity index 59% rename from secparse/impl/server.go rename to apollo/workflow/collect_cve_data.go index fe9a9099..e83cff14 100644 --- a/secparse/impl/server.go +++ b/apollo/workflow/collect_cve_data.go @@ -28,63 +28,54 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package impl +package workflow import ( "context" - "github.com/sirupsen/logrus" - "google.golang.org/grpc" - "peridot.resf.org/secparse/db" - secparsepb "peridot.resf.org/secparse/proto/v1" - "peridot.resf.org/secparse/rpmutils" - "peridot.resf.org/utils" - "regexp" + "encoding/json" + "github.com/pkg/errors" + "go.temporal.io/sdk/workflow" + "strings" + "time" ) -type Server struct { - secparsepb.UnimplementedSecparseServer - - log *logrus.Logger - db db.Access - - advisoryIdRegex *regexp.Regexp -} - -func NewServer(db db.Access) *Server { - return &Server{ - log: logrus.New(), - db: db, - advisoryIdRegex: rpmutils.AdvisoryId(), +func (c *Controller) CollectCVEDataActivity(ctx context.Context) error { + cves, err := c.db.GetAllCVEs() + if err != nil { + return errors.Wrap(err, "could not get cves") } + + // Go through each CVE and set CVE content by fetching from rhsecurity + for _, cve := range cves { + if cve.Content.Valid { + continue + } + if !strings.HasPrefix(cve.ID, "CVE") { + continue + } + + cveRh, _, err := c.security.GetCveExecute(c.security.GetCve(ctx, cve.ID)) + if err != nil { + return errors.Wrap(err, "could not get cve") + } + + cveBytes, err := json.Marshal(cveRh) + if err != nil { + return errors.Wrap(err, "could not marshal cve") + } + err = c.db.SetCVEContent(cve.ID, cveBytes) + if err != nil { + return errors.Wrap(err, "could not set cve content") + } + } + + return nil } -func (s *Server) interceptor(ctx context.Context, req interface{}, usi *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { - n := utils.EndInterceptor - - return n(ctx, req, usi, handler) -} - -func (s *Server) Run() { - res := utils.NewGRPCServer( - &utils.GRPCOptions{ - Interceptor: s.interceptor, - }, - func(r *utils.Register) { - err := secparsepb.RegisterSecparseHandlerFromEndpoint( - r.Context, - r.Mux, - r.Endpoint, - r.Options, - ) - if err != nil { - s.log.Fatalf("could not register handler - %s", err) - } - }, - func(r *utils.RegisterServer) { - secparsepb.RegisterSecparseServer(r.Server, s) - }, - ) - - defer res.Cancel() - res.WaitGroup.Wait() +func (c *Controller) CollectCVEDataWorkflow(ctx workflow.Context) error { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 5 * time.Minute, + StartToCloseTimeout: 12 * time.Hour, + }) + return workflow.ExecuteActivity(activityCtx, c.CollectCVEDataActivity).Get(ctx, nil) } diff --git a/secparse/admin/impl/cve.go b/apollo/workflow/common.go similarity index 66% rename from secparse/admin/impl/cve.go rename to apollo/workflow/common.go index 70480b35..42e7e2c7 100644 --- a/secparse/admin/impl/cve.go +++ b/apollo/workflow/common.go @@ -28,33 +28,4 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package impl - -import ( - "context" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/utils" -) - -func (s *Server) ListUnresolvedCVEs(_ context.Context, _ *secparseadminpb.ListUnresolvedCVEsRequest) (*secparseadminpb.ListUnresolvedCVEsResponse, error) { - cves, err := s.db.GetAllUnresolvedCVEs() - if err != nil { - return nil, utils.CouldNotRetrieveObjects - } - - return &secparseadminpb.ListUnresolvedCVEsResponse{ - Cves: db.DTOListCVEsToPB(cves), - }, nil -} - -func (s *Server) ListFixedCVEs(_ context.Context, _ *secparseadminpb.ListFixedCVEsRequest) (*secparseadminpb.ListFixedCVEsResponse, error) { - cves, err := s.db.GetAllCVEsWithAllProductsFixed() - if err != nil { - return nil, utils.CouldNotRetrieveObjects - } - - return &secparseadminpb.ListFixedCVEsResponse{ - Cves: db.DTOListCVEsToPB(cves), - }, nil -} +package workflow diff --git a/apollo/workflow/downstream_check.go b/apollo/workflow/downstream_check.go new file mode 100644 index 00000000..705bd111 --- /dev/null +++ b/apollo/workflow/downstream_check.go @@ -0,0 +1,203 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package workflow + +import ( + "context" + "fmt" + "github.com/sirupsen/logrus" + "go.temporal.io/sdk/activity" + "go.temporal.io/sdk/workflow" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rpmutils" + "peridot.resf.org/utils" + "strings" + "time" +) + +func (c *Controller) DownstreamCVECheckActivity(ctx context.Context) error { + go func() { + for { + activity.RecordHeartbeat(ctx) + time.Sleep(10 * time.Second) + } + }() + + pendingProducts, err := c.db.GetPendingAffectedProducts() + if err != nil { + logrus.Errorf("could not get fixed cves: %v", err) + return fmt.Errorf("could not get fixed cves") + } + + for _, affectedProduct := range pendingProducts { + if !affectedProduct.CveID.Valid { + continue + } + + err = func() error { + willNotFixOnly := true + allFixed := true + + switch affectedProduct.State { + case + int(apollopb.AffectedProduct_STATE_WILL_NOT_FIX_UPSTREAM), + int(apollopb.AffectedProduct_STATE_OUT_OF_SUPPORT_SCOPE): + return nil + case + int(apollopb.AffectedProduct_STATE_UNDER_INVESTIGATION_UPSTREAM), + int(apollopb.AffectedProduct_STATE_AFFECTED_UPSTREAM): + allFixed = false + willNotFixOnly = false + return nil + } + + product, err := c.db.GetProductByID(affectedProduct.ProductID) + if err != nil { + logrus.Errorf("could not get product with id %d: %v", affectedProduct.ProductID, err) + return err + } + + ignoredUpstreamPackages, err := c.db.GetAllIgnoredPackagesByProductID(product.ID) + if err != nil { + logrus.Errorf("could not get ignored packages: %v", err) + return err + } + + beginTx, err := c.db.Begin() + if err != nil { + logrus.Errorf("could not begin transaction: %v", err) + return err + } + tx := c.db.UseTransaction(beginTx) + + skipProduct := false + defer func(skipProduct *bool, affectedProduct apollodb.AffectedProduct) { + if *skipProduct { + logrus.Infof("%s: Skipping package for now", affectedProduct.Package) + _ = beginTx.Rollback() + } + }(&skipProduct, *affectedProduct) + + cve, err := c.db.GetCVEByID(affectedProduct.CveID.String) + if err != nil { + return err + } + + nvrOnly := strings.Replace(affectedProduct.Package, ":", "-", 1) + if rpmutils.Module().MatchString(nvrOnly) { + if !affectedProduct.Advisory.Valid { + skipProduct = true + } + + redHatAdvisory, err := c.errata.GetErrata(affectedProduct.Advisory.String) + if err != nil { + logrus.Errorf("Could not get Red Hat Advisory: %v", err) + skipProduct = true + } + + for _, arch := range product.Archs { + redHatProductName := affectedProductNameForArchAndVersion(arch, product.RedHatMajorVersion.Int32) + affected := redHatAdvisory.AffectedProducts[redHatProductName] + if affected == nil { + continue + } + srpms := affected.SRPMs + for _, srpm := range srpms { + status := c.checkKojiForBuild(tx, ignoredUpstreamPackages, srpm, affectedProduct, cve) + if status == apollopb.BuildStatus_BUILD_STATUS_SKIP { + skipProduct = true + break + } else if status == apollopb.BuildStatus_BUILD_STATUS_FIXED { + willNotFixOnly = false + } else if status == apollopb.BuildStatus_BUILD_STATUS_NOT_FIXED { + allFixed = false + willNotFixOnly = false + } + } + break + } + if skipProduct { + logrus.Errorf("%s has not been fixed for NVR %s", cve.ID, nvrOnly) + } + } else { + nvrOnly = rpmutils.Epoch().ReplaceAllString(affectedProduct.Package, "") + status := c.checkKojiForBuild(tx, ignoredUpstreamPackages, nvrOnly, affectedProduct, cve) + if status == apollopb.BuildStatus_BUILD_STATUS_SKIP { + skipProduct = true + } else if status == apollopb.BuildStatus_BUILD_STATUS_FIXED { + willNotFixOnly = false + } else if status == apollopb.BuildStatus_BUILD_STATUS_NOT_FIXED { + allFixed = false + willNotFixOnly = false + } + } + + if !skipProduct { + newState := apollopb.AffectedProduct_STATE_FIXED_UPSTREAM + if allFixed { + newState = apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM + } + if willNotFixOnly { + newState = apollopb.AffectedProduct_STATE_WILL_NOT_FIX_UPSTREAM + } + err := tx.UpdateAffectedProductStateAndPackageAndAdvisory(affectedProduct.ID, int(newState), affectedProduct.Package, utils.NullStringToPointer(affectedProduct.Advisory)) + if err != nil { + logrus.Errorf("Could not save new CVE state: %v", err) + return err + } + err = beginTx.Commit() + if err != nil { + logrus.Errorf("could not commit transaction: %v", err) + return err + } + + logrus.Infof("%s is now set to %s", cve.ID, newState.String()) + } + + return nil + }() + if err != nil { + return err + } + } + + return nil +} + +func (c *Controller) DownstreamCVECheckWorkflow(ctx workflow.Context) error { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 30 * time.Minute, + StartToCloseTimeout: 6 * time.Hour, + HeartbeatTimeout: 30 * time.Second, + }) + return workflow.ExecuteActivity(activityCtx, c.DownstreamCVECheckActivity).Get(ctx, nil) +} diff --git a/secparse/cron/cve_resolved_downstream_test.go b/apollo/workflow/downstream_check_test.go similarity index 53% rename from secparse/cron/cve_resolved_downstream_test.go rename to apollo/workflow/downstream_check_test.go index db6eabb0..e04e7340 100644 --- a/secparse/cron/cve_resolved_downstream_test.go +++ b/apollo/workflow/downstream_check_test.go @@ -28,17 +28,26 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "github.com/stretchr/testify/require" + "go.temporal.io/sdk/testsuite" "io/ioutil" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rherrata" "peridot.resf.org/koji" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/rherrata" "testing" ) +func getDownstreamCheckEnv() *testsuite.TestWorkflowEnvironment { + env := getPollRedHatErrataEnv() + env.RegisterActivity(controller.UpdateCVEStateActivity) + env.RegisterActivity(controller.DownstreamCVECheckActivity) + + return env +} + func TestInstance_CheckIfCVEResolvedDownstream_RHBA20212593_NotFixedDownstream(t *testing.T) { resetDb() @@ -71,13 +80,18 @@ func TestInstance_CheckIfCVEResolvedDownstream_RHBA20212593_NotFixedDownstream(t }, } - cronInstance.ScanRedHatErrata() - cronInstance.CheckIfCVEResolvedDownstream() + env := getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.PollRedHatErrataWorkflow) + require.Nil(t, env.GetWorkflowError()) - affectedProducts, _ := cronInstance.db.GetAllAffectedProductsByCVE("RHBA-2021:2593") + env = getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.DownstreamCVECheckWorkflow) + require.Nil(t, env.GetWorkflowError()) + + affectedProducts, _ := controller.db.GetAllAffectedProductsByCVE("RHBA-2021:2593") require.Len(t, affectedProducts, 1) require.Equal(t, "cmake-3.18.2-11.el8_4", affectedProducts[0].Package) - require.Equal(t, int(secparseadminpb.AffectedProductState_FixedUpstream), affectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_FIXED_UPSTREAM), affectedProducts[0].State) } func TestInstance_CheckIfCVEResolvedDownstream_RHBA20212593_FixedDownstream(t *testing.T) { @@ -117,7 +131,13 @@ func TestInstance_CheckIfCVEResolvedDownstream_RHBA20212593_FixedDownstream(t *t }, } - cronInstance.ScanRedHatErrata() + env := getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.PollRedHatErrataWorkflow) + require.Nil(t, env.GetWorkflowError()) + + env = getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.UpdateCVEStateWorkflow) + require.Nil(t, env.GetWorkflowError()) kojiMock.Tagged = []*koji.Build{ { @@ -268,13 +288,175 @@ func TestInstance_CheckIfCVEResolvedDownstream_RHBA20212593_FixedDownstream(t *t }, } - cronInstance.CheckIfCVEResolvedDownstream() + env = getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.DownstreamCVECheckWorkflow) + require.Nil(t, env.GetWorkflowError()) - affectedProducts, _ := cronInstance.db.GetAllAffectedProductsByCVE("RHBA-2021:2593") + affectedProducts, _ := controller.db.GetAllAffectedProductsByCVE("RHBA-2021:2593") require.Len(t, affectedProducts, 1) require.Equal(t, "cmake-3.18.2-11.el8_4", affectedProducts[0].Package) - require.Equal(t, int(secparseadminpb.AffectedProductState_FixedDownstream), affectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM), affectedProducts[0].State) require.Len(t, mockDb.BuildReferences, 14) - require.Equal(t, "10", mockDb.BuildReferences[0].KojiID) + require.Equal(t, "10", mockDb.BuildReferences[0].KojiID.String) } + +/*func TestInstance_CheckIfCVEResolvedDownstream_RHSA20221642_FixedDownstream(t *testing.T) { + resetDb() + + htmlFile, err := ioutil.ReadFile("testdata/RHSA-2022-1642.html") + require.Nil(t, err) + + errataMock.HTMLResponses["RHSA-2022:1642"] = string(htmlFile[:]) + + errataMock.Advisories.Response.Docs = []*rherrata.CompactErrata{ + { + Name: "RHSA-2022:1642", + Description: "", + Synopsis: "", + Severity: "Important", + Type: "Security", + AffectedPackages: []string{ + "zlib-1.2.11-18.el8_5.src.rpm", + "zlib-1.2.11-18.el8_5.i686.rpm", + "zlib-1.2.11-18.el8_5.x86_64.rpm", + "zlib-debuginfo-1.2.11-18.el8_5.i686.rpm", + "zlib-debuginfo-1.2.11-18.el8_5.x86_64.rpm", + "zlib-debugsource-1.2.11-18.el8_5.i686.rpm", + "zlib-debugsource-1.2.11-18.el8_5.x86_64.rpm", + "zlib-devel-1.2.11-18.el8_5.i686.rpm", + "zlib-devel-1.2.11-18.el8_5.x86_64.rpm", + }, + CVEs: []string{ + "CVE-2018-25032", + }, + Fixes: []string{}, + PublicationDate: "2022-04-28T00:00:00Z", + }, + } + + env := getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.PollRedHatErrataWorkflow) + require.Nil(t, env.GetWorkflowError()) + + kojiMock.Tagged = []*koji.Build{ + { + BuildId: 10, + CompletionTime: "", + CompletionTs: 0, + CreationEventId: 0, + CreationTime: "", + CreationTs: 0, + Epoch: "", + Extra: &koji.ListBuildsExtra{ + Source: &koji.ListBuildsExtraSource{ + OriginalUrl: "git+https://git.rockylinux.org/staging/rpms/zlib.git?#cc63be52ed1ba4f25d2015fd014558a3e7e19b08", + }, + }, + Name: "zlib", + Nvr: "zlib-1.2.11-18.el8_5", + OwnerId: 0, + OwnerName: "distrobuild", + PackageId: 0, + PackageName: "zlib", + Release: "18.el8_5", + Source: "", + StartTime: "", + StartTs: 0, + State: 0, + TaskId: 0, + Version: "1.2.11", + VolumeId: 0, + VolumeName: "", + TagId: 0, + TagName: "", + }, + } + + kojiMock.RPMs = []*koji.RPM{ + { + Arch: "src", + BuildId: 10, + Name: "zlib", + Nvr: "zlib-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "i686", + BuildId: 10, + Name: "zlib", + Nvr: "zlib-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "x86_64", + BuildId: 10, + Name: "zlib", + Nvr: "zlib-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "i686", + BuildId: 10, + Name: "zlib-debuginfo", + Nvr: "zlib-debuginfo-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "x86_64", + BuildId: 10, + Name: "zlib-debuginfo", + Nvr: "zlib-debuginfo-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "i686", + BuildId: 10, + Name: "zlib-debugsource", + Nvr: "zlib-debugsource-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "x86_64", + BuildId: 10, + Name: "zlib-debugsource", + Nvr: "zlib-debugsource-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "i686", + BuildId: 10, + Name: "zlib-devel", + Nvr: "zlib-devel-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + { + Arch: "x86_64", + BuildId: 10, + Name: "zlib-devel", + Nvr: "zlib-devel-1.2.11-18.el8_5", + Release: "18.el8_5", + Version: "1.2.11", + }, + } + + env = getDownstreamCheckEnv() + env.ExecuteWorkflow(controller.DownstreamCVECheckWorkflow) + require.Nil(t, env.GetWorkflowError()) + + affectedProducts, _ := controller.db.GetAllAffectedProductsByCVE("RHSA-2022:1642") + require.Len(t, affectedProducts, 1) + require.Equal(t, "zlib-1.2.11-18.el8_5", affectedProducts[0].Package) + require.Equal(t, int(apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM), affectedProducts[0].State) + + require.Len(t, mockDb.BuildReferences, 14) + require.Equal(t, "10", mockDb.BuildReferences[0].KojiID) +}*/ diff --git a/apollo/workflow/poll_mirror_cve.go b/apollo/workflow/poll_mirror_cve.go new file mode 100644 index 00000000..abb4d83f --- /dev/null +++ b/apollo/workflow/poll_mirror_cve.go @@ -0,0 +1,212 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package workflow + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" + "go.temporal.io/sdk/activity" + "go.temporal.io/sdk/workflow" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/utils" + "strings" + "time" +) + +type ShortCodesRes struct { + ShortCodes []*apollodb.ShortCode `json:"short_codes"` +} + +func (c *Controller) pollCVEProcessProduct(ctx context.Context, product *apollodb.Product, shortCode *apollodb.ShortCode) error { + // Skip if the product doesn't define a valid Red Hat version + if !product.RedHatMajorVersion.Valid { + return nil + } + // Skip if product doesn't have correct Red Hat prefix + if !strings.HasPrefix(product.Name, product.RedHatProductPrefix.String) { + return nil + } + + var lastSync *time.Time + mirrorState, err := c.db.GetMirrorState(shortCode.Code) + if err != nil { + if err != sql.ErrNoRows { + c.log.Errorf("could not get last sync for code %s: %v", shortCode.Code, err) + // The cron will retry this + return nil + } + } else { + if mirrorState != nil && mirrorState.LastSync.Valid { + lastSync = &mirrorState.LastSync.Time + } + } + if lastSync == nil { + now := time.Now() + if product.MirrorFromDate.Valid { + now = product.MirrorFromDate.Time + } + lastSync = &now + } + + req := c.security.GetCves(ctx) + req = req.Product(productName(product.RedHatMajorVersion.Int32)) + if lastSync != nil { + req = req.After(lastSync.Format("2006-01-02")) + } + + page := 1 + for { + reqNew := req.Page(float32(page)) + cves, _, err := c.security.GetCvesExecute(reqNew) + if err != nil { + c.log.Errorf("could not get cves: %v", err) + return fmt.Errorf("could not get cves") + } + if len(cves) == 0 { + break + } + + for _, cve := range cves { + _, err := c.db.GetCVEByID(cve.CVE) + if err == nil { + continue + } + if err != sql.ErrNoRows { + c.log.Errorf("an unknown error occurred: %v", err) + return fmt.Errorf("an unknown error occurred") + } + + cveRh, _, err := c.security.GetCveExecute(c.security.GetCve(ctx, cve.CVE)) + if err != nil { + return errors.Wrap(err, "could not get cve") + } + cveBytes, err := json.Marshal(cveRh) + if err != nil { + return fmt.Errorf("could not marshal cve: %v", err) + } + + sourceBy := "Red Hat" + _, err = c.db.CreateCVE(cve.CVE, shortCode.Code, &sourceBy, &cve.ResourceUrl, types.NullJSONText{Valid: true, JSONText: cveBytes}) + if err != nil { + c.log.Errorf("could not create cve: %v", err) + return fmt.Errorf("could not create cve") + } + c.log.Infof("Added %s to %s with state NewFromUpstream", cve.CVE, shortCode.Code) + } + page++ + } + + err = c.db.UpdateMirrorState(shortCode.Code, utils.Pointer[time.Time](time.Now())) + if err != nil { + c.log.Errorf("could not update mirroring state: %v", err) + } + + return nil +} + +func (c *Controller) PollCVEProcessShortCodeActivity(ctx context.Context, shortCode *apollodb.ShortCode) error { + go func() { + for { + activity.RecordHeartbeat(ctx) + time.Sleep(10 * time.Second) + } + }() + + if int32(shortCode.Mode) != int32(apollopb.ShortCode_MODE_MIRROR) { + // This is not a mirrored short code, continue + return nil + } + + allProducts, err := c.db.GetProductsByShortCode(shortCode.Code) + if err != nil { + c.log.Errorf("could not get all products for code %s: %v", shortCode.Code, err) + // Returning nil since the cron will retry this + // We can set up an alert on the Grafana side to alert us + // if this happens too often + return nil + } + + for _, product := range allProducts { + err := c.pollCVEProcessProduct(ctx, product, shortCode) + if err != nil { + return err + } + } + + return nil +} + +func (c *Controller) GetAllShortCodesActivity() (*ShortCodesRes, error) { + s, err := c.db.GetAllShortCodes() + if err != nil { + return nil, err + } + + return &ShortCodesRes{ + ShortCodes: s, + }, nil +} + +func (c *Controller) PollRedHatCVEsWorkflow(ctx workflow.Context) error { + shortCodeCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: time.Minute, + StartToCloseTimeout: time.Minute, + }) + var shortCodeRes ShortCodesRes + err := workflow.ExecuteActivity(shortCodeCtx, c.GetAllShortCodesActivity).Get(ctx, &shortCodeRes) + if err != nil { + return err + } + + var futures []workflow.Future + for _, shortCode := range shortCodeRes.ShortCodes { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 5 * time.Minute, + StartToCloseTimeout: 12 * time.Hour, + HeartbeatTimeout: 30 * time.Second, + }) + futures = append(futures, workflow.ExecuteActivity(activityCtx, c.PollCVEProcessShortCodeActivity, shortCode)) + } + + for _, future := range futures { + err := future.Get(ctx, nil) + if err != nil { + return err + } + } + + return nil +} diff --git a/secparse/cron/poll_redhat_for_cves_test.go b/apollo/workflow/poll_mirror_cve_test.go similarity index 73% rename from secparse/cron/poll_redhat_for_cves_test.go rename to apollo/workflow/poll_mirror_cve_test.go index d1ab0edd..70972b9f 100644 --- a/secparse/cron/poll_redhat_for_cves_test.go +++ b/apollo/workflow/poll_mirror_cve_test.go @@ -28,18 +28,27 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "github.com/stretchr/testify/require" - "peridot.resf.org/secparse/rhsecurity" + "go.temporal.io/sdk/testsuite" + "peridot.resf.org/apollo/rhsecurity" "testing" ) +func getPollRedHatCVEsEnv() *testsuite.TestWorkflowEnvironment { + env := testWfSuite.NewTestWorkflowEnvironment() + env.RegisterActivity(controller.GetAllShortCodesActivity) + env.RegisterActivity(controller.PollCVEProcessShortCodeActivity) + + return env +} + func TestInstance_PollRedHatForNewCVEs_AddNewCVE(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -56,10 +65,14 @@ func TestInstance_PollRedHatForNewCVEs_AddNewCVE(t *testing.T) { Cvss3Score: "5.6", }, } + securityMock.ActiveCVE = &rhsecurity.CVEDetailed{} + require.Nil(t, readTestDataJson("testdata/CVE-2021-3602.json", securityMock.ActiveCVE)) - cronInstance.PollRedHatForNewCVEs() + env := getPollRedHatCVEsEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) - cves, _ := cronInstance.db.GetAllUnresolvedCVEs() + cves, _ := controller.db.GetAllUnresolvedCVEs() require.Len(t, cves, 1) require.Equal(t, "CVE-2021-3602", cves[0].ID) } @@ -67,7 +80,7 @@ func TestInstance_PollRedHatForNewCVEs_AddNewCVE(t *testing.T) { func TestPollRedHatForNewCVEs_SkipExistingCVE(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -84,16 +97,22 @@ func TestPollRedHatForNewCVEs_SkipExistingCVE(t *testing.T) { Cvss3Score: "5.6", }, } + securityMock.ActiveCVE = &rhsecurity.CVEDetailed{} + require.Nil(t, readTestDataJson("testdata/CVE-2021-3602.json", securityMock.ActiveCVE)) - cronInstance.PollRedHatForNewCVEs() + env := getPollRedHatCVEsEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) - cves, _ := cronInstance.db.GetAllUnresolvedCVEs() + cves, _ := controller.db.GetAllUnresolvedCVEs() require.Len(t, cves, 1) require.Equal(t, "CVE-2021-3602", cves[0].ID) - cronInstance.PollRedHatForNewCVEs() + env = getPollRedHatCVEsEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) - cves, _ = cronInstance.db.GetAllUnresolvedCVEs() + cves, _ = controller.db.GetAllUnresolvedCVEs() require.Len(t, cves, 1) require.Equal(t, "CVE-2021-3602", cves[0].ID) } diff --git a/apollo/workflow/poll_mirror_errata.go b/apollo/workflow/poll_mirror_errata.go new file mode 100644 index 00000000..5a6ee616 --- /dev/null +++ b/apollo/workflow/poll_mirror_errata.go @@ -0,0 +1,289 @@ +// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. +// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. +// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +package workflow + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "go.temporal.io/sdk/activity" + "go.temporal.io/sdk/workflow" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rpmutils" + "peridot.resf.org/utils" + "strconv" + "strings" + "time" +) + +func (c *Controller) processErrataShortCodeProduct(shortCode *apollodb.ShortCode, product *apollodb.Product) error { + if !product.RedHatMajorVersion.Valid { + return nil + } + if !strings.HasPrefix(product.Name, product.RedHatProductPrefix.String) { + return nil + } + + ignoredUpstreamPackages, err := c.db.GetAllIgnoredPackagesByProductID(product.ID) + if err != nil { + logrus.Errorf("could not get ignored packages: %v", err) + return fmt.Errorf("could not get ignored packages") + } + + var lastSync *time.Time + mirrorState, err := c.db.GetMirrorState(shortCode.Code) + if err == nil { + if mirrorState.ErrataAfter.Valid { + lastSync = &mirrorState.ErrataAfter.Time + } + } + + advisories, err := c.errata.GetAdvisories(product.CurrentFullVersion, lastSync) + if err != nil { + logrus.Errorf("Could not get Red Hat Advisories: %v", err) + return fmt.Errorf("could not get Red Hat Advisories") + } + + var newLastSync *time.Time + + for _, advisory := range advisories { + if newLastSync == nil { + parsedTime, err := time.Parse(time.RFC3339, advisory.PublicationDate) + if err == nil { + newLastSync = &parsedTime + _ = c.db.UpdateMirrorStateErrata(shortCode.Code, newLastSync) + } + } + + advisoryId := rpmutils.AdvisoryId().FindStringSubmatch(advisory.Name) + if len(advisoryId) < 5 { + logrus.Errorf("Invalid advisory %s", advisory.Name) + return nil + } + code := advisoryId[1] + year, err := strconv.Atoi(advisoryId[3]) + if err != nil { + logrus.Errorf("Invalid advisory %s", advisory.Name) + return nil + } + num, err := strconv.Atoi(advisoryId[4]) + if err != nil { + logrus.Errorf("Invalid advisory %s", advisory.Name) + return nil + } + + beginTx, err := c.db.Begin() + if err != nil { + logrus.Errorf("Could not begin tx: %v", err) + return fmt.Errorf("could not begin tx") + } + tx := c.db.UseTransaction(beginTx) + + _, err = tx.GetAdvisoryByCodeAndYearAndNum(code, year, num) + if err != nil { + if err == sql.ErrNoRows { + // If security then just add CVEs, the rest should be automatic + if strings.HasPrefix(advisory.Name, "RHSA") { + for _, cve := range advisory.CVEs { + _, err := tx.GetCVEByID(cve) + if err == nil { + continue + } + if err != sql.ErrNoRows { + logrus.Errorf("an unknown error occurred: %v", err) + return fmt.Errorf("an unknown error occurred") + } + + sourceBy := "Red Hat" + resourceUrl := fmt.Sprintf("https://access.redhat.com/hydra/rest/securitydata/cve/%s.json", cve) + + cveRh, _, err := c.security.GetCveExecute(c.security.GetCve(context.TODO(), cve)) + if err != nil { + return errors.Wrap(err, "could not get cve") + } + cveBytes, err := json.Marshal(cveRh) + if err != nil { + return fmt.Errorf("could not marshal cve: %v", err) + } + + _, err = tx.CreateCVE(cve, shortCode.Code, &sourceBy, &resourceUrl, types.NullJSONText{Valid: true, JSONText: cveBytes}) + if err != nil { + logrus.Errorf("could not create cve: %v", err) + _ = beginTx.Rollback() + return fmt.Errorf("could not create cve") + } + logrus.Infof("Added %s to %s (%s)", cve, shortCode.Code, advisory.Name) + } + } else if strings.HasPrefix(advisory.Name, "RHBA") || strings.HasPrefix(advisory.Name, "RHEA") { + doRollback := false + _, err := tx.GetAffectedProductByAdvisory(advisory.Name) + if err != nil { + if err == sql.ErrNoRows { + _, err := tx.GetCVEByID(advisory.Name) + if err == nil { + continue + } + if err != sql.ErrNoRows { + logrus.Errorf("an unknown error occurred: %v", err) + return fmt.Errorf("an unknown error occurred") + } + + sourceBy := "Red Hat" + resourceUrl := fmt.Sprintf("https://access.redhat.com/errata/%s", advisory.Name) + _, err = tx.CreateCVE(advisory.Name, product.ShortCode, &sourceBy, &resourceUrl, types.NullJSONText{}) + if err != nil { + logrus.Errorf("Could not create cve: %v", err) + _ = beginTx.Rollback() + continue + } + + for _, srpm := range advisory.AffectedPackages { + if !strings.Contains(srpm, ".src.rpm") { + continue + } + + pkg := strings.Replace(srpm, ".src.rpm", "", 1) + + nvr := rpmutils.NVR().FindStringSubmatch(pkg) + var packageName string + if len(nvr) >= 2 { + packageName = nvr[1] + } else { + packageName = pkg + } + if utils.StrContains(packageName, ignoredUpstreamPackages) { + continue + } + dist := fmt.Sprintf("el%d", product.RedHatMajorVersion.Int32) + if !strings.Contains(pkg, dist) { + continue + } + if strings.Contains(pkg, dist+"sat") { + continue + } + _, err := tx.CreateAffectedProduct(product.ID, advisory.Name, int(apollopb.AffectedProduct_STATE_FIXED_UPSTREAM), product.CurrentFullVersion, pkg, &advisory.Name) + if err != nil { + logrus.Errorf("Could not create affected product for srpm: %v", err) + doRollback = true + break + } + } + if doRollback { + _ = beginTx.Rollback() + continue + } + logrus.Infof("Added %s to %s", advisory.Name, shortCode.Code) + } else { + logrus.Errorf("Could not get affected product by advisory: %v", err) + continue + } + } + } + } else { + logrus.Errorf("Could not fetch advisory: %v", err) + continue + } + } + + err = beginTx.Commit() + if err != nil { + logrus.Errorf("Could not commit new advisory tx: %v", err) + continue + } + } + + return nil +} + +func (c *Controller) ProcessRedHatErrataShortCodeActivity(ctx context.Context, shortCode *apollodb.ShortCode) error { + go func() { + for { + activity.RecordHeartbeat(ctx) + time.Sleep(10 * time.Second) + } + }() + + if int32(shortCode.Mode) != int32(apollopb.ShortCode_MODE_MIRROR) { + // This is not a mirrored short code, continue + return nil + } + + allProducts, err := c.db.GetProductsByShortCode(shortCode.Code) + if err != nil { + logrus.Errorf("could not get all products for code %s: %v", shortCode.Code, err) + return fmt.Errorf("could not get all products for code %s", shortCode.Code) + } + + for _, product := range allProducts { + err := c.processErrataShortCodeProduct(shortCode, product) + if err != nil { + return err + } + } + + return nil +} + +func (c *Controller) PollRedHatErrataWorkflow(ctx workflow.Context) error { + shortCodeCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: time.Minute, + StartToCloseTimeout: time.Minute, + }) + var shortCodeRes ShortCodesRes + err := workflow.ExecuteActivity(shortCodeCtx, c.GetAllShortCodesActivity).Get(ctx, &shortCodeRes) + if err != nil { + return err + } + + var futures []workflow.Future + for _, shortCode := range shortCodeRes.ShortCodes { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 5 * time.Minute, + StartToCloseTimeout: 12 * time.Hour, + HeartbeatTimeout: 30 * time.Second, + }) + futures = append(futures, workflow.ExecuteActivity(activityCtx, c.ProcessRedHatErrataShortCodeActivity, shortCode)) + } + + for _, future := range futures { + err := future.Get(ctx, nil) + if err != nil { + return err + } + } + + return nil +} diff --git a/secparse/cron/scan_redhat_errata_test.go b/apollo/workflow/poll_mirror_errata_test.go similarity index 84% rename from secparse/cron/scan_redhat_errata_test.go rename to apollo/workflow/poll_mirror_errata_test.go index 712b6a52..0efc7459 100644 --- a/secparse/cron/scan_redhat_errata_test.go +++ b/apollo/workflow/poll_mirror_errata_test.go @@ -28,16 +28,24 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "github.com/stretchr/testify/require" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/secparse/rherrata" + "go.temporal.io/sdk/testsuite" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rherrata" "testing" ) +func getPollRedHatErrataEnv() *testsuite.TestWorkflowEnvironment { + env := testWfSuite.NewTestWorkflowEnvironment() + env.RegisterActivity(controller.GetAllShortCodesActivity) + env.RegisterActivity(controller.ProcessRedHatErrataShortCodeActivity) + + return env +} + func TestInstance_ScanRedHatErrata_RHSA20212595_Security_CVE(t *testing.T) { resetDb() @@ -77,16 +85,18 @@ func TestInstance_ScanRedHatErrata_RHSA20212595_Security_CVE(t *testing.T) { }, } - cronInstance.ScanRedHatErrata() + env := getPollRedHatErrataEnv() + env.ExecuteWorkflow(controller.PollRedHatErrataWorkflow) + require.Nil(t, env.GetWorkflowError()) - cves, _ := cronInstance.db.GetAllUnresolvedCVEs() + cves, _ := controller.db.GetAllUnresolvedCVEs() require.Len(t, cves, 1) require.Equal(t, "CVE-2021-3514", cves[0].ID) - - mockDb.Cves = []*db.CVE{} } func TestInstance_ScanRedHatErrata_BugFix_Erratum(t *testing.T) { + resetDb() + errataMock.Advisories.Response.Docs = []*rherrata.CompactErrata{ { Name: "RHBA-2021:2593", @@ -116,14 +126,16 @@ func TestInstance_ScanRedHatErrata_BugFix_Erratum(t *testing.T) { }, } - cronInstance.ScanRedHatErrata() + env := getPollRedHatErrataEnv() + env.ExecuteWorkflow(controller.PollRedHatErrataWorkflow) + require.Nil(t, env.GetWorkflowError()) cves := mockDb.Cves require.Len(t, cves, 1) require.Equal(t, "RHBA-2021:2593", cves[0].ID) - affectedProducts, _ := cronInstance.db.GetAllAffectedProductsByCVE(cves[0].ID) + affectedProducts, _ := controller.db.GetAllAffectedProductsByCVE(cves[0].ID) require.Len(t, affectedProducts, 1) require.Equal(t, "cmake-3.18.2-11.el8_4", affectedProducts[0].Package) - require.Equal(t, int(secparseadminpb.AffectedProductState_FixedUpstream), affectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_FIXED_UPSTREAM), affectedProducts[0].State) } diff --git a/apollo/workflow/testdata/CVE-2021-3602.json b/apollo/workflow/testdata/CVE-2021-3602.json new file mode 100644 index 00000000..6a30594f --- /dev/null +++ b/apollo/workflow/testdata/CVE-2021-3602.json @@ -0,0 +1,112 @@ +{ + "threat_severity": "Moderate", + "public_date": "2021-07-15T14:00:00Z", + "bugzilla": { + "description": "CVE-2021-3602 buildah: Host environment variables leaked in build container when using chroot isolation", + "id": "1969264", + "url": "https://bugzilla.redhat.com/show_bug.cgi?id=1969264" + }, + "cvss3": { + "cvss3_base_score": "5.6", + "cvss3_scoring_vector": "CVSS:3.1/AV:L/AC:H/PR:L/UI:N/S:C/C:H/I:N/A:N", + "status": "verified" + }, + "cwe": "CWE-200", + "details": [ + "An information disclosure flaw was found in Buildah, when building containers using chroot isolation. Running processes in container builds (e.g. Dockerfile RUN commands) can access environment variables from parent and grandparent processes. When run in a container in a CI/CD environment, environment variables may include sensitive information that was shared with the container in order to be used only by Buildah itself (e.g. container registry credentials).", + "An information disclosure flaw was found in Buildah, when building containers using chroot isolation. Running processes in container builds (e.g. Dockerfile RUN commands) can access environment variables from parent and grandparent processes. When run in a container in a CI/CD environment, environment variables may include sensitive information that was shared with the container in order to be used only by Buildah itself (e.g. container registry credentials)." + ], + "statement": "OpenShift Container Platform 4's builder container is not vulnerable to this flaw as it uses OCI isolation (i.e. using runc) and does not use chroot isolation.", + "acknowledgement": "Red Hat would like to thank Blake Burkhart for reporting this issue.", + "affected_release": [ + { + "product_name": "Red Hat Enterprise Linux 8", + "release_date": "2021-11-09T00:00:00Z", + "advisory": "RHSA-2021:4154", + "cpe": "cpe:/a:redhat:enterprise_linux:8", + "package": "container-tools:rhel8-8050020210921082437.faa19cc5" + }, + { + "product_name": "Red Hat Enterprise Linux 8", + "release_date": "2021-11-09T00:00:00Z", + "advisory": "RHSA-2021:4221", + "cpe": "cpe:/a:redhat:enterprise_linux:8", + "package": "container-tools:2.0-8050020210817115648.faa19cc5" + }, + { + "product_name": "Red Hat Enterprise Linux 8", + "release_date": "2021-11-09T00:00:00Z", + "advisory": "RHSA-2021:4222", + "cpe": "cpe:/a:redhat:enterprise_linux:8", + "package": "container-tools:3.0-8050020210915114620.faa19cc5" + } + ], + "package_state": [ + { + "product_name": "Red Hat Enterprise Linux 7", + "fix_state": "Out of support scope", + "package_name": "buildah", + "cpe": "cpe:/o:redhat:enterprise_linux:7" + }, + { + "product_name": "Red Hat Enterprise Linux 7", + "fix_state": "Out of support scope", + "package_name": "podman", + "cpe": "cpe:/o:redhat:enterprise_linux:7" + }, + { + "product_name": "Red Hat Enterprise Linux 9", + "fix_state": "Not affected", + "package_name": "buildah", + "cpe": "cpe:/o:redhat:enterprise_linux:9" + }, + { + "product_name": "Red Hat Enterprise Linux 9", + "fix_state": "Not affected", + "package_name": "podman", + "cpe": "cpe:/o:redhat:enterprise_linux:9" + }, + { + "product_name": "Red Hat OpenShift Container Platform 3.11", + "fix_state": "Will not fix", + "package_name": "podman", + "cpe": "cpe:/a:redhat:openshift:3.11" + }, + { + "product_name": "Red Hat OpenShift Container Platform 4", + "fix_state": "Will not fix", + "package_name": "buildah", + "cpe": "cpe:/a:redhat:openshift:4" + }, + { + "product_name": "Red Hat OpenShift Container Platform 4", + "fix_state": "Not affected", + "package_name": "cri-o", + "cpe": "cpe:/a:redhat:openshift:4" + }, + { + "product_name": "Red Hat OpenShift Container Platform 4", + "fix_state": "Not affected", + "package_name": "openshift4/ose-docker-builder", + "cpe": "cpe:/a:redhat:openshift:4" + }, + { + "product_name": "Red Hat OpenShift Container Platform 4", + "fix_state": "Will not fix", + "package_name": "podman", + "cpe": "cpe:/a:redhat:openshift:4" + }, + { + "product_name": "Red Hat Quay 3", + "fix_state": "Affected", + "package_name": "quay/quay-builder-rhel8", + "cpe": "cpe:/a:redhat:quay:3" + } + ], + "upstream_fix": "buildah 1.16.8, buildah 1.19.9, buildah 1.17.2, buildah 1.21.3", + "references": [ + "https://github.com/containers/buildah/security/advisories/GHSA-7638-r9r3-rmjj" + ], + "name": "CVE-2021-3602", + "csaw": false +} diff --git a/secparse/cron/testdata/RHBA-2021-2593.html b/apollo/workflow/testdata/RHBA-2021-2593.html similarity index 96% rename from secparse/cron/testdata/RHBA-2021-2593.html rename to apollo/workflow/testdata/RHBA-2021-2593.html index 28a940c1..38b30150 100644 --- a/secparse/cron/testdata/RHBA-2021-2593.html +++ b/apollo/workflow/testdata/RHBA-2021-2593.html @@ -1,35 +1,3 @@ - - +
+
+ + diff --git a/secparse/cron/update_cve_state.go b/apollo/workflow/update_cve_state.go similarity index 54% rename from secparse/cron/update_cve_state.go rename to apollo/workflow/update_cve_state.go index 84fe3f5a..1437ea01 100644 --- a/secparse/cron/update_cve_state.go +++ b/apollo/workflow/update_cve_state.go @@ -28,30 +28,45 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "context" + "fmt" "github.com/sirupsen/logrus" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" + "go.temporal.io/sdk/activity" + "go.temporal.io/sdk/workflow" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "strings" + "time" ) -func (i *Instance) UpdateCVEState() { - cves, err := i.db.GetAllUnresolvedCVEs() +func (c *Controller) UpdateCVEStateActivity(ctx context.Context) error { + go func() { + for { + activity.RecordHeartbeat(ctx) + time.Sleep(10 * time.Second) + } + }() + + cves, err := c.db.GetAllUnresolvedCVEs() if err != nil { - logrus.Errorf("could not get unresolved cves: %v", err) - return + c.log.Errorf("could not get unresolved cves: %v", err) + return fmt.Errorf("could not get unresolved cves") } - shortCodeBuffer := map[string]*db.ShortCode{} - productBuffer := map[string][]*db.Product{} - ignoredPackagesBuffer := map[string][]string{} + shortCodeBuffer := map[string]*apollodb.ShortCode{} + productBuffer := map[string][]*apollodb.Product{} + ignoredPackagesBuffer := map[int64][]string{} - ctx := context.TODO() for _, cve := range cves { + if !strings.HasPrefix(cve.ID, "CVE") { + continue + } + if shortCodeBuffer[cve.ShortCode] == nil { - shortCode, err := i.db.GetShortCodeByCode(cve.ShortCode) + shortCode, err := c.db.GetShortCodeByCode(cve.ShortCode) if err != nil { logrus.Errorf("could not get short code: %v", err) continue @@ -62,7 +77,7 @@ func (i *Instance) UpdateCVEState() { shortCode := shortCodeBuffer[cve.ShortCode] if productBuffer[shortCode.Code] == nil { - products, err := i.db.GetProductsByShortCode(shortCode.Code) + products, err := c.db.GetProductsByShortCode(shortCode.Code) if err != nil { logrus.Errorf("could not get products for code: %s: %v", shortCode.Code, err) continue @@ -71,53 +86,62 @@ func (i *Instance) UpdateCVEState() { } products := productBuffer[shortCode.Code] - if ignoredPackagesBuffer[shortCode.Code] == nil { - ignoredUpstreamPackages, err := i.db.GetAllIgnoredPackagesByShortCode(shortCode.Code) - if err != nil { - logrus.Errorf("could not get ignored packages: %v", err) - continue - } - ignoredPackagesBuffer[shortCode.Code] = ignoredUpstreamPackages - } - ignoredUpstreamPackages := ignoredPackagesBuffer[shortCode.Code] - - cveRh, _, err := i.api.GetCveExecute(i.api.GetCve(ctx, cve.ID)) + // Please do not simplify next statement + // During testing we're mocking pagination as well, and this is the + // easiest way to "wrap" and represent a new request restarting it from page 1 + cveRh, _, err := c.security.GetCveExecute(c.security.GetCve(ctx, cve.ID)) if err != nil { logrus.Errorf("could not retrieve new state for %s from Red Hat: %v", cve.ID, err) continue } for _, product := range products { + if ignoredPackagesBuffer[product.ID] == nil { + ignoredUpstreamPackages, err := c.db.GetAllIgnoredPackagesByProductID(product.ID) + if err != nil { + logrus.Errorf("could not get ignored packages: %v", err) + continue + } + ignoredPackagesBuffer[product.ID] = ignoredUpstreamPackages + } + ignoredUpstreamPackages := ignoredPackagesBuffer[product.ID] + pName := productName(product.RedHatMajorVersion.Int32) - beginTx, err := i.db.Begin() + beginTx, err := c.db.Begin() if err != nil { - logrus.Errorf("could not begin transaction: %v", err) + c.log.Errorf("could not begin transaction: %v", err) continue } - tx := i.db.UseTransaction(beginTx) + tx := c.db.UseTransaction(beginTx) skipCve := false + defer func() { + if skipCve { + _ = beginTx.Rollback() + } + }() + if cveRh.AffectedRelease != nil { for _, state := range *cveRh.AffectedRelease { - if state.ProductName == pName { - st := secparseadminpb.AffectedProductState_FixedUpstream + if (product.Cpe.Valid && state.Cpe == product.Cpe.String) || state.ProductName == pName { + st := apollopb.AffectedProduct_STATE_FIXED_UPSTREAM packageName := "TBD" if state.Package != nil { packageName = *state.Package - match, err := i.checkForIgnoredPackage(ignoredUpstreamPackages, packageName) + match, err := c.checkForIgnoredPackage(ignoredUpstreamPackages, packageName) if err != nil { - logrus.Errorf("Invalid glob: %v", err) + c.log.Errorf("Invalid glob: %v", err) continue } if match { - st = secparseadminpb.AffectedProductState_UnknownProductState + st = apollopb.AffectedProduct_STATE_UNKNOWN } } else { - st = secparseadminpb.AffectedProductState_UnknownProductState + st = apollopb.AffectedProduct_STATE_UNKNOWN } - skipCve = i.checkProduct(tx, cve, shortCode, product, st, packageName, &state.Advisory) + skipCve = c.checkProduct(tx, cve, shortCode, product, st, packageName, &state.Advisory) if skipCve { break } @@ -126,22 +150,22 @@ func (i *Instance) UpdateCVEState() { } if cveRh.PackageState != nil { for _, state := range *cveRh.PackageState { - if state.ProductName == pName { + if (product.Cpe.Valid && state.Cpe == product.Cpe.String) || state.ProductName == pName { pState := productState(state.FixState) packageName := "TBD" if state.PackageName != "" { packageName = state.PackageName - match, err := i.checkForIgnoredPackage(ignoredUpstreamPackages, packageName) + match, err := c.checkForIgnoredPackage(ignoredUpstreamPackages, packageName) if err != nil { - logrus.Errorf("Invalid glob: %v", err) + c.log.Errorf("Invalid glob: %v", err) continue } if match { - pState = secparseadminpb.AffectedProductState_UnknownProductState + pState = apollopb.AffectedProduct_STATE_UNKNOWN } } - skipCve = i.checkProduct(tx, cve, shortCode, product, pState, packageName, nil) + skipCve = c.checkProduct(tx, cve, shortCode, product, pState, packageName, nil) if skipCve { break } @@ -149,16 +173,22 @@ func (i *Instance) UpdateCVEState() { } } - if skipCve { - _ = beginTx.Rollback() - continue - } - err = beginTx.Commit() if err != nil { - logrus.Errorf("could not commit transaction: %v", err) + c.log.Errorf("could not commit transaction: %v", err) continue } } } + + return nil +} + +func (c *Controller) UpdateCVEStateWorkflow(ctx workflow.Context) error { + activityCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + ScheduleToStartTimeout: 5 * time.Minute, + StartToCloseTimeout: 12 * time.Hour, + HeartbeatTimeout: 30 * time.Second, + }) + return workflow.ExecuteActivity(activityCtx, c.UpdateCVEStateActivity).Get(ctx, nil) } diff --git a/secparse/cron/update_cve_state_test.go b/apollo/workflow/update_cve_state_test.go similarity index 80% rename from secparse/cron/update_cve_state_test.go rename to apollo/workflow/update_cve_state_test.go index 847dce3d..4d5c421b 100644 --- a/secparse/cron/update_cve_state_test.go +++ b/apollo/workflow/update_cve_state_test.go @@ -28,19 +28,27 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "github.com/stretchr/testify/require" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/rhsecurity" + "go.temporal.io/sdk/testsuite" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rhsecurity" "testing" ) +func getUpdateCVEStateEnv() *testsuite.TestWorkflowEnvironment { + env := getPollRedHatCVEsEnv() + env.RegisterActivity(controller.UpdateCVEStateActivity) + + return env +} + func TestInstance_UpdateCVEState_AffectedUpstream(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -76,17 +84,22 @@ func TestInstance_UpdateCVEState_AffectedUpstream(t *testing.T) { }, } - cronInstance.PollRedHatForNewCVEs() - cronInstance.UpdateCVEState() + env := getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) + + env = getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.UpdateCVEStateWorkflow) + require.Nil(t, env.GetWorkflowError()) require.Len(t, mockDb.AffectedProducts, 1) - require.Equal(t, int(secparseadminpb.AffectedProductState_AffectedUpstream), mockDb.AffectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_AFFECTED_UPSTREAM), mockDb.AffectedProducts[0].State) } func TestInstance_UpdateCVEState_WillNotFixUpstream(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -122,17 +135,22 @@ func TestInstance_UpdateCVEState_WillNotFixUpstream(t *testing.T) { }, } - cronInstance.PollRedHatForNewCVEs() - cronInstance.UpdateCVEState() + env := getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) + + env = getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.UpdateCVEStateWorkflow) + require.Nil(t, env.GetWorkflowError()) require.Len(t, mockDb.AffectedProducts, 1) - require.Equal(t, int(secparseadminpb.AffectedProductState_WillNotFixUpstream), mockDb.AffectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_WILL_NOT_FIX_UPSTREAM), mockDb.AffectedProducts[0].State) } func TestInstance_UpdateCVEState_NotAffectedUpstream(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -168,8 +186,13 @@ func TestInstance_UpdateCVEState_NotAffectedUpstream(t *testing.T) { }, } - cronInstance.PollRedHatForNewCVEs() - cronInstance.UpdateCVEState() + env := getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) + + env = getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.UpdateCVEStateWorkflow) + require.Nil(t, env.GetWorkflowError()) require.Len(t, mockDb.AffectedProducts, 0) } @@ -177,7 +200,7 @@ func TestInstance_UpdateCVEState_NotAffectedUpstream(t *testing.T) { func TestInstance_UpdateCVEState_OutOfSupportScope(t *testing.T) { resetDb() - securityMock.Cves = []*rhsecurity.CVE{ + securityMock.Cves[1] = []*rhsecurity.CVE{ { CVE: "CVE-2021-3602", Severity: "moderate", @@ -213,9 +236,14 @@ func TestInstance_UpdateCVEState_OutOfSupportScope(t *testing.T) { }, } - cronInstance.PollRedHatForNewCVEs() - cronInstance.UpdateCVEState() + env := getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.PollRedHatCVEsWorkflow) + require.Nil(t, env.GetWorkflowError()) + + env = getUpdateCVEStateEnv() + env.ExecuteWorkflow(controller.UpdateCVEStateWorkflow) + require.Nil(t, env.GetWorkflowError()) require.Len(t, mockDb.AffectedProducts, 1) - require.Equal(t, int(secparseadminpb.AffectedProductState_OutOfSupportScope), mockDb.AffectedProducts[0].State) + require.Equal(t, int(apollopb.AffectedProduct_STATE_OUT_OF_SUPPORT_SCOPE), mockDb.AffectedProducts[0].State) } diff --git a/secparse/cron/cron.go b/apollo/workflow/workflow.go similarity index 64% rename from secparse/cron/cron.go rename to apollo/workflow/workflow.go index 627c4aa8..853e4c2f 100644 --- a/secparse/cron/cron.go +++ b/apollo/workflow/workflow.go @@ -28,7 +28,7 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "database/sql" @@ -36,67 +36,75 @@ import ( "github.com/gobwas/glob" "github.com/sirupsen/logrus" "github.com/spf13/viper" + "go.temporal.io/sdk/client" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rherrata" + "peridot.resf.org/apollo/rhsecurity" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/koji" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/secparse/rherrata" - "peridot.resf.org/secparse/rhsecurity" - "peridot.resf.org/secparse/rpmutils" - "regexp" + "peridot.resf.org/utils" "strconv" "strings" ) -type Instance struct { - db db.Access - api rhsecurity.DefaultApi - errata rherrata.APIService +var forceKoji koji.API - koji koji.API - kojiCompose string - kojiModuleCompose string +type Controller struct { + log *logrus.Logger + temporal client.Client + db apollodb.Access + mainQueue string - nvr *regexp.Regexp - epoch *regexp.Regexp - module *regexp.Regexp - dist *regexp.Regexp - moduleDist *regexp.Regexp - advisoryIdRegex *regexp.Regexp + errata rherrata.APIService + security rhsecurity.DefaultApi + + vendor string } -type BuildStatus int +type Koji struct { + Endpoint string + Compose string + ModuleCompose string +} -const ( - Fixed BuildStatus = iota - NotFixed - WillNotFix - Skip -) +type NewControllerInput struct { + Temporal client.Client + Database apollodb.Access + MainQueue string +} -func New(access db.Access) (*Instance, error) { - instance := &Instance{ - db: access, - api: rhsecurity.NewAPIClient(rhsecurity.NewConfiguration()).DefaultApi, - errata: rherrata.NewClient(), - nvr: rpmutils.NVR(), - epoch: rpmutils.Epoch(), - module: rpmutils.Module(), - dist: rpmutils.Dist(), - moduleDist: rpmutils.ModuleDist(), - advisoryIdRegex: rpmutils.AdvisoryId(), +type Option func(c *Controller) + +func WithSecurityAPI(api rhsecurity.DefaultApi) Option { + return func(c *Controller) { + c.security = api + } +} + +func WithErrataAPI(api rherrata.APIService) Option { + return func(c *Controller) { + c.errata = api + } +} + +// NewController returns a new workflow controller. It is the entry point for the Temporal worker. +// Usually each project share a common controller with different workflows and activities enabled +// in the `cmd` package. +func NewController(input *NewControllerInput, opts ...Option) (*Controller, error) { + c := &Controller{ + log: logrus.New(), + temporal: input.Temporal, + db: input.Database, + mainQueue: input.MainQueue, + vendor: viper.GetString("vendor"), } - if kojiEndpoint := viper.GetString("koji-endpoint"); kojiEndpoint != "" { - var err error - instance.koji, err = koji.New(kojiEndpoint) - if err != nil { - return nil, err - } - instance.kojiCompose = viper.GetString("koji-compose") - instance.kojiModuleCompose = viper.GetString("koji-module-compose") + for _, opt := range opts { + opt(c) } - return instance, nil + return c, nil } // productName simply appends major version to `Red Hat Enterprise Linux` @@ -129,29 +137,29 @@ func affectedProductNameForArchAndVersion(arch string, majorVersion int32) strin } // productState returns appropriate proto type for string states -func productState(state string) secparseadminpb.AffectedProductState { +func productState(state string) apollopb.AffectedProduct_State { switch state { case "Under investigation": - return secparseadminpb.AffectedProductState_UnderInvestigationUpstream + return apollopb.AffectedProduct_STATE_UNDER_INVESTIGATION_UPSTREAM case "Not affected": - return secparseadminpb.AffectedProductState_UnknownProductState + return apollopb.AffectedProduct_STATE_UNKNOWN case "Will not fix": - return secparseadminpb.AffectedProductState_WillNotFixUpstream + return apollopb.AffectedProduct_STATE_WILL_NOT_FIX_UPSTREAM case "Out of support scope": - return secparseadminpb.AffectedProductState_OutOfSupportScope + return apollopb.AffectedProduct_STATE_OUT_OF_SUPPORT_SCOPE case "Affected": - return secparseadminpb.AffectedProductState_AffectedUpstream + return apollopb.AffectedProduct_STATE_AFFECTED_UPSTREAM default: - return secparseadminpb.AffectedProductState_UnderInvestigationUpstream + return apollopb.AffectedProduct_STATE_UNDER_INVESTIGATION_UPSTREAM } } // checkProduct is used to check and validate CVE package states and releases -func (i *Instance) checkProduct(tx db.Access, cve *db.CVE, shortCode *db.ShortCode, product *db.Product, productState secparseadminpb.AffectedProductState, packageName string, advisory *string) bool { +func (c *Controller) checkProduct(tx apollodb.Access, cve *apollodb.CVE, shortCode *apollodb.ShortCode, product *apollodb.Product, productState apollopb.AffectedProduct_State, packageName string, advisory *string) bool { // Re-create a valid product name using the short code prefix and major version. // Example: Red Hat Enterprise Linux 8 translates to Rocky Linux 8 for the short code `RL`. - // Check `//secparse:seed.sql` for more info - mirrorProductName := fmt.Sprintf("%s %d", shortCode.RedHatProductPrefix.String, product.RedHatMajorVersion.Int32) + // Check `//apollo:seed.sql` for more info + mirrorProductName := fmt.Sprintf("%s %d", product.RedHatProductPrefix.String, product.RedHatMajorVersion.Int32) // Get the affected product if exists affectedProduct, err := tx.GetAffectedProductByCVEAndPackage(cve.ID, packageName) @@ -160,9 +168,9 @@ func (i *Instance) checkProduct(tx db.Access, cve *db.CVE, shortCode *db.ShortCo if err == sql.ErrNoRows { // Check if the current package name matches an NVR and if we have a non-NVR variant skipCreate := false - epochlessPackage := i.epoch.ReplaceAllString(packageName, "") - if i.nvr.MatchString(epochlessPackage) { - nvr := i.nvr.FindStringSubmatch(epochlessPackage) + epochlessPackage := rpmutils.Epoch().ReplaceAllString(packageName, "") + if rpmutils.NVR().MatchString(epochlessPackage) { + nvr := rpmutils.NVR().FindStringSubmatch(epochlessPackage) affectedProduct, err = tx.GetAffectedProductByCVEAndPackage(cve.ID, nvr[1]) if err == nil { skipCreate = true @@ -188,7 +196,7 @@ func (i *Instance) checkProduct(tx db.Access, cve *db.CVE, shortCode *db.ShortCo // If product state isn't set to unknown (usually when product isn't affected) // create a new affected product entry for the CVE - if productState != secparseadminpb.AffectedProductState_UnknownProductState { + if productState != apollopb.AffectedProduct_STATE_UNKNOWN { affectedProduct, err = tx.CreateAffectedProduct(product.ID, cve.ID, int(productState), product.CurrentFullVersion, packageName, advisory) if err != nil { logrus.Errorf("could not create affected product: %v", err) @@ -206,14 +214,14 @@ func (i *Instance) checkProduct(tx db.Access, cve *db.CVE, shortCode *db.ShortCo // We don't use else because this may change if a non-NVR variant is found if err == nil { // If the state isn't set to unknown (it is then usually queued for deletion) - if productState != secparseadminpb.AffectedProductState_UnknownProductState { + if productState != apollopb.AffectedProduct_STATE_UNKNOWN { // If it's already in that state, skip if int(productState) == affectedProduct.State { return true } // If the affected product is set to FixedDownstream and we're trying to set it to FixedUpstream, skip - if affectedProduct.State == int(secparseadminpb.AffectedProductState_FixedDownstream) && productState == secparseadminpb.AffectedProductState_FixedUpstream { + if affectedProduct.State == int(apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM) && productState == apollopb.AffectedProduct_STATE_FIXED_UPSTREAM { return true } @@ -238,15 +246,15 @@ func (i *Instance) checkProduct(tx db.Access, cve *db.CVE, shortCode *db.ShortCo return false } -func (i *Instance) isNvrIdentical(build *koji.Build, nvr []string) bool { +func (c *Controller) isNvrIdentical(build *koji.Build, nvr []string) bool { // Join all release bits and remove the dist tag (because sometimes downstream forks do not match the upstream dist tag) // Example: Rocky Linux 8.3 initial build did not tag updated RHEL packages as el8_3, but as el8 - joinedRelease := i.dist.ReplaceAllString(strings.TrimSuffix(strings.Join(nvr[2:], "."), "."), "") + joinedRelease := rpmutils.Dist().ReplaceAllString(strings.TrimSuffix(strings.Join(nvr[2:], "."), "."), "") // Remove all module release bits (to make it possible to actually match NVR) - joinedRelease = i.moduleDist.ReplaceAllString(joinedRelease, "") + joinedRelease = rpmutils.ModuleDist().ReplaceAllString(joinedRelease, "") // Same operations for the build release - buildRelease := i.dist.ReplaceAllString(build.Release, "") - buildRelease = i.moduleDist.ReplaceAllString(buildRelease, "") + buildRelease := rpmutils.Dist().ReplaceAllString(build.Release, "") + buildRelease = rpmutils.ModuleDist().ReplaceAllString(buildRelease, "") // Check if package name, version matches and that the release prefix matches // The reason we're only checking for prefix in release is that downstream @@ -259,7 +267,7 @@ func (i *Instance) isNvrIdentical(build *koji.Build, nvr []string) bool { return false } -func (i *Instance) checkForIgnoredPackage(ignoredPackages []string, packageName string) (bool, error) { +func (c *Controller) checkForIgnoredPackage(ignoredPackages []string, packageName string) (bool, error) { for _, ignoredPackage := range ignoredPackages { g, err := glob.Compile(ignoredPackage) if err != nil { @@ -274,22 +282,57 @@ func (i *Instance) checkForIgnoredPackage(ignoredPackages []string, packageName return false, nil } -func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvrOnly string, affectedProduct *db.AffectedProduct, cve *db.CVE) BuildStatus { +func (c *Controller) checkForRebootSuggestedPackage(pkgs []string, packageName string) (bool, error) { + for _, p := range pkgs { + g, err := glob.Compile(p) + if err != nil { + return false, err + } + + if g.Match(packageName) { + return true, nil + } + } + + return false, nil +} + +func (c *Controller) checkKojiForBuild(tx apollodb.Access, ignoredPackages []string, nvrOnly string, affectedProduct *apollodb.AffectedProduct, cve *apollodb.CVE) apollopb.BuildStatus { + product, err := tx.GetProductByID(affectedProduct.ProductID) + if err != nil { + c.log.Errorf("could not get product: %v", err) + return apollopb.BuildStatus_BUILD_STATUS_SKIP + } + if product.BuildSystem != "koji" { + return apollopb.BuildStatus_BUILD_STATUS_SKIP + } + + var k koji.API + if forceKoji != nil { + k = forceKoji + } else { + k, err = koji.New(product.BuildSystemEndpoint) + if err != nil { + c.log.Errorf("could not create koji client: %v", err) + return apollopb.BuildStatus_BUILD_STATUS_SKIP + } + } + // Check if the submitted NVR is valid - nvr := i.nvr.FindStringSubmatch(nvrOnly) + nvr := rpmutils.NVR().FindStringSubmatch(nvrOnly) if len(nvr) < 3 { logrus.Errorf("Invalid NVR %s", nvrOnly) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } nvr = nvr[1:] - match, err := i.checkForIgnoredPackage(ignoredPackages, nvr[0]) + match, err := c.checkForIgnoredPackage(ignoredPackages, nvr[0]) if err != nil { logrus.Errorf("Invalid glob: %v", err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } if match { - return WillNotFix + return apollopb.BuildStatus_BUILD_STATUS_WILL_NOT_FIX } var tagged []*koji.Build @@ -298,34 +341,34 @@ func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvr // rather than check in the compose tag if strings.Contains(nvrOnly, ".module") { // We need to find the package id - packageRes, err := i.koji.GetPackage(&koji.GetPackageRequest{ + packageRes, err := k.GetPackage(&koji.GetPackageRequest{ PackageName: nvr[0], }) if err != nil { logrus.Errorf("Could not get package information from Koji: %v", err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } // Use package id to get builds - buildsRes, err := i.koji.ListBuilds(&koji.ListBuildsRequest{ + buildsRes, err := k.ListBuilds(&koji.ListBuildsRequest{ PackageID: packageRes.ID, }) if err != nil { logrus.Errorf("Could not get builds from Koji: %v", err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } tagged = buildsRes.Builds } else { // Non-module packages can be queried using the list tagged operation. // We only check the compose tag - taggedRes, err := i.koji.ListTagged(&koji.ListTaggedRequest{ - Tag: i.kojiCompose, + taggedRes, err := k.ListTagged(&koji.ListTaggedRequest{ + Tag: product.KojiCompose.String, Package: nvr[0], }) if err != nil { logrus.Errorf("Could not get tagged builds for package %s: %v", nvr[0], err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } tagged = taggedRes.Builds @@ -334,7 +377,7 @@ func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvr // No valid builds found usually means that we don't ship that package if len(tagged) <= 0 { logrus.Errorf("No valid builds found for package %s", nvr[0]) - return NotFixed + return apollopb.BuildStatus_BUILD_STATUS_NOT_FIXED } // Use a top-level fixed state to track if the NVR exists (at least once for modules) @@ -350,21 +393,21 @@ func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvr kojiNvr := fmt.Sprintf("%s-%s-%s", latestBuild.PackageName, latestBuild.Version, latestBuild.Release) // If the NVR is identical, that means that the fix has been built - if i.isNvrIdentical(latestBuild, nvr) { + if c.isNvrIdentical(latestBuild, nvr) { logrus.Infof("%s has been fixed downstream with build %d (%s)", cve.ID, latestBuild.BuildId, kojiNvr) - err := tx.UpdateAffectedProductStateAndPackageAndAdvisory(affectedProduct.ID, int(secparseadminpb.AffectedProductState_FixedDownstream), affectedProduct.Package, &affectedProduct.Advisory.String) + err := tx.UpdateAffectedProductStateAndPackageAndAdvisory(affectedProduct.ID, int(apollopb.AffectedProduct_STATE_FIXED_DOWNSTREAM), affectedProduct.Package, &affectedProduct.Advisory.String) if err != nil { logrus.Errorf("Could not update affected product %d: %v", affectedProduct.ID, err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } // Get all RPMs for build - rpms, err := i.koji.ListRPMs(&koji.ListRPMsRequest{ + rpms, err := k.ListRPMs(&koji.ListRPMsRequest{ BuildID: latestBuild.BuildId, }) if err != nil { logrus.Errorf("Could not get RPMs from Koji: %v", err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } var srcRpm string @@ -383,17 +426,12 @@ func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvr // Add all RPMs as a build reference to the CVE // This is the "Affected packages" section of an advisory for _, rpm := range rpms.RPMs { - epochInt := 0 - if rpm.Epoch != nil { - epochInt = *rpm.Epoch - } - // Construct a valid rpm name (this is what the repos will contain) - rpmStr := fmt.Sprintf("%s-%d:%s-%s.%s.rpm", rpm.Name, epochInt, rpm.Version, rpm.Release, rpm.Arch) - _, err = tx.CreateBuildReference(affectedProduct.ID, rpmStr, srcRpm, cve.ID, strconv.Itoa(latestBuild.BuildId)) + rpmStr := fmt.Sprintf("%s-%d:%s-%s.%s.rpm", rpm.Name, utils.Default[int](rpm.Epoch), rpm.Version, rpm.Release, rpm.Arch) + _, err = tx.CreateBuildReference(affectedProduct.ID, rpmStr, srcRpm, cve.ID, "", utils.Pointer[string](strconv.Itoa(latestBuild.BuildId)), nil) if err != nil { logrus.Errorf("Could not create build reference: %v", err) - return Skip + return apollopb.BuildStatus_BUILD_STATUS_SKIP } } @@ -407,8 +445,8 @@ func (i *Instance) checkKojiForBuild(tx db.Access, ignoredPackages []string, nvr // No fix has been detected, will mark as FixedUpstream if !fixed { logrus.Errorf("%s has not been fixed for NVR %s", cve.ID, nvrOnly) - return NotFixed + return apollopb.BuildStatus_BUILD_STATUS_NOT_FIXED } - return Fixed + return apollopb.BuildStatus_BUILD_STATUS_FIXED } diff --git a/secparse/cron/main_test.go b/apollo/workflow/workflow_test.go similarity index 58% rename from secparse/cron/main_test.go rename to apollo/workflow/workflow_test.go index c0954e1a..76032bad 100644 --- a/secparse/cron/main_test.go +++ b/apollo/workflow/workflow_test.go @@ -28,64 +28,90 @@ // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -package cron +package workflow import ( "database/sql" + "encoding/json" + "go.temporal.io/sdk/testsuite" "os" + apollodb "peridot.resf.org/apollo/db" + apollomock "peridot.resf.org/apollo/db/mock" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rherrata" + "peridot.resf.org/apollo/rhsecuritymock" "peridot.resf.org/koji" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/secparse/db/mock" - "peridot.resf.org/secparse/rherrata" - "peridot.resf.org/secparse/rhsecuritymock" "testing" "time" ) var ( - cronInstance *Instance - mockDb *mock.Access + mockDb *apollomock.Access securityMock *rhsecuritymock.Client errataMock *rherrata.MockInstance kojiMock *koji.Mock + testWfSuite *testsuite.WorkflowTestSuite + controller *Controller ) func resetDb() { - *mockDb = *mock.New() + *mockDb = *apollomock.New() now := time.Now() mirrorFromDate, _ := time.Parse("2006-01-02", "2021-06-01") - mockDb.ShortCodes = append(mockDb.ShortCodes, &db.ShortCode{ - Code: "RL", - Mode: int8(secparseadminpb.ShortCodeMode_MirrorRedHatMode), - CreatedAt: &now, - ArchivedAt: sql.NullTime{}, + mockDb.ShortCodes = append(mockDb.ShortCodes, &apollodb.ShortCode{ + Code: "RL", + Mode: int8(apollopb.ShortCode_MODE_MIRROR), + CreatedAt: &now, + ArchivedAt: sql.NullTime{}, + }) + mockDb.Products = append(mockDb.Products, &apollodb.Product{ + ID: 1, + Name: "Rocky Linux 8", + CurrentFullVersion: "8.4", + RedHatMajorVersion: sql.NullInt32{Valid: true, Int32: 8}, + ShortCode: "RL", + Archs: []string{"x86_64", "aarch64"}, MirrorFromDate: sql.NullTime{Valid: true, Time: mirrorFromDate}, RedHatProductPrefix: sql.NullString{Valid: true, String: "Rocky Linux"}, - }) - mockDb.Products = append(mockDb.Products, &db.Product{ - ID: 1, - Name: "Rocky Linux 8", - CurrentFullVersion: "8.4", - RedHatMajorVersion: sql.NullInt32{Valid: true, Int32: 8}, - ShortCode: "RL", - Archs: []string{"x86_64", "aarch64"}, + BuildSystem: "koji", // we're testing koji only for now + BuildSystemEndpoint: "local", + KojiCompose: sql.NullString{Valid: true, String: "Rocky-8.4"}, + KojiModuleCompose: sql.NullString{Valid: true, String: "Rocky-8.4-module"}, }) } +func readTestDataJson(file string, target interface{}) error { + data, err := os.ReadFile(file) + if err != nil { + return err + } + + return json.Unmarshal(data, target) +} + func TestMain(m *testing.M) { - mockDb = mock.New() + mockDb = apollomock.New() securityMock = rhsecuritymock.New() errataMock = rherrata.NewMock() kojiMock = koji.NewMock() + forceKoji = kojiMock - instance, _ := New(mockDb) - instance.api = securityMock - instance.errata = errataMock.API - instance.koji = kojiMock + testWfSuite = &testsuite.WorkflowTestSuite{} - cronInstance = instance + input := &NewControllerInput{ + Database: mockDb, + } + instance, err := NewController( + input, + WithSecurityAPI(securityMock), + WithErrataAPI(errataMock.API), + ) + if err != nil { + panic(err.(any)) + } + + controller = instance resetDb() diff --git a/bases/bazel/go/BUILD b/bases/bazel/go/BUILD index 546c1aa5..c7a793fa 100644 --- a/bases/bazel/go/BUILD +++ b/bases/bazel/go/BUILD @@ -8,29 +8,34 @@ container_image( "//platforms:s390x": "@go_base_s390x//image", "//platforms:ppc64le": "@go_base_ppc64le//image", }), + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "go_arm64", base = "@go_base_arm64//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "go_amd64", base = "@go_base_amd64//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "go_s390x", base = "@go_base_s390x//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "go_ppc64le", base = "@go_base_ppc64le//image", + tags = ["manual"], visibility = ["//visibility:public"], ) diff --git a/bases/build/BUILD b/bases/build/BUILD index 5b0914d6..ff3b11ff 100644 --- a/bases/build/BUILD +++ b/bases/build/BUILD @@ -3,24 +3,28 @@ load("@io_bazel_rules_docker//container:container.bzl", "container_image") container_image( name = "build_arm64", base = "@build_base_arm64//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "build_amd64", base = "@build_base_amd64//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "build_s390x", base = "@build_base_s390x//image", + tags = ["manual"], visibility = ["//visibility:public"], ) container_image( name = "build_ppc64le", base = "@build_base_ppc64le//image", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -32,5 +36,6 @@ container_image( "//platforms:s390x": "@build_base_s390x//image", "//platforms:ppc64le": "@build_base_ppc64le//image", }), + tags = ["manual"], visibility = ["//visibility:public"], ) diff --git a/ci/bycdeploy.jsonnet b/ci/bycdeploy.jsonnet index 84024e1d..2ffd1690 100644 --- a/ci/bycdeploy.jsonnet +++ b/ci/bycdeploy.jsonnet @@ -37,8 +37,14 @@ local labels = { local envs = [stageNoDash]; + local ports = info.ports + (if std.objectHas(info, 'disableMetrics') && info.disableMetrics then [] else [{ + name: 'metrics', + containerPort: 7332, + protocol: 'TCP', + }]); + local services = if std.objectHas(info, 'services') then info.services else - [{ name: '%s-%s-%s' % [metadata.name, port.name, env], port: port.containerPort, portName: port.name, expose: if std.objectHas(port, 'expose') then port.expose else false } for env in envs for port in info.ports]; + [{ name: '%s-%s-%s' % [metadata.name, port.name, env], port: port.containerPort, portName: port.name, expose: if std.objectHas(port, 'expose') then port.expose else false } for env in envs for port in ports]; local nssa = '001-ns-sa.yaml'; local migrate = '002-migrate.yaml'; @@ -176,6 +182,7 @@ local labels = { ], annotations: { 'sidecar.istio.io/inject': 'false', + 'linkerd.io/inject': 'disabled', }, }) else {}, ]), @@ -191,12 +198,15 @@ local labels = { fsUser: if std.objectHas(info, 'fsUser') then info.fsUser else null, imagePullSecrets: imagePullSecrets, labels: db.label(), - annotations: if std.objectHas(info, 'annotations') then info.annotations, + annotations: (if std.objectHas(info, 'annotations') then info.annotations else {}) + { + 'prometheus.io/scrape': 'true', + 'prometheus.io/port': '7332', + }, initContainers: if !legacyDb && info.backend then [kubernetes.request_cdb_certs('%s%s' % [metadata.name, stageNoDash]) + { serviceAccount: '%s-%s-serviceaccount' % [stageNoDash, fixed.name], }], volumes: (if std.objectHas(info, 'volumes') then info.volumes(metadata) else []) + (if !legacyDb then kubernetes.request_cdb_certs_volumes() else []), - ports: std.map(function(x) x { expose: null, external: null }, info.ports), + ports: std.map(function(x) x { expose: null, external: null }, ports), health: if std.objectHas(info, 'health') then info.health, env: env + (if dbname != '' && info.backend then ([dbPassEnv]) else []) + [ { @@ -218,7 +228,20 @@ local labels = { ]), [svcVsDr]: std.manifestYamlStream( - [kubernetes.define_service(metadata { name: srv.name }, srv.port, srv.port, portName=srv.portName, selector=metadata.name, env=mappings.get_env_from_svc(srv.name)) for srv in services] + + [kubernetes.define_service( + metadata { + name: srv.name, + annotations: { + 'konghq.com/protocol': std.strReplace(std.strReplace(std.strReplace(srv.name, metadata.name, ''), stage, ''), '-', ''), + 'ingress.kubernetes.io/service-upstream': 'true', + } + }, + srv.port, + srv.port, + portName=srv.portName, + selector=metadata.name, + env=mappings.get_env_from_svc(srv.name), + ) for srv in services] + [kubernetes.define_virtual_service(metadata { name: srv.name + '-internal' }, { hosts: [vshost(srv)], gateways: [], diff --git a/ci/kubernetes.jsonnet b/ci/kubernetes.jsonnet index 21294f92..2e989c92 100644 --- a/ci/kubernetes.jsonnet +++ b/ci/kubernetes.jsonnet @@ -65,6 +65,7 @@ local define_volumes(volumes) = [ emptyDir: if std.objectHas(vm, 'emptyDir') then {}, secret: if std.objectHas(vm, 'secret') then vm.secret, configMap: if std.objectHas(vm, 'configMap') then vm.configMap, + hostPath: if std.objectHas(vm, 'hostPath') then vm.hostPath, } for vm in volumes ]; @@ -133,7 +134,7 @@ local dev() = stage == '-dev'; env: if !std.objectHas(deporig, 'env') then [] else deporig.env, ports: if !std.objectHas(deporig, 'ports') then [{ containerPort: 80, protocol: 'TCP' }] else deporig.ports, initContainers: if !std.objectHas(deporig, 'initContainers') then [] else deporig.initContainers, - limits: if !std.objectHas(deporig, 'limits') || deporig.limits == null then { cpu: '0.1', memory: '256M' } else deporig.limits, + limits: if std.objectHas(deporig, 'limits') then deporig.limits, requests: if !std.objectHas(deporig, 'requests') || deporig.requests == null then { cpu: '0.001', memory: '128M' } else deporig.requests, }; @@ -275,11 +276,11 @@ local dev() = stage == '-dev'; }, // Ingress - define_ingress(metadataOrig, host, path='/', port=80):: + define_ingress(metadataOrig, host, srvName=null, path='/', port=80):: local metadata = fix_metadata(metadataOrig); { - apiVersion: 'extensions/v1beta1', + apiVersion: 'networking.k8s.io/v1', kind: 'Ingress', metadata: metadata { name: metadata.name + '-ingress', @@ -291,15 +292,27 @@ local dev() = stage == '-dev'; paths: [ { path: path, + pathType: 'Prefix', backend: { - serviceName: metadata.name + '-service', - servicePort: port, + service: { + name: if srvName != null then srvName else metadata.name + '-service', + port: { + number: port, + } + } }, }, ], }, }], - }, + } + ({ + tls: [{ + hosts: [ + host, + ], + secretName: metadata.name + '-tls', + }], + }), }, // Service @@ -318,7 +331,12 @@ local dev() = stage == '-dev'; port: externalPort, protocol: protocol, targetPort: internalPort, - }], + }] + (if portName == 'http' && externalPort != 80 then [{ + name: portName + "-80", + port: 80, + protocol: protocol, + targetPort: internalPort, + }] else []), selector: { app: if selector != '' then selector else metadata.name, env: env, @@ -611,7 +629,7 @@ local dev() = stage == '-dev'; tag(name, extra=false):: '%s/%s%s%s%s' % [ - std.strReplace(ociRegistry, 'host.docker.internal.local', 'host.docker.internal'), + std.strReplace(ociRegistry, 'host.docker.internal.local', 'registry'), ociRegistryRepo, if ociNoNestedSupport then ':' else '/', name, diff --git a/ci/service_mappings.jsonnet b/ci/service_mappings.jsonnet index f0afe767..75429b29 100644 --- a/ci/service_mappings.jsonnet +++ b/ci/service_mappings.jsonnet @@ -39,5 +39,13 @@ id: 'id', external: true, }, + 'apollo-http': { + id: 'apollo', + external: true, + }, + 'apollo-frontend-http': { + id: 'errata', + external: true, + }, } } diff --git a/common/frontend_server/index.mjs b/common/frontend_server/index.mjs index b44b2f81..27bd5b0d 100644 --- a/common/frontend_server/index.mjs +++ b/common/frontend_server/index.mjs @@ -148,7 +148,6 @@ export default async function(opts) { // Bypassing auth here doesn't accomplish anything. let middlewares = []; - // If requireEmailSuffix is present, let's validate post callback // that the signed in email ends with a suffix in the allowlist // Again, a bypass here doesn't accomplish anything. @@ -248,8 +247,11 @@ export default async function(opts) { const prodEnvName = `URL_${x.substr(1).replace('/', '_').toUpperCase()}`; - const apiUrl = prod ? (process.env[prodEnvName] - || opts.apis[x].prodApiUrl) : opts.apis[x].devApiUrl; + const apiUrl = process.env[prodEnvName] + ? process.env[prodEnvName] + : prod + ? opts.apis[x].prodApiUrl + : opts.apis[x].devApiUrl; createProxyMiddleware({ target: apiUrl, diff --git a/common/ui/RESFLogo.tsx b/common/ui/RESFLogo.tsx new file mode 100644 index 00000000..871c3ab4 --- /dev/null +++ b/common/ui/RESFLogo.tsx @@ -0,0 +1,76 @@ +/* + * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. + * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. + * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +import React from 'react'; +import classnames from 'classnames'; + +export interface RESFLogoProps { + className?: string; +} + +export const RESFLogo = (props: RESFLogoProps) => { + return ( + + + + + + + + + ); +}; diff --git a/config/jobs/presubmits.yaml b/config/jobs/presubmits.yaml index 3d182cb0..da955180 100644 --- a/config/jobs/presubmits.yaml +++ b/config/jobs/presubmits.yaml @@ -21,6 +21,17 @@ presubmits: decorate: true spec: containers: - - image: alpine + - image: quay.io/peridot/ci:v0.3.16 command: - - /bin/date + - hack/unit_tests.sh + + - name: pull-peridot-non-manual-build + branches: + - main + always_run: true + decorate: true + spec: + containers: + - image: quay.io/peridot/ci:v0.3.16 + command: + - hack/non_manual_build.sh diff --git a/config/prow/config.yaml b/config/prow/config.yaml index 383d42a8..d356893f 100644 --- a/config/prow/config.yaml +++ b/config/prow/config.yaml @@ -48,7 +48,6 @@ tide: - lgtm - approved missingLabels: - - needs-rebase - do-not-merge/hold - do-not-merge/work-in-progress - do-not-merge/invalid-owners-file diff --git a/config/prow/plugins.yaml b/config/prow/plugins.yaml index ef151321..d1b2f11d 100644 --- a/config/prow/plugins.yaml +++ b/config/prow/plugins.yaml @@ -17,13 +17,6 @@ plugins: - wip - yuks -external_plugins: - rocky-linux/peridot: - - name: needs-rebase - events: - - issue_comment - - pull_request - config_updater: maps: config/prow/config.yaml: diff --git a/hack/non_manual_build.sh b/hack/non_manual_build.sh new file mode 100755 index 00000000..5856a0a1 --- /dev/null +++ b/hack/non_manual_build.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -o errexit +set -x + +bazel build --config=ci $(bazel query "//... except attr(tags, 'manual', //...) except //vendor/...") diff --git a/hack/unit_tests.sh b/hack/unit_tests.sh new file mode 100755 index 00000000..268d372a --- /dev/null +++ b/hack/unit_tests.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -o errexit +set -x + +bazel test --config=ci //... diff --git a/infrastructure/bazel-remote/001-pvcs.yaml b/infrastructure/bazel-remote/001-pvcs.yaml new file mode 100644 index 00000000..17bce49e --- /dev/null +++ b/infrastructure/bazel-remote/001-pvcs.yaml @@ -0,0 +1,14 @@ +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: buildcache + namespace: default + labels: + app: buildcache +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Gi diff --git a/infrastructure/bazel-remote/002-serviceaccounts.yaml b/infrastructure/bazel-remote/002-serviceaccounts.yaml new file mode 100644 index 00000000..85fff0ed --- /dev/null +++ b/infrastructure/bazel-remote/002-serviceaccounts.yaml @@ -0,0 +1,7 @@ +kind: ServiceAccount +apiVersion: v1 +metadata: + name: buildcache + namespace: default + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-buildcache-peridot diff --git a/infrastructure/bazel-remote/003-deployment.yaml b/infrastructure/bazel-remote/003-deployment.yaml new file mode 100644 index 00000000..727d46fb --- /dev/null +++ b/infrastructure/bazel-remote/003-deployment.yaml @@ -0,0 +1,61 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: buildcache + namespace: default + labels: + app: buildcache +spec: + replicas: 1 + strategy: + type: RollingUpdate + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + selector: + matchLabels: + app: buildcache + template: + metadata: + labels: + app: buildcache + spec: + serviceAccountName: "buildcache" + terminationGracePeriodSeconds: 180 + securityContext: + fsGroup: 1000 + containers: + - name: buildcache + image: quay.io/bazel-remote/bazel-remote:v2.3.9 + args: + - --s3.auth_method=iam_role + - --s3.region=us-east-2 + - --s3.bucket=resf-prod-buildcache + - --s3.endpoint=s3.us-east-2.amazonaws.com + - --max_size=98 + - --dir=/buildcache + ports: + - name: http + containerPort: 8080 + - name: grpc + containerPort: 9092 + volumeMounts: + - name: buildcache + mountPath: /buildcache + livenessProbe: + httpGet: + path: /status + port: 8080 + initialDelaySeconds: 3 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /status + port: 8080 + initialDelaySeconds: 10 + periodSeconds: 3 + timeoutSeconds: 600 + volumes: + - name: buildcache + persistentVolumeClaim: + claimName: buildcache diff --git a/infrastructure/bazel-remote/004-service.yaml b/infrastructure/bazel-remote/004-service.yaml new file mode 100644 index 00000000..5af12e98 --- /dev/null +++ b/infrastructure/bazel-remote/004-service.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: Service +metadata: + name: buildcache + namespace: default +spec: + selector: + app: buildcache + ports: + - port: 9092 diff --git a/infrastructure/buildbuddy/001-configmap.yaml b/infrastructure/buildbuddy/001-configmap.yaml new file mode 100644 index 00000000..17ca7f7a --- /dev/null +++ b/infrastructure/buildbuddy/001-configmap.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: buildbuddy + namespace: default +data: + config.yaml: | + app: + build_buddy_url: "https://bz.build.resf.org" + default_to_dense_mode: true + database: + data_source: "sqlite3:///data/buildbuddy.db" + storage: + aws_s3: + region: us-east-2 + bucket: resf-prod-buildbuddy-bz diff --git a/infrastructure/buildbuddy/002-pvcs.yaml b/infrastructure/buildbuddy/002-pvcs.yaml new file mode 100644 index 00000000..11e25de9 --- /dev/null +++ b/infrastructure/buildbuddy/002-pvcs.yaml @@ -0,0 +1,14 @@ +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: buildbuddy + namespace: default + labels: + app: buildbuddy +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Gi diff --git a/infrastructure/buildbuddy/002-serviceaccounts.yaml b/infrastructure/buildbuddy/002-serviceaccounts.yaml new file mode 100644 index 00000000..5599927f --- /dev/null +++ b/infrastructure/buildbuddy/002-serviceaccounts.yaml @@ -0,0 +1,7 @@ +kind: ServiceAccount +apiVersion: v1 +metadata: + name: buildbuddy + namespace: default + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-buildbuddy-peridot diff --git a/infrastructure/buildbuddy/003-statefulset.yaml b/infrastructure/buildbuddy/003-statefulset.yaml new file mode 100644 index 00000000..59c882bf --- /dev/null +++ b/infrastructure/buildbuddy/003-statefulset.yaml @@ -0,0 +1,45 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: buildbuddy-app + namespace: default +spec: + replicas: 1 + serviceName: buildbuddy-app + selector: + matchLabels: + app: buildbuddy-app + template: + metadata: + labels: + app: buildbuddy-app + spec: + serviceAccountName: buildbuddy + containers: + - name: buildbuddy-app + image: gcr.io/flame-public/buildbuddy-app-onprem:v2.12.1 + env: + - name: ENV + value: "onprem" + - name: SERVICE_NAME + value: "app-backend" + - name: VERSION + value: "prod-1.0" + ports: + - name: http + containerPort: 8080 + - name: grpc + containerPort: 1985 + volumeMounts: + - mountPath: /data + name: database + - mountPath: "/config.yaml" + name: config + subPath: "config.yaml" + volumes: + - name: config + configMap: + name: buildbuddy + - name: database + persistentVolumeClaim: + claimName: buildbuddy diff --git a/infrastructure/buildbuddy/004-services.yaml b/infrastructure/buildbuddy/004-services.yaml new file mode 100644 index 00000000..3067869f --- /dev/null +++ b/infrastructure/buildbuddy/004-services.yaml @@ -0,0 +1,21 @@ +apiVersion: v1 +kind: Service +metadata: + name: buildbuddy + namespace: default +spec: + selector: + app: buildbuddy-app + ports: + - port: 8080 +--- +apiVersion: v1 +kind: Service +metadata: + name: buildbuddy-grpc + namespace: default +spec: + selector: + app: buildbuddy-app + ports: + - port: 1985 diff --git a/infrastructure/buildbuddy/005-virtualservice.yaml b/infrastructure/buildbuddy/005-virtualservice.yaml new file mode 100644 index 00000000..58dcee56 --- /dev/null +++ b/infrastructure/buildbuddy/005-virtualservice.yaml @@ -0,0 +1,23 @@ +--- +apiVersion: networking.istio.io/v1beta1 +kind: VirtualService +metadata: + name: buildbuddy + namespace: default + annotations: + external-dns.alpha.kubernetes.io/target: ingress.build.resf.org +spec: + gateways: + - istio-system/base-gateway-public + hosts: + - bz.build.resf.org + http: + - name: buildbuddy + match: + - uri: + prefix: "/invocation" + route: + - destination: + host: buildbuddy.default.svc.cluster.local + port: + number: 8080 diff --git a/secparse/admin/BUILD b/infrastructure/prow/BUILD.bazel similarity index 100% rename from secparse/admin/BUILD rename to infrastructure/prow/BUILD.bazel diff --git a/infrastructure/prow/configmaps.yaml b/infrastructure/prow/configmaps.yaml new file mode 100644 index 00000000..88956144 --- /dev/null +++ b/infrastructure/prow/configmaps.yaml @@ -0,0 +1,127 @@ +# Updated using inrepoconfig, initial should be uncommented +# --- +# apiVersion: v1 +# kind: ConfigMap +# metadata: +# namespace: prow +# name: plugins +# data: +# plugins.yaml: | +# plugins: +# rocky-linux/peridot: +# plugins: +# - approve +# - assign +# - blunderbuss +# - cat +# - config-updater +# - dog +# - help +# - heart +# - hold +# - label +# - lgtm +# - trigger +# - verify-owners +# - wip +# - yuks +# config_updater: +# maps: +# config/prow/config.yaml: +# name: config +# config/prow/plugins.yaml: +# name: plugins +# config/jobs/**/*.{yaml,yml}: +# name: job-config +# gzip: true +# --- +# apiVersion: v1 +# kind: ConfigMap +# metadata: +# namespace: prow +# name: config +# data: +# config.yaml: | +# prowjob_namespace: prow +# pod_namespace: test-pods +# allowed_clusters: +# rocky-linux/peridot: +# - default +# deck: +# spyglass: +# lenses: +# - lens: +# name: metadata +# required_files: +# - started.json|finished.json +# - lens: +# config: +# name: buildlog +# required_files: +# - build-log.txt +# - lens: +# name: junit +# required_files: +# - bazel-testlogs/.*/test.xml +# - lens: +# name: podinfo +# required_files: +# - podinfo.json +# plank: +# job_url_prefix_config: +# "*": https://prow.build.resf.org/view/ +# report_templates: +# '*': >- +# [Full PR test history](https://prow.build.resf.org/pr-history?org={{.Spec.Refs.Org}}&repo={{.Spec.Refs.Repo}}&pr={{with index .Spec.Refs.Pulls 0}}{{.Number}}{{end}}). +# [Your PR dashboard](https://prow.build.resf.org/pr?query=is:pr+state:open+author:{{with +# index .Spec.Refs.Pulls 0}}{{.Author}}{{end}}). +# default_decoration_configs: +# "*": +# gcs_configuration: +# bucket: s3://resf-prod-prow-logs +# path_strategy: explicit +# s3_credentials_secret: s3-credentials +# utility_images: +# clonerefs: gcr.io/k8s-prow/clonerefs:v20221028-a8625c1f93 +# entrypoint: gcr.io/k8s-prow/entrypoint:v20221028-a8625c1f93 +# initupload: gcr.io/k8s-prow/initupload:v20221028-a8625c1f93 +# sidecar: gcr.io/k8s-prow/sidecar:v20221028-a8625c1f93 +# tide: +# queries: +# - labels: +# - lgtm +# - approved +# missingLabels: +# - needs-rebase +# - do-not-merge/hold +# - do-not-merge/work-in-progress +# - do-not-merge/invalid-owners-file +# repos: +# - rocky-linux/peridot +# decorate_all_jobs: true +# presubmits: +# rocky-linux/peridot: +# - name: pull-peridot-validate-prow-yaml +# run_if_changed: '^(config/prow/(config|plugins).yaml$|config/jobs/.*.yaml$)' +# decorate: true +# spec: +# containers: +# - image: gcr.io/k8s-prow/checkconfig:v20221028-a8625c1f93 +# command: +# - checkconfig +# args: +# - --config-path=config/prow/config.yaml +# - --job-config-path=config/jobs +# - --plugin-config=config/prow/plugins.yaml +# - --strict +# +# - name: pull-peridot-unit-test +# branches: +# - main +# always_run: true +# decorate: true +# spec: +# containers: +# - image: alpine +# command: +# - /bin/date diff --git a/infrastructure/prow/deployments.yaml b/infrastructure/prow/deployments.yaml new file mode 100644 index 00000000..ab3c49b9 --- /dev/null +++ b/infrastructure/prow/deployments.yaml @@ -0,0 +1,518 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: hook + labels: + app: hook +spec: + replicas: 2 + strategy: + type: RollingUpdate + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + selector: + matchLabels: + app: hook + template: + metadata: + labels: + app: hook + spec: + serviceAccountName: "hook" + terminationGracePeriodSeconds: 180 + containers: + - name: hook + image: gcr.io/k8s-prow/hook:v20221028-a8625c1f93 + imagePullPolicy: Always + args: + - --dry-run=false + - --config-path=/etc/config/config.yaml + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + ports: + - name: http + containerPort: 8888 + volumeMounts: + - name: hmac + mountPath: /etc/webhook + readOnly: true + - name: github-token + mountPath: /etc/github + readOnly: true + - name: config + mountPath: /etc/config + readOnly: true + - name: plugins + mountPath: /etc/plugins + readOnly: true + livenessProbe: + httpGet: + path: /healthz + port: 8081 + initialDelaySeconds: 3 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /healthz/ready + port: 8081 + initialDelaySeconds: 10 + periodSeconds: 3 + timeoutSeconds: 600 + volumes: + - name: hmac + secret: + secretName: hmac-token + - name: github-token + secret: + secretName: github-token + - name: config + configMap: + name: config + - name: plugins + configMap: + name: plugins +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: sinker + labels: + app: sinker +spec: + selector: + matchLabels: + app: sinker + replicas: 1 + template: + metadata: + labels: + app: sinker + spec: + serviceAccountName: "sinker" + containers: + - name: sinker + image: gcr.io/k8s-prow/sinker:v20221028-a8625c1f93 + args: + - --config-path=/etc/config/config.yaml + - --dry-run=false + volumeMounts: + - name: config + mountPath: /etc/config + readOnly: true + volumes: + - name: config + configMap: + name: config +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: deck + labels: + app: deck +spec: + replicas: 2 + strategy: + type: RollingUpdate + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + selector: + matchLabels: + app: deck + template: + metadata: + labels: + app: deck + spec: + serviceAccountName: "deck" + terminationGracePeriodSeconds: 30 + containers: + - name: deck + image: gcr.io/k8s-prow/deck:v20221028-a8625c1f93 + args: + - --config-path=/etc/config/config.yaml + - --plugin-config=/etc/plugins/plugins.yaml + - --tide-url=http://tide/ + - --hook-url=http://hook:8888/plugin-help + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --github-graphql-endpoint=http://ghproxy/graphql + - --s3-credentials-file=/etc/s3-credentials/service-account.json + - --spyglass=true + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + ports: + - name: http + containerPort: 8080 + volumeMounts: + - name: config + mountPath: /etc/config + readOnly: true + - name: github-token + mountPath: /etc/github + readOnly: true + - name: plugins + mountPath: /etc/plugins + readOnly: true + - name: s3-credentials + mountPath: /etc/s3-credentials + readOnly: true + livenessProbe: + httpGet: + path: /healthz + port: 8081 + initialDelaySeconds: 3 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /healthz/ready + port: 8081 + initialDelaySeconds: 10 + periodSeconds: 3 + timeoutSeconds: 600 + volumes: + - name: config + configMap: + name: config + - name: github-token + secret: + secretName: github-token + - name: plugins + configMap: + name: plugins + - name: s3-credentials + secret: + secretName: s3-credentials +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: horologium + labels: + app: horologium +spec: + replicas: 1 # Do not scale up. + strategy: + type: Recreate + selector: + matchLabels: + app: horologium + template: + metadata: + labels: + app: horologium + spec: + serviceAccountName: "horologium" + terminationGracePeriodSeconds: 30 + containers: + - name: horologium + image: gcr.io/k8s-prow/horologium:v20221028-a8625c1f93 + args: + - --dry-run=false + - --config-path=/etc/config/config.yaml + volumeMounts: + - name: config + mountPath: /etc/config + readOnly: true + volumes: + - name: config + configMap: + name: config +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: tide + labels: + app: tide +spec: + replicas: 1 # Do not scale up. + strategy: + type: Recreate + selector: + matchLabels: + app: tide + template: + metadata: + labels: + app: tide + spec: + serviceAccountName: "tide" + containers: + - name: tide + image: gcr.io/k8s-prow/tide:v20221028-a8625c1f93 + args: + - --dry-run=false + - --config-path=/etc/config/config.yaml + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --github-graphql-endpoint=http://ghproxy/graphql + - --s3-credentials-file=/etc/s3-credentials/service-account.json + - --status-path=s3://resf-prod-prow-tide/tide-status + - --history-uri=s3://resf-prod-prow-tide/tide-history.json + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + ports: + - name: http + containerPort: 8888 + volumeMounts: + - name: github-token + mountPath: /etc/github + readOnly: true + - name: config + mountPath: /etc/config + readOnly: true + - name: s3-credentials + mountPath: /etc/s3-credentials + readOnly: true + volumes: + - name: github-token + secret: + secretName: github-token + - name: config + configMap: + name: config + - name: s3-credentials + secret: + secretName: s3-credentials +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: statusreconciler + namespace: prow + labels: + app: statusreconciler +spec: + replicas: 1 + selector: + matchLabels: + app: statusreconciler + template: + metadata: + labels: + app: statusreconciler + spec: + serviceAccountName: statusreconciler + terminationGracePeriodSeconds: 180 + containers: + - name: statusreconciler + image: gcr.io/k8s-prow/status-reconciler:v20221028-a8625c1f93 + args: + - --dry-run=false + - --continue-on-error=true + - --plugin-config=/etc/plugins/plugins.yaml + - --config-path=/etc/config/config.yaml + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --s3-credentials-file=/etc/s3-credentials/service-account.json + - --status-path=s3://resf-prod-prow-status-reconciler/status-reconciler-status + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + volumeMounts: + - name: github-token + mountPath: /etc/github + readOnly: true + - name: config + mountPath: /etc/config + readOnly: true + - name: plugins + mountPath: /etc/plugins + readOnly: true + - name: s3-credentials + mountPath: /etc/s3-credentials + readOnly: true + volumes: + - name: github-token + secret: + secretName: github-token + - name: config + configMap: + name: config + - name: plugins + configMap: + name: plugins + - name: s3-credentials + secret: + secretName: s3-credentials +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: ghproxy + labels: + app: ghproxy +spec: + selector: + matchLabels: + app: ghproxy + strategy: + type: Recreate + # GHProxy does not support HA + replicas: 1 + template: + metadata: + labels: + app: ghproxy + spec: + containers: + - name: ghproxy + image: gcr.io/k8s-prow/ghproxy:v20221028-a8625c1f93 + args: + - --cache-dir=/cache + - --cache-sizeGB=99 + - --serve-metrics=true + ports: + - containerPort: 8888 + volumeMounts: + - name: cache + mountPath: /cache + volumes: + - name: cache + persistentVolumeClaim: + claimName: ghproxy +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: prow-controller-manager + labels: + app: prow-controller-manager +spec: + replicas: 1 + selector: + matchLabels: + app: prow-controller-manager + template: + metadata: + labels: + app: prow-controller-manager + spec: + serviceAccountName: prow-controller-manager + containers: + - name: prow-controller-manager + args: + - --dry-run=false + - --config-path=/etc/config/config.yaml + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --enable-controller=plank + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + image: gcr.io/k8s-prow/prow-controller-manager:v20221028-a8625c1f93 + volumeMounts: + - name: github-token + mountPath: /etc/github + readOnly: true + - name: config + mountPath: /etc/config + readOnly: true + volumes: + - name: github-token + secret: + secretName: github-token + - name: config + configMap: + name: config +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: prow + name: crier + labels: + app: crier +spec: + replicas: 1 + selector: + matchLabels: + app: crier + template: + metadata: + labels: + app: crier + spec: + serviceAccountName: crier + terminationGracePeriodSeconds: 30 + containers: + - name: crier + image: gcr.io/k8s-prow/crier:v20221028-a8625c1f93 + args: + - --blob-storage-workers=10 + - --config-path=/etc/config/config.yaml + - --s3-credentials-file=/etc/s3-credentials/service-account.json + - --github-endpoint=http://ghproxy + - --github-endpoint=https://api.github.com + - --github-workers=10 + - --kubernetes-blob-storage-workers=10 + - --github-app-id=$(GITHUB_APP_ID) + - --github-app-private-key-path=/etc/github/cert + env: + - name: GITHUB_APP_ID + valueFrom: + secretKeyRef: + name: github-token + key: appid + volumeMounts: + - name: config + mountPath: /etc/config + readOnly: true + - name: github-token + mountPath: /etc/github + readOnly: true + - name: s3-credentials + mountPath: /etc/s3-credentials + readOnly: true + volumes: + - name: config + configMap: + name: config + - name: github-token + secret: + secretName: github-token + - name: s3-credentials + secret: + secretName: s3-credentials +--- diff --git a/infrastructure/prow/namespaces.yaml b/infrastructure/prow/namespaces.yaml new file mode 100644 index 00000000..026b906a --- /dev/null +++ b/infrastructure/prow/namespaces.yaml @@ -0,0 +1,12 @@ +--- +apiVersion: v1 +kind: Namespace +metadata: + name: prow + labels: + istio-injection: enabled +--- +apiVersion: v1 +kind: Namespace +metadata: + name: test-pods diff --git a/infrastructure/prow/pvcs.yaml b/infrastructure/prow/pvcs.yaml new file mode 100644 index 00000000..36144407 --- /dev/null +++ b/infrastructure/prow/pvcs.yaml @@ -0,0 +1,14 @@ +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + namespace: prow + labels: + app: ghproxy + name: ghproxy +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Gi diff --git a/infrastructure/prow/rolebindings.yaml b/infrastructure/prow/rolebindings.yaml new file mode 100644 index 00000000..1a2c021b --- /dev/null +++ b/infrastructure/prow/rolebindings.yaml @@ -0,0 +1,162 @@ +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "deck" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "deck" +subjects: + - kind: ServiceAccount + name: "deck" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: "deck" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "deck" +subjects: + - kind: ServiceAccount + name: "deck" + namespace: prow +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "horologium" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "horologium" +subjects: + - kind: ServiceAccount + name: "horologium" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "sinker" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "sinker" +subjects: + - kind: ServiceAccount + name: "sinker" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: "sinker" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "sinker" +subjects: + - kind: ServiceAccount + name: "sinker" + namespace: prow +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "hook" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "hook" +subjects: + - kind: ServiceAccount + name: "hook" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "tide" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "tide" +subjects: + - kind: ServiceAccount + name: "tide" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "statusreconciler" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "statusreconciler" +subjects: + - kind: ServiceAccount + name: "statusreconciler" +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: prow-controller-manager +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: prow-controller-manager +subjects: + - kind: ServiceAccount + name: prow-controller-manager +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: prow-controller-manager +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: prow-controller-manager +subjects: + - kind: ServiceAccount + name: prow-controller-manager + namespace: prow +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: crier + namespace: prow +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: crier +subjects: + - kind: ServiceAccount + name: crier + namespace: prow +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: crier + namespace: test-pods +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: crier +subjects: + - kind: ServiceAccount + name: crier + namespace: prow + diff --git a/infrastructure/prow/roles.yaml b/infrastructure/prow/roles.yaml new file mode 100644 index 00000000..81c5223c --- /dev/null +++ b/infrastructure/prow/roles.yaml @@ -0,0 +1,262 @@ +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "deck" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - get + - list + - watch +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: "deck" +rules: + - apiGroups: + - "" + resources: + - pods/log + verbs: + - get +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "horologium" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - create + - list + - watch +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "sinker" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - delete + - list + - watch + - get + - apiGroups: + - coordination.k8s.io + resources: + - leases + resourceNames: + - prow-sinker-leaderlock + verbs: + - get + - update + - apiGroups: + - coordination.k8s.io + resources: + - leases + verbs: + - create + - apiGroups: + - "" + resources: + - configmaps + resourceNames: + - prow-sinker-leaderlock + verbs: + - get + - update + - apiGroups: + - "" + resources: + - configmaps + - events + verbs: + - create +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: "sinker" +rules: + - apiGroups: + - "" + resources: + - pods + verbs: + - delete + - list + - watch + - get + - patch +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "hook" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - create + - get + - list + - update + - apiGroups: + - "" + resources: + - configmaps + verbs: + - create + - get + - update +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "tide" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - create + - list + - get + - watch +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: "statusreconciler" +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - create +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: prow-controller-manager +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - prowjobs + verbs: + - get + - list + - watch + - update + - patch + - apiGroups: + - coordination.k8s.io + resources: + - leases + resourceNames: + - prow-controller-manager-leader-lock + verbs: + - get + - update + - apiGroups: + - coordination.k8s.io + resources: + - leases + verbs: + - create + - apiGroups: + - "" + resources: + - configmaps + resourceNames: + - prow-controller-manager-leader-lock + verbs: + - get + - update + - apiGroups: + - "" + resources: + - configmaps + - events + verbs: + - create +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: prow-controller-manager +rules: + - apiGroups: + - "" + resources: + - pods + verbs: + - delete + - list + - watch + - create + - patch +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: prow + name: crier +rules: + - apiGroups: + - "prow.k8s.io" + resources: + - "prowjobs" + verbs: + - "get" + - "watch" + - "list" + - "patch" +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + namespace: test-pods + name: crier +rules: + - apiGroups: + - "" + resources: + - "pods" + - "events" + verbs: + - "get" + - "list" + - apiGroups: + - "" + resources: + - "pods" + verbs: + - "patch" diff --git a/infrastructure/prow/secrets.yaml b/infrastructure/prow/secrets.yaml new file mode 100644 index 00000000..6d7189bd --- /dev/null +++ b/infrastructure/prow/secrets.yaml @@ -0,0 +1,41 @@ +--- +# Get this from Github +# apiVersion: v1 +# kind: Secret +# metadata: +# namespace: prow +# name: github-token +# stringData: +# cert: <> +# appid: <> +# --- +# Generate this manually +# apiVersion: v1 +# kind: Secret +# metadata: +# namespace: prow +# name: hmac-token +# stringData: +# # Generate via `openssl rand -hex 20`. This is the secret used in the GitHub webhook configuration +# hmac: << insert-hmac-token-here >> +apiVersion: v1 +kind: Secret +metadata: + namespace: prow + name: s3-credentials +stringData: + service-account.json: | + { + "region": "us-east-2" + } +--- +apiVersion: v1 +kind: Secret +metadata: + namespace: test-pods + name: s3-credentials +stringData: + service-account.json: | + { + "region": "us-east-2" + } diff --git a/infrastructure/prow/serviceaccounts.yaml b/infrastructure/prow/serviceaccounts.yaml new file mode 100644 index 00000000..bdf4a64f --- /dev/null +++ b/infrastructure/prow/serviceaccounts.yaml @@ -0,0 +1,64 @@ +--- +kind: ServiceAccount +apiVersion: v1 +metadata: + namespace: prow + name: "deck" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +kind: ServiceAccount +apiVersion: v1 +metadata: + namespace: prow + name: "horologium" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +kind: ServiceAccount +apiVersion: v1 +metadata: + namespace: prow + name: "sinker" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + namespace: prow + name: "hook" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + namespace: prow + name: "tide" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + namespace: prow + name: "statusreconciler" + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + namespace: prow + name: prow-controller-manager + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot +--- +kind: ServiceAccount +apiVersion: v1 +metadata: + name: crier + namespace: prow + annotations: + eks.amazonaws.com/role-arn: arn:aws:iam::893168113496:role/resf-prow-peridot diff --git a/infrastructure/prow/services.yaml b/infrastructure/prow/services.yaml new file mode 100644 index 00000000..008325f3 --- /dev/null +++ b/infrastructure/prow/services.yaml @@ -0,0 +1,54 @@ +--- +apiVersion: v1 +kind: Service +metadata: + namespace: prow + name: hook +spec: + selector: + app: hook + ports: + - port: 8888 +--- +apiVersion: v1 +kind: Service +metadata: + namespace: prow + name: deck +spec: + selector: + app: deck + ports: + - port: 80 + targetPort: 8080 +--- +apiVersion: v1 +kind: Service +metadata: + namespace: prow + name: tide +spec: + selector: + app: tide + ports: + - port: 80 + targetPort: 8888 +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app: ghproxy + namespace: prow + name: ghproxy +spec: + ports: + - name: main + port: 80 + protocol: TCP + targetPort: 8888 + - name: metrics + port: 9090 + selector: + app: ghproxy + type: ClusterIP diff --git a/infrastructure/prow/virtualservice.yaml b/infrastructure/prow/virtualservice.yaml new file mode 100644 index 00000000..e79b5a27 --- /dev/null +++ b/infrastructure/prow/virtualservice.yaml @@ -0,0 +1,29 @@ +--- +apiVersion: networking.istio.io/v1beta1 +kind: VirtualService +metadata: + name: prow + namespace: prow + annotations: + external-dns.alpha.kubernetes.io/target: ingress.build.resf.org +spec: + gateways: + - istio-system/base-gateway-public + hosts: + - prow.build.resf.org + http: + - name: hook + match: + - uri: + prefix: "/hook" + route: + - destination: + host: hook.prow.svc.cluster.local + port: + number: 8888 + - name: deck + route: + - destination: + host: deck.prow.svc.cluster.local + port: + number: 80 diff --git a/initdb/BUILD.bazel b/initdb/BUILD.bazel index 15968234..603a4ef3 100644 --- a/initdb/BUILD.bazel +++ b/initdb/BUILD.bazel @@ -21,8 +21,15 @@ go_library( go_binary( name = "initdb", embed = [":initdb_lib"], + visibility = ["//visibility:public"], +) + +go_binary( + name = "initdb_amd64", + embed = [":initdb_lib"], goarch = "amd64", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -31,6 +38,7 @@ go_binary( embed = [":initdb_lib"], goarch = "arm64", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -39,6 +47,7 @@ go_binary( embed = [":initdb_lib"], goarch = "s390x", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -47,6 +56,7 @@ go_binary( embed = [":initdb_lib"], goarch = "ppc64le", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -54,7 +64,7 @@ container( architecture = "amd64", base = "//bases/bazel/go:go_amd64", files = [ - ":initdb", + ":initdb_amd64", ], image_name = "initdb", ) diff --git a/package.json b/package.json index 79661a3e..c9078572 100644 --- a/package.json +++ b/package.json @@ -25,10 +25,11 @@ "@mui/lab": "^5.0.0-alpha.60", "@mui/material": "^5.2.4", "@mui/styles": "^5.2.3", + "@mui/system": "^5.10.6", "@mui/x-data-grid": "^5.2.0", "@ory/hydra-client": "^1.10.6", "@pmmmwh/react-refresh-webpack-plugin": "^0.5.1", - "@tailwindcss/forms": "^0.4.0", + "@tailwindcss/forms": "^0.5.3", "@types/classnames": "^2.2.11", "@types/node": "^14.14.22", "@types/react": "^17.0.24", @@ -82,7 +83,7 @@ "strip-ansi": "^6.0.0", "style-loader": "^2.0.0", "styled-components": "^5.3.3", - "tailwindcss": "^3.0.6", + "tailwindcss": "^3.1.8", "terser-webpack-plugin": "^5.1.2", "tslib": "^2.1.0", "type-fest": "^0.20.2", diff --git a/peridot/builder/v1/workflow/BUILD.bazel b/peridot/builder/v1/workflow/BUILD.bazel index 05745149..da031fd8 100644 --- a/peridot/builder/v1/workflow/BUILD.bazel +++ b/peridot/builder/v1/workflow/BUILD.bazel @@ -18,6 +18,7 @@ go_library( importpath = "peridot.resf.org/peridot/builder/v1/workflow", visibility = ["//visibility:public"], deps = [ + "//apollo/rpmutils", "//modulemd", "//peridot/composetools", "//peridot/db", @@ -29,7 +30,6 @@ go_library( "//peridot/proto/v1/yumrepofs:pb", "//peridot/rpmbuild", "//peridot/yummeta", - "//secparse/rpmutils", "//servicecatalog", "//utils", "//vendor/cirello.io/dynamolock", @@ -58,7 +58,6 @@ go_library( "//vendor/go.temporal.io/sdk/client", "//vendor/go.temporal.io/sdk/temporal", "//vendor/go.temporal.io/sdk/workflow", - "//vendor/golang.org/x/sys/unix", "//vendor/gopkg.in/yaml.v3:yaml_v3", "//vendor/k8s.io/api/core/v1:core", "//vendor/k8s.io/apimachinery/pkg/api/resource", diff --git a/peridot/builder/v1/workflow/arch.go b/peridot/builder/v1/workflow/arch.go index 4be20f9b..7b125c9d 100644 --- a/peridot/builder/v1/workflow/arch.go +++ b/peridot/builder/v1/workflow/arch.go @@ -52,10 +52,10 @@ import ( "github.com/google/uuid" "go.temporal.io/sdk/activity" "google.golang.org/protobuf/types/known/wrapperspb" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/peridot/db/models" peridotpb "peridot.resf.org/peridot/pb" "peridot.resf.org/peridot/rpmbuild" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/servicecatalog" ) @@ -668,9 +668,11 @@ func (c *Controller) BuildArchActivity(ctx context.Context, projectId string, pa pkgGroup = project.BuildStagePackages } - if len(pkgEo.DependsOn) != 0 { - for _, pkg := range pkgEo.DependsOn { - pkgGroup = append(pkgGroup, pkg) + if pkgEo != nil { + if len(pkgEo.DependsOn) != 0 { + for _, pkg := range pkgEo.DependsOn { + pkgGroup = append(pkgGroup, pkg) + } } } @@ -736,11 +738,16 @@ func (c *Controller) UploadArchActivity(ctx context.Context, projectId string, p var ret []*UploadActivityResult for _, rpm := range rpms { - rpmFilename := filepath.Base(rpm) - if !rpmutils.NVR().MatchString(rpmFilename) { + var nvr []string + base := strings.TrimSuffix(filepath.Base(rpm), ".rpm") + if rpmutils.NVRUnusualRelease().MatchString(base) { + nvr = rpmutils.NVRUnusualRelease().FindStringSubmatch(base) + } else if rpmutils.NVR().MatchString(base) { + nvr = rpmutils.NVR().FindStringSubmatch(base) + } + if !rpmutils.NVR().MatchString(base) { return nil, errors.New("invalid rpm") } - nvr := rpmutils.NVR().FindStringSubmatch(rpmFilename) res, err := c.uploadArtifact(projectId, parentTaskId, rpm, nvr[4], peridotpb.TaskType_TASK_TYPE_BUILD_ARCH_UPLOAD) if err != nil { return nil, err diff --git a/peridot/builder/v1/workflow/build.go b/peridot/builder/v1/workflow/build.go index a96fbd50..f5bd03fe 100644 --- a/peridot/builder/v1/workflow/build.go +++ b/peridot/builder/v1/workflow/build.go @@ -43,10 +43,10 @@ import ( "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" "path/filepath" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/peridot/db/models" peridotpb "peridot.resf.org/peridot/pb" yumrepofspb "peridot.resf.org/peridot/yumrepofs/pb" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/utils" "strings" "time" diff --git a/peridot/builder/v1/workflow/import.go b/peridot/builder/v1/workflow/import.go index c29110fe..49019edb 100644 --- a/peridot/builder/v1/workflow/import.go +++ b/peridot/builder/v1/workflow/import.go @@ -64,15 +64,14 @@ import ( "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" "io" - "io/ioutil" http2 "net/http" "net/url" "os" "path/filepath" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/peridot/db/models" peridotpb "peridot.resf.org/peridot/pb" "peridot.resf.org/peridot/rpmbuild" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/utils" "regexp" "strings" @@ -171,10 +170,6 @@ func compressFolder(path string, stripHeaderName string, buf io.Writer, fs billy return innerCompress(path, stripHeaderName, tw, fs) } -func genRefSpec(bp string, mv int) config.RefSpec { - return config.RefSpec(fmt.Sprintf("+refs/heads/%s%d:refs/remotes/origin/%s%d", bp, mv, bp, mv)) -} - func genPushBranch(bp string, suffix string, mv int) string { return fmt.Sprintf("%s%d%s", bp, mv, suffix) } @@ -204,157 +199,6 @@ func recursiveRemove(path string, fs billy.Filesystem) error { return nil } -func copyFile(fromPath string, toPath string, fs billy.Filesystem) error { - from, err := fs.Open(fromPath) - if err != nil { - return err - } - defer from.Close() - - to, err := fs.OpenFile(toPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) - if err != nil { - return err - } - defer to.Close() - - _, err = io.Copy(to, from) - if err != nil { - return err - } - - return nil -} - -func recursiveCopy(fromPath string, toPath string, fs billy.Filesystem) error { - read, err := fs.ReadDir(fromPath) - if err != nil { - return fmt.Errorf("could not read dir: %v", err) - } - - for _, fi := range read { - fullFromPath := filepath.Join(fromPath, fi.Name()) - fullToPath := filepath.Join(toPath, fi.Name()) - - if fi.IsDir() { - err := os.MkdirAll(fullToPath, 0755) - if err != nil { - return err - } - err = recursiveCopy(fullFromPath, fullToPath, fs) - if err != nil { - return err - } - } else { - err = copyFile(fullFromPath, fullToPath, fs) - if err != nil { - return err - } - } - } - - return nil -} - -func recursiveCopyRpms(fromDir string, toDir string, fs billy.Filesystem) error { - read, err := fs.ReadDir(fromDir) - if err != nil { - return fmt.Errorf("could not read dir: %v", err) - } - - for _, fi := range read { - fullFromPath := filepath.Join(fromDir, fi.Name()) - fullToPath := filepath.Join(toDir, fi.Name()) - - if fi.IsDir() { - err = recursiveCopyRpms(fullFromPath, toDir, fs) - if err != nil { - return err - } - } else { - if strings.HasSuffix(fullFromPath, ".rpm") { - err = copyFile(fullFromPath, fullToPath, fs) - if err != nil { - return err - } - } - } - } - - return nil -} - -func recursiveChown(path string, uid int, gid int) error { - read, err := os.Open(path) - if err != nil { - return fmt.Errorf("could not open dir: %v", err) - } - defer read.Close() - - files, err := read.Readdirnames(0) - if err != nil { - return fmt.Errorf("could not read dir: %v", err) - } - - for _, file := range files { - fullPath := filepath.Join(path, file) - - err = os.Chown(fullPath, uid, gid) - if err != nil { - continue - } - - fi, err := os.Stat(fullPath) - if err != nil { - return err - } - - if fi.IsDir() { - err = recursiveChown(fullPath, uid, gid) - if err != nil { - return err - } - } - } - - return nil -} - -func recursiveChmod(path string, num int) error { - read, err := os.Open(path) - if err != nil { - return fmt.Errorf("could not open dir: %v", err) - } - defer read.Close() - - files, err := read.Readdirnames(0) - if err != nil { - return fmt.Errorf("could not read dir: %v", err) - } - - for _, file := range files { - fullPath := filepath.Join(path, file) - - err = os.Chmod(fullPath, os.FileMode(num)) - if err != nil { - return err - } - - fi, err := os.Stat(fullPath) - if err != nil { - return err - } - - if fi.IsDir() { - err = recursiveChmod(fullPath, num) - if err != nil { - return err - } - } - } - - return nil -} - func checkoutRepo(project *models.Project, sourceBranchPrefix string, remoteUrl string, authenticator transport.AuthMethod, tagMode git.TagMode, onDisk bool) (*git.Repository, *git.Worktree, error) { var fs billy.Filesystem if onDisk { @@ -393,64 +237,6 @@ func GetTargetScmUrl(project *models.Project, packageName string, section OpenPa return strings.Replace(strings.Replace(fmt.Sprintf("%s/%s/%s/%s.git", project.TargetGitlabHost, project.TargetPrefix, section, gitlabify(packageName)), "//", "/", -1), ":/", "://", 1) } -func getVersionAndRelease(project *models.Project, fs billy.Filesystem) (string, string, error) { - ls, err := fs.ReadDir("SPECS") - if err != nil { - return "", "", err - } - if len(ls) != 1 { - return "", "", fmt.Errorf("only one spec file is allowed") - } - - f, err := fs.Open(fmt.Sprintf("SPECS/%s", ls[0].Name())) - if err != nil { - return "", "", err - } - - specBytes, err := ioutil.ReadAll(f) - if err != nil { - return "", "", err - } - specString := string(specBytes) - - var version string - var release string - - distTag := fmt.Sprintf(".el%d", project.MajorVersion) - if project.DistTagOverride.Valid { - distTag = fmt.Sprintf(".%s", project.DistTagOverride.String) - } - - distReplacer := strings.NewReplacer("%{?dist}", distTag) - - for _, line := range strings.Split(specString, "\n") { - if fieldValueRegex.MatchString(line) { - fieldValue := strings.SplitN(line, ":", 2) - field := strings.TrimSpace(fieldValue[0]) - value := strings.TrimSpace(fieldValue[1]) - - if field == "Version" { - version = value - } - - if field == "Release" { - release = distReplacer.Replace(value) - } - } - } - - if version == "" { - return "", "", temporal.NewNonRetryableApplicationError("no version was found", "SRC_NO_VERSION", nil) - } - if release == "" { - return "", "", temporal.NewNonRetryableApplicationError("no release was found", "SRC_NO_RELEASE", nil) - } - - normalizer := strings.NewReplacer("-", ".", "~", ".") - - return normalizer.Replace(version), normalizer.Replace(release), nil -} - func (c *Controller) getAuthenticator(projectId string) (transport.AuthMethod, error) { // Retrieve keys for the project projectKeys, err := c.db.GetProjectKeys(projectId) @@ -756,22 +542,6 @@ func (c *Controller) ImportPackageWorkflow(ctx workflow.Context, req *peridotpb. // Loop through all revisions and deactivate previous import revisions (if exists) // The latest import revisions should be the only one active if !req.SetInactive { - for _, revision := range importRevisions { - packageVersionId, err := tx.GetPackageVersionId(pkg.ID.String(), revision.Vre.Version.Value, revision.Vre.Release.Value) - if err != nil { - if err != sql.ErrNoRows { - return nil, err - } else { - continue - } - } - err = tx.DeactivateImportRevisionsByPackageVersionId(packageVersionId) - if err != nil { - setInternalError(errorDetails, err) - return nil, err - } - } - // Deactivate previous package version (newer versions even if lower take precedent) // todo(mustafa): Maybe we should add a config option later? err = tx.DeactivateProjectPackageVersionByPackageIdAndProjectId(pkg.ID.String(), project.ID.String()) diff --git a/peridot/builder/v1/workflow/infrastructure.go b/peridot/builder/v1/workflow/infrastructure.go index 37768a77..4691ead8 100644 --- a/peridot/builder/v1/workflow/infrastructure.go +++ b/peridot/builder/v1/workflow/infrastructure.go @@ -708,12 +708,12 @@ func (c *Controller) CreateK8sPodActivity(ctx context.Context, req *ProvisionWor // The privileges are dropped soon after if req.Privileged { podConfig.Spec.Containers[0].SecurityContext = &v1.SecurityContext{ - RunAsUser: utils.Int64(0), - RunAsGroup: utils.Int64(0), - Privileged: utils.Bool(true), - RunAsNonRoot: utils.Bool(false), - ReadOnlyRootFilesystem: utils.Bool(false), - AllowPrivilegeEscalation: utils.Bool(true), + RunAsUser: utils.Pointer[int64](0), + RunAsGroup: utils.Pointer[int64](0), + RunAsNonRoot: utils.Pointer[bool](false), + ReadOnlyRootFilesystem: utils.Pointer[bool](false), + AllowPrivilegeEscalation: utils.Pointer[bool](true), + Privileged: utils.Pointer[bool](true), } } diff --git a/peridot/builder/v1/workflow/rpmimport.go b/peridot/builder/v1/workflow/rpmimport.go index b56bed31..adc37d91 100644 --- a/peridot/builder/v1/workflow/rpmimport.go +++ b/peridot/builder/v1/workflow/rpmimport.go @@ -48,11 +48,11 @@ import ( "io/ioutil" "os" "path/filepath" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/peridot/db/models" peridotpb "peridot.resf.org/peridot/pb" "peridot.resf.org/peridot/rpmbuild" yumrepofspb "peridot.resf.org/peridot/yumrepofs/pb" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/utils" "strings" "time" diff --git a/peridot/builder/v1/workflow/srpm.go b/peridot/builder/v1/workflow/srpm.go index b23d1495..2774c36c 100644 --- a/peridot/builder/v1/workflow/srpm.go +++ b/peridot/builder/v1/workflow/srpm.go @@ -421,9 +421,11 @@ func (c *Controller) BuildSRPMActivity(ctx context.Context, upstreamPrefix strin if len(project.SrpmStagePackages) != 0 { pkgGroup = project.SrpmStagePackages } - if len(pkgEo.DependsOn) != 0 { - for _, pkg := range pkgEo.DependsOn { - pkgGroup = append(pkgGroup, pkg) + if pkgEo != nil { + if len(pkgEo.DependsOn) != 0 { + for _, pkg := range pkgEo.DependsOn { + pkgGroup = append(pkgGroup, pkg) + } } } diff --git a/peridot/builder/v1/workflow/yumrepofs.go b/peridot/builder/v1/workflow/yumrepofs.go index 50259378..89fd1c35 100644 --- a/peridot/builder/v1/workflow/yumrepofs.go +++ b/peridot/builder/v1/workflow/yumrepofs.go @@ -55,6 +55,7 @@ import ( "io" "io/ioutil" "path/filepath" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/modulemd" "peridot.resf.org/peridot/composetools" peridotdb "peridot.resf.org/peridot/db" @@ -63,7 +64,6 @@ import ( peridotpb "peridot.resf.org/peridot/pb" "peridot.resf.org/peridot/yummeta" yumrepofspb "peridot.resf.org/peridot/yumrepofs/pb" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/utils" "regexp" "strings" @@ -1227,8 +1227,12 @@ func (c *Controller) makeRepoChanges(tx peridotdb.Access, req *UpdateRepoRequest } var name string - if rpmutils.NVR().MatchString(filepath.Base(artifact.Name)) { - nvr := rpmutils.NVR().FindStringSubmatch(filepath.Base(artifact.Name)) + base := strings.TrimSuffix(filepath.Base(artifact.Name), ".rpm") + if rpmutils.NVRUnusualRelease().MatchString(base) { + nvr := rpmutils.NVRUnusualRelease().FindStringSubmatch(base) + name = nvr[1] + } else if rpmutils.NVR().MatchString(base) { + nvr := rpmutils.NVR().FindStringSubmatch(base) name = nvr[1] } diff --git a/peridot/cmd/v1/keykeeper/ci/deploy.jsonnet b/peridot/cmd/v1/keykeeper/ci/deploy.jsonnet index 1332eb3c..67a88d30 100644 --- a/peridot/cmd/v1/keykeeper/ci/deploy.jsonnet +++ b/peridot/cmd/v1/keykeeper/ci/deploy.jsonnet @@ -6,7 +6,7 @@ local utils = import 'ci/utils.jsonnet'; bycdeploy.new({ name: 'keykeeper', - replicas: if kubernetes.prod() then 20 else 3, + replicas: if kubernetes.prod() then 3 else 1, dbname: 'peridot', backend: true, migrate: true, @@ -52,6 +52,15 @@ bycdeploy.new({ health: { port: 46002, }, + volumes(metadata): [ + { + name: 'urandom', + path: '/dev/random', + hostPath: { + path: '/dev/random', + }, + } + ], env: [ { name: 'KEYKEEPER_PRODUCTION', diff --git a/peridot/cmd/v1/peridot/build_rpm_import.go b/peridot/cmd/v1/peridot/build_rpm_import.go index 4eac3ae5..fa2a8047 100644 --- a/peridot/cmd/v1/peridot/build_rpm_import.go +++ b/peridot/cmd/v1/peridot/build_rpm_import.go @@ -32,13 +32,12 @@ package main import ( "encoding/base64" + "github.com/spf13/cobra" "io/ioutil" "log" + "openapi.peridot.resf.org/peridotopenapi" "os" "time" - - "github.com/spf13/cobra" - "openapi.peridot.resf.org/peridotopenapi" ) type LookasideUploadTask struct { diff --git a/peridot/cmd/v1/peridotbuilder/BUILD.bazel b/peridot/cmd/v1/peridotbuilder/BUILD.bazel index a2b008c9..a0dbbfa6 100644 --- a/peridot/cmd/v1/peridotbuilder/BUILD.bazel +++ b/peridot/cmd/v1/peridotbuilder/BUILD.bazel @@ -28,11 +28,18 @@ go_library( ], ) +go_binary( + name = "peridotbuilder", + embed = [":peridotbuilder_lib"], + visibility = ["//visibility:public"], +) + go_binary( name = "peridotbuilder_amd64", embed = [":peridotbuilder_lib"], goarch = "amd64", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -41,6 +48,7 @@ go_binary( embed = [":peridotbuilder_lib"], goarch = "arm64", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -49,6 +57,7 @@ go_binary( embed = [":peridotbuilder_lib"], goarch = "s390x", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -57,5 +66,6 @@ go_binary( embed = [":peridotbuilder_lib"], goarch = "ppc64le", goos = "linux", + tags = ["manual"], visibility = ["//visibility:public"], ) diff --git a/peridot/cmd/v1/peridotserver/ci/deploy.jsonnet b/peridot/cmd/v1/peridotserver/ci/deploy.jsonnet index 0c68806a..6ac96754 100644 --- a/peridot/cmd/v1/peridotserver/ci/deploy.jsonnet +++ b/peridot/cmd/v1/peridotserver/ci/deploy.jsonnet @@ -67,6 +67,14 @@ bycdeploy.new({ name: 'PERIDOT_S3_FORCE_PATH_STYLE', value: 'true' }, + if kubernetes.prod() then { + name: 'PERIDOT_S3_REGION', + value: 'us-east-2', + }, + if kubernetes.prod() then { + name: 'PERIDOT_S3_BUCKET', + value: 'resf-peridot-prod', + }, $.dsn, ] + temporal.kube_env('PERIDOT'), }) diff --git a/peridot/composetools/BUILD.bazel b/peridot/composetools/BUILD.bazel index 1b25df0a..b918b977 100644 --- a/peridot/composetools/BUILD.bazel +++ b/peridot/composetools/BUILD.bazel @@ -9,8 +9,8 @@ go_library( importpath = "peridot.resf.org/peridot/composetools", visibility = ["//visibility:public"], deps = [ + "//apollo/rpmutils", "//peridot/yummeta", - "//secparse/rpmutils", "//utils", "//vendor/github.com/gobwas/glob", ], diff --git a/peridot/composetools/rpm.go b/peridot/composetools/rpm.go index 99b31f4d..2fb80aa9 100644 --- a/peridot/composetools/rpm.go +++ b/peridot/composetools/rpm.go @@ -35,8 +35,8 @@ import ( "fmt" "github.com/gobwas/glob" "path/filepath" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/peridot/yummeta" - "peridot.resf.org/secparse/rpmutils" "peridot.resf.org/utils" "strings" ) @@ -84,6 +84,11 @@ func IsDebugPackageNvra(nvra string) (bool, error) { return false, ErrInvalidNVR } + if rpmutils.NVRUnusualRelease().MatchString(nvra) { + match := rpmutils.NVRUnusualRelease().FindStringSubmatch(nvra) + return IsDebugPackage(match[1]), nil + } + match := rpmutils.NVR().FindStringSubmatch(nvra) return IsDebugPackage(match[1]), nil } diff --git a/peridot/db/db.go b/peridot/db/db.go index a620ba0a..c72591d8 100644 --- a/peridot/db/db.go +++ b/peridot/db/db.go @@ -45,7 +45,7 @@ type Access interface { CreateProject(project *peridotpb.Project) (*models.Project, error) UpdateProject(id string, project *peridotpb.Project) (*models.Project, error) SetProjectKeys(projectId string, username string, password string) error - SetBuildRootPackages(projectId string, srpmPackages []string, buildPackages []string) error + SetBuildRootPackages(projectId string, srpmPackages []string, buildPackages []string) error CreateBuild(packageId string, packageVersionId string, taskId string, projectId string) (*models.Build, error) GetArtifactsForBuild(buildId string) (models.TaskArtifacts, error) @@ -97,7 +97,7 @@ type Access interface { GetPackageID(name string) (string, error) SetExtraOptionsForPackage(projectId string, packageName string, withFlags pq.StringArray, withoutFlags pq.StringArray) error GetExtraOptionsForPackage(projectId string, packageName string) (*models.ExtraOptions, error) - SetGroupInstallOptionsForPackage(projectId string, packageName string, dependsOn pq.StringArray) error + SetGroupInstallOptionsForPackage(projectId string, packageName string, dependsOn pq.StringArray) error CreateTask(user *utils.ContextUser, arch string, taskType peridotpb.TaskType, projectId *string, parentTaskId *string) (*models.Task, error) SetTaskStatus(id string, status peridotpb.TaskStatus) error @@ -120,6 +120,8 @@ type Access interface { GetPluginsForProject(projectId string) (models.Plugins, error) GetExternalRepositoriesForProject(projectId string) (models.ExternalRepositories, error) + DeleteExternalRepositoryForProject(projectId string, externalRepositoryId string) error + CreateExternalRepositoryForProject(projectId string, repoURL string, priority *int32, moduleHotfixes bool) (*models.ExternalRepository, error) FindRepositoriesForPackage(projectId string, pkg string, internalOnly bool) (models.Repositories, error) FindRepositoriesForProject(projectId string, id *string, internalOnly bool) (models.Repositories, error) GetRepositoryRevision(revisionId string) (*models.RepositoryRevision, error) diff --git a/peridot/db/models/project.go b/peridot/db/models/project.go index fe6c8da7..fd564845 100644 --- a/peridot/db/models/project.go +++ b/peridot/db/models/project.go @@ -71,8 +71,8 @@ type Project struct { VendorMacro sql.NullString `json:"vendorMacro" db:"vendor_macro"` PackagerMacro sql.NullString `json:"packagerMacro" db:"packager_macro"` - SrpmStagePackages pq.StringArray `json:"srpmStagePackages" db:"srpm_stage_packages"` - BuildStagePackages pq.StringArray `json:"buildStagePackages" db:"build_stage_packages"` + SrpmStagePackages pq.StringArray `json:"srpmStagePackages" db:"srpm_stage_packages"` + BuildStagePackages pq.StringArray `json:"buildStagePackages" db:"build_stage_packages"` } type Projects []Project diff --git a/peridot/db/models/repository.go b/peridot/db/models/repository.go index 17f01b24..cdd8b606 100644 --- a/peridot/db/models/repository.go +++ b/peridot/db/models/repository.go @@ -50,8 +50,23 @@ type ExternalRepository struct { ModuleHotfixes bool `json:"moduleHotfixes" db:"module_hotfixes"` } +func (e *ExternalRepository) ToProto() *peridotpb.ExternalRepository { + return &peridotpb.ExternalRepository{ + Url: e.Url, + Priority: int32(e.Priority), + } +} + type ExternalRepositories []ExternalRepository +func (e ExternalRepositories) ToProto() []*peridotpb.ExternalRepository { + var result []*peridotpb.ExternalRepository + for _, r := range e { + result = append(result, r.ToProto()) + } + return result +} + type Repository struct { ID uuid.UUID `json:"id" db:"id"` CreatedAt time.Time `json:"createdAt" db:"created_at"` diff --git a/peridot/db/psql/import.go b/peridot/db/psql/import.go index a05da88b..eb5f752a 100644 --- a/peridot/db/psql/import.go +++ b/peridot/db/psql/import.go @@ -98,7 +98,6 @@ func (a *Access) GetLatestImportRevisionsForPackageInProject(packageName string, p.name = $1 and ppv.project_id = $2 and ppv.active = true - and ir.active = true order by ir.created_at desc `, packageName, diff --git a/peridot/db/psql/repository.go b/peridot/db/psql/repository.go index 7b203d42..a3104a54 100644 --- a/peridot/db/psql/repository.go +++ b/peridot/db/psql/repository.go @@ -45,6 +45,32 @@ func (a *Access) GetExternalRepositoriesForProject(projectId string) (ret models return ret, nil } +func (a *Access) DeleteExternalRepositoryForProject(projectId string, id string) error { + _, err := a.query.Exec("delete from external_repositories where project_id = $1 and id = $2", projectId, id) + return err +} + +func (a *Access) CreateExternalRepositoryForProject(projectId string, repoURL string, priority *int32, moduleHotfixes bool) (*models.ExternalRepository, error) { + var ret models.ExternalRepository + err := a.query.Get( + &ret, + ` + insert into external_repositories (project_id, url, priority) + values ($1, $2, $3, $4) + returning id, created_at, project_id, url, priority, module_hotfixes + `, + projectId, + repoURL, + priority, + moduleHotfixes, + ) + if err != nil { + return nil, err + } + + return &ret, nil +} + func (a *Access) FindRepositoriesForPackage(projectId string, pkg string, internalOnly bool) (ret models.Repositories, err error) { err = a.query.Select( &ret, diff --git a/peridot/keykeeper/v1/sign.go b/peridot/keykeeper/v1/sign.go index 7178c43d..234d1e58 100644 --- a/peridot/keykeeper/v1/sign.go +++ b/peridot/keykeeper/v1/sign.go @@ -188,60 +188,93 @@ func (s *Server) SignArtifactActivity(ctx context.Context, artifactId string, ke switch ext { case ".rpm": - var outBuf bytes.Buffer - opts := []string{ - "--define", "_gpg_name " + keyName, - "--define", "_peridot_keykeeper_key " + key.keyUuid.String(), - "--addsign", localPath, - } - cmd := gpgCmdEnv(exec.Command("rpm", opts...)) - cmd.Stdout = &outBuf - cmd.Stderr = &outBuf - err := cmd.Run() - if err != nil { - s.log.Errorf("failed to sign artifact %s: %v", artifact.Name, err) - statusErr := status.New(codes.Internal, "failed to sign artifact") - statusErr, err2 := statusErr.WithDetails(&errdetails.ErrorInfo{ - Reason: "rpmsign-failed", - Domain: "keykeeper.peridot.resf.org", - Metadata: map[string]string{ - "logs": outBuf.String(), - "err": err.Error(), - }, - }) - if err2 != nil { - s.log.Errorf("failed to add error details to status: %v", err2) + rpmSign := func() (*keykeeperpb.SignedArtifact, error) { + var outBuf bytes.Buffer + opts := []string{ + "--define", "_gpg_name " + keyName, + "--define", "_peridot_keykeeper_key " + key.keyUuid.String(), + "--addsign", localPath, + } + cmd := gpgCmdEnv(exec.Command("rpm", opts...)) + cmd.Stdout = &outBuf + cmd.Stderr = &outBuf + err := cmd.Run() + if err != nil { + s.log.Errorf("failed to sign artifact %s: %v", artifact.Name, err) + statusErr := status.New(codes.Internal, "failed to sign artifact") + statusErr, err2 := statusErr.WithDetails(&errdetails.ErrorInfo{ + Reason: "rpmsign-failed", + Domain: "keykeeper.peridot.resf.org", + Metadata: map[string]string{ + "logs": outBuf.String(), + "err": err.Error(), + }, + }) + if err2 != nil { + s.log.Errorf("failed to add error details to status: %v", err2) + } + return nil, statusErr.Err() + } + _, err = s.storage.PutObject(newObjectKey, localPath) + if err != nil { + s.log.Errorf("failed to upload artifact %s: %v", newObjectKey, err) + return nil, fmt.Errorf("failed to upload artifact %s: %v", newObjectKey, err) } - return nil, statusErr.Err() - } - _, err = s.storage.PutObject(newObjectKey, localPath) - if err != nil { - s.log.Errorf("failed to upload artifact %s: %v", newObjectKey, err) - return nil, fmt.Errorf("failed to upload artifact %s: %v", newObjectKey, err) - } - f, err := os.Open(localPath) + f, err := os.Open(localPath) + if err != nil { + return nil, err + } + + hasher := sha256.New() + _, err = io.Copy(hasher, f) + if err != nil { + return nil, err + } + hash := hex.EncodeToString(hasher.Sum(nil)) + + err = s.db.CreateTaskArtifactSignature(artifact.ID.String(), key.keyUuid.String(), hash) + if err != nil { + s.log.Errorf("failed to create task artifact signature: %v", err) + return nil, fmt.Errorf("failed to create task artifact signature: %v", err) + } + + return &keykeeperpb.SignedArtifact{ + Path: newObjectKey, + HashSha256: hash, + }, nil + } + verifySig := func() error { + var outBuf bytes.Buffer + opts := []string{ + "--define", "_gpg_name " + keyName, + "--define", "_peridot_keykeeper_key " + key.keyUuid.String(), + "--checksig", localPath, + } + cmd := gpgCmdEnv(exec.Command("rpm", opts...)) + cmd.Stdout = &outBuf + cmd.Stderr = &outBuf + err := cmd.Run() + if err != nil { + s.log.Errorf("failed to verify artifact %s: %v", artifact.Name, err) + return fmt.Errorf("failed to verify artifact %s: %v", artifact.Name, err) + } + if !strings.Contains(outBuf.String(), "digest signatures OK") { + s.log.Errorf("artifact %s not signed(?), retrying", artifact.Name) + return fmt.Errorf("artifact %s not signed(?), retrying", artifact.Name) + } + return nil + } + res, err := rpmSign() + if err != nil { + return nil, err + } + err = verifySig() if err != nil { return nil, err } - hasher := sha256.New() - _, err = io.Copy(hasher, f) - if err != nil { - return nil, err - } - hash := hex.EncodeToString(hasher.Sum(nil)) - - err = s.db.CreateTaskArtifactSignature(artifact.ID.String(), key.keyUuid.String(), hash) - if err != nil { - s.log.Errorf("failed to create task artifact signature: %v", err) - return nil, fmt.Errorf("failed to create task artifact signature: %v", err) - } - - return &keykeeperpb.SignedArtifact{ - Path: newObjectKey, - HashSha256: hash, - }, nil + return res, nil default: s.log.Infof("skipping artifact %s, extension %s not supported", artifact.Name, ext) return nil, ErrUnsupportedExtension diff --git a/peridot/proto/v1/project.proto b/peridot/proto/v1/project.proto index f1b357da..975051d5 100644 --- a/peridot/proto/v1/project.proto +++ b/peridot/proto/v1/project.proto @@ -82,6 +82,18 @@ service ProjectService { body: "*" }; } + + rpc ListExternalRepositories(ListExternalRepositoriesRequest) returns (ListExternalRepositoriesResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id=*}/external_repositories" + }; + } + + rpc DeleteExternalRepository(DeleteExternalRepositoryRequest) returns (DeleteExternalRepositoryResponse) { + option (google.api.http) = { + delete: "/v1/projects/{project_id=*}/external_repositories/{id=*}" + }; + } } // Project is a contained RPM distribution @@ -305,3 +317,31 @@ message LookasideFileUploadRequest { message LookasideFileUploadResponse { string digest = 1; } + +message ExternalRepository { + string url = 1; + int32 priority = 2; + bool module_hotfixes = 3; +} +message ListExternalRepositoriesRequest { + google.protobuf.StringValue project_id = 1 [(validate.rules).message.required = true]; +} +message ListExternalRepositoriesResponse { + repeated ExternalRepository repositories = 1; +} + +message CreateExternalRepositoryRequest { + google.protobuf.StringValue project_id = 1 [(validate.rules).message.required = true]; + google.protobuf.StringValue url = 2 [(validate.rules).message.required = true]; + google.protobuf.Int32Value priority = 3; + bool module_hotfixes = 4; +} +message CreateExternalRepositoryResponse { + ExternalRepository repository = 1; +} + +message DeleteExternalRepositoryRequest { + google.protobuf.StringValue project_id = 1 [(validate.rules).message.required = true]; + google.protobuf.StringValue id = 2 [(validate.rules).message.required = true]; +} +message DeleteExternalRepositoryResponse {} diff --git a/peridot/seed.sql b/peridot/seed.sql index 4e42793f..4422e9c0 100644 --- a/peridot/seed.sql +++ b/peridot/seed.sql @@ -63,8 +63,8 @@ insert into projects (id, name, major_version, target_gitlab_host, target_prefix values (:rocky9Id, 'Rocky Linux 9', 9, 'https://gitlab.com', 'rocky-9-test', 'r', 'https://git.centos.org', '', 'c', - 'el9', '-beta', - array ['x86_64', 'i686']); + 'el9', '', + array ['x86_64', 'aarch64']); insert into projects (id, name, major_version, target_gitlab_host, target_prefix, target_branch_prefix, source_git_host, @@ -75,15 +75,6 @@ values (:rl8Id, 'Rocky Linux 8', 8, 'https://gitlab.com', 'el8', array ['aarch64']); -insert into projects (id, name, major_version, target_gitlab_host, target_prefix, - target_branch_prefix, source_git_host, - source_prefix, source_branch_prefix, cdn_url, dist_tag_override, archs) -values (:supportedPackages8Id, 'SLES-TEST', 8, 'https://gitlab.com', - 'peridot-sles153-buildtest', 'r', - 'https://git.centos.org', '', 'c', 'https://rocky-linux-sources-staging.a1.rockylinux.org', - 'sl15', - array ['x86_64']); - -- the packages here are split into multiple queries -- so they get a unique created_at value insert into packages (id, name, package_type) diff --git a/publisher/cmd/publisher-legacy-errata/BUILD.bazel b/publisher/cmd/publisher-legacy-errata/BUILD.bazel index 2a5001d4..3794bb34 100644 --- a/publisher/cmd/publisher-legacy-errata/BUILD.bazel +++ b/publisher/cmd/publisher-legacy-errata/BUILD.bazel @@ -7,10 +7,9 @@ go_library( importpath = "peridot.resf.org/publisher/cmd/publisher-legacy-errata", visibility = ["//visibility:private"], deps = [ + "//apollo/db/connector", "//publisher/updateinfo/legacy", - "//secparse/db/connector", "//utils", - "//vendor/github.com/go-git/go-billy/v5/osfs", "//vendor/github.com/sirupsen/logrus", "//vendor/github.com/spf13/cobra", ], diff --git a/publisher/cmd/publisher-legacy-errata/main.go b/publisher/cmd/publisher-legacy-errata/main.go index ab25cb8b..551fd80e 100644 --- a/publisher/cmd/publisher-legacy-errata/main.go +++ b/publisher/cmd/publisher-legacy-errata/main.go @@ -31,11 +31,11 @@ package main import ( - "github.com/go-git/go-billy/v5/osfs" "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "path/filepath" + apolloconnector "peridot.resf.org/apollo/db/connector" "peridot.resf.org/publisher/updateinfo/legacy" - "peridot.resf.org/secparse/db/connector" "peridot.resf.org/utils" ) @@ -52,11 +52,12 @@ var ( composeName string productName string productShort string - republish bool + productID int64 + scanAndStop bool ) func init() { - dname := "secparse" + dname := "apollo" cnf.DatabaseName = &dname cnf.Name = "publisher" @@ -66,10 +67,12 @@ func init() { pflags.StringVar(&composeName, "compose-name", "", "Compose to use") pflags.StringVar(&productName, "product-name", "", "Product name") pflags.StringVar(&productShort, "product-short", "", "Product name (short)") - pflags.BoolVar(&republish, "republish", false, "Republish (process published advisories as well, should be used if repodata is out of sync)") + pflags.Int64Var(&productID, "product-id", 0, "Product ID") + pflags.BoolVar(&scanAndStop, "scan-and-stop", false, "Scan RPMs and stop, used for debugging purposes") _ = root.MarkPersistentFlagRequired("compose-name") _ = root.MarkPersistentFlagRequired("product-name") _ = root.MarkPersistentFlagRequired("product-short") + _ = root.MarkPersistentFlagRequired("product-id") utils.AddDBFlagsOnly(pflags, cnf) utils.BindOnly(pflags, cnf) @@ -77,12 +80,11 @@ func init() { func mn(_ *cobra.Command, _ []string) { scanner := &legacy.Scanner{ - FS: osfs.New(repoDir), - DB: connector.MustAuto(), + DB: apolloconnector.MustAuto(), } - err := scanner.ScanAndPublish(from, composeName, productName, productShort, republish) + err := scanner.ScanAndPublish(from, filepath.Join(repoDir, composeName), productName, productShort, productID, scanAndStop) if err != nil { - logrus.Fatalf("Could not scan and publish: %v", err) + logrus.Fatalf("could not scan and publish: %v", err) } } diff --git a/publisher/updateinfo/legacy/BUILD.bazel b/publisher/updateinfo/legacy/BUILD.bazel index 729ec0db..74d1f106 100644 --- a/publisher/updateinfo/legacy/BUILD.bazel +++ b/publisher/updateinfo/legacy/BUILD.bazel @@ -6,11 +6,12 @@ go_library( importpath = "peridot.resf.org/publisher/updateinfo/legacy", visibility = ["//visibility:public"], deps = [ + "//apollo/db", + "//apollo/proto/v1:pb", + "//apollo/rpmutils", "//publisher/updateinfo", - "//secparse/db", - "//secparse/proto/v1:proto", - "//secparse/rpmutils", - "//vendor/github.com/go-git/go-billy/v5:go-billy", + "//utils", "//vendor/github.com/sirupsen/logrus", + "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library", ], ) diff --git a/publisher/updateinfo/legacy/legacy.go b/publisher/updateinfo/legacy/legacy.go index b4940470..fbbbad28 100644 --- a/publisher/updateinfo/legacy/legacy.go +++ b/publisher/updateinfo/legacy/legacy.go @@ -39,28 +39,31 @@ import ( "encoding/xml" "errors" "fmt" - "github.com/go-git/go-billy/v5" "github.com/sirupsen/logrus" + "google.golang.org/protobuf/types/known/wrapperspb" "io" + "io/fs" + "io/ioutil" "os" "path/filepath" + apollodb "peridot.resf.org/apollo/db" + apollopb "peridot.resf.org/apollo/pb" + "peridot.resf.org/apollo/rpmutils" "peridot.resf.org/publisher/updateinfo" - "peridot.resf.org/secparse/db" - secparsepb "peridot.resf.org/secparse/proto/v1" - "peridot.resf.org/secparse/rpmutils" + "peridot.resf.org/utils" "strconv" "strings" + "sync" "time" ) type Scanner struct { - DB db.Access - FS billy.Filesystem + DB apollodb.Access } type internalAdvisory struct { - Pb *secparsepb.Advisory - Db *db.Advisory + Pb *apollopb.Advisory + Db *apollodb.Advisory } type rpm struct { @@ -68,47 +71,76 @@ type rpm struct { Src string Sha256 string Epoch string + Repo string + Err error Advisory *internalAdvisory } -func (s *Scanner) recursiveRPMScan(rootDir string) (map[string][]*rpm, error) { - infos, err := s.FS.ReadDir(rootDir) - if err != nil { - return nil, err - } +func (s *Scanner) recursiveRPMScan(rootDir string, cache map[string]string) (<-chan rpm, <-chan error) { + res := make(chan rpm) + errc := make(chan error, 1) - ret := map[string][]*rpm{} - - for _, fi := range infos { - if fi.IsDir() { - nRpms, err := s.recursiveRPMScan(filepath.Join(rootDir, fi.Name())) + go func() { + var wg sync.WaitGroup + err := filepath.WalkDir(rootDir, func(path string, d fs.DirEntry, err error) error { if err != nil { - // Ignore paths we can't access - continue + return err + } + if d.IsDir() { + return nil + } + if !strings.HasSuffix(d.Name(), ".rpm") { + return nil + } + if strings.Contains(path, "kickstart/Packages") { + return nil } - for k, v := range nRpms { - if ret[k] == nil { - ret[k] = []*rpm{} + wg.Add(1) + go func() { + k, err := s.findRepoData(filepath.Join(path, "..")) + if err != nil { + logrus.Errorf("could not find repodata for %s: %s", path, err) + k = filepath.Join(path, "..") + } + k = filepath.Join(k, "..") + + var sum string + if s := cache[d.Name()]; s != "" { + sum = s + } else { + f, _ := os.Open(path) + defer f.Close() + hasher := sha256.New() + _, err = io.Copy(hasher, f) + sum = hex.EncodeToString(hasher.Sum(nil)) } - ret[k] = append(ret[k], v...) - } - } else { - if strings.HasSuffix(fi.Name(), ".rpm") { - k := filepath.Join(rootDir, "..") - if ret[k] == nil { - ret[k] = []*rpm{} + select { + case res <- rpm{ + Name: d.Name(), + Sha256: sum, + Repo: k, + Err: err, + }: } - ret[k] = append(ret[k], &rpm{ - Name: fi.Name(), - }) - } - } - } + wg.Done() + }() - return ret, nil + select { + default: + return nil + } + }) + go func() { + wg.Wait() + close(res) + }() + errc <- err + }() + + return res, errc } func (s *Scanner) findRepoData(rootDir string) (string, error) { @@ -117,7 +149,7 @@ func (s *Scanner) findRepoData(rootDir string) (string, error) { } repoDataPath := filepath.Join(rootDir, "repodata") - stat, err := s.FS.Stat(repoDataPath) + stat, err := os.Stat(repoDataPath) if err != nil { if os.IsNotExist(err) { return s.findRepoData(filepath.Join(rootDir, "..")) @@ -133,17 +165,82 @@ func (s *Scanner) findRepoData(rootDir string) (string, error) { } } -func (s *Scanner) ScanAndPublish(from string, composeName string, productName string, productShort string, republish bool) error { - _, err := s.FS.Stat(composeName) +func (s *Scanner) ScanAndPublish(from string, composeName string, productName string, productShort string, productID int64, scanAndStop bool) error { + logrus.Infof("using %s as root directory", composeName) + + realPathCompose, err := filepath.EvalSymlinks(composeName) if err != nil { return err } - rpms, err := s.recursiveRPMScan(composeName) + logrus.Infof("real path is %s", realPathCompose) + + _, err = os.Stat(realPathCompose) if err != nil { + return fmt.Errorf("could not find compose %s: %w", realPathCompose, err) + } + + // Read cache file if exists, so we can skip hashing on known artifacts + cacheFile := filepath.Join(realPathCompose, fmt.Sprintf("apollocache_%d", productID)) + cache := map[string]string{} + if _, err := os.Stat(cacheFile); err == nil { + cacheBts, err := ioutil.ReadFile(cacheFile) + if err != nil { + return err + } + cacheLines := strings.Split(string(cacheBts), "\n") + for _, line := range cacheLines { + if line == "" { + continue + } + parts := strings.Split(line, " ") + cache[parts[0]] = parts[1] + } + } + + rpms := map[string][]*rpm{} + rpmsChan, errChan := s.recursiveRPMScan(realPathCompose, cache) + for r := range rpmsChan { + rpmCopy := r + if rpmCopy.Err != nil { + return rpmCopy.Err + } + + if rpms[rpmCopy.Repo] == nil { + rpms[rpmCopy.Repo] = []*rpm{} + } + + rpms[rpmCopy.Repo] = append(rpms[rpmCopy.Repo], &rpmCopy) + } + if err := <-errChan; err != nil { return err } + if len(rpms) == 0 { + return errors.New("no rpms found") + } + + // Cache hashes in {REPO_DIR}/apollocache_{PRODUCT_ID} + var newCacheEntries []string + for _, v := range rpms { + for _, rpm := range v { + entry := fmt.Sprintf("%s %s", rpm.Name, rpm.Sha256) + if !utils.StrContains(entry, newCacheEntries) { + newCacheEntries = append(newCacheEntries, entry) + } + } + } + if err := ioutil.WriteFile(cacheFile, []byte(strings.Join(newCacheEntries, "\n")), 0644); err != nil { + return err + } + + if scanAndStop { + for k := range rpms { + logrus.Infof("repo %s", k) + } + return nil + } + published := map[string][]*rpm{} beginTx, err := s.DB.Begin() @@ -153,16 +250,14 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st tx := s.DB.UseTransaction(beginTx) rollback := false - advisories, err := tx.GetAllAdvisories(false) + advisories, err := tx.GetAllAdvisories(&apollopb.AdvisoryFilters{ + IncludeUnpublished: wrapperspb.Bool(true), + }, 0, -1) if err != nil { return err } for _, advisory := range advisories { - // Skip already published advisories if republish is disabled - if advisory.PublishedAt.Valid && !republish { - continue - } - advisoryPb := db.DTOAdvisoryToPB(advisory) + advisoryPb := apollodb.DTOAdvisoryToPB(advisory) touchedOnce := false for _, artifactWithSrpm := range advisory.BuildArtifacts { @@ -180,34 +275,14 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st for _, repoRpm := range repoRpms { if repoRpm.Name == rpmutils.Epoch().ReplaceAllString(artifact, "") { - hasher := sha256.New() - f, err := s.FS.Open(filepath.Join(repo, "Packages", repoRpm.Name)) - if err != nil { - // If not found, then try sorted directory - f, err = s.FS.Open(filepath.Join(repo, "Packages", strings.ToLower(string(repoRpm.Name[0])), repoRpm.Name)) - if err != nil { - logrus.Errorf("Could not open affected package: %v", err) - rollback = true - break - } - } - _, err = io.Copy(hasher, f) - _ = f.Close() - if err != nil { - logrus.Errorf("Could not hash affected package: %v", err) - rollback = true - break - } - logrus.Infof("Advisory %s affects %s", advisoryPb.Name, artifact) - err = tx.AddAdvisoryRPM(advisory.ID, artifact) + err = tx.AddAdvisoryRPM(advisory.ID, artifact, productID) if err != nil { logrus.Errorf("Could not add advisory RPM: %v", err) rollback = true break } touchedOnce = true - repoRpm.Sha256 = hex.EncodeToString(hasher.Sum(nil)) repoRpm.Epoch = strings.TrimSuffix(rpmutils.Epoch().FindStringSubmatch(artifact)[0], ":") repoRpm.Advisory = &internalAdvisory{ Pb: advisoryPb, @@ -230,7 +305,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st advisory.PublishedAt = sql.NullTime{Valid: true, Time: time.Now()} _, err = tx.UpdateAdvisory(advisory) if err != nil { - logrus.Errorf("Could not update advisory %s: %v", advisoryPb.Name, err) + logrus.Errorf("could not update advisory %s: %v", advisoryPb.Name, err) rollback = true break } @@ -265,7 +340,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st } repoMdPath := filepath.Join(repoDataDir, "repomd.xml") - f, err := s.FS.Open(repoMdPath) + f, err := os.Open(repoMdPath) if err != nil { logrus.Errorf("Could not open repomd.xml: %v", err) rollback = true @@ -289,50 +364,8 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st } } - var updateInfo *updateinfo.UpdatesRoot - if olderUpdateInfo == "" { - updateInfo = &updateinfo.UpdatesRoot{ - Updates: []*updateinfo.Update{}, - } - } else { - if republish { - updateInfo = &updateinfo.UpdatesRoot{ - Updates: []*updateinfo.Update{}, - } - } else { - olderF, err := s.FS.Open(filepath.Join(repoDataDir, "..", olderUpdateInfo)) - if err != nil { - logrus.Errorf("Could not open older updateinfo: %v", err) - rollback = true - break - } - - var decoded bytes.Buffer - r, err := gzip.NewReader(olderF) - if err != nil { - logrus.Errorf("Could not create new gzip reader: %v", err) - rollback = true - break - } - if _, err := io.Copy(&decoded, r); err != nil { - logrus.Errorf("Could not copy gzip data: %v", err) - rollback = true - break - } - _ = r.Close() - - err = xml.NewDecoder(&decoded).Decode(&updateInfo) - if err != nil { - logrus.Errorf("Could not decode older updateinfo: %v", err) - rollback = true - break - } - _ = olderF.Close() - - if updateInfo.Updates == nil { - updateInfo.Updates = []*updateinfo.Update{} - } - } + updateInfo := &updateinfo.UpdatesRoot{ + Updates: []*updateinfo.Update{}, } for advisoryName, publishedRpms := range advisories { @@ -340,16 +373,16 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st updateType := "enhancement" switch advisory.Pb.Type { - case secparsepb.Advisory_BugFix: + case apollopb.Advisory_TYPE_BUGFIX: updateType = "bugfix" break - case secparsepb.Advisory_Security: + case apollopb.Advisory_TYPE_SECURITY: updateType = "security" break } severity := advisory.Pb.Severity.String() - if advisory.Pb.Severity == secparsepb.Advisory_UnknownSeverity { + if advisory.Pb.Severity == apollopb.Advisory_SEVERITY_UNKNOWN { severity = "None" } @@ -366,12 +399,12 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st Updated: &updateinfo.UpdateDate{ Date: advisory.Db.RedHatIssuedAt.Time.Format(updateinfo.TimeFormat), }, - Rights: "Copyright (C) 2021 Rocky Enterprise Software Foundation", + Rights: "Copyright (C) 2022 Rocky Enterprise Software Foundation", Release: productName, PushCount: "1", Severity: severity, Summary: advisory.Pb.Topic, - Description: fmt.Sprintf("For more information visit https://errata.rockylinux.org/%s", advisory.Pb.Name), + Description: advisory.Pb.Description, References: &updateinfo.UpdateReferenceRoot{ References: []*updateinfo.UpdateReference{}, }, @@ -387,10 +420,9 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st } for _, cve := range advisory.Pb.Cves { - sourceCve := strings.Split(cve, ":::") - sourceBy := sourceCve[0] - sourceLink := sourceCve[1] - id := sourceCve[2] + sourceBy := cve.SourceBy + sourceLink := cve.SourceLink + id := cve.Name referenceType := "erratum" if strings.HasPrefix(id, "CVE") { @@ -398,10 +430,10 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st } reference := &updateinfo.UpdateReference{ - Href: sourceLink, + Href: sourceLink.Value, ID: id, Type: referenceType, - Title: fmt.Sprintf("Update information for %s is retrieved from %s", id, sourceBy), + Title: fmt.Sprintf("Update information for %s is retrieved from %s", id, sourceBy.Value), } update.References.References = append(update.References.References, reference) @@ -410,7 +442,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st for _, publishedRpm := range publishedRpms { nvr := rpmutils.NVR().FindStringSubmatch(publishedRpm.Name) - update.PkgList.Collections[0].Packages = append(update.PkgList.Collections[0].Packages, &updateinfo.UpdatePackage{ + updPkg := &updateinfo.UpdatePackage{ Name: nvr[1], Version: nvr[2], Release: nvr[3], @@ -424,7 +456,11 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st Value: publishedRpm.Sha256, }, }, - }) + } + if advisory.Db.RebootSuggested { + updPkg.RebootSuggested = "True" + } + update.PkgList.Collections[0].Packages = append(update.PkgList.Collections[0].Packages, updPkg) } if rollback { break @@ -506,7 +542,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st } } - uif, err := s.FS.OpenFile(updateInfoPath, os.O_TRUNC|os.O_RDWR|os.O_CREATE, 0644) + uif, err := os.OpenFile(updateInfoPath, os.O_TRUNC|os.O_RDWR|os.O_CREATE, 0644) if err != nil { logrus.Errorf("Could not open updateinfo file %s: %v", updateInfoPath, err) rollback = true @@ -525,7 +561,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st repomd.Rpm = "" } - updateF, err := s.FS.OpenFile(repoMdPath, os.O_TRUNC|os.O_RDWR|os.O_CREATE, 0644) + updateF, err := os.OpenFile(repoMdPath, os.O_TRUNC|os.O_RDWR|os.O_CREATE, 0644) if err != nil { logrus.Errorf("Could not open repomd file for update: %v", err) rollback = true @@ -543,7 +579,7 @@ func (s *Scanner) ScanAndPublish(from string, composeName string, productName st _ = updateF.Close() if olderUpdateInfo != "" { - _ = s.FS.Remove(filepath.Join(repo, olderUpdateInfo)) + _ = os.Remove(filepath.Join(repo, olderUpdateInfo)) } } diff --git a/publisher/updateinfo/updateinfo.go b/publisher/updateinfo/updateinfo.go index afa874db..c84fbfba 100644 --- a/publisher/updateinfo/updateinfo.go +++ b/publisher/updateinfo/updateinfo.go @@ -62,14 +62,15 @@ type UpdatePackageSum struct { } type UpdatePackage struct { - Name string `xml:"name,attr"` - Version string `xml:"version,attr"` - Release string `xml:"release,attr"` - Epoch string `xml:"epoch,attr"` - Arch string `xml:"arch,attr"` - Src string `xml:"src,attr"` - Filename string `xml:"filename"` - Sum []*UpdatePackageSum `xml:"sum"` + Name string `xml:"name,attr"` + Version string `xml:"version,attr"` + Release string `xml:"release,attr"` + Epoch string `xml:"epoch,attr"` + Arch string `xml:"arch,attr"` + Src string `xml:"src,attr"` + Filename string `xml:"filename"` + RebootSuggested string `xml:"reboot_suggested"` + Sum []*UpdatePackageSum `xml:"sum"` } type UpdateCollection struct { diff --git a/rules_byc/defs.bzl b/rules_byc/defs.bzl index 305e7c8f..f5f320f0 100644 --- a/rules_byc/defs.bzl +++ b/rules_byc/defs.bzl @@ -57,6 +57,7 @@ def peridot_k8s(name, src, tags = [], outs = [], static = False, prod_only = Fal src = src, outs = outs, tags = tags + [ + "manual", "peridot_k8s", ], ext_strs = select({ @@ -86,11 +87,13 @@ def peridot_k8s(name, src, tags = [], outs = [], static = False, prod_only = Fal k8s_apply( name = "%s.apply" % name, srcs = [":%s" % name], + tags = ["manual"], visibility = ["//visibility:public"], ) multirun( - name = "%s.push" % name, - commands = dependent_push + [":%s_container" % name], + name = "%s.push" % name, + commands = dependent_push + [":%s_container" % name], + tags = ["manual"], ) multirun( name = "%s.push_apply" % name, @@ -98,6 +101,7 @@ def peridot_k8s(name, src, tags = [], outs = [], static = False, prod_only = Fal ":%s.push" % name, ":%s.apply" % name, ], + tags = ["manual"], ) def byc_frontend(name, tags = [], **kwargs): @@ -105,6 +109,7 @@ def byc_frontend(name, tags = [], **kwargs): name = "{}.bundle".format(name), build = True, tags = tags + [ + "manual", "byc_frontend_bundle", ], **kwargs @@ -114,6 +119,7 @@ def byc_frontend(name, tags = [], **kwargs): name = "{}.server".format(name), build = False, tags = tags + [ + "manual", "byc_frontend_server", "ibazel_notify_changes", "ibazel_live_reload", diff --git a/rules_byc/internal/byc_bundle/tailwind.config.js b/rules_byc/internal/byc_bundle/tailwind.config.js index d63dfb39..8b2642d3 100644 --- a/rules_byc/internal/byc_bundle/tailwind.config.js +++ b/rules_byc/internal/byc_bundle/tailwind.config.js @@ -64,8 +64,6 @@ const fontSize = { }; const rootDir = path.resolve(process.cwd()); -// const projectDir = segments[segments.length - 2].split('.')[0]; -// let projectPath = path.resolve(process.cwd(), projectDir); let projectDir = rootDir; let projectPath = rootDir; diff --git a/rules_byc/internal/container/container.bzl b/rules_byc/internal/container/container.bzl index e30e85e3..adc2ed08 100644 --- a/rules_byc/internal/container/container.bzl +++ b/rules_byc/internal/container/container.bzl @@ -16,6 +16,7 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", name = "%s_bin" % image_name, directory = "/home/app/%s" % "bundle" if frontend else "bundle", files = files, + tags = ["manual"], visibility = [":__subpackages__"], ) @@ -24,10 +25,11 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", layer_name = "%s_tar_layer" % image_name container_layer( name = layer_name, + tags = ["manual"], tars = tars_to_layer, visibility = [":__subpackages__"], ) - extra_layers += [layer_name] + extra_layers.append(layer_name) if not architecture: container_image( @@ -40,6 +42,7 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", }), base = base, layers = [":%s_bin" % image_name] + extra_layers, + tags = ["manual"], visibility = ["//visibility:public"], ) else: @@ -48,6 +51,7 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", architecture = architecture, base = base, layers = [":%s_bin" % image_name] + extra_layers, + tags = ["manual"], visibility = ["//visibility:public"], ) @@ -67,6 +71,7 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", entry_point = server_entrypoint, data = server_files, base = ":%s_image" % image_name, + tags = ["manual"], ) container_push( @@ -86,5 +91,6 @@ def container(image_name, files, tars_to_layer = [], base = "//bases/bazel/go", "//platforms:s390x": "%s_s390x-{STABLE_BUILD_TAG}" % image_name, "//platforms:ppc64le": "%s_ppc64le-{STABLE_BUILD_TAG}" % image_name, }) if should_use_aws_format and not disable_conditional else tag, + tags = ["manual"], visibility = ["//visibility:public"], ) diff --git a/secparse/README.md b/secparse/README.md deleted file mode 100644 index 6b64a4a0..00000000 --- a/secparse/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# secparse -Errata mirroring and publishing platform - -### Testing -`bazel test --test_arg=-test.v --test_output=all $(bazel query 'tests(//secparse/...)')` - -### Development -* Add `127.0.0.1 errata.pdot.localhost` to `/etc/hosts` -* Have a PostgreSQL database running with `postgres` user with `postgres` as password -* Create and migrate database `./hack/recreate_with_seed secparse` -* You can then run all components like this: - ``` - bazel run //secparse/cmd/secparse - bazel run //secparse/cmd/secparseadmin - bazel run //secparse/cmd/secparsecron - ibazel run //secparse/ui:secparse.server - ``` - -You can then visit `http://errata.pdot.localhost:9007` - -### Deployment (excluding `publisher`) -* Push all containers and tag with current git hash -``` -STABLE_STAGE=-prod bazel run --platforms @io_bazel_rules_go//go/toolchain:linux_amd64 //secparse/cmd/secparse:secparse-server -STABLE_STAGE=-prod bazel run --platforms @io_bazel_rules_go//go/toolchain:linux_amd64 //secparse/cmd/secparseadmin:secparseadmin-server -STABLE_STAGE=-prod bazel run --platforms @io_bazel_rules_go//go/toolchain:linux_amd64 //secparse/cmd/secparsecron:secparsecron-server -STABLE_STAGE=-prod bazel run --platforms @build_bazel_rules_nodejs//toolchains/node:linux_amd64 //secparse/ui:secparse-frontend -``` -* Clone `git@github.com:rocky-linux/peridot-ansible.git` and cd into `peridot-ansible` -* Change hashes in `roles/local/{name}/defaults/main.yml` -* First run migrate if the database schema has changed `ansible-playbook -i inventories/hosts.ini playbooks/secparse001-migrate.yml` -* Deploy containers `ansible-playbook -i inventories/hosts.ini playbooks/secparse001.yml` \ No newline at end of file diff --git a/secparse/admin/impl/BUILD.bazel b/secparse/admin/impl/BUILD.bazel deleted file mode 100644 index 4153a086..00000000 --- a/secparse/admin/impl/BUILD.bazel +++ /dev/null @@ -1,21 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "impl", - srcs = [ - "cve.go", - "server.go", - "short_code.go", - ], - importpath = "peridot.resf.org/secparse/admin/impl", - visibility = ["//visibility:public"], - deps = [ - "//secparse/admin/proto/v1:proto", - "//secparse/db", - "//servicecatalog", - "//utils", - "//vendor/github.com/ory/hydra-client-go/client", - "//vendor/github.com/sirupsen/logrus", - "@org_golang_google_grpc//:go_default_library", - ], -) diff --git a/secparse/admin/proto/v1/BUILD.bazel b/secparse/admin/proto/v1/BUILD.bazel deleted file mode 100644 index 228227c6..00000000 --- a/secparse/admin/proto/v1/BUILD.bazel +++ /dev/null @@ -1,47 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") -load("@rules_proto//proto:defs.bzl", "proto_library") -load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") - -proto_library( - name = "secparseadminpb_proto", - srcs = [ - "advisory.proto", - "affected_product.proto", - "cve.proto", - "secparseadmin.proto", - "short_code.proto", - ], - visibility = ["//visibility:public"], - deps = [ - "//proto:commonpb_proto", - "@com_envoyproxy_protoc_gen_validate//validate:validate_proto", - "@com_google_protobuf//:timestamp_proto", - "@com_google_protobuf//:wrappers_proto", - "@go_googleapis//google/api:annotations_proto", - ], -) - -go_proto_library( - name = "secparseadminpb_go_proto", - compilers = [ - "//:go_apiv2", - "//:go_grpc", - "//:go_validate", - "@com_github_grpc_ecosystem_grpc_gateway_v2//protoc-gen-grpc-gateway:go_gen_grpc_gateway", - ], - importpath = "peridot.resf.org/secparse/admin/proto/v1", - proto = ":secparseadminpb_proto", - visibility = ["//visibility:public"], - deps = [ - "//proto:common", - "@com_envoyproxy_protoc_gen_validate//validate:validate_go_proto", - "@go_googleapis//google/api:annotations_go_proto", - ], -) - -go_library( - name = "proto", - embed = [":secparseadminpb_go_proto"], - importpath = "peridot.resf.org/secparse/admin/proto/v1", - visibility = ["//visibility:public"], -) diff --git a/secparse/admin/proto/v1/advisory.proto b/secparse/admin/proto/v1/advisory.proto deleted file mode 100644 index 632593c7..00000000 --- a/secparse/admin/proto/v1/advisory.proto +++ /dev/null @@ -1,8 +0,0 @@ -syntax = "proto3"; - -package resf.secparse.admin; - -import "google/protobuf/timestamp.proto"; - -option go_package = "peridot.resf.org/secparse/admin/proto/v1;secparseadminpb"; - diff --git a/secparse/admin/proto/v1/affected_product.proto b/secparse/admin/proto/v1/affected_product.proto deleted file mode 100644 index 6a969281..00000000 --- a/secparse/admin/proto/v1/affected_product.proto +++ /dev/null @@ -1,40 +0,0 @@ -syntax = "proto3"; - -package resf.secparse.admin; - -import "google/protobuf/wrappers.proto"; - -option go_package = "peridot.resf.org/secparse/admin/proto/v1;secparseadminpb"; - -enum AffectedProductState { - UnknownProductState = 0; - // CVE only affects downstream - UnderInvestigationDownstream = 1; - // CVE affecting upstream and a fix still hasn't been issued - UnderInvestigationUpstream = 2; - // CVE has been fixed upstream - FixedUpstream = 3; - // CVE has been fixed downstream - // At this stage the CVE can be included in errata - FixedDownstream = 4; - // CVE will NOT be fixed upstream - WillNotFixUpstream = 5; - // CVE will NOT be fixed downstream - // This will probably never happen with Core, but may happen for SIGs - WillNotFixDownstream = 6; - // CVE is out of support scope - OutOfSupportScope = 7; - // CVE affects product and upstream is working on a fix - AffectedUpstream = 8; - // CVE affects product and a fix is being worked out - AffectedDownstream = 9; -} - -message AffectedProduct { - int64 product_id = 1; - google.protobuf.StringValue cve_id = 2; - string version = 3; - AffectedProductState state = 4; - string package = 5; - google.protobuf.StringValue advisory = 6; -} diff --git a/secparse/admin/proto/v1/cve.proto b/secparse/admin/proto/v1/cve.proto deleted file mode 100644 index 808cd7da..00000000 --- a/secparse/admin/proto/v1/cve.proto +++ /dev/null @@ -1,43 +0,0 @@ -syntax = "proto3"; - -package resf.secparse.admin; - -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -option go_package = "peridot.resf.org/secparse/admin/proto/v1;secparseadminpb"; - -enum CVEState { - UnknownCVEState = 0; - // New CVE acknowledged by upstream - NewFromUpstream = 1; - // New issue, original (not from upstream) - NewOriginal = 2; - // Upstream has fixed the issue, downstream can now merge - ResolvedUpstream = 3; - // A fix has been merged from upstream or a fix has been pushed for an original package, awaiting release - ResolvedDownstream = 4; - // CVE has been included in an advisory (advisory may be unpublished) - IncludedInAdvisory = 5; - // CVE affects products that's not supported - NoSupportedProducts = 6; - // CVE is resolved but no advisory is required - ResolvedNoAdvisory = 7; -} - -message CVE { - string name = 1; - CVEState state = 2; - google.protobuf.StringValue source_by = 3; - google.protobuf.StringValue source_link = 4; -} - -message ListUnresolvedCVEsRequest {} -message ListUnresolvedCVEsResponse { - repeated CVE cves = 1; -} - -message ListFixedCVEsRequest {} -message ListFixedCVEsResponse { - repeated CVE cves = 1; -} diff --git a/secparse/admin/proto/v1/secparseadmin.proto b/secparse/admin/proto/v1/secparseadmin.proto deleted file mode 100644 index ef0ae12e..00000000 --- a/secparse/admin/proto/v1/secparseadmin.proto +++ /dev/null @@ -1,67 +0,0 @@ -syntax = "proto3"; - -package resf.secparse.admin; - -import "google/api/annotations.proto"; -import "proto/common.proto"; -import "secparse/admin/proto/v1/short_code.proto"; -import "secparse/admin/proto/v1/cve.proto"; - -option go_package = "peridot.resf.org/secparse/admin/proto/v1;secparseadminpb"; - -service SecparseAdmin { - // ListShortCodes - // - // List all short codes - rpc ListShortCodes (ListShortCodesRequest) returns (ListShortCodesResponse) { - option (google.api.http) = { - get: "/short_codes" - }; - } - - // GetShortCode - // - // Get short code entry by code - rpc GetShortCode (GetShortCodeRequest) returns (GetShortCodeResponse) { - option (google.api.http) = { - get: "/short_codes/{code=*}" - }; - } - - // CreateShortCode - // - // Create a new short code / prefix to publish errata with - rpc CreateShortCode (CreateShortCodeRequest) returns (CreateShortCodeResponse) { - option (google.api.http) = { - post: "/short_codes" - body: "*" - }; - } - - // ListUnresolvedCVEs - // - // List all unresolved CVEs - rpc ListUnresolvedCVEs (ListUnresolvedCVEsRequest) returns (ListUnresolvedCVEsResponse) { - option (google.api.http) = { - get: "/cves/unresolved" - }; - } - - // ListFixedCVEs - // - // List all CVEs either fixed upstream or downstream - rpc ListFixedCVEs (ListFixedCVEsRequest) returns (ListFixedCVEsResponse) { - option (google.api.http) = { - get: "/cves/fixed" - }; - } - - // HealthCheck - // - // Endpoint to see if the service is in good health - rpc HealthCheck (resf.orgmon.HealthCheckRequest) returns (resf.orgmon.HealthCheckResponse) { - option (google.api.http) = { - get: "/healthz" - }; - } -} diff --git a/secparse/admin/proto/v1/short_code.proto b/secparse/admin/proto/v1/short_code.proto deleted file mode 100644 index b218e20b..00000000 --- a/secparse/admin/proto/v1/short_code.proto +++ /dev/null @@ -1,65 +0,0 @@ -syntax = "proto3"; - -package resf.secparse.admin; - -import "google/protobuf/wrappers.proto"; -import "google/protobuf/timestamp.proto"; -import "validate/validate.proto"; - -option go_package = "peridot.resf.org/secparse/admin/proto/v1;secparseadminpb"; - -enum ShortCodeMode { - UnknownMode = 0; - PublishMode = 1; - MirrorRedHatMode = 2; -} - -message ShortCode { - // Code - // - // Full short code - string code = 1; - - // Mode - // - // Mode for short code - // Currently only publish and mirror is supported - // Mirror only mirrors another advisory upstream and will NOT allow publishing - // from the same short code - ShortCodeMode mode = 2; - - // Archived - // - // Whether the short code is archived or not - // An archived short code CANNOT be used to issue errata - bool archived = 3; -} - -message GetShortCodeRequest { - string code = 1; -} - -message GetShortCodeResponse { - ShortCode short_code = 1; -} - -message ListShortCodesRequest { - string page_token = 1; - int32 page_size = 2; -} - -message ListShortCodesResponse { - repeated ShortCode short_codes = 1; - google.protobuf.StringValue next_page_token = 2; -} - -message CreateShortCodeRequest { - string code = 1 [(validate.rules).string.min_len = 1]; - ShortCodeMode mode = 2 [(validate.rules).enum = {not_in: [0]}]; - google.protobuf.Timestamp mirror_from_date = 3; - int32 redhat_major_version = 4; -} - -message CreateShortCodeResponse { - ShortCode short_code = 1; -} diff --git a/secparse/cmd/secparse/BUILD.bazel b/secparse/cmd/secparse/BUILD.bazel deleted file mode 100644 index f351389b..00000000 --- a/secparse/cmd/secparse/BUILD.bazel +++ /dev/null @@ -1,33 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") -load("//rules_byc:defs.bzl", "container") - -go_library( - name = "secparse_lib", - srcs = ["main.go"], - importpath = "peridot.resf.org/secparse/cmd/secparse", - visibility = ["//visibility:private"], - deps = [ - "//secparse/db/connector", - "//secparse/impl", - "//utils", - "//vendor/github.com/sirupsen/logrus", - "//vendor/github.com/spf13/cobra", - ], -) - -go_binary( - name = "secparse", - embed = [":secparse_lib"], - visibility = ["//visibility:public"], -) - -container( - base = "//bases/bazel/go", - files = [ - ":secparse", - ], - image_name = "secparse", - tars_to_layer = [ - "//secparse/migrate", - ], -) diff --git a/secparse/cmd/secparseadmin/BUILD.bazel b/secparse/cmd/secparseadmin/BUILD.bazel deleted file mode 100644 index c9eca468..00000000 --- a/secparse/cmd/secparseadmin/BUILD.bazel +++ /dev/null @@ -1,30 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") -load("//rules_byc:defs.bzl", "container") - -go_library( - name = "secparseadmin_lib", - srcs = ["main.go"], - importpath = "peridot.resf.org/secparse/cmd/secparseadmin", - visibility = ["//visibility:private"], - deps = [ - "//secparse/admin/impl", - "//secparse/db/connector", - "//utils", - "//vendor/github.com/sirupsen/logrus", - "//vendor/github.com/spf13/cobra", - ], -) - -go_binary( - name = "secparseadmin", - embed = [":secparseadmin_lib"], - visibility = ["//visibility:public"], -) - -container( - base = "//bases/bazel/go", - files = [ - ":secparseadmin", - ], - image_name = "secparseadmin", -) diff --git a/secparse/cmd/secparsecron/BUILD.bazel b/secparse/cmd/secparsecron/BUILD.bazel deleted file mode 100644 index a5ce6db8..00000000 --- a/secparse/cmd/secparsecron/BUILD.bazel +++ /dev/null @@ -1,30 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") -load("//rules_byc:defs.bzl", "container") - -go_library( - name = "secparsecron_lib", - srcs = ["main.go"], - importpath = "peridot.resf.org/secparse/cmd/secparsecron", - visibility = ["//visibility:private"], - deps = [ - "//secparse/cron", - "//secparse/db/connector", - "//utils", - "//vendor/github.com/sirupsen/logrus", - "//vendor/github.com/spf13/cobra", - ], -) - -go_binary( - name = "secparsecron", - embed = [":secparsecron_lib"], - visibility = ["//visibility:public"], -) - -container( - base = "//bases/bazel/go", - files = [ - ":secparsecron", - ], - image_name = "secparsecron", -) diff --git a/secparse/cmd/secparsecron/main.go b/secparse/cmd/secparsecron/main.go deleted file mode 100644 index bc4cf634..00000000 --- a/secparse/cmd/secparsecron/main.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package main - -import ( - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "peridot.resf.org/secparse/cron" - "peridot.resf.org/secparse/db/connector" - "peridot.resf.org/utils" - "sync" - "time" -) - -var root = &cobra.Command{ - Use: "secparsecron", - Run: mn, -} - -var cnf = utils.NewFlagConfig() - -func init() { - dname := "secparse" - cnf.DatabaseName = &dname - cnf.Name = "secparse" - - pflags := root.PersistentFlags() - pflags.String("koji-endpoint", "https://koji.rockylinux.org/kojihub", "Koji endpoint to check for downstream fix") - pflags.String("koji-compose", "dist-rocky8-compose", "Tag to source compose packages from") - pflags.String("koji-module-compose", "dist-rocky8-module-compose", "Tag to source compose modules from") - - utils.AddFlags(pflags, cnf) -} - -func mn(_ *cobra.Command, _ []string) { - cronInstance, err := cron.New(connector.MustAuto()) - if err != nil { - logrus.Fatal(err) - } - - var wg sync.WaitGroup - wg.Add(4) - - go func() { - // Poll Red Hat for new advisories every two hours - for { - cronInstance.ScanRedHatErrata() - cronInstance.PollRedHatForNewCVEs() - time.Sleep(2 * time.Hour) - } - }() - - go func() { - // Poll unresolved CVE status and update every hour - for { - cronInstance.UpdateCVEState() - time.Sleep(time.Hour) - } - }() - - go func() { - // Auto detect downstream builds when CVEs are fixed upstream (check every 10 minutes) - for { - cronInstance.CheckIfCVEResolvedDownstream() - time.Sleep(10 * time.Minute) - } - }() - - go func() { - // Create advisory for fixed CVEs (check every 10 minutes) - for { - cronInstance.CreateAdvisoryForFixedCVEs() - time.Sleep(10 * time.Minute) - } - }() - - wg.Wait() -} - -func main() { - utils.Main() - if err := root.Execute(); err != nil { - logrus.Fatal(err) - } -} diff --git a/secparse/cron/BUILD.bazel b/secparse/cron/BUILD.bazel deleted file mode 100644 index 28fbc5a2..00000000 --- a/secparse/cron/BUILD.bazel +++ /dev/null @@ -1,50 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") - -go_library( - name = "cron", - srcs = [ - "autocreate_advisory.go", - "cron.go", - "cve_resolved_downstream.go", - "poll_redhat_for_cves.go", - "scan_redhat_errata.go", - "update_cve_state.go", - ], - importpath = "peridot.resf.org/secparse/cron", - visibility = ["//visibility:public"], - deps = [ - "//koji", - "//secparse/admin/proto/v1:proto", - "//secparse/db", - "//secparse/rherrata", - "//secparse/rhsecurity", - "//secparse/rpmutils", - "//utils", - "//vendor/github.com/gobwas/glob", - "//vendor/github.com/sirupsen/logrus", - "//vendor/github.com/spf13/viper", - ], -) - -go_test( - name = "cron_test", - srcs = [ - "cve_resolved_downstream_test.go", - "main_test.go", - "poll_redhat_for_cves_test.go", - "scan_redhat_errata_test.go", - "update_cve_state_test.go", - ], - data = glob(["testdata/**"]), - embed = [":cron"], - deps = [ - "//koji", - "//secparse/admin/proto/v1:proto", - "//secparse/db", - "//secparse/db/mock", - "//secparse/rherrata", - "//secparse/rhsecurity", - "//secparse/rhsecuritymock", - "//vendor/github.com/stretchr/testify/require", - ], -) diff --git a/secparse/cron/cve_resolved_downstream.go b/secparse/cron/cve_resolved_downstream.go deleted file mode 100644 index f6c623a4..00000000 --- a/secparse/cron/cve_resolved_downstream.go +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package cron - -import ( - "github.com/sirupsen/logrus" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "strings" -) - -func (i *Instance) CheckIfCVEResolvedDownstream() { - if i.koji == nil { - logrus.Infoln("Automatic build checks are disabled. Provide a Koji endpoint using --koji-endpoint") - return - } - - cves, err := i.db.GetAllCVEsWithAllProductsFixed() - if err != nil { - logrus.Errorf("could not get fixed cves: %v", err) - return - } - - productBuffer := map[int64]*db.Product{} - ignoredPackagesBuffer := map[string][]string{} - - for _, cve := range cves { - affectedProducts, err := i.db.GetAllAffectedProductsByCVE(cve.ID) - if err != nil { - logrus.Errorf("could not get all affected products by %s: %v", cve.ID, err) - continue - } - - beginTx, err := i.db.Begin() - if err != nil { - logrus.Errorf("could not begin transaction: %v", err) - continue - } - tx := i.db.UseTransaction(beginTx) - - didSkipProduct := false - willNotFixOnly := true - allFixed := true - - for _, affectedProduct := range affectedProducts { - switch affectedProduct.State { - case - int(secparseadminpb.AffectedProductState_WillNotFixUpstream), - int(secparseadminpb.AffectedProductState_OutOfSupportScope): - continue - case - int(secparseadminpb.AffectedProductState_UnderInvestigationUpstream), - int(secparseadminpb.AffectedProductState_AffectedUpstream): - allFixed = false - willNotFixOnly = false - continue - } - - skipProduct := false - - if productBuffer[affectedProduct.ProductID] == nil { - product, err := i.db.GetProductByID(affectedProduct.ProductID) - if err != nil { - logrus.Errorf("could not get product with id %d: %v", affectedProduct.ProductID, err) - continue - } - productBuffer[affectedProduct.ProductID] = product - } - product := productBuffer[affectedProduct.ProductID] - - if ignoredPackagesBuffer[product.ShortCode] == nil { - ignoredUpstreamPackages, err := i.db.GetAllIgnoredPackagesByShortCode(product.ShortCode) - if err != nil { - logrus.Errorf("could not get ignored packages: %v", err) - continue - } - ignoredPackagesBuffer[product.ShortCode] = ignoredUpstreamPackages - } - ignoredUpstreamPackages := ignoredPackagesBuffer[product.ShortCode] - - nvrOnly := strings.Replace(affectedProduct.Package, ":", "-", 1) - if i.module.MatchString(nvrOnly) { - if !affectedProduct.Advisory.Valid { - skipProduct = true - break - } - - redHatAdvisory, err := i.errata.GetErrata(affectedProduct.Advisory.String) - if err != nil { - logrus.Errorf("Could not get Red Hat Advisory: %v", err) - skipProduct = true - break - } - - for _, arch := range product.Archs { - redHatProductName := affectedProductNameForArchAndVersion(arch, product.RedHatMajorVersion.Int32) - affected := redHatAdvisory.AffectedProducts[redHatProductName] - if affected == nil { - continue - } - srpms := affected.SRPMs - for _, srpm := range srpms { - status := i.checkKojiForBuild(tx, ignoredUpstreamPackages, srpm, affectedProduct, cve) - if status == Skip { - skipProduct = true - break - } else if status == Fixed { - willNotFixOnly = false - } else if status == NotFixed { - allFixed = false - willNotFixOnly = false - } - } - break - } - if skipProduct { - logrus.Errorf("%s has not been fixed for NVR %s", cve.ID, nvrOnly) - break - } - } else { - nvrOnly = i.epoch.ReplaceAllString(affectedProduct.Package, "") - status := i.checkKojiForBuild(tx, ignoredUpstreamPackages, nvrOnly, affectedProduct, cve) - if status == Skip { - skipProduct = true - break - } else if status == Fixed { - willNotFixOnly = false - } else if status == NotFixed { - allFixed = false - willNotFixOnly = false - } - } - - if skipProduct { - didSkipProduct = true - logrus.Infof("%s: Skipping package for now", affectedProduct.Package) - _ = beginTx.Rollback() - break - } - } - - if !didSkipProduct { - newState := secparseadminpb.CVEState_ResolvedUpstream - if allFixed { - newState = secparseadminpb.CVEState_ResolvedDownstream - } - if willNotFixOnly { - newState = secparseadminpb.CVEState_ResolvedNoAdvisory - } - err := tx.UpdateCVEState(cve.ID, newState) - if err != nil { - logrus.Errorf("Could not save new CVE state: %v", err) - continue - } - err = beginTx.Commit() - if err != nil { - logrus.Errorf("could not commit transaction: %v", err) - continue - } - - logrus.Infof("%s is now set to %s", cve.ID, newState.String()) - } - } -} diff --git a/secparse/cron/poll_redhat_for_cves.go b/secparse/cron/poll_redhat_for_cves.go deleted file mode 100644 index ae52a711..00000000 --- a/secparse/cron/poll_redhat_for_cves.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package cron - -import ( - "context" - "database/sql" - "github.com/sirupsen/logrus" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "strings" - "time" -) - -func (i *Instance) PollRedHatForNewCVEs() { - ctx := context.TODO() - - shortCodes, err := i.db.GetAllShortCodes() - if err != nil { - logrus.Errorf("could not get short codes: %v", err) - return - } - for _, shortCode := range shortCodes { - if int32(shortCode.Mode) != int32(secparseadminpb.ShortCodeMode_MirrorRedHatMode) { - // This is not a mirrored short code, continue - continue - } - - allProducts, err := i.db.GetProductsByShortCode(shortCode.Code) - if err != nil { - logrus.Errorf("could not get all products for code %s: %v", shortCode.Code, err) - continue - } - - for _, product := range allProducts { - if !product.RedHatMajorVersion.Valid { - continue - } - if !strings.HasPrefix(product.Name, shortCode.RedHatProductPrefix.String) { - continue - } - - lastSync, err := i.db.GetMirrorStateLastSync(shortCode.Code) - if err != nil { - if err != sql.ErrNoRows { - logrus.Errorf("could not get last sync for code %s: %v", shortCode.Code, err) - continue - } - - now := time.Now() - if shortCode.MirrorFromDate.Valid { - now = shortCode.MirrorFromDate.Time - } - lastSync = &now - } - - req := i.api.GetCves(ctx) - req = req.Product(productName(product.RedHatMajorVersion.Int32)) - if lastSync != nil { - req = req.After(lastSync.Format("2006-01-02")) - } - - cves, _, err := i.api.GetCvesExecute(req) - if err != nil { - logrus.Errorf("could not get cves: %v", err) - return - } - - for _, cve := range cves { - _, err := i.db.GetCVEByID(cve.CVE) - if err == nil { - continue - } - if err != sql.ErrNoRows { - logrus.Errorf("an unknown error occurred: %v", err) - return - } - - sourceBy := "Red Hat" - _, err = i.db.CreateCVE(cve.CVE, secparseadminpb.CVEState_NewFromUpstream, shortCode.Code, &sourceBy, &cve.ResourceUrl) - if err != nil { - logrus.Errorf("could not create cve: %v", err) - return - } - logrus.Infof("Added %s to %s with state NewFromUpstream", cve.CVE, shortCode.Code) - } - - now := time.Now() - err = i.db.UpdateMirrorState(shortCode.Code, &now) - if err != nil { - logrus.Errorf("could not update mirroring state: %v", err) - } - } - } -} diff --git a/secparse/cron/scan_redhat_errata.go b/secparse/cron/scan_redhat_errata.go deleted file mode 100644 index a35f2d4a..00000000 --- a/secparse/cron/scan_redhat_errata.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package cron - -import ( - "database/sql" - "fmt" - "github.com/sirupsen/logrus" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/utils" - "strconv" - "strings" -) - -func (i *Instance) ScanRedHatErrata() { - shortCodes, err := i.db.GetAllShortCodes() - if err != nil { - logrus.Errorf("could not get short codes: %v", err) - return - } - - ignoredPackagesBuffer := map[string][]string{} - - for _, shortCode := range shortCodes { - if int32(shortCode.Mode) != int32(secparseadminpb.ShortCodeMode_MirrorRedHatMode) { - // This is not a mirrored short code, continue - continue - } - - if ignoredPackagesBuffer[shortCode.Code] == nil { - ignoredUpstreamPackages, err := i.db.GetAllIgnoredPackagesByShortCode(shortCode.Code) - if err != nil { - logrus.Errorf("could not get ignored packages: %v", err) - continue - } - ignoredPackagesBuffer[shortCode.Code] = ignoredUpstreamPackages - } - ignoredUpstreamPackages := ignoredPackagesBuffer[shortCode.Code] - - allProducts, err := i.db.GetProductsByShortCode(shortCode.Code) - if err != nil { - logrus.Errorf("could not get all products for code %s: %v", shortCode.Code, err) - continue - } - - for _, product := range allProducts { - if !product.RedHatMajorVersion.Valid { - continue - } - if !strings.HasPrefix(product.Name, shortCode.RedHatProductPrefix.String) { - continue - } - - advisories, err := i.errata.GetAdvisories(product.CurrentFullVersion) - if err != nil { - logrus.Errorf("Could not get Red Hat Advisories: %v", err) - continue - } - - for _, advisory := range advisories { - advisoryId := i.advisoryIdRegex.FindStringSubmatch(advisory.Name) - if len(advisoryId) < 5 { - logrus.Errorf("Invalid advisory %s", advisory.Name) - continue - } - code := advisoryId[1] - year, err := strconv.Atoi(advisoryId[3]) - if err != nil { - logrus.Errorf("Invalid advisory %s", advisory.Name) - continue - } - num, err := strconv.Atoi(advisoryId[4]) - if err != nil { - logrus.Errorf("Invalid advisory %s", advisory.Name) - continue - } - - beginTx, err := i.db.Begin() - if err != nil { - logrus.Errorf("Could not begin tx: %v", err) - continue - } - tx := i.db.UseTransaction(beginTx) - - _, err = tx.GetAdvisoryByCodeAndYearAndNum(code, year, num) - if err != nil { - if err == sql.ErrNoRows { - // If security then just add CVEs, the rest should be automatic - if strings.HasPrefix(advisory.Name, "RHSA") { - for _, cve := range advisory.CVEs { - _, err := tx.GetCVEByID(cve) - if err == nil { - continue - } - if err != sql.ErrNoRows { - logrus.Errorf("an unknown error occurred: %v", err) - return - } - - sourceBy := "Red Hat" - resourceUrl := fmt.Sprintf("https://access.redhat.com/hydra/rest/securitydata/cve/%s.json", cve) - _, err = tx.CreateCVE(cve, secparseadminpb.CVEState_NewFromUpstream, shortCode.Code, &sourceBy, &resourceUrl) - if err != nil { - logrus.Errorf("could not create cve: %v", err) - _ = beginTx.Rollback() - return - } - logrus.Infof("Added %s to %s (%s)", cve, shortCode.Code, advisory.Name) - } - } else if strings.HasPrefix(advisory.Name, "RHBA") || strings.HasPrefix(advisory.Name, "RHEA") { - doRollback := false - _, err := tx.GetAffectedProductByAdvisory(advisory.Name) - if err != nil { - if err == sql.ErrNoRows { - _, err := tx.GetCVEByID(advisory.Name) - if err == nil { - continue - } - if err != sql.ErrNoRows { - logrus.Errorf("an unknown error occurred: %v", err) - return - } - - sourceBy := "Red Hat" - resourceUrl := fmt.Sprintf("https://access.redhat.com/errata/%s", advisory.Name) - _, err = tx.CreateCVE(advisory.Name, secparseadminpb.CVEState_ResolvedUpstream, product.ShortCode, &sourceBy, &resourceUrl) - if err != nil { - logrus.Errorf("Could not create cve: %v", err) - _ = beginTx.Rollback() - continue - } - - for _, srpm := range advisory.AffectedPackages { - if !strings.Contains(srpm, ".src.rpm") { - continue - } - - pkg := strings.Replace(srpm, ".src.rpm", "", 1) - - nvr := i.nvr.FindStringSubmatch(pkg) - var packageName string - if len(nvr) >= 2 { - packageName = nvr[1] - } else { - packageName = pkg - } - if utils.StrContains(packageName, ignoredUpstreamPackages) { - continue - } - dist := fmt.Sprintf("el%d", product.RedHatMajorVersion.Int32) - if !strings.Contains(pkg, dist) { - continue - } - if strings.Contains(pkg, dist+"sat") { - continue - } - _, err := tx.CreateAffectedProduct(product.ID, advisory.Name, int(secparseadminpb.AffectedProductState_FixedUpstream), product.CurrentFullVersion, pkg, &advisory.Name) - if err != nil { - logrus.Errorf("Could not create affected product for srpm: %v", err) - doRollback = true - break - } - } - if doRollback { - _ = beginTx.Rollback() - continue - } - logrus.Infof("Added %s to %s", advisory.Name, shortCode.Code) - } else { - logrus.Errorf("Could not get affected product by advisory: %v", err) - continue - } - } - } - } else { - logrus.Errorf("Could not fetch advisory: %v", err) - continue - } - } - - err = beginTx.Commit() - if err != nil { - logrus.Errorf("Could not commit new advisory tx: %v", err) - continue - } - } - } - } -} diff --git a/secparse/db/psql/psql.go b/secparse/db/psql/psql.go deleted file mode 100644 index 6c07a88a..00000000 --- a/secparse/db/psql/psql.go +++ /dev/null @@ -1,631 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package psql - -import ( - "database/sql" - "time" - - "github.com/jmoiron/sqlx" - secparseadminpb "peridot.resf.org/secparse/admin/proto/v1" - "peridot.resf.org/secparse/db" - "peridot.resf.org/utils" -) - -type Access struct { - db *sqlx.DB - query utils.SqlQuery -} - -func New() *Access { - pgx := utils.PgInitx() - return &Access{ - db: pgx, - query: pgx, - } -} - -func (a *Access) GetAllShortCodes() ([]*db.ShortCode, error) { - var shortCodes []*db.ShortCode - err := a.query.Select( - &shortCodes, - ` - select - code, - mode, - created_at, - archived_at, - mirror_from_date, - redhat_product_prefix - from short_codes - order by created_at desc - `, - ) - if err != nil { - return nil, err - } - - return shortCodes, nil -} - -func (a *Access) GetShortCodeByCode(code string) (*db.ShortCode, error) { - var shortCode db.ShortCode - err := a.query.Get(&shortCode, "select code, mode, created_at, archived_at, mirror_from_date, redhat_product_prefix from short_codes where code = $1", code) - if err != nil { - return nil, err - } - - return &shortCode, nil -} - -func (a *Access) CreateShortCode(code string, mode secparseadminpb.ShortCodeMode) (*db.ShortCode, error) { - var shortCode db.ShortCode - err := a.query.Get(&shortCode, "insert into short_codes (code, mode) values ($1, $2) returning code, mode, created_at, archived_at, mirror_from_date, redhat_product_prefix", code, int(mode)) - if err != nil { - return nil, err - } - - return &shortCode, nil -} - -func (a *Access) GetAllAdvisories(publishedOnly bool) ([]*db.Advisory, error) { - var advisories []*db.Advisory - err := a.query.Select( - &advisories, - ` - select - a.id, - a.created_at, - a.year, - a.num, - a.synopsis, - a.topic, - a.severity, - a.type, - a.description, - a.solution, - a.redhat_issued_at, - a.short_code_code, - a.published_at, - array_remove(array_agg(distinct p.name), NULL) as affected_products, - array_remove(array_agg(distinct f.ticket), NULL) as fixes, - array_remove(array_agg(distinct c.source_by || ':::' || c.source_link || ':::' || c.id), NULL) as cves, - array_remove(array_agg(distinct r.url), NULL) as references, - array_remove(array_agg(distinct ar.name), NULL) as rpms - from advisories a - left join advisory_fixes adf on adf.advisory_id = a.id - left join fixes f on f.id = adf.fix_id - left join advisory_cves ac on ac.advisory_id = a.id - left join cves c on c.id = ac.cve_id - left join affected_products ap on ap.cve_id = ac.cve_id - left join products p on ap.product_id = p.id - left join advisory_references r on r.advisory_id = a.id - left join advisory_rpms ar on ar.advisory_id = a.id - where - ($1 is false or a.published_at is not null) - group by a.id - order by a.created_at desc - `, - publishedOnly, - ) - if err != nil { - return nil, err - } - - return advisories, nil -} - -func (a *Access) GetAdvisoryByCodeAndYearAndNum(code string, year int, num int) (*db.Advisory, error) { - var advisory db.Advisory - err := a.query.Get( - &advisory, - ` - select - a.id, - a.created_at, - a.year, - a.num, - a.synopsis, - a.topic, - a.severity, - a.type, - a.description, - a.solution, - a.redhat_issued_at, - a.short_code_code, - a.published_at, - array_remove(array_agg(distinct p.name), NULL) as affected_products, - array_remove(array_agg(distinct f.ticket), NULL) as fixes, - array_remove(array_agg(distinct c.source_by || ':::' || c.source_link || ':::' || c.id), NULL) as cves, - array_remove(array_agg(distinct r.url), NULL) as references, - array_remove(array_agg(distinct ar.name), NULL) as rpms - from advisories a - left join advisory_fixes adf on adf.advisory_id = a.id - left join fixes f on f.id = adf.fix_id - left join advisory_cves ac on ac.advisory_id = a.id - left join cves c on c.id = ac.cve_id - left join affected_products ap on ap.cve_id = ac.cve_id - left join products p on ap.product_id = p.id - left join advisory_references r on r.advisory_id = a.id - left join advisory_rpms ar on ar.advisory_id = a.id - where - a.year = $1 - and a.num = $2 - and a.short_code_code = $3 - group by a.id - `, - year, - num, - code, - ) - if err != nil { - return nil, err - } - - return &advisory, nil -} - -func (a *Access) CreateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { - var ret db.Advisory - - var redHatIssuedAt *time.Time - var publishedAt *time.Time - - if advisory.RedHatIssuedAt.Valid { - redHatIssuedAt = &advisory.RedHatIssuedAt.Time - } - if advisory.PublishedAt.Valid { - publishedAt = &advisory.PublishedAt.Time - } - - err := a.query.Get( - &ret, - ` - insert into advisories - (year, num, synopsis, topic, severity, type, description, solution, - redhat_issued_at, short_code_code, published_at) - values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) - returning - id, - created_at, - year, - num, - synopsis, - topic, - severity, - type, - description, - solution, - redhat_issued_at, - short_code_code, - published_at - `, - advisory.Year, - advisory.Num, - advisory.Synopsis, - advisory.Topic, - advisory.Severity, - advisory.Type, - advisory.Description, - advisory.Solution, - redHatIssuedAt, - advisory.ShortCodeCode, - publishedAt, - ) - if err != nil { - return nil, err - } - - return &ret, nil -} - -func (a *Access) UpdateAdvisory(advisory *db.Advisory) (*db.Advisory, error) { - var ret db.Advisory - - var publishedAt *time.Time - - if advisory.PublishedAt.Valid { - publishedAt = &advisory.PublishedAt.Time - } - - err := a.query.Get( - &ret, - ` - update advisories - set - year = $1, - num = $2, - synopsis = $3, - topic = $4, - severity = $5, - type = $6, - description = $7, - solution = $8, - short_code_code = $9, - published_at = $10 - where - id = $11 - returning - id, - created_at, - year, - num, - synopsis, - topic, - severity, - type, - description, - solution, - redhat_issued_at, - short_code_code, - published_at - `, - advisory.Year, - advisory.Num, - advisory.Synopsis, - advisory.Topic, - advisory.Severity, - advisory.Type, - advisory.Description, - advisory.Solution, - advisory.ShortCodeCode, - publishedAt, - advisory.ID, - ) - if err != nil { - return nil, err - } - - return &ret, nil -} - -func (a *Access) GetAllUnresolvedCVEs() ([]*db.CVE, error) { - var cves []*db.CVE - err := a.query.Select(&cves, "select id, created_at, state, short_code_code, source_by, source_link from cves where state in (1, 2, 8, 9)") - if err != nil { - return nil, err - } - - return cves, nil -} - -func (a *Access) GetAllCVEsWithAllProductsFixed() ([]*db.CVE, error) { - var cves []*db.CVE - err := a.query.Select( - &cves, - ` - select - c.id, - c.created_at, - c.state, - c.short_code_code, - c.source_by, - c.source_link - from cves c - where - c.id in (select cve_id from affected_products where state = 3) - and c.state in (1, 2, 3, 4) - `, - ) - if err != nil { - return nil, err - } - - return cves, nil -} - -func (a *Access) GetAllCVEsFixedDownstream() ([]*db.CVE, error) { - var cves []*db.CVE - err := a.query.Select( - &cves, - ` - select - c.id, - c.created_at, - c.state, - c.short_code_code, - c.source_by, - c.source_link - from cves c - where - c.state = 4 - `, - ) - if err != nil { - return nil, err - } - - return cves, nil -} - -func (a *Access) GetCVEByID(id string) (*db.CVE, error) { - var cve db.CVE - err := a.query.Get(&cve, "select id, created_at, state, short_code_code, source_by, source_link from cves where id = $1", id) - if err != nil { - return nil, err - } - - return &cve, nil -} - -func (a *Access) CreateCVE(cveId string, state secparseadminpb.CVEState, shortCode string, sourceBy *string, sourceLink *string) (*db.CVE, error) { - var cve db.CVE - err := a.query.Get(&cve, "insert into cves (id, state, short_code_code, source_by, source_link) values ($1, $2, $3, $4, $5) returning id, created_at, state, short_code_code, source_by, source_link", cveId, int(state), shortCode, sourceBy, sourceLink) - if err != nil { - return nil, err - } - - return &cve, nil -} - -func (a *Access) UpdateCVEState(cve string, state secparseadminpb.CVEState) error { - _, err := a.query.Exec( - ` - update cves - set - state = $1 - where id = $2 - `, - state, - cve, - ) - return err -} - -func (a *Access) GetProductsByShortCode(code string) ([]*db.Product, error) { - var products []*db.Product - err := a.query.Select(&products, "select id, name, current_full_version, redhat_major_version, short_code_code, archs from products where short_code_code = $1 and (eol_at < now() or eol_at is null)", code) - if err != nil { - return nil, err - } - - return products, nil -} - -func (a *Access) GetProductByNameAndShortCode(name string, code string) (*db.Product, error) { - var product db.Product - err := a.query.Get(&product, "select id, name, current_full_version, redhat_major_version, short_code_code, archs from products where name = $1 and short_code_code = $2", name, code) - if err != nil { - return nil, err - } - - return &product, nil -} - -func (a *Access) GetProductByID(id int64) (*db.Product, error) { - var product db.Product - err := a.query.Get(&product, "select id, name, current_full_version, redhat_major_version, short_code_code, archs from products where id = $1", id) - if err != nil { - return nil, err - } - - return &product, nil -} - -func (a *Access) CreateProduct(name string, currentFullVersion string, redHatMajorVersion *int32, code string, archs []string) (*db.Product, error) { - var product db.Product - err := a.query.Get(&product, "insert into products (name, current_full_version, redhat_major_version, short_code_code, archs) values ($1, $2, $3, $4) returning id, name, current_full_version, redhat_major_version, short_code_code, archs", name, currentFullVersion, redHatMajorVersion, code, archs) - if err != nil { - return nil, err - } - - return &product, nil -} - -func (a *Access) GetAllAffectedProductsByCVE(cve string) ([]*db.AffectedProduct, error) { - var affectedProducts []*db.AffectedProduct - err := a.query.Select(&affectedProducts, "select id, product_id, cve_id, state, version, package, advisory from affected_products where cve_id = $1", cve) - if err != nil { - return nil, err - } - - return affectedProducts, nil -} - -func (a *Access) GetAffectedProductByCVEAndPackage(cve string, pkg string) (*db.AffectedProduct, error) { - var affectedProduct db.AffectedProduct - err := a.query.Get(&affectedProduct, "select id, product_id, cve_id, state, version, package, advisory from affected_products where cve_id = $1 and package = $2", cve, pkg) - if err != nil { - return nil, err - } - - return &affectedProduct, nil -} - -func (a *Access) GetAffectedProductByAdvisory(advisory string) (*db.AffectedProduct, error) { - var affectedProduct db.AffectedProduct - err := a.query.Get(&affectedProduct, "select id, product_id, cve_id, state, version, package, advisory from affected_products where advisory = $1", advisory) - if err != nil { - return nil, err - } - - return &affectedProduct, nil -} - -func (a *Access) CreateAffectedProduct(productId int64, cveId string, state int, version string, pkg string, advisory *string) (*db.AffectedProduct, error) { - var affectedProduct db.AffectedProduct - err := a.query.Get(&affectedProduct, "insert into affected_products (product_id, cve_id, state, version, package, advisory) values ($1, $2, $3, $4, $5, $6) returning id, product_id, cve_id, state, version, package, advisory", productId, cveId, state, version, pkg, advisory) - if err != nil { - return nil, err - } - - return &affectedProduct, nil -} - -func (a *Access) UpdateAffectedProductStateAndPackageAndAdvisory(id int64, state int, pkg string, advisory *string) error { - _, err := a.query.Exec( - ` - update affected_products - set - state = $1, - package = $2, - advisory = $3 - where id = $4 - `, - state, - pkg, - advisory, - id, - ) - return err -} - -func (a *Access) DeleteAffectedProduct(id int64) error { - _, err := a.query.Exec( - ` - delete from affected_products - where id = $1 - `, - id, - ) - return err -} - -func (a *Access) CreateFix(ticket string, description string) (int64, error) { - var id int64 - err := a.query.Get(&id, "insert into fixes (ticket, description) values ($1, $2) returning id", ticket, description) - return id, err -} - -func (a *Access) GetMirrorStateLastSync(code string) (*time.Time, error) { - var lastSync time.Time - row := a.query.QueryRowx("select last_sync from mirror_state where short_code_code = $1", code) - if err := row.Err(); err != nil { - if err == sql.ErrNoRows { - return nil, nil - } - - return nil, err - } - - err := row.Scan(&lastSync) - if err != nil { - return nil, err - } - - return &lastSync, nil -} - -func (a *Access) UpdateMirrorState(code string, lastSync *time.Time) error { - _, err := a.query.Exec( - ` - insert into mirror_state (short_code_code, last_sync) - values ($1, $2) - on conflict (short_code_code) do - update - set last_sync = EXCLUDED.last_sync - `, - code, - lastSync, - ) - return err -} - -func (a *Access) CreateBuildReference(affectedProductId int64, rpm string, srcRpm string, cveId string, kojiId string) (*db.BuildReference, error) { - var buildReference db.BuildReference - err := a.query.Get( - &buildReference, - ` - insert into build_references - (affected_product_id, rpm, src_rpm, cve_id, koji_id) - values ($1, $2, $3, $4, $5) - returning id, affected_product_id, rpm, src_rpm, cve_id, koji_id - `, - affectedProductId, - rpm, - srcRpm, - cveId, - kojiId, - ) - if err != nil { - return nil, err - } - - return &buildReference, nil -} - -func (a *Access) CreateAdvisoryReference(advisoryId int64, url string) error { - _, err := a.query.Exec("insert into advisory_references (advisory_id, url) values ($1, $2)", advisoryId, url) - return err -} - -func (a *Access) GetAllIgnoredPackagesByShortCode(code string) ([]string, error) { - var packages []string - err := a.query.Select(&packages, "select package from ignored_upstream_packages where short_code_code = $1", code) - if err != nil { - return nil, err - } - - return packages, nil -} - -func (a *Access) AddAdvisoryFix(advisoryId int64, fixId int64) error { - _, err := a.query.Exec("insert into advisory_fixes (advisory_id, fix_id) values ($1, $2) on conflict do nothing", advisoryId, fixId) - if err != nil { - return err - } - - return nil -} - -func (a *Access) AddAdvisoryCVE(advisoryId int64, cveId string) error { - _, err := a.query.Exec("insert into advisory_cves (advisory_id, cve_id) values ($1, $2) on conflict do nothing", advisoryId, cveId) - if err != nil { - return err - } - - return nil -} - -func (a *Access) AddAdvisoryRPM(advisoryId int64, name string) error { - _, err := a.query.Exec("insert into advisory_rpms (advisory_id, name) values ($1, $2) on conflict do nothing", advisoryId, name) - if err != nil { - return err - } - - return nil -} - -func (a *Access) Begin() (utils.Tx, error) { - tx, err := a.db.Beginx() - if err != nil { - return nil, err - } - - return tx, nil -} - -func (a *Access) UseTransaction(tx utils.Tx) db.Access { - newAccess := *a - newAccess.query = tx - - return &newAccess -} diff --git a/secparse/impl/advisory.go b/secparse/impl/advisory.go deleted file mode 100644 index 1c08a923..00000000 --- a/secparse/impl/advisory.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) All respective contributors to the Peridot Project. All rights reserved. -// Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. -// Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors -// may be used to endorse or promote products derived from this software without -// specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -package impl - -import ( - "context" - "github.com/sirupsen/logrus" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" - "peridot.resf.org/secparse/db" - secparsepb "peridot.resf.org/secparse/proto/v1" - "peridot.resf.org/utils" - "strconv" -) - -func (s *Server) ListAdvisories(_ context.Context, _ *secparsepb.ListAdvisoriesRequest) (*secparsepb.ListAdvisoriesResponse, error) { - advisories, err := s.db.GetAllAdvisories(true) - if err != nil { - logrus.Error(err) - return nil, utils.CouldNotRetrieveObjects - } - - return &secparsepb.ListAdvisoriesResponse{ - Advisories: db.DTOListAdvisoriesToPB(advisories), - }, nil -} - -func (s *Server) GetAdvisory(_ context.Context, req *secparsepb.GetAdvisoryRequest) (*secparsepb.GetAdvisoryResponse, error) { - if err := req.ValidateAll(); err != nil { - return nil, err - } - advisoryId := s.advisoryIdRegex.FindStringSubmatch(req.Id) - code := advisoryId[1] - year, err := strconv.Atoi(advisoryId[3]) - if err != nil { - return nil, status.Error(codes.InvalidArgument, "invalid year") - } - num, err := strconv.Atoi(advisoryId[4]) - if err != nil { - return nil, status.Error(codes.InvalidArgument, "invalid num") - } - - advisory, err := s.db.GetAdvisoryByCodeAndYearAndNum(code, year, num) - if err != nil { - logrus.Error(err) - } - if err != nil || !advisory.PublishedAt.Valid { - return nil, utils.CouldNotFindObject - } - - return &secparsepb.GetAdvisoryResponse{ - Advisory: db.DTOAdvisoryToPB(advisory), - }, nil -} diff --git a/secparse/proto/v1/secparse.proto b/secparse/proto/v1/secparse.proto deleted file mode 100644 index 61ed330e..00000000 --- a/secparse/proto/v1/secparse.proto +++ /dev/null @@ -1,58 +0,0 @@ -syntax = "proto3"; - -package resf.secparse; - -import "google/api/annotations.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; - -import "proto/common.proto"; -import "secparse/proto/v1/advisory.proto"; - -option go_package = "peridot.resf.org/secparse/proto/v1;secparsepb"; - -option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { - host: "errata.rockylinux.org" - info: { - title: "Secparse Errata API" - version: "1.0" - contact: { - name: "Mustafa Gezen" - email: "mustafa@ctrliq.com" - } - } - schemes: HTTPS - consumes: "application/json" - produces: "application/json" -}; - -service Secparse { - // ListAdvisories - // - // Return a list of advisories by given filters. - // No filters returns all advisories - // This method is paginated - rpc ListAdvisories (ListAdvisoriesRequest) returns (ListAdvisoriesResponse) { - option (google.api.http) = { - get: "/advisories" - }; - } - - // GetAdvisory - // - // Returns an advisory with given ID if found, else returns NotFound - rpc GetAdvisory (GetAdvisoryRequest) returns (GetAdvisoryResponse) { - option (google.api.http) = { - get: "/advisories/{id=*}" - }; - } - - // HealthCheck - // - // Endpoint to see if the service is in good health - rpc HealthCheck (resf.orgmon.HealthCheckRequest) returns (resf.orgmon.HealthCheckResponse) { - option (google.api.http) = { - get: "/healthz" - }; - } -} - diff --git a/secparse/seed.sql b/secparse/seed.sql deleted file mode 100644 index 77bbcf8e..00000000 --- a/secparse/seed.sql +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. - * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. - * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -insert into short_codes (code, mode, mirror_from_date, redhat_product_prefix) -values ('RL', 2, '2021-06-01'::timestamp, 'Rocky Linux'); -insert into short_codes (code, mode) -values ('RK', 1); -insert into products (name, current_full_version, redhat_major_version, short_code_code, archs) -values ('Rocky Linux 8', '8.4', 8, 'RL', array ['x86_64', 'aarch64']); -insert into ignored_upstream_packages (short_code_code, package) -values - ('RL', 'kernel-rt*'), - ('RL', 'tfm-rubygem-unicode*'), - ('RL', 'katello-host-tools*'), - ('RL', 'openssl-ibmca*'), - ('RL', 'insights-client*'), - ('RL', 'tfm-rubygem-unicode-display_width*'), - ('RL', 'pulp*'), - ('RL', 'satellite*'), - ('RL', 'tfm-rubygem-unf_ext*'), - ('RL', 'foreman*'), - ('RL', 'kpatch*'), - ('RL', 'rhc-worker-playbook*') diff --git a/secparse/ui/src/admin/components/AdminSection.tsx b/secparse/ui/src/admin/components/AdminSection.tsx deleted file mode 100644 index 7caad326..00000000 --- a/secparse/ui/src/admin/components/AdminSection.tsx +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. - * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. - * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -import React from 'react'; - -export const AdminSection = () => { - return
asd
; -}; diff --git a/secparse/ui/src/components/Root.tsx b/secparse/ui/src/components/Root.tsx deleted file mode 100644 index 36a95a0d..00000000 --- a/secparse/ui/src/components/Root.tsx +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. - * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. - * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -import React from 'react'; - -import { - AppBar, - Toolbar, - Container, - CssBaseline, - Drawer, - Divider, - IconButton, - List, -} from '@material-ui/core'; -import ChevronLeftIcon from '@material-ui/icons/ChevronLeft'; -import MenuIcon from '@material-ui/icons/Menu'; - -import { useStyles } from '../styles'; -import { Switch, Route } from 'react-router'; -import { Overview } from 'secparse/ui/src/components/Overview'; -import { BrowserRouter, Link } from 'react-router-dom'; -import { RockyLogo } from 'common/ui/RockyLogo'; -import classnames from 'classnames'; -import { ShowErrata } from './ShowErrata'; -import { AdminSection } from '../admin/components/AdminSection'; - -export const Root = () => { - const [open, setOpen] = React.useState(false); - const classes = useStyles(); - - const handleDrawerClose = () => { - setOpen(false); - }; - - const handleDrawerOpen = () => { - setOpen(true); - }; - - const inManage = location.pathname.startsWith('/manage'); - - return ( - -
- - - - {inManage && ( - - - - )} - -
- -
- Product Errata{inManage && ' (Admin)'} -
-
- -
-
- {inManage && ( - -
- - - -
- -
- )} -
-
- - - - - - - -
-
-
- ); -}; diff --git a/secparse/ui/src/components/ShowErrata.tsx b/secparse/ui/src/components/ShowErrata.tsx deleted file mode 100644 index 3bf3b622..00000000 --- a/secparse/ui/src/components/ShowErrata.tsx +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright (c) All respective contributors to the Peridot Project. All rights reserved. - * Copyright (c) 2021-2022 Rocky Enterprise Software Foundation, Inc. All rights reserved. - * Copyright (c) 2021-2022 Ctrl IQ, Inc. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -import React from 'react'; -import { - SecparseAdvisory, - SecparseGetAdvisoryResponse, -} from 'bazel-bin/secparse/proto/v1/client_typescript'; -import { reqap } from 'common/ui/reqap'; -import { api } from '../api'; -import { RouteComponentProps } from 'react-router'; -import { - Card, - CardContent, - Chip, - CircularProgress, - Paper, - Tab, - Tabs, -} from '@material-ui/core'; - -interface ShowErrataParams { - id: string; -} - -export interface ShowErrataProps - extends RouteComponentProps {} - -export const ShowErrata = (props: ShowErrataProps) => { - const [errata, setErrata] = React.useState< - SecparseAdvisory | undefined | null - >(); - const [tabValue, setTabValue] = React.useState(0); - - React.useEffect(() => { - (async () => { - let err, res: void | SecparseGetAdvisoryResponse | undefined; - [err, res] = await reqap(() => - api.getAdvisory({ id: props.match.params.id }) - ); - if (err || !res) { - setErrata(null); - return; - } - - if (res) { - setErrata(res.advisory); - } - })().then(); - }, []); - - const handleTabChange = ({}, val: number) => { - setTabValue(val); - }; - - return ( -
- {errata === undefined && } - {errata === null && ( -

- Oh no! Something has gone wrong! -

- )} - {errata && ( - <> -
-

{errata.name}

- -
- - - - - - - - {tabValue === 0 && ( - -

Synopsis

- {errata.synopsis} -

Type

- {errata.type} -

Severity

- {errata.severity} -

Topic

- {errata.topic?.split('\n').map((x) => ( -

{x}

- ))} -

Description

- {errata.description?.split('\n').map((x) => ( -

{x}

- ))} -

Affected products

-
    - {errata.affectedProducts?.map((x) => ( -
  • {x}
  • - ))} -
-

Fixes

- -

CVEs

-
    - {errata.cves?.map((x) => { - const cve = x.split(':::'); - let text = `${cve[2]}${ - cve[0] !== '' && ` (Source: ${cve[0]})` - }`; - - return ( -
  • - {cve[1] === '' ? ( - {text} - ) : ( - - {text} - - )} -
  • - ); - })} - {errata.cves?.length === 0 &&
  • No CVEs
  • } -
-

References

-
    - {errata.references?.map((x) => ( -
  • {x}
  • - ))} - {errata.references?.length === 0 &&
  • No references
  • } -
-
- )} - {tabValue === 1 && ( - -

SRPMs

-
    - {errata.rpms - ?.filter((x) => x.indexOf('.src.rpm') !== -1) - .map((x) => ( -
  • {x}
  • - ))} -
-

RPMs

-
    - {errata.rpms - ?.filter((x) => x.indexOf('.src.rpm') === -1) - .map((x) => ( -
  • {x}
  • - ))} -
-
- )} -
- - )} -
- ); -}; diff --git a/utils/pointer.go b/utils/pointer.go index 237f8670..d25a21d0 100644 --- a/utils/pointer.go +++ b/utils/pointer.go @@ -102,6 +102,17 @@ func NullTimeToTimestamppb(t sql.NullTime) *timestamppb.Timestamp { return timestamppb.New(t.Time) } +func TimestampToNullTime(t *timestamppb.Timestamp) sql.NullTime { + if t == nil { + return sql.NullTime{} + } + + return sql.NullTime{ + Time: t.AsTime(), + Valid: true, + } +} + func NullStringToPointer(s sql.NullString) *string { if !s.Valid { return nil @@ -122,3 +133,14 @@ func Pointer[T any](t T) *T { s := t return &s } + +func Default[T any](t *T) T { + x := struct { + X T + }{} + if t != nil { + return *t + } + + return x.X +} diff --git a/vendor/github.com/gorilla/feeds/.travis.yml b/vendor/github.com/gorilla/feeds/.travis.yml new file mode 100644 index 00000000..7939a218 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/.travis.yml @@ -0,0 +1,16 @@ +language: go +sudo: false +matrix: + include: + - go: 1.8 + - go: 1.9 + - go: "1.10" + - go: 1.x + - go: tip + allow_failures: + - go: tip +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go vet . + - go test -v -race ./... diff --git a/vendor/github.com/gorilla/feeds/AUTHORS b/vendor/github.com/gorilla/feeds/AUTHORS new file mode 100644 index 00000000..2c28cf94 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/AUTHORS @@ -0,0 +1,29 @@ +# This is the official list of gorilla/feeds authors for copyright purposes. +# Please keep the list sorted. + +Dmitry Chestnykh +Eddie Scholtz +Gabriel Simmer +Google LLC (https://opensource.google.com/) +honky +James Gregory +Jason Hall +Jason Moiron +Kamil Kisiel +Kevin Stock +Markus Zimmermann +Matt Silverlock +Matthew Dawson +Milan Aleksic +Milan Aleksić +nlimpid +Paul Petring +Sean Enck +Sue Spence +Supermighty +Toru Fukui +Vabd +Volker +ZhiFeng Hu +weberc2 + diff --git a/vendor/github.com/gorilla/feeds/BUILD.bazel b/vendor/github.com/gorilla/feeds/BUILD.bazel new file mode 100644 index 00000000..de78e1ee --- /dev/null +++ b/vendor/github.com/gorilla/feeds/BUILD.bazel @@ -0,0 +1,16 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "feeds", + srcs = [ + "atom.go", + "doc.go", + "feed.go", + "json.go", + "rss.go", + "uuid.go", + ], + importmap = "peridot.resf.org/vendor/github.com/gorilla/feeds", + importpath = "github.com/gorilla/feeds", + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/gorilla/feeds/LICENSE b/vendor/github.com/gorilla/feeds/LICENSE new file mode 100644 index 00000000..e24412d5 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2013-2018 The Gorilla Feeds Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/feeds/README.md b/vendor/github.com/gorilla/feeds/README.md new file mode 100644 index 00000000..4d733cf5 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/README.md @@ -0,0 +1,185 @@ +## gorilla/feeds +[![GoDoc](https://godoc.org/github.com/gorilla/feeds?status.svg)](https://godoc.org/github.com/gorilla/feeds) +[![Build Status](https://travis-ci.org/gorilla/feeds.svg?branch=master)](https://travis-ci.org/gorilla/feeds) + +feeds is a web feed generator library for generating RSS, Atom and JSON feeds from Go +applications. + +### Goals + + * Provide a simple interface to create both Atom & RSS 2.0 feeds + * Full support for [Atom][atom], [RSS 2.0][rss], and [JSON Feed Version 1][jsonfeed] spec elements + * Ability to modify particulars for each spec + +[atom]: https://tools.ietf.org/html/rfc4287 +[rss]: http://www.rssboard.org/rss-specification +[jsonfeed]: https://jsonfeed.org/version/1 + +### Usage + +```go +package main + +import ( + "fmt" + "log" + "time" + "github.com/gorilla/feeds" +) + +func main() { + now := time.Now() + feed := &feeds.Feed{ + Title: "jmoiron.net blog", + Link: &feeds.Link{Href: "http://jmoiron.net/blog"}, + Description: "discussion about tech, footie, photos", + Author: &feeds.Author{Name: "Jason Moiron", Email: "jmoiron@jmoiron.net"}, + Created: now, + } + + feed.Items = []*feeds.Item{ + &feeds.Item{ + Title: "Limiting Concurrency in Go", + Link: &feeds.Link{Href: "http://jmoiron.net/blog/limiting-concurrency-in-go/"}, + Description: "A discussion on controlled parallelism in golang", + Author: &feeds.Author{Name: "Jason Moiron", Email: "jmoiron@jmoiron.net"}, + Created: now, + }, + &feeds.Item{ + Title: "Logic-less Template Redux", + Link: &feeds.Link{Href: "http://jmoiron.net/blog/logicless-template-redux/"}, + Description: "More thoughts on logicless templates", + Created: now, + }, + &feeds.Item{ + Title: "Idiomatic Code Reuse in Go", + Link: &feeds.Link{Href: "http://jmoiron.net/blog/idiomatic-code-reuse-in-go/"}, + Description: "How to use interfaces effectively", + Created: now, + }, + } + + atom, err := feed.ToAtom() + if err != nil { + log.Fatal(err) + } + + rss, err := feed.ToRss() + if err != nil { + log.Fatal(err) + } + + json, err := feed.ToJSON() + if err != nil { + log.Fatal(err) + } + + fmt.Println(atom, "\n", rss, "\n", json) +} +``` + +Outputs: + +```xml + + + jmoiron.net blog + + http://jmoiron.net/blog + 2013-01-16T03:26:01-05:00 + discussion about tech, footie, photos + + Limiting Concurrency in Go + + 2013-01-16T03:26:01-05:00 + tag:jmoiron.net,2013-01-16:/blog/limiting-concurrency-in-go/ + A discussion on controlled parallelism in golang + + Jason Moiron + jmoiron@jmoiron.net + + + + Logic-less Template Redux + + 2013-01-16T03:26:01-05:00 + tag:jmoiron.net,2013-01-16:/blog/logicless-template-redux/ + More thoughts on logicless templates + + + + Idiomatic Code Reuse in Go + + 2013-01-16T03:26:01-05:00 + tag:jmoiron.net,2013-01-16:/blog/idiomatic-code-reuse-in-go/ + How to use interfaces <em>effectively</em> + + + + + + + + jmoiron.net blog + http://jmoiron.net/blog + discussion about tech, footie, photos + jmoiron@jmoiron.net (Jason Moiron) + 2013-01-16T03:22:24-05:00 + + Limiting Concurrency in Go + http://jmoiron.net/blog/limiting-concurrency-in-go/ + A discussion on controlled parallelism in golang + 2013-01-16T03:22:24-05:00 + + + Logic-less Template Redux + http://jmoiron.net/blog/logicless-template-redux/ + More thoughts on logicless templates + 2013-01-16T03:22:24-05:00 + + + Idiomatic Code Reuse in Go + http://jmoiron.net/blog/idiomatic-code-reuse-in-go/ + How to use interfaces <em>effectively</em> + 2013-01-16T03:22:24-05:00 + + + + +{ + "version": "https://jsonfeed.org/version/1", + "title": "jmoiron.net blog", + "home_page_url": "http://jmoiron.net/blog", + "description": "discussion about tech, footie, photos", + "author": { + "name": "Jason Moiron" + }, + "items": [ + { + "id": "", + "url": "http://jmoiron.net/blog/limiting-concurrency-in-go/", + "title": "Limiting Concurrency in Go", + "summary": "A discussion on controlled parallelism in golang", + "date_published": "2013-01-16T03:22:24.530817846-05:00", + "author": { + "name": "Jason Moiron" + } + }, + { + "id": "", + "url": "http://jmoiron.net/blog/logicless-template-redux/", + "title": "Logic-less Template Redux", + "summary": "More thoughts on logicless templates", + "date_published": "2013-01-16T03:22:24.530817846-05:00" + }, + { + "id": "", + "url": "http://jmoiron.net/blog/idiomatic-code-reuse-in-go/", + "title": "Idiomatic Code Reuse in Go", + "summary": "How to use interfaces \u003cem\u003eeffectively\u003c/em\u003e", + "date_published": "2013-01-16T03:22:24.530817846-05:00" + } + ] +} +``` + diff --git a/vendor/github.com/gorilla/feeds/atom.go b/vendor/github.com/gorilla/feeds/atom.go new file mode 100644 index 00000000..7196f478 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/atom.go @@ -0,0 +1,169 @@ +package feeds + +import ( + "encoding/xml" + "fmt" + "net/url" + "time" +) + +// Generates Atom feed as XML + +const ns = "http://www.w3.org/2005/Atom" + +type AtomPerson struct { + Name string `xml:"name,omitempty"` + Uri string `xml:"uri,omitempty"` + Email string `xml:"email,omitempty"` +} + +type AtomSummary struct { + XMLName xml.Name `xml:"summary"` + Content string `xml:",chardata"` + Type string `xml:"type,attr"` +} + +type AtomContent struct { + XMLName xml.Name `xml:"content"` + Content string `xml:",chardata"` + Type string `xml:"type,attr"` +} + +type AtomAuthor struct { + XMLName xml.Name `xml:"author"` + AtomPerson +} + +type AtomContributor struct { + XMLName xml.Name `xml:"contributor"` + AtomPerson +} + +type AtomEntry struct { + XMLName xml.Name `xml:"entry"` + Xmlns string `xml:"xmlns,attr,omitempty"` + Title string `xml:"title"` // required + Updated string `xml:"updated"` // required + Id string `xml:"id"` // required + Category string `xml:"category,omitempty"` + Content *AtomContent + Rights string `xml:"rights,omitempty"` + Source string `xml:"source,omitempty"` + Published string `xml:"published,omitempty"` + Contributor *AtomContributor + Links []AtomLink // required if no child 'content' elements + Summary *AtomSummary // required if content has src or content is base64 + Author *AtomAuthor // required if feed lacks an author +} + +// Multiple links with different rel can coexist +type AtomLink struct { + //Atom 1.0 + XMLName xml.Name `xml:"link"` + Href string `xml:"href,attr"` + Rel string `xml:"rel,attr,omitempty"` + Type string `xml:"type,attr,omitempty"` + Length string `xml:"length,attr,omitempty"` +} + +type AtomFeed struct { + XMLName xml.Name `xml:"feed"` + Xmlns string `xml:"xmlns,attr"` + Title string `xml:"title"` // required + Id string `xml:"id"` // required + Updated string `xml:"updated"` // required + Category string `xml:"category,omitempty"` + Icon string `xml:"icon,omitempty"` + Logo string `xml:"logo,omitempty"` + Rights string `xml:"rights,omitempty"` // copyright used + Subtitle string `xml:"subtitle,omitempty"` + Link *AtomLink + Author *AtomAuthor `xml:"author,omitempty"` + Contributor *AtomContributor + Entries []*AtomEntry `xml:"entry"` +} + +type Atom struct { + *Feed +} + +func newAtomEntry(i *Item) *AtomEntry { + id := i.Id + // assume the description is html + s := &AtomSummary{Content: i.Description, Type: "html"} + + if len(id) == 0 { + // if there's no id set, try to create one, either from data or just a uuid + if len(i.Link.Href) > 0 && (!i.Created.IsZero() || !i.Updated.IsZero()) { + dateStr := anyTimeFormat("2006-01-02", i.Updated, i.Created) + host, path := i.Link.Href, "/invalid.html" + if url, err := url.Parse(i.Link.Href); err == nil { + host, path = url.Host, url.Path + } + id = fmt.Sprintf("tag:%s,%s:%s", host, dateStr, path) + } else { + id = "urn:uuid:" + NewUUID().String() + } + } + var name, email string + if i.Author != nil { + name, email = i.Author.Name, i.Author.Email + } + + link_rel := i.Link.Rel + if link_rel == "" { + link_rel = "alternate" + } + x := &AtomEntry{ + Title: i.Title, + Links: []AtomLink{{Href: i.Link.Href, Rel: link_rel, Type: i.Link.Type}}, + Id: id, + Updated: anyTimeFormat(time.RFC3339, i.Updated, i.Created), + Summary: s, + } + + // if there's a content, assume it's html + if len(i.Content) > 0 { + x.Content = &AtomContent{Content: i.Content, Type: "html"} + } + + if i.Enclosure != nil && link_rel != "enclosure" { + x.Links = append(x.Links, AtomLink{Href: i.Enclosure.Url, Rel: "enclosure", Type: i.Enclosure.Type, Length: i.Enclosure.Length}) + } + + if len(name) > 0 || len(email) > 0 { + x.Author = &AtomAuthor{AtomPerson: AtomPerson{Name: name, Email: email}} + } + return x +} + +// create a new AtomFeed with a generic Feed struct's data +func (a *Atom) AtomFeed() *AtomFeed { + updated := anyTimeFormat(time.RFC3339, a.Updated, a.Created) + feed := &AtomFeed{ + Xmlns: ns, + Title: a.Title, + Link: &AtomLink{Href: a.Link.Href, Rel: a.Link.Rel}, + Subtitle: a.Description, + Id: a.Link.Href, + Updated: updated, + Rights: a.Copyright, + } + if a.Author != nil { + feed.Author = &AtomAuthor{AtomPerson: AtomPerson{Name: a.Author.Name, Email: a.Author.Email}} + } + for _, e := range a.Items { + feed.Entries = append(feed.Entries, newAtomEntry(e)) + } + return feed +} + +// FeedXml returns an XML-Ready object for an Atom object +func (a *Atom) FeedXml() interface{} { + return a.AtomFeed() +} + +// FeedXml returns an XML-ready object for an AtomFeed object +func (a *AtomFeed) FeedXml() interface{} { + return a +} diff --git a/vendor/github.com/gorilla/feeds/doc.go b/vendor/github.com/gorilla/feeds/doc.go new file mode 100644 index 00000000..4e0759cc --- /dev/null +++ b/vendor/github.com/gorilla/feeds/doc.go @@ -0,0 +1,73 @@ +/* +Syndication (feed) generator library for golang. + +Installing + + go get github.com/gorilla/feeds + +Feeds provides a simple, generic Feed interface with a generic Item object as well as RSS, Atom and JSON Feed specific RssFeed, AtomFeed and JSONFeed objects which allow access to all of each spec's defined elements. + +Examples + +Create a Feed and some Items in that feed using the generic interfaces: + + import ( + "time" + . "github.com/gorilla/feeds" + ) + + now = time.Now() + + feed := &Feed{ + Title: "jmoiron.net blog", + Link: &Link{Href: "http://jmoiron.net/blog"}, + Description: "discussion about tech, footie, photos", + Author: &Author{Name: "Jason Moiron", Email: "jmoiron@jmoiron.net"}, + Created: now, + Copyright: "This work is copyright © Benjamin Button", + } + + feed.Items = []*Item{ + &Item{ + Title: "Limiting Concurrency in Go", + Link: &Link{Href: "http://jmoiron.net/blog/limiting-concurrency-in-go/"}, + Description: "A discussion on controlled parallelism in golang", + Author: &Author{Name: "Jason Moiron", Email: "jmoiron@jmoiron.net"}, + Created: now, + }, + &Item{ + Title: "Logic-less Template Redux", + Link: &Link{Href: "http://jmoiron.net/blog/logicless-template-redux/"}, + Description: "More thoughts on logicless templates", + Created: now, + }, + &Item{ + Title: "Idiomatic Code Reuse in Go", + Link: &Link{Href: "http://jmoiron.net/blog/idiomatic-code-reuse-in-go/"}, + Description: "How to use interfaces effectively", + Created: now, + }, + } + +From here, you can output Atom, RSS, or JSON Feed versions of this feed easily + + atom, err := feed.ToAtom() + rss, err := feed.ToRss() + json, err := feed.ToJSON() + +You can also get access to the underlying objects that feeds uses to export its XML + + atomFeed := (&Atom{Feed: feed}).AtomFeed() + rssFeed := (&Rss{Feed: feed}).RssFeed() + jsonFeed := (&JSON{Feed: feed}).JSONFeed() + +From here, you can modify or add each syndication's specific fields before outputting + + atomFeed.Subtitle = "plays the blues" + atom, err := ToXML(atomFeed) + rssFeed.Generator = "gorilla/feeds v1.0 (github.com/gorilla/feeds)" + rss, err := ToXML(rssFeed) + jsonFeed.NextUrl = "https://www.example.com/feed.json?page=2" + json, err := jsonFeed.ToJSON() +*/ +package feeds diff --git a/vendor/github.com/gorilla/feeds/feed.go b/vendor/github.com/gorilla/feeds/feed.go new file mode 100644 index 00000000..790a1b6c --- /dev/null +++ b/vendor/github.com/gorilla/feeds/feed.go @@ -0,0 +1,145 @@ +package feeds + +import ( + "encoding/json" + "encoding/xml" + "io" + "sort" + "time" +) + +type Link struct { + Href, Rel, Type, Length string +} + +type Author struct { + Name, Email string +} + +type Image struct { + Url, Title, Link string + Width, Height int +} + +type Enclosure struct { + Url, Length, Type string +} + +type Item struct { + Title string + Link *Link + Source *Link + Author *Author + Description string // used as description in rss, summary in atom + Id string // used as guid in rss, id in atom + Updated time.Time + Created time.Time + Enclosure *Enclosure + Content string +} + +type Feed struct { + Title string + Link *Link + Description string + Author *Author + Updated time.Time + Created time.Time + Id string + Subtitle string + Items []*Item + Copyright string + Image *Image +} + +// add a new Item to a Feed +func (f *Feed) Add(item *Item) { + f.Items = append(f.Items, item) +} + +// returns the first non-zero time formatted as a string or "" +func anyTimeFormat(format string, times ...time.Time) string { + for _, t := range times { + if !t.IsZero() { + return t.Format(format) + } + } + return "" +} + +// interface used by ToXML to get a object suitable for exporting XML. +type XmlFeed interface { + FeedXml() interface{} +} + +// turn a feed object (either a Feed, AtomFeed, or RssFeed) into xml +// returns an error if xml marshaling fails +func ToXML(feed XmlFeed) (string, error) { + x := feed.FeedXml() + data, err := xml.MarshalIndent(x, "", " ") + if err != nil { + return "", err + } + // strip empty line from default xml header + s := xml.Header[:len(xml.Header)-1] + string(data) + return s, nil +} + +// WriteXML writes a feed object (either a Feed, AtomFeed, or RssFeed) as XML into +// the writer. Returns an error if XML marshaling fails. +func WriteXML(feed XmlFeed, w io.Writer) error { + x := feed.FeedXml() + // write default xml header, without the newline + if _, err := w.Write([]byte(xml.Header[:len(xml.Header)-1])); err != nil { + return err + } + e := xml.NewEncoder(w) + e.Indent("", " ") + return e.Encode(x) +} + +// creates an Atom representation of this feed +func (f *Feed) ToAtom() (string, error) { + a := &Atom{f} + return ToXML(a) +} + +// WriteAtom writes an Atom representation of this feed to the writer. +func (f *Feed) WriteAtom(w io.Writer) error { + return WriteXML(&Atom{f}, w) +} + +// creates an Rss representation of this feed +func (f *Feed) ToRss() (string, error) { + r := &Rss{f} + return ToXML(r) +} + +// WriteRss writes an RSS representation of this feed to the writer. +func (f *Feed) WriteRss(w io.Writer) error { + return WriteXML(&Rss{f}, w) +} + +// ToJSON creates a JSON Feed representation of this feed +func (f *Feed) ToJSON() (string, error) { + j := &JSON{f} + return j.ToJSON() +} + +// WriteJSON writes an JSON representation of this feed to the writer. +func (f *Feed) WriteJSON(w io.Writer) error { + j := &JSON{f} + feed := j.JSONFeed() + + e := json.NewEncoder(w) + e.SetIndent("", " ") + return e.Encode(feed) +} + +// Sort sorts the Items in the feed with the given less function. +func (f *Feed) Sort(less func(a, b *Item) bool) { + lessFunc := func(i, j int) bool { + return less(f.Items[i], f.Items[j]) + } + sort.SliceStable(f.Items, lessFunc) +} diff --git a/vendor/github.com/gorilla/feeds/json.go b/vendor/github.com/gorilla/feeds/json.go new file mode 100644 index 00000000..75a82fd6 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/json.go @@ -0,0 +1,183 @@ +package feeds + +import ( + "encoding/json" + "strings" + "time" +) + +const jsonFeedVersion = "https://jsonfeed.org/version/1" + +// JSONAuthor represents the author of the feed or of an individual item +// in the feed +type JSONAuthor struct { + Name string `json:"name,omitempty"` + Url string `json:"url,omitempty"` + Avatar string `json:"avatar,omitempty"` +} + +// JSONAttachment represents a related resource. Podcasts, for instance, would +// include an attachment that’s an audio or video file. +type JSONAttachment struct { + Url string `json:"url,omitempty"` + MIMEType string `json:"mime_type,omitempty"` + Title string `json:"title,omitempty"` + Size int32 `json:"size,omitempty"` + Duration time.Duration `json:"duration_in_seconds,omitempty"` +} + +// MarshalJSON implements the json.Marshaler interface. +// The Duration field is marshaled in seconds, all other fields are marshaled +// based upon the definitions in struct tags. +func (a *JSONAttachment) MarshalJSON() ([]byte, error) { + type EmbeddedJSONAttachment JSONAttachment + return json.Marshal(&struct { + Duration float64 `json:"duration_in_seconds,omitempty"` + *EmbeddedJSONAttachment + }{ + EmbeddedJSONAttachment: (*EmbeddedJSONAttachment)(a), + Duration: a.Duration.Seconds(), + }) +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// The Duration field is expected to be in seconds, all other field types +// match the struct definition. +func (a *JSONAttachment) UnmarshalJSON(data []byte) error { + type EmbeddedJSONAttachment JSONAttachment + var raw struct { + Duration float64 `json:"duration_in_seconds,omitempty"` + *EmbeddedJSONAttachment + } + raw.EmbeddedJSONAttachment = (*EmbeddedJSONAttachment)(a) + + err := json.Unmarshal(data, &raw) + if err != nil { + return err + } + + if raw.Duration > 0 { + nsec := int64(raw.Duration * float64(time.Second)) + raw.EmbeddedJSONAttachment.Duration = time.Duration(nsec) + } + + return nil +} + +// JSONItem represents a single entry/post for the feed. +type JSONItem struct { + Id string `json:"id"` + Url string `json:"url,omitempty"` + ExternalUrl string `json:"external_url,omitempty"` + Title string `json:"title,omitempty"` + ContentHTML string `json:"content_html,omitempty"` + ContentText string `json:"content_text,omitempty"` + Summary string `json:"summary,omitempty"` + Image string `json:"image,omitempty"` + BannerImage string `json:"banner_,omitempty"` + PublishedDate *time.Time `json:"date_published,omitempty"` + ModifiedDate *time.Time `json:"date_modified,omitempty"` + Author *JSONAuthor `json:"author,omitempty"` + Tags []string `json:"tags,omitempty"` + Attachments []JSONAttachment `json:"attachments,omitempty"` +} + +// JSONHub describes an endpoint that can be used to subscribe to real-time +// notifications from the publisher of this feed. +type JSONHub struct { + Type string `json:"type"` + Url string `json:"url"` +} + +// JSONFeed represents a syndication feed in the JSON Feed Version 1 format. +// Matching the specification found here: https://jsonfeed.org/version/1. +type JSONFeed struct { + Version string `json:"version"` + Title string `json:"title"` + HomePageUrl string `json:"home_page_url,omitempty"` + FeedUrl string `json:"feed_url,omitempty"` + Description string `json:"description,omitempty"` + UserComment string `json:"user_comment,omitempty"` + NextUrl string `json:"next_url,omitempty"` + Icon string `json:"icon,omitempty"` + Favicon string `json:"favicon,omitempty"` + Author *JSONAuthor `json:"author,omitempty"` + Expired *bool `json:"expired,omitempty"` + Hubs []*JSONItem `json:"hubs,omitempty"` + Items []*JSONItem `json:"items,omitempty"` +} + +// JSON is used to convert a generic Feed to a JSONFeed. +type JSON struct { + *Feed +} + +// ToJSON encodes f into a JSON string. Returns an error if marshalling fails. +func (f *JSON) ToJSON() (string, error) { + return f.JSONFeed().ToJSON() +} + +// ToJSON encodes f into a JSON string. Returns an error if marshalling fails. +func (f *JSONFeed) ToJSON() (string, error) { + data, err := json.MarshalIndent(f, "", " ") + if err != nil { + return "", err + } + + return string(data), nil +} + +// JSONFeed creates a new JSONFeed with a generic Feed struct's data. +func (f *JSON) JSONFeed() *JSONFeed { + feed := &JSONFeed{ + Version: jsonFeedVersion, + Title: f.Title, + Description: f.Description, + } + + if f.Link != nil { + feed.HomePageUrl = f.Link.Href + } + if f.Author != nil { + feed.Author = &JSONAuthor{ + Name: f.Author.Name, + } + } + for _, e := range f.Items { + feed.Items = append(feed.Items, newJSONItem(e)) + } + return feed +} + +func newJSONItem(i *Item) *JSONItem { + item := &JSONItem{ + Id: i.Id, + Title: i.Title, + Summary: i.Description, + + ContentHTML: i.Content, + } + + if i.Link != nil { + item.Url = i.Link.Href + } + if i.Source != nil { + item.ExternalUrl = i.Source.Href + } + if i.Author != nil { + item.Author = &JSONAuthor{ + Name: i.Author.Name, + } + } + if !i.Created.IsZero() { + item.PublishedDate = &i.Created + } + if !i.Updated.IsZero() { + item.ModifiedDate = &i.Updated + } + if i.Enclosure != nil && strings.HasPrefix(i.Enclosure.Type, "image/") { + item.Image = i.Enclosure.Url + } + + return item +} diff --git a/vendor/github.com/gorilla/feeds/rss.go b/vendor/github.com/gorilla/feeds/rss.go new file mode 100644 index 00000000..09179dfb --- /dev/null +++ b/vendor/github.com/gorilla/feeds/rss.go @@ -0,0 +1,168 @@ +package feeds + +// rss support +// validation done according to spec here: +// http://cyber.law.harvard.edu/rss/rss.html + +import ( + "encoding/xml" + "fmt" + "time" +) + +// private wrapper around the RssFeed which gives us the .. xml +type RssFeedXml struct { + XMLName xml.Name `xml:"rss"` + Version string `xml:"version,attr"` + ContentNamespace string `xml:"xmlns:content,attr"` + Channel *RssFeed +} + +type RssContent struct { + XMLName xml.Name `xml:"content:encoded"` + Content string `xml:",cdata"` +} + +type RssImage struct { + XMLName xml.Name `xml:"image"` + Url string `xml:"url"` + Title string `xml:"title"` + Link string `xml:"link"` + Width int `xml:"width,omitempty"` + Height int `xml:"height,omitempty"` +} + +type RssTextInput struct { + XMLName xml.Name `xml:"textInput"` + Title string `xml:"title"` + Description string `xml:"description"` + Name string `xml:"name"` + Link string `xml:"link"` +} + +type RssFeed struct { + XMLName xml.Name `xml:"channel"` + Title string `xml:"title"` // required + Link string `xml:"link"` // required + Description string `xml:"description"` // required + Language string `xml:"language,omitempty"` + Copyright string `xml:"copyright,omitempty"` + ManagingEditor string `xml:"managingEditor,omitempty"` // Author used + WebMaster string `xml:"webMaster,omitempty"` + PubDate string `xml:"pubDate,omitempty"` // created or updated + LastBuildDate string `xml:"lastBuildDate,omitempty"` // updated used + Category string `xml:"category,omitempty"` + Generator string `xml:"generator,omitempty"` + Docs string `xml:"docs,omitempty"` + Cloud string `xml:"cloud,omitempty"` + Ttl int `xml:"ttl,omitempty"` + Rating string `xml:"rating,omitempty"` + SkipHours string `xml:"skipHours,omitempty"` + SkipDays string `xml:"skipDays,omitempty"` + Image *RssImage + TextInput *RssTextInput + Items []*RssItem `xml:"item"` +} + +type RssItem struct { + XMLName xml.Name `xml:"item"` + Title string `xml:"title"` // required + Link string `xml:"link"` // required + Description string `xml:"description"` // required + Content *RssContent + Author string `xml:"author,omitempty"` + Category string `xml:"category,omitempty"` + Comments string `xml:"comments,omitempty"` + Enclosure *RssEnclosure + Guid string `xml:"guid,omitempty"` // Id used + PubDate string `xml:"pubDate,omitempty"` // created or updated + Source string `xml:"source,omitempty"` +} + +type RssEnclosure struct { + //RSS 2.0 + XMLName xml.Name `xml:"enclosure"` + Url string `xml:"url,attr"` + Length string `xml:"length,attr"` + Type string `xml:"type,attr"` +} + +type Rss struct { + *Feed +} + +// create a new RssItem with a generic Item struct's data +func newRssItem(i *Item) *RssItem { + item := &RssItem{ + Title: i.Title, + Link: i.Link.Href, + Description: i.Description, + Guid: i.Id, + PubDate: anyTimeFormat(time.RFC1123Z, i.Created, i.Updated), + } + if len(i.Content) > 0 { + item.Content = &RssContent{Content: i.Content} + } + if i.Source != nil { + item.Source = i.Source.Href + } + + // Define a closure + if i.Enclosure != nil && i.Enclosure.Type != "" && i.Enclosure.Length != "" { + item.Enclosure = &RssEnclosure{Url: i.Enclosure.Url, Type: i.Enclosure.Type, Length: i.Enclosure.Length} + } + + if i.Author != nil { + item.Author = i.Author.Name + } + return item +} + +// create a new RssFeed with a generic Feed struct's data +func (r *Rss) RssFeed() *RssFeed { + pub := anyTimeFormat(time.RFC1123Z, r.Created, r.Updated) + build := anyTimeFormat(time.RFC1123Z, r.Updated) + author := "" + if r.Author != nil { + author = r.Author.Email + if len(r.Author.Name) > 0 { + author = fmt.Sprintf("%s (%s)", r.Author.Email, r.Author.Name) + } + } + + var image *RssImage + if r.Image != nil { + image = &RssImage{Url: r.Image.Url, Title: r.Image.Title, Link: r.Image.Link, Width: r.Image.Width, Height: r.Image.Height} + } + + channel := &RssFeed{ + Title: r.Title, + Link: r.Link.Href, + Description: r.Description, + ManagingEditor: author, + PubDate: pub, + LastBuildDate: build, + Copyright: r.Copyright, + Image: image, + } + for _, i := range r.Items { + channel.Items = append(channel.Items, newRssItem(i)) + } + return channel +} + +// FeedXml returns an XML-Ready object for an Rss object +func (r *Rss) FeedXml() interface{} { + // only generate version 2.0 feeds for now + return r.RssFeed().FeedXml() + +} + +// FeedXml returns an XML-ready object for an RssFeed object +func (r *RssFeed) FeedXml() interface{} { + return &RssFeedXml{ + Version: "2.0", + Channel: r, + ContentNamespace: "http://purl.org/rss/1.0/modules/content/", + } +} diff --git a/vendor/github.com/gorilla/feeds/test.atom b/vendor/github.com/gorilla/feeds/test.atom new file mode 100644 index 00000000..aa152148 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/test.atom @@ -0,0 +1,92 @@ + + + <![CDATA[Lorem ipsum feed for an interval of 1 minutes]]> + + http://example.com/ + RSS for Node + Tue, 30 Oct 2018 23:22:37 GMT + + Tue, 30 Oct 2018 23:22:00 GMT + + 60 + + <![CDATA[Lorem ipsum 2018-10-30T23:22:00+00:00]]> + + http://example.com/test/1540941720 + http://example.com/test/1540941720 + + Tue, 30 Oct 2018 23:22:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:21:00+00:00]]> + + http://example.com/test/1540941660 + http://example.com/test/1540941660 + + Tue, 30 Oct 2018 23:21:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:20:00+00:00]]> + + http://example.com/test/1540941600 + http://example.com/test/1540941600 + + Tue, 30 Oct 2018 23:20:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:19:00+00:00]]> + + http://example.com/test/1540941540 + http://example.com/test/1540941540 + + Tue, 30 Oct 2018 23:19:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:18:00+00:00]]> + + http://example.com/test/1540941480 + http://example.com/test/1540941480 + + Tue, 30 Oct 2018 23:18:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:17:00+00:00]]> + + http://example.com/test/1540941420 + http://example.com/test/1540941420 + + Tue, 30 Oct 2018 23:17:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:16:00+00:00]]> + + http://example.com/test/1540941360 + http://example.com/test/1540941360 + + Tue, 30 Oct 2018 23:16:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:15:00+00:00]]> + + http://example.com/test/1540941300 + http://example.com/test/1540941300 + + Tue, 30 Oct 2018 23:15:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:14:00+00:00]]> + + http://example.com/test/1540941240 + http://example.com/test/1540941240 + + Tue, 30 Oct 2018 23:14:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:13:00+00:00]]> + + http://example.com/test/1540941180 + http://example.com/test/1540941180 + + Tue, 30 Oct 2018 23:13:00 GMT + + \ No newline at end of file diff --git a/vendor/github.com/gorilla/feeds/test.rss b/vendor/github.com/gorilla/feeds/test.rss new file mode 100644 index 00000000..8d912aba --- /dev/null +++ b/vendor/github.com/gorilla/feeds/test.rss @@ -0,0 +1,96 @@ + + + + <![CDATA[Lorem ipsum feed for an interval of 1 minutes]]> + + http://example.com/ + RSS for Node + Tue, 30 Oct 2018 23:22:37 GMT + + Tue, 30 Oct 2018 23:22:00 GMT + + 60 + + <![CDATA[Lorem ipsum 2018-10-30T23:22:00+00:00]]> + + http://example.com/test/1540941720 + http://example.com/test/1540941720 + + Tue, 30 Oct 2018 23:22:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:21:00+00:00]]> + + http://example.com/test/1540941660 + http://example.com/test/1540941660 + + Tue, 30 Oct 2018 23:21:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:20:00+00:00]]> + + http://example.com/test/1540941600 + http://example.com/test/1540941600 + + Tue, 30 Oct 2018 23:20:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:19:00+00:00]]> + + http://example.com/test/1540941540 + http://example.com/test/1540941540 + + Tue, 30 Oct 2018 23:19:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:18:00+00:00]]> + + http://example.com/test/1540941480 + http://example.com/test/1540941480 + + Tue, 30 Oct 2018 23:18:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:17:00+00:00]]> + + http://example.com/test/1540941420 + http://example.com/test/1540941420 + + Tue, 30 Oct 2018 23:17:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:16:00+00:00]]> + + http://example.com/test/1540941360 + http://example.com/test/1540941360 + + Tue, 30 Oct 2018 23:16:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:15:00+00:00]]> + + http://example.com/test/1540941300 + http://example.com/test/1540941300 + + Tue, 30 Oct 2018 23:15:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:14:00+00:00]]> + + http://example.com/test/1540941240 + http://example.com/test/1540941240 + + Tue, 30 Oct 2018 23:14:00 GMT + + + <![CDATA[Lorem ipsum 2018-10-30T23:13:00+00:00]]> + + http://example.com/test/1540941180 + http://example.com/test/1540941180 + + Tue, 30 Oct 2018 23:13:00 GMT + + + \ No newline at end of file diff --git a/vendor/github.com/gorilla/feeds/to-implement.md b/vendor/github.com/gorilla/feeds/to-implement.md new file mode 100644 index 00000000..45fd1e75 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/to-implement.md @@ -0,0 +1,20 @@ +[Full iTunes list](https://help.apple.com/itc/podcasts_connect/#/itcb54353390) + +[Example of ideal iTunes RSS feed](https://help.apple.com/itc/podcasts_connect/#/itcbaf351599) + +``` + + + + + + + + + + + + + + +``` \ No newline at end of file diff --git a/vendor/github.com/gorilla/feeds/uuid.go b/vendor/github.com/gorilla/feeds/uuid.go new file mode 100644 index 00000000..51bbafe1 --- /dev/null +++ b/vendor/github.com/gorilla/feeds/uuid.go @@ -0,0 +1,27 @@ +package feeds + +// relevant bits from https://github.com/abneptis/GoUUID/blob/master/uuid.go + +import ( + "crypto/rand" + "fmt" +) + +type UUID [16]byte + +// create a new uuid v4 +func NewUUID() *UUID { + u := &UUID{} + _, err := rand.Read(u[:16]) + if err != nil { + panic(err) + } + + u[8] = (u[8] | 0x80) & 0xBf + u[6] = (u[6] | 0x40) & 0x4f + return u +} + +func (u *UUID) String() string { + return fmt.Sprintf("%x-%x-%x-%x-%x", u[:4], u[4:6], u[6:8], u[8:10], u[10:]) +} diff --git a/vendor/go.temporal.io/sdk/testsuite/BUILD.bazel b/vendor/go.temporal.io/sdk/testsuite/BUILD.bazel new file mode 100644 index 00000000..80d144d9 --- /dev/null +++ b/vendor/go.temporal.io/sdk/testsuite/BUILD.bazel @@ -0,0 +1,10 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "testsuite", + srcs = ["testsuite.go"], + importmap = "peridot.resf.org/vendor/go.temporal.io/sdk/testsuite", + importpath = "go.temporal.io/sdk/testsuite", + visibility = ["//visibility:public"], + deps = ["//vendor/go.temporal.io/sdk/internal"], +) diff --git a/vendor/go.temporal.io/sdk/testsuite/testsuite.go b/vendor/go.temporal.io/sdk/testsuite/testsuite.go new file mode 100644 index 00000000..b1302334 --- /dev/null +++ b/vendor/go.temporal.io/sdk/testsuite/testsuite.go @@ -0,0 +1,47 @@ +// The MIT License +// +// Copyright (c) 2020 Temporal Technologies Inc. All rights reserved. +// +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// Package testsuite contains unit testing framework for Temporal workflows and activities. +package testsuite + +import ( + "go.temporal.io/sdk/internal" +) + +type ( + // WorkflowTestSuite is the test suite to run unit tests for workflow/activity. + WorkflowTestSuite = internal.WorkflowTestSuite + + // TestWorkflowEnvironment is the environment that you use to test workflow + TestWorkflowEnvironment = internal.TestWorkflowEnvironment + + // TestActivityEnvironment is the environment that you use to test activity + TestActivityEnvironment = internal.TestActivityEnvironment + + // MockCallWrapper is a wrapper to mock.Call. It offers the ability to wait on workflow's clock instead of wall clock. + MockCallWrapper = internal.MockCallWrapper +) + +// ErrMockStartChildWorkflowFailed is special error used to indicate the mocked child workflow should fail to start. +var ErrMockStartChildWorkflowFailed = internal.ErrMockStartChildWorkflowFailed diff --git a/vendor/modules.txt b/vendor/modules.txt index 27b96862..0c467746 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -317,7 +317,11 @@ github.com/googleapis/gnostic/compiler github.com/googleapis/gnostic/extensions github.com/googleapis/gnostic/jsonschema github.com/googleapis/gnostic/openapiv2 +# github.com/gorilla/feeds v1.1.1 +## explicit +github.com/gorilla/feeds # github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 +## explicit github.com/grpc-ecosystem/go-grpc-middleware github.com/grpc-ecosystem/go-grpc-middleware/auth github.com/grpc-ecosystem/go-grpc-middleware/retry @@ -399,6 +403,8 @@ github.com/matttproud/golang_protobuf_extensions/pbutil github.com/mitchellh/go-homedir # github.com/mitchellh/mapstructure v1.4.1 github.com/mitchellh/mapstructure +# github.com/mmcdole/gofeed v1.1.3 +## explicit # github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd github.com/modern-go/concurrent # github.com/modern-go/reflect2 v1.0.2 @@ -422,6 +428,7 @@ github.com/pborman/uuid ## explicit github.com/pelletier/go-toml # github.com/pkg/errors v0.9.1 +## explicit github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 github.com/pmezard/go-difflib/difflib @@ -440,6 +447,8 @@ github.com/prometheus/common/model github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util +# github.com/quay/goval-parser v0.8.7 +## explicit # github.com/rivo/uniseg v0.2.0 github.com/rivo/uniseg # github.com/robfig/cron v1.2.0 @@ -563,6 +572,7 @@ go.temporal.io/sdk/internal/common/util go.temporal.io/sdk/internal/log go.temporal.io/sdk/log go.temporal.io/sdk/temporal +go.temporal.io/sdk/testsuite go.temporal.io/sdk/worker go.temporal.io/sdk/workflow # go.uber.org/atomic v1.9.0 @@ -1008,6 +1018,9 @@ k8s.io/utils/integer # openapi.peridot.resf.org/peridotopenapi v0.0.0-00010101000000-000000000000 => ./bazel-bin/peridot/proto/v1/client_go ## explicit openapi.peridot.resf.org/peridotopenapi +# peridot.resf.org/apollo/pb v0.0.0-00010101000000-000000000000 => ./bazel-bin/apollo/proto/v1/apollopb_go_proto_/peridot.resf.org/apollo/pb +## explicit +peridot.resf.org/apollo/pb # peridot.resf.org/common v0.0.0-00010101000000-000000000000 => ./bazel-bin/proto/commonpb_go_proto_/peridot.resf.org/common ## explicit peridot.resf.org/common @@ -1023,12 +1036,6 @@ peridot.resf.org/peridot/pb # peridot.resf.org/peridot/yumrepofs/pb v0.0.0-00010101000000-000000000000 => ./bazel-bin/peridot/proto/v1/yumrepofs/yumrepofspb_go_proto_/peridot.resf.org/peridot/yumrepofs/pb ## explicit peridot.resf.org/peridot/yumrepofs/pb -# peridot.resf.org/secparse/admin/proto v0.0.0-00010101000000-000000000000 => ./bazel-bin/secparse/admin/proto/v1/secparseadminpb_go_proto_/peridot.resf.org/secparse/admin/proto -## explicit -peridot.resf.org/secparse/admin/proto/v1 -# peridot.resf.org/secparse/proto v0.0.0-00010101000000-000000000000 => ./bazel-bin/secparse/proto/v1/secparsepb_go_proto_/peridot.resf.org/secparse/proto -## explicit -peridot.resf.org/secparse/proto/v1 # sigs.k8s.io/structured-merge-diff/v4 v4.1.2 sigs.k8s.io/structured-merge-diff/v4/fieldpath sigs.k8s.io/structured-merge-diff/v4/schema @@ -1038,6 +1045,7 @@ sigs.k8s.io/structured-merge-diff/v4/value sigs.k8s.io/yaml # github.com/dgrijalva/jwt-go v3.2.0+incompatible => github.com/golang-jwt/jwt/v4 v4.4.2 # openapi.peridot.resf.org/peridotopenapi => ./bazel-bin/peridot/proto/v1/client_go +# peridot.resf.org/apollo/pb => ./bazel-bin/apollo/proto/v1/apollopb_go_proto_/peridot.resf.org/apollo/pb # bazel.build/protobuf => ./bazel-bin/build/bazel/protobuf/bazelbuild_go_proto_/bazel.build/protobuf # bazel.build/remote/execution/v2 => ./bazel-bin/build/bazel/remote/execution/v2/remoteexecution_go_proto_/bazel.build/remote/execution/v2 # bazel.build/semver => ./bazel-bin/build/bazel/semver/semver_go_proto_/bazel.build/semver @@ -1046,6 +1054,4 @@ sigs.k8s.io/yaml # peridot.resf.org/peridot/keykeeper/pb => ./bazel-bin/peridot/proto/v1/keykeeper/keykeeperpb_go_proto_/peridot.resf.org/peridot/keykeeper/pb # peridot.resf.org/peridot/yumrepofs/pb => ./bazel-bin/peridot/proto/v1/yumrepofs/yumrepofspb_go_proto_/peridot.resf.org/peridot/yumrepofs/pb # peridot.resf.org/common => ./bazel-bin/proto/commonpb_go_proto_/peridot.resf.org/common -# peridot.resf.org/secparse/admin/proto => ./bazel-bin/secparse/admin/proto/v1/secparseadminpb_go_proto_/peridot.resf.org/secparse/admin/proto -# peridot.resf.org/secparse/proto => ./bazel-bin/secparse/proto/v1/secparsepb_go_proto_/peridot.resf.org/secparse/proto # github.com/envoyproxy/protoc-gen-validate/validate => ./bazel-bin/vendor/github.com/envoyproxy/protoc-gen-validate/validate/go_default_library_/github.com/envoyproxy/protoc-gen-validate/validate diff --git a/vendor/openapi.peridot.resf.org/peridotopenapi/BUILD.bazel b/vendor/openapi.peridot.resf.org/peridotopenapi/BUILD.bazel index 3b43507c..e92d2973 100644 --- a/vendor/openapi.peridot.resf.org/peridotopenapi/BUILD.bazel +++ b/vendor/openapi.peridot.resf.org/peridotopenapi/BUILD.bazel @@ -33,6 +33,7 @@ go_library( "model_v1_build_filters.go", "model_v1_create_project_request.go", "model_v1_create_project_response.go", + "model_v1_external_repository.go", "model_v1_get_build_batch_response.go", "model_v1_get_build_response.go", "model_v1_get_import_batch_response.go", @@ -50,6 +51,7 @@ go_library( "model_v1_import_revision.go", "model_v1_list_build_batches_response.go", "model_v1_list_builds_response.go", + "model_v1_list_external_repositories_response.go", "model_v1_list_import_batches_response.go", "model_v1_list_imports_response.go", "model_v1_list_packages_response.go", diff --git a/vendor/openapi.peridot.resf.org/peridotopenapi/README.md b/vendor/openapi.peridot.resf.org/peridotopenapi/README.md index 646472ec..47927961 100644 --- a/vendor/openapi.peridot.resf.org/peridotopenapi/README.md +++ b/vendor/openapi.peridot.resf.org/peridotopenapi/README.md @@ -97,9 +97,11 @@ Class | Method | HTTP request | Description *PackageServiceApi* | [**ListPackages**](docs/PackageServiceApi.md#listpackages) | **Get** /v1/projects/{projectId}/packages | ListPackages returns all packages with filters applied *ProjectServiceApi* | [**CreateHashedRepositories**](docs/ProjectServiceApi.md#createhashedrepositories) | **Post** /v1/projects/{projectId}/repositories/hashed | *ProjectServiceApi* | [**CreateProject**](docs/ProjectServiceApi.md#createproject) | **Post** /v1/projects | +*ProjectServiceApi* | [**DeleteExternalRepository**](docs/ProjectServiceApi.md#deleteexternalrepository) | **Delete** /v1/projects/{projectId}/external_repositories/{id} | *ProjectServiceApi* | [**GetProject**](docs/ProjectServiceApi.md#getproject) | **Get** /v1/projects/{id} | *ProjectServiceApi* | [**GetProjectCredentials**](docs/ProjectServiceApi.md#getprojectcredentials) | **Get** /v1/projects/{projectId}/credentials | *ProjectServiceApi* | [**GetRepository**](docs/ProjectServiceApi.md#getrepository) | **Get** /v1/projects/{projectId}/repositories/{id} | +*ProjectServiceApi* | [**ListExternalRepositories**](docs/ProjectServiceApi.md#listexternalrepositories) | **Get** /v1/projects/{projectId}/external_repositories | *ProjectServiceApi* | [**ListProjects**](docs/ProjectServiceApi.md#listprojects) | **Get** /v1/projects | *ProjectServiceApi* | [**ListRepositories**](docs/ProjectServiceApi.md#listrepositories) | **Get** /v1/projects/{projectId}/repositories | *ProjectServiceApi* | [**LookasideFileUpload**](docs/ProjectServiceApi.md#lookasidefileupload) | **Post** /v1/lookaside | @@ -137,6 +139,7 @@ Class | Method | HTTP request | Description - [V1BuildFilters](docs/V1BuildFilters.md) - [V1CreateProjectRequest](docs/V1CreateProjectRequest.md) - [V1CreateProjectResponse](docs/V1CreateProjectResponse.md) + - [V1ExternalRepository](docs/V1ExternalRepository.md) - [V1GetBuildBatchResponse](docs/V1GetBuildBatchResponse.md) - [V1GetBuildResponse](docs/V1GetBuildResponse.md) - [V1GetImportBatchResponse](docs/V1GetImportBatchResponse.md) @@ -154,6 +157,7 @@ Class | Method | HTTP request | Description - [V1ImportRevision](docs/V1ImportRevision.md) - [V1ListBuildBatchesResponse](docs/V1ListBuildBatchesResponse.md) - [V1ListBuildsResponse](docs/V1ListBuildsResponse.md) + - [V1ListExternalRepositoriesResponse](docs/V1ListExternalRepositoriesResponse.md) - [V1ListImportBatchesResponse](docs/V1ListImportBatchesResponse.md) - [V1ListImportsResponse](docs/V1ListImportsResponse.md) - [V1ListPackagesResponse](docs/V1ListPackagesResponse.md) diff --git a/vendor/openapi.peridot.resf.org/peridotopenapi/api_project_service.go b/vendor/openapi.peridot.resf.org/peridotopenapi/api_project_service.go index 66920696..6e075735 100644 --- a/vendor/openapi.peridot.resf.org/peridotopenapi/api_project_service.go +++ b/vendor/openapi.peridot.resf.org/peridotopenapi/api_project_service.go @@ -53,6 +53,21 @@ type ProjectServiceApi interface { */ CreateProjectExecute(r ApiCreateProjectRequest) (V1CreateProjectResponse, *_nethttp.Response, error) + /* + * DeleteExternalRepository Method for DeleteExternalRepository + * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param projectId + * @param id + * @return ApiDeleteExternalRepositoryRequest + */ + DeleteExternalRepository(ctx _context.Context, projectId string, id string) ApiDeleteExternalRepositoryRequest + + /* + * DeleteExternalRepositoryExecute executes the request + * @return map[string]interface{} + */ + DeleteExternalRepositoryExecute(r ApiDeleteExternalRepositoryRequest) (map[string]interface{}, *_nethttp.Response, error) + /* * GetProject Method for GetProject * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @@ -96,6 +111,20 @@ type ProjectServiceApi interface { */ GetRepositoryExecute(r ApiGetRepositoryRequest) (V1GetRepositoryResponse, *_nethttp.Response, error) + /* + * ListExternalRepositories Method for ListExternalRepositories + * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param projectId + * @return ApiListExternalRepositoriesRequest + */ + ListExternalRepositories(ctx _context.Context, projectId string) ApiListExternalRepositoriesRequest + + /* + * ListExternalRepositoriesExecute executes the request + * @return V1ListExternalRepositoriesResponse + */ + ListExternalRepositoriesExecute(r ApiListExternalRepositoriesRequest) (V1ListExternalRepositoriesResponse, *_nethttp.Response, error) + /* * ListProjects Method for ListProjects * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @@ -422,6 +451,122 @@ func (a *ProjectServiceApiService) CreateProjectExecute(r ApiCreateProjectReques return localVarReturnValue, localVarHTTPResponse, nil } +type ApiDeleteExternalRepositoryRequest struct { + ctx _context.Context + ApiService ProjectServiceApi + projectId string + id string +} + + +func (r ApiDeleteExternalRepositoryRequest) Execute() (map[string]interface{}, *_nethttp.Response, error) { + return r.ApiService.DeleteExternalRepositoryExecute(r) +} + +/* + * DeleteExternalRepository Method for DeleteExternalRepository + * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param projectId + * @param id + * @return ApiDeleteExternalRepositoryRequest + */ +func (a *ProjectServiceApiService) DeleteExternalRepository(ctx _context.Context, projectId string, id string) ApiDeleteExternalRepositoryRequest { + return ApiDeleteExternalRepositoryRequest{ + ApiService: a, + ctx: ctx, + projectId: projectId, + id: id, + } +} + +/* + * Execute executes the request + * @return map[string]interface{} + */ +func (a *ProjectServiceApiService) DeleteExternalRepositoryExecute(r ApiDeleteExternalRepositoryRequest) (map[string]interface{}, *_nethttp.Response, error) { + var ( + localVarHTTPMethod = _nethttp.MethodDelete + localVarPostBody interface{} + localVarFormFileName string + localVarFileName string + localVarFileBytes []byte + localVarReturnValue map[string]interface{} + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ProjectServiceApiService.DeleteExternalRepository") + if err != nil { + return localVarReturnValue, nil, GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/v1/projects/{projectId}/external_repositories/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"projectId"+"}", _neturl.PathEscape(parameterToString(r.projectId, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", _neturl.PathEscape(parameterToString(r.id, "")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := _neturl.Values{} + localVarFormParams := _neturl.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = _ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v RpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiGetProjectRequest struct { ctx _context.Context ApiService ProjectServiceApi @@ -762,6 +907,118 @@ func (a *ProjectServiceApiService) GetRepositoryExecute(r ApiGetRepositoryReques return localVarReturnValue, localVarHTTPResponse, nil } +type ApiListExternalRepositoriesRequest struct { + ctx _context.Context + ApiService ProjectServiceApi + projectId string +} + + +func (r ApiListExternalRepositoriesRequest) Execute() (V1ListExternalRepositoriesResponse, *_nethttp.Response, error) { + return r.ApiService.ListExternalRepositoriesExecute(r) +} + +/* + * ListExternalRepositories Method for ListExternalRepositories + * @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param projectId + * @return ApiListExternalRepositoriesRequest + */ +func (a *ProjectServiceApiService) ListExternalRepositories(ctx _context.Context, projectId string) ApiListExternalRepositoriesRequest { + return ApiListExternalRepositoriesRequest{ + ApiService: a, + ctx: ctx, + projectId: projectId, + } +} + +/* + * Execute executes the request + * @return V1ListExternalRepositoriesResponse + */ +func (a *ProjectServiceApiService) ListExternalRepositoriesExecute(r ApiListExternalRepositoriesRequest) (V1ListExternalRepositoriesResponse, *_nethttp.Response, error) { + var ( + localVarHTTPMethod = _nethttp.MethodGet + localVarPostBody interface{} + localVarFormFileName string + localVarFileName string + localVarFileBytes []byte + localVarReturnValue V1ListExternalRepositoriesResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ProjectServiceApiService.ListExternalRepositories") + if err != nil { + return localVarReturnValue, nil, GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/v1/projects/{projectId}/external_repositories" + localVarPath = strings.Replace(localVarPath, "{"+"projectId"+"}", _neturl.PathEscape(parameterToString(r.projectId, "")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := _neturl.Values{} + localVarFormParams := _neturl.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = _ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v RpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiListProjectsRequest struct { ctx _context.Context ApiService ProjectServiceApi diff --git a/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_external_repository.go b/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_external_repository.go new file mode 100644 index 00000000..9d9206ea --- /dev/null +++ b/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_external_repository.go @@ -0,0 +1,187 @@ +/* + * peridot/proto/v1/batch.proto + * + * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + * + * API version: version not set + */ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package peridotopenapi + +import ( + "encoding/json" +) + +// V1ExternalRepository struct for V1ExternalRepository +type V1ExternalRepository struct { + Url *string `json:"url,omitempty"` + Priority *int32 `json:"priority,omitempty"` + ModuleHotfixes *bool `json:"moduleHotfixes,omitempty"` +} + +// NewV1ExternalRepository instantiates a new V1ExternalRepository object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewV1ExternalRepository() *V1ExternalRepository { + this := V1ExternalRepository{} + return &this +} + +// NewV1ExternalRepositoryWithDefaults instantiates a new V1ExternalRepository object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewV1ExternalRepositoryWithDefaults() *V1ExternalRepository { + this := V1ExternalRepository{} + return &this +} + +// GetUrl returns the Url field value if set, zero value otherwise. +func (o *V1ExternalRepository) GetUrl() string { + if o == nil || o.Url == nil { + var ret string + return ret + } + return *o.Url +} + +// GetUrlOk returns a tuple with the Url field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *V1ExternalRepository) GetUrlOk() (*string, bool) { + if o == nil || o.Url == nil { + return nil, false + } + return o.Url, true +} + +// HasUrl returns a boolean if a field has been set. +func (o *V1ExternalRepository) HasUrl() bool { + if o != nil && o.Url != nil { + return true + } + + return false +} + +// SetUrl gets a reference to the given string and assigns it to the Url field. +func (o *V1ExternalRepository) SetUrl(v string) { + o.Url = &v +} + +// GetPriority returns the Priority field value if set, zero value otherwise. +func (o *V1ExternalRepository) GetPriority() int32 { + if o == nil || o.Priority == nil { + var ret int32 + return ret + } + return *o.Priority +} + +// GetPriorityOk returns a tuple with the Priority field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *V1ExternalRepository) GetPriorityOk() (*int32, bool) { + if o == nil || o.Priority == nil { + return nil, false + } + return o.Priority, true +} + +// HasPriority returns a boolean if a field has been set. +func (o *V1ExternalRepository) HasPriority() bool { + if o != nil && o.Priority != nil { + return true + } + + return false +} + +// SetPriority gets a reference to the given int32 and assigns it to the Priority field. +func (o *V1ExternalRepository) SetPriority(v int32) { + o.Priority = &v +} + +// GetModuleHotfixes returns the ModuleHotfixes field value if set, zero value otherwise. +func (o *V1ExternalRepository) GetModuleHotfixes() bool { + if o == nil || o.ModuleHotfixes == nil { + var ret bool + return ret + } + return *o.ModuleHotfixes +} + +// GetModuleHotfixesOk returns a tuple with the ModuleHotfixes field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *V1ExternalRepository) GetModuleHotfixesOk() (*bool, bool) { + if o == nil || o.ModuleHotfixes == nil { + return nil, false + } + return o.ModuleHotfixes, true +} + +// HasModuleHotfixes returns a boolean if a field has been set. +func (o *V1ExternalRepository) HasModuleHotfixes() bool { + if o != nil && o.ModuleHotfixes != nil { + return true + } + + return false +} + +// SetModuleHotfixes gets a reference to the given bool and assigns it to the ModuleHotfixes field. +func (o *V1ExternalRepository) SetModuleHotfixes(v bool) { + o.ModuleHotfixes = &v +} + +func (o V1ExternalRepository) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.Url != nil { + toSerialize["url"] = o.Url + } + if o.Priority != nil { + toSerialize["priority"] = o.Priority + } + if o.ModuleHotfixes != nil { + toSerialize["moduleHotfixes"] = o.ModuleHotfixes + } + return json.Marshal(toSerialize) +} + +type NullableV1ExternalRepository struct { + value *V1ExternalRepository + isSet bool +} + +func (v NullableV1ExternalRepository) Get() *V1ExternalRepository { + return v.value +} + +func (v *NullableV1ExternalRepository) Set(val *V1ExternalRepository) { + v.value = val + v.isSet = true +} + +func (v NullableV1ExternalRepository) IsSet() bool { + return v.isSet +} + +func (v *NullableV1ExternalRepository) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableV1ExternalRepository(val *V1ExternalRepository) *NullableV1ExternalRepository { + return &NullableV1ExternalRepository{value: val, isSet: true} +} + +func (v NullableV1ExternalRepository) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableV1ExternalRepository) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} + + diff --git a/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_list_external_repositories_response.go b/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_list_external_repositories_response.go new file mode 100644 index 00000000..b13e9e28 --- /dev/null +++ b/vendor/openapi.peridot.resf.org/peridotopenapi/model_v1_list_external_repositories_response.go @@ -0,0 +1,115 @@ +/* + * peridot/proto/v1/batch.proto + * + * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + * + * API version: version not set + */ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package peridotopenapi + +import ( + "encoding/json" +) + +// V1ListExternalRepositoriesResponse struct for V1ListExternalRepositoriesResponse +type V1ListExternalRepositoriesResponse struct { + Repositories *[]V1ExternalRepository `json:"repositories,omitempty"` +} + +// NewV1ListExternalRepositoriesResponse instantiates a new V1ListExternalRepositoriesResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewV1ListExternalRepositoriesResponse() *V1ListExternalRepositoriesResponse { + this := V1ListExternalRepositoriesResponse{} + return &this +} + +// NewV1ListExternalRepositoriesResponseWithDefaults instantiates a new V1ListExternalRepositoriesResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewV1ListExternalRepositoriesResponseWithDefaults() *V1ListExternalRepositoriesResponse { + this := V1ListExternalRepositoriesResponse{} + return &this +} + +// GetRepositories returns the Repositories field value if set, zero value otherwise. +func (o *V1ListExternalRepositoriesResponse) GetRepositories() []V1ExternalRepository { + if o == nil || o.Repositories == nil { + var ret []V1ExternalRepository + return ret + } + return *o.Repositories +} + +// GetRepositoriesOk returns a tuple with the Repositories field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *V1ListExternalRepositoriesResponse) GetRepositoriesOk() (*[]V1ExternalRepository, bool) { + if o == nil || o.Repositories == nil { + return nil, false + } + return o.Repositories, true +} + +// HasRepositories returns a boolean if a field has been set. +func (o *V1ListExternalRepositoriesResponse) HasRepositories() bool { + if o != nil && o.Repositories != nil { + return true + } + + return false +} + +// SetRepositories gets a reference to the given []V1ExternalRepository and assigns it to the Repositories field. +func (o *V1ListExternalRepositoriesResponse) SetRepositories(v []V1ExternalRepository) { + o.Repositories = &v +} + +func (o V1ListExternalRepositoriesResponse) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.Repositories != nil { + toSerialize["repositories"] = o.Repositories + } + return json.Marshal(toSerialize) +} + +type NullableV1ListExternalRepositoriesResponse struct { + value *V1ListExternalRepositoriesResponse + isSet bool +} + +func (v NullableV1ListExternalRepositoriesResponse) Get() *V1ListExternalRepositoriesResponse { + return v.value +} + +func (v *NullableV1ListExternalRepositoriesResponse) Set(val *V1ListExternalRepositoriesResponse) { + v.value = val + v.isSet = true +} + +func (v NullableV1ListExternalRepositoriesResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableV1ListExternalRepositoriesResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableV1ListExternalRepositoriesResponse(val *V1ListExternalRepositoriesResponse) *NullableV1ListExternalRepositoriesResponse { + return &NullableV1ListExternalRepositoriesResponse{value: val, isSet: true} +} + +func (v NullableV1ListExternalRepositoriesResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableV1ListExternalRepositoriesResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} + + diff --git a/yarn.lock b/yarn.lock index f9520a68..474b6f1d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -997,6 +997,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" + integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/template@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.6.tgz#1283f4993e00b929d6e2d3c72fdc9168a2977a31" @@ -1073,6 +1080,17 @@ source-map "^0.5.7" stylis "4.0.13" +"@emotion/cache@^11.10.3": + version "11.10.3" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.10.3.tgz#c4f67904fad10c945fea5165c3a5a0583c164b87" + integrity sha512-Psmp/7ovAa8appWh3g51goxu/z3iVms7JXOreq136D8Bbn6dYraPnmL6mdM8GThEx9vwSn92Fz+mGSjBzN8UPQ== + dependencies: + "@emotion/memoize" "^0.8.0" + "@emotion/sheet" "^1.2.0" + "@emotion/utils" "^1.2.0" + "@emotion/weak-memoize" "^0.3.0" + stylis "4.0.13" + "@emotion/cache@^11.9.3": version "11.9.3" resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.9.3.tgz#96638449f6929fd18062cfe04d79b29b44c0d6cb" @@ -1101,6 +1119,11 @@ resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== +"@emotion/memoize@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.8.0.tgz#f580f9beb67176fa57aae70b08ed510e1b18980f" + integrity sha512-G/YwXTkv7Den9mXDO7AhLWkE3q+I92B+VqAE+dYG4NGPaHZGvt3G8Q0p9vmE+sq7rTGphUbAvmQ9YpbfMQGGlA== + "@emotion/react@^11.8.1": version "11.9.3" resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.9.3.tgz#f4f4f34444f6654a2e550f5dab4f2d360c101df9" @@ -1130,6 +1153,11 @@ resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.1.tgz#015756e2a9a3c7c5f11d8ec22966a8dbfbfac787" integrity sha512-J3YPccVRMiTZxYAY0IOq3kd+hUP8idY8Kz6B/Cyo+JuXq52Ek+zbPbSQUrVQp95aJ+lsAW7DPL1P2Z+U1jGkKA== +"@emotion/sheet@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.2.0.tgz#771b1987855839e214fc1741bde43089397f7be5" + integrity sha512-OiTkRgpxescko+M51tZsMq7Puu/KP55wMT8BgpcXVG2hqXc0Vo0mfymJ/Uj24Hp0i083ji/o0aLddh08UEjq8w== + "@emotion/styled@^11.8.1": version "11.9.3" resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.9.3.tgz#47f0c71137fec7c57035bf3659b52fb536792340" @@ -1156,11 +1184,21 @@ resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.1.0.tgz#86b0b297f3f1a0f2bdb08eeac9a2f49afd40d0cf" integrity sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ== +"@emotion/utils@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.2.0.tgz#9716eaccbc6b5ded2ea5a90d65562609aab0f561" + integrity sha512-sn3WH53Kzpw8oQ5mgMmIzzyAaH2ZqFEbozVVBSYp538E06OSE6ytOp7pRAjNQR+Q/orwqdQYJSe2m3hCOeznkw== + "@emotion/weak-memoize@^0.2.5": version "0.2.5" resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== +"@emotion/weak-memoize@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.0.tgz#ea89004119dc42db2e1dba0f97d553f7372f6fcb" + integrity sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg== + "@eslint/eslintrc@^0.4.3": version "0.4.3" resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" @@ -1327,6 +1365,15 @@ react-is "^18.2.0" react-transition-group "^4.4.2" +"@mui/private-theming@^5.10.6": + version "5.10.6" + resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.10.6.tgz#2c6bb2a4b7034cd402a099bd0349f217584e7b25" + integrity sha512-I/W0QyTLRdEx6py3lKAquKO/rNF/7j+nIOM/xCyI9kU0fcotVTcTY08mKMsS6vrzdWpi6pAkD0wP0KwWy5R5VA== + dependencies: + "@babel/runtime" "^7.19.0" + "@mui/utils" "^5.10.6" + prop-types "^15.8.1" + "@mui/private-theming@^5.9.0": version "5.9.0" resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.9.0.tgz#d2437ed95ecfa3bfc9d2ee7c6053c94d4931cb26" @@ -1336,6 +1383,16 @@ "@mui/utils" "^5.9.0" prop-types "^15.8.1" +"@mui/styled-engine@^5.10.6": + version "5.10.6" + resolved "https://registry.yarnpkg.com/@mui/styled-engine/-/styled-engine-5.10.6.tgz#9c6e79b29740e9f494c9fb26ebd4046aa88c1d21" + integrity sha512-OnVw5xnO4l0XzlJFhKif/RlLenBNhyEQQlSTwB9ApSWB05UAU5ZSbjNsRfyEKvgmQ/fPa+MqPD/dzxbIRCwyeg== + dependencies: + "@babel/runtime" "^7.19.0" + "@emotion/cache" "^11.10.3" + csstype "^3.1.0" + prop-types "^15.8.1" + "@mui/styled-engine@^5.8.7": version "5.8.7" resolved "https://registry.yarnpkg.com/@mui/styled-engine/-/styled-engine-5.8.7.tgz#63d0779c07677fe76d4705a02c7ae99f89b50780" @@ -1369,6 +1426,20 @@ jss-plugin-vendor-prefixer "^10.8.2" prop-types "^15.8.1" +"@mui/system@^5.10.6": + version "5.10.6" + resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.10.6.tgz#ddeeb63830e325a06ba8a0cf33ec950c49a06a73" + integrity sha512-HfQVX7e2xpQ3jtdB/WwtkFVtozMOozyN575/63u8ILHkE8wGDhblmCieAsnyJPFbm7WBW5PCMyzmfr4QyKLaYg== + dependencies: + "@babel/runtime" "^7.19.0" + "@mui/private-theming" "^5.10.6" + "@mui/styled-engine" "^5.10.6" + "@mui/types" "^7.2.0" + "@mui/utils" "^5.10.6" + clsx "^1.2.1" + csstype "^3.1.0" + prop-types "^15.8.1" + "@mui/system@^5.9.0": version "5.9.0" resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.9.0.tgz#804055bc6fcd557479b8b28dfca7ed5c98fd9bf9" @@ -1388,6 +1459,22 @@ resolved "https://registry.yarnpkg.com/@mui/types/-/types-7.1.4.tgz#4185c05d6df63ec673cda15feab80440abadc764" integrity sha512-uveM3byMbthO+6tXZ1n2zm0W3uJCQYtwt/v5zV5I77v2v18u0ITkb8xwhsDD2i3V2Kye7SaNR6FFJ6lMuY/WqQ== +"@mui/types@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@mui/types/-/types-7.2.0.tgz#91380c2d42420f51f404120f7a9270eadd6f5c23" + integrity sha512-lGXtFKe5lp3UxTBGqKI1l7G8sE2xBik8qCfrLHD5olwP/YU0/ReWoWT7Lp1//ri32dK39oPMrJN8TgbkCSbsNA== + +"@mui/utils@^5.10.6": + version "5.10.6" + resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.10.6.tgz#98d432d2b05544c46efe356cf095cea3a37c2e59" + integrity sha512-g0Qs8xN/MW2M3fLL8197h5J2VB9U+49fLlnKKqC6zy/yus5cZwdT+Gwec+wUMxgwQoxMDn+J8oDWAn28kEOR/Q== + dependencies: + "@babel/runtime" "^7.19.0" + "@types/prop-types" "^15.7.5" + "@types/react-is" "^16.7.1 || ^17.0.0" + prop-types "^15.8.1" + react-is "^18.2.0" + "@mui/utils@^5.4.1", "@mui/utils@^5.9.0": version "5.9.0" resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.9.0.tgz#2e1ac58905b767de47412cb32475862875b8e880" @@ -1571,10 +1658,10 @@ dependencies: defer-to-connect "^2.0.0" -"@tailwindcss/forms@^0.4.0": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@tailwindcss/forms/-/forms-0.4.1.tgz#5a47ccd60490cbba84e662f2b9cf3d71a5126d17" - integrity sha512-gS9xjCmJjUBz/eP12QlENPLnf0tCx68oYE3mri0GMP5jdtVwLbGUNSRpjsp6NzLAZzZy3ueOwrcqB78Ax6Z84A== +"@tailwindcss/forms@^0.5.3": + version "0.5.3" + resolved "https://registry.yarnpkg.com/@tailwindcss/forms/-/forms-0.5.3.tgz#e4d7989686cbcaf416c53f1523df5225332a86e7" + integrity sha512-y5mb86JUoiUgBjY/o6FJSFZSEttfb3Q5gllE4xoKjAAD+vBrnIhE4dViwUuow3va8mpH4s9jyUbUbrRGoRdc2Q== dependencies: mini-svg-data-uri "^1.2.3" @@ -4267,7 +4354,7 @@ levn@^0.4.1: prelude-ls "^1.2.1" type-check "~0.4.0" -lilconfig@^2.0.3, lilconfig@^2.0.5: +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== @@ -6115,10 +6202,10 @@ table@^6.0.9: string-width "^4.2.3" strip-ansi "^6.0.1" -tailwindcss@^3.0.6: - version "3.1.6" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.1.6.tgz#bcb719357776c39e6376a8d84e9834b2b19a49f1" - integrity sha512-7skAOY56erZAFQssT1xkpk+kWt2NrO45kORlxFPXUt3CiGsVPhH1smuH5XoDH6sGPXLyBv+zgCKA2HWBsgCytg== +tailwindcss@^3.1.8: + version "3.1.8" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" + integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== dependencies: arg "^5.0.2" chokidar "^3.5.3" @@ -6129,7 +6216,7 @@ tailwindcss@^3.0.6: fast-glob "^3.2.11" glob-parent "^6.0.2" is-glob "^4.0.3" - lilconfig "^2.0.5" + lilconfig "^2.0.6" normalize-path "^3.0.0" object-hash "^3.0.0" picocolors "^1.0.0"