Initial commit

This commit is contained in:
Mustafa Gezen 2023-08-21 18:01:10 +02:00
commit 698ab72d9e
Signed by: mustafa
GPG Key ID: DCDF010D946438C1
2050 changed files with 795679 additions and 0 deletions

8
.api-linter.yaml Normal file
View File

@ -0,0 +1,8 @@
- included_paths:
- "**/*"
disabled_rules:
- "core::0191::proto-package"
- "core::0123::resource-annotation"
- "core::0132::request-parent-required"
- "core::0133::request-parent-required"
- "core::0131::request-name-reference"

8
.bazelignore Normal file
View File

@ -0,0 +1,8 @@
.ijwb
.idea
bazel-bin
bazel-genfiles
bazel-out
bazel-testlogs
bazel-peridot
node_modules

205
.bazelrc Normal file
View File

@ -0,0 +1,205 @@
# Java
test --test_env='LC_ALL=en_US.UTF-8'
test --test_env='LANG=en_US.UTF-8'
test --jvmopt='-Dsun.jnu.encoding=UTF-8'
test --jvmopt='-Dfile.encoding=UTF-8'
build --test_env='LC_ALL=en_US.UTF-8'
build --jvmopt='-Dsun.jnu.encoding=UTF-8'
build --jvmopt='-Dfile.encoding=UTF-8'
build --test_env='LANG=en_US.UTF-8'
test --test_env=PATH
build --java_language_version=17
test --java_language_version=17
# Do not upload locally executed action results to the remote cache.
# This should be the default for local builds so local builds cannot poison the remote cache.
# It should be flipped to `--remote_upload_local_results` on CI
# by using `--bazelrc=.aspect/bazelrc/ci.bazelrc`.
# Docs: https://bazel.build/reference/command-line-reference#flag--remote_upload_local_results
build --noremote_upload_local_results
# Don't allow network access for build actions in the sandbox.
# Ensures that you don't accidentally make non-hermetic actions/tests which depend on remote
# services.
# Developers should tag targets with `tags=["requires-network"]` to opt-out of the enforcement.
# Docs: https://bazel.build/reference/command-line-reference#flag--sandbox_default_allow_network
build --sandbox_default_allow_network=false
# Warn if a test's timeout is significantly longer than the test's actual execution time.
# Bazel's default for test_timeout is medium (5 min), but most tests should instead be short (1 min).
# While a test's timeout should be set such that it is not flaky, a test that has a highly
# over-generous timeout can hide real problems that crop up unexpectedly.
# For instance, a test that normally executes in a minute or two should not have a timeout of
# ETERNAL or LONG as these are much, much too generous.
# Docs: https://bazel.build/docs/user-manual#test-verbose-timeout-warnings
test --test_verbose_timeout_warnings
# Allow the Bazel server to check directory sources for changes. Ensures that the Bazel server
# notices when a directory changes, if you have a directory listed in the srcs of some target.
# Recommended when using
# [copy_directory](https://github.com/aspect-build/bazel-lib/blob/main/docs/copy_directory.md) and
# [rules_js](https://github.com/aspect-build/rules_js) since npm package are source directories
# inputs to copy_directory actions.
# Docs: https://bazel.build/reference/command-line-reference#flag--host_jvm_args
startup --host_jvm_args=-DBAZEL_TRACK_SOURCE_DIRECTORIES=1
# Allow exclusive tests to run in the sandbox. Fixes a bug where Bazel doesn't enable sandboxing for
# tests with `tags=["exclusive"]`.
# Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_exclusive_test_sandboxed
test --incompatible_exclusive_test_sandboxed
# Use a static value for `PATH` and does not inherit `LD_LIBRARY_PATH`. Doesn't let environment
# variables like `PATH` sneak into the build, which can cause massive cache misses when they change.
# Use `--action_env=ENV_VARIABLE` if you want to inherit specific environment variables from the
# client, but note that doing so can prevent cross-user caching if a shared cache is used.
# Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_strict_action_env
build --incompatible_strict_action_env
# Propagate tags from a target declaration to the actions' execution requirements.
# Ensures that tags applied in your BUILD file, like `tags=["no-remote"]`
# get propagated to actions created by the rule.
# Without this option, you rely on rules authors to manually check the tags you passed
# and apply relevant ones to the actions they create.
# See https://github.com/bazelbuild/bazel/issues/8830 for details.
# Docs: https://bazel.build/reference/command-line-reference#flag--experimental_allow_tags_propagation
build --experimental_allow_tags_propagation
fetch --experimental_allow_tags_propagation
query --experimental_allow_tags_propagation
# Do not automatically create `__init__.py` files in the runfiles of Python targets. Fixes the wrong
# default that comes from Google's internal monorepo by using `__init__.py` to delimit a Python
# package. Precisely, when a `py_binary` or `py_test` target has `legacy_create_init` set to `auto (the
# default), it is treated as false if and only if this flag is set. See
# https://github.com/bazelbuild/bazel/issues/10076.
# Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_default_to_explicit_init_py
build --incompatible_default_to_explicit_init_py
# Attempt to build & test every target whose prerequisites were successfully built.
# Docs: https://bazel.build/docs/user-manual#keep-going
build --keep_going
# Output test errors to stderr so users don't have to `cat` or open test failure log files when test
# fail. This makes the log noiser in exchange for reducing the time-to-feedback on test failures for
# users.
# Docs: https://bazel.build/docs/user-manual#test-output
test --test_output=errors
# Show the output files created by builds that requested more than one target. This helps users
# locate the build outputs in more cases
# Docs: https://bazel.build/docs/user-manual#show-result
build --show_result=20
# Bazel picks up host-OS-specific config lines from bazelrc files. For example, if the host OS is
# Linux and you run bazel build, Bazel picks up lines starting with build:linux. Supported OS
# identifiers are `linux`, `macos`, `windows`, `freebsd`, and `openbsd`. Enabling this flag is
# equivalent to using `--config=linux` on Linux, `--config=windows` on Windows, etc.
# Docs: https://bazel.build/reference/command-line-reference#flag--enable_platform_specific_config
common --enable_platform_specific_config
# Output a heap dump if an OOM is thrown during a Bazel invocation
# (including OOMs due to `--experimental_oom_more_eagerly_threshold`).
# The dump will be written to `<output_base>/<invocation_id>.heapdump.hprof`.
# You may need to configure CI to capture this artifact and upload for later use.
# Docs: https://bazel.build/reference/command-line-reference#flag--heap_dump_on_oom
common --heap_dump_on_oom
# Speed up all builds by not checking if output files have been modified. Lets you make changes to
# the output tree without triggering a build for local debugging. For example, you can modify
# [rules_js](https://github.com/aspect-build/rules_js) 3rd party npm packages in the output tree
# when local debugging.
# Docs: https://github.com/bazelbuild/bazel/blob/1af61b21df99edc2fc66939cdf14449c2661f873/src/main/java/com/google/devtools/build/lib/pkgcache/PackageOptions.java#L185
build --noexperimental_check_output_files
fetch --noexperimental_check_output_files
query --noexperimental_check_output_files
# Don't apply `--noremote_upload_local_results` and `--noremote_accept_cached` to the disk cache.
# If you have both `--noremote_upload_local_results` and `--disk_cache`, then this fixes a bug where
# Bazel doesn't write to the local disk cache as it treats as a remote cache.
# Docs: https://bazel.build/reference/command-line-reference#flag--incompatible_remote_results_ignore_disk
build --incompatible_remote_results_ignore_disk
# Do not build runfiles symlink forests for external repositories under
# `.runfiles/wsname/external/repo` (in addition to `.runfiles/repo`). This reduces runfiles &
# sandbox creation times & prevents accidentally depending on this feature which may flip to off by
# default in the future. Note, some rules may fail under this flag, please file issues with the rule
# author.
# Docs: https://bazel.build/reference/command-line-reference#flag--legacy_external_runfiles
build --nolegacy_external_runfiles
# Some actions are always IO-intensive but require little compute. It's wasteful to put the output
# in the remote cache, it just saturates the network and fills the cache storage causing earlier
# evictions. It's also not worth sending them for remote execution.
# For actions like PackageTar it's usually faster to just re-run the work locally every time.
# You'll have to look at an execution log to figure out what other action mnemonics you care about.
# In some cases you may need to patch rulesets to add a mnemonic to actions that don't have one.
# https://bazel.build/reference/command-line-reference#flag--modify_execution_info
build --modify_execution_info=PackageTar=+no-remote
############################################################
# Use `bazel test --config=debug` to enable these settings #
############################################################
# Stream stdout/stderr output from each test in real-time.
# Docs: https://bazel.build/docs/user-manual#test-output
test:debug --test_output=streamed
# Run one test at a time.
# Docs: https://bazel.build/reference/command-line-reference#flag--test_strategy
test:debug --test_strategy=exclusive
# Prevent long running tests from timing out.
# Docs: https://bazel.build/docs/user-manual#test-timeout
test:debug --test_timeout=9999
# Always run tests even if they have cached results.
# Docs: https://bazel.build/docs/user-manual#cache-test-results
test:debug --nocache_test_results
# Aspect recommended Bazel flags when using Aspect's JavaScript rules: https://github.com/aspect-build/rules_js
# Docs for Node.js flags: https://nodejs.org/en/docs/guides/debugging-getting-started/#command-line-options
# Support for debugging Node.js tests. Use bazel run with `--config=debug` to turn on the NodeJS
# inspector agent. The node process will break before user code starts and wait for the debugger to
# connect. Pass the --inspect-brk option to all tests which enables the node inspector agent. See
# https://nodejs.org/de/docs/guides/debugging-getting-started/#command-line-options for more
# details.
# Docs: https://nodejs.org/en/docs/guides/debugging-getting-started/#command-line-options
run:debug -- --node_options=--inspect-brk
# Enable runfiles on all platforms. Runfiles are on by default on Linux and MacOS but off on
# Windows.
#
# In general, rules_js and derivate rule sets assume that runfiles are enabled and do not support no
# runfiles case because it does not scale to teach all Node.js tools to use the runfiles manifest.
#
# If you are developing on Windows, you must either run bazel with administrator privileges or
# enable developer mode. If you do not you may hit this error on Windows:
#
# Bazel needs to create symlinks to build the runfiles tree.
# Creating symlinks on Windows requires one of the following:
# 1. Bazel is run with administrator privileges.
# 2. The system version is Windows 10 Creators Update (1703) or later
# and developer mode is enabled.
#
# Docs: https://bazel.build/reference/command-line-reference#flag--enable_runfiles
build --enable_runfiles
# Speed up all builds by not checking if external repository files have been modified.
# Docs: https://github.com/bazelbuild/bazel/blob/1af61b21df99edc2fc66939cdf14449c2661f873/src/main/java/com/google/devtools/build/lib/bazel/repository/RepositoryOptions.java#L244
build --noexperimental_check_external_repository_files
fetch --noexperimental_check_external_repository_files
query --noexperimental_check_external_repository_files
# Directories used by sandboxed non-worker execution may be reused to avoid unnecessary setup costs.
# Save time on Sandbox creation and deletion when many of the same kind of action run during the
# build.
# Docs: https://bazel.build/reference/command-line-reference#flag--reuse_sandbox_directories
build --reuse_sandbox_directories
# Avoid this flag being enabled by remote_download_minimal or remote_download_toplevel
# See https://meroton.com/blog/bazel-6-errors-build-without-the-bytes/
build --noexperimental_action_cache_store_output_metadata
# Import Mac development flags
import %workspace%/macdev.bazelrc

1
.bazelversion Normal file
View File

@ -0,0 +1 @@
6.3.2

31
.editorconfig Normal file
View File

@ -0,0 +1,31 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf
[*.md]
max_line_length = off
trim_trailing_whitespace = false
[*.go]
indent_size = 4
[*.py]
indent_size = 4
[*.bzl]
indent_size = 4
[BUILD]
indent_size = 4
[BUILD.bazel]
indent_size = 4
[WORKSPACE]
indent_size = 4

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
.idea
.ijwb
bazel-*
node_modules

5
.prettierrc Normal file
View File

@ -0,0 +1,5 @@
{
"singleQuote": true,
"jsxSingleQuote": false,
"tabWidth": 2
}

1
AUTHORS Normal file
View File

@ -0,0 +1 @@
Mustafa Gezen <mustafa@gezen.no>

50
BUILD.bazel Normal file
View File

@ -0,0 +1,50 @@
load("@aspect_bazel_lib//lib:copy_to_bin.bzl", "copy_to_bin")
load("@io_bazel_rules_go//proto:compiler.bzl", "go_proto_compiler")
load("@bazel_gazelle//:def.bzl", "gazelle")
load("@npm//:defs.bzl", "npm_link_all_packages")
npm_link_all_packages(name = "node_modules")
exports_files(["tsconfig.json"])
# gazelle:prefix go.resf.org/peridot
# gazelle:exclude third_party/googleapis
# gazelle:exclude vendor/go.resf.org/peridot
# gazelle:exclude vendor/google.golang.org
# gazelle:exclude vendor/github.com/golang/protobuf
# gazelle:exclude vendor/golang.org/x/net
# gazelle:exclude vendor.go
# gazelle:go_grpc_compilers @io_bazel_rules_go//proto:go_grpc,//:go_gen_grpc_gateway
# gazelle:resolve proto proto google/api/annotations.proto @googleapis//google/api:annotations_proto
# gazelle:resolve proto go google/api/annotations.proto @org_golang_google_genproto//googleapis/api/annotations
gazelle(name = "gazelle")
gazelle(
name = "gazelle-update-repos",
args = [
"-from_file=go.mod",
"-to_macro=deps.bzl%go_dependencies",
"-prune",
],
command = "update-repos",
)
go_proto_compiler(
name = "go_gen_grpc_gateway",
options = ["logtostderr=true"],
plugin = "//vendor/github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway",
suffix = ".pb.gw.go",
visibility = ["//visibility:public"],
deps = [
"//vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime",
"//vendor/github.com/grpc-ecosystem/grpc-gateway/v2/utilities",
"@org_golang_google_grpc//grpclog",
"@org_golang_google_grpc//metadata",
],
)
copy_to_bin(
name = "tsconfig",
srcs = ["tsconfig.json"],
visibility = ["//visibility:public"],
)

202
LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2023 Peridot Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

174
WORKSPACE Normal file
View File

@ -0,0 +1,174 @@
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# ------------------------------------------------------------------------------
# bazel_skylib
# ------------------------------------------------------------------------------
http_archive(
name = "bazel_skylib",
sha256 = "66ffd9315665bfaafc96b52278f57c7e2dd09f5ede279ea6d39b2be471e7e3aa",
strip_prefix = "",
# 1.4.2, latest as of 2023-06-08
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz",
],
)
# ------------------------------------------------------------------------------
# protobuf
# ------------------------------------------------------------------------------
http_archive(
name = "com_google_protobuf",
sha256 = "d0f5f605d0d656007ce6c8b5a82df3037e1d8fe8b121ed42e536f569dec16113",
strip_prefix = "protobuf-3.14.0",
urls = [
"https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/v3.14.0.tar.gz",
"https://github.com/protocolbuffers/protobuf/archive/v3.14.0.tar.gz",
],
)
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
protobuf_deps()
# ------------------------------------------------------------------------------
# googleapis
# ------------------------------------------------------------------------------
http_archive(
name = "googleapis",
sha256 = "9d1a930e767c93c825398b8f8692eca3fe353b9aaadedfbcf1fca2282c85df88",
strip_prefix = "googleapis-64926d52febbf298cb82a8f472ade4a3969ba922",
urls = [
"https://github.com/googleapis/googleapis/archive/64926d52febbf298cb82a8f472ade4a3969ba922.zip",
],
)
load("@googleapis//:repository_rules.bzl", "switched_rules_by_language")
switched_rules_by_language(
name = "com_google_googleapis_imports",
)
# ------------------------------------------------------------------------------
# rules_go
# ------------------------------------------------------------------------------
http_archive(
name = "io_bazel_rules_go",
sha256 = "6dc2da7ab4cf5d7bfc7c949776b1b7c733f05e56edc4bcd9022bb249d2e2a996",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.39.1/rules_go-v0.39.1.zip",
"https://github.com/bazelbuild/rules_go/releases/download/v0.39.1/rules_go-v0.39.1.zip",
],
)
http_archive(
name = "bazel_gazelle",
sha256 = "727f3e4edd96ea20c29e8c2ca9e8d2af724d8c7778e7923a854b2c80952bc405",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.30.0/bazel-gazelle-v0.30.0.tar.gz",
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.30.0/bazel-gazelle-v0.30.0.tar.gz",
],
)
load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository")
go_rules_dependencies()
go_register_toolchains(version = "1.20.5")
load("//:deps.bzl", "go_dependencies")
# gazelle:repository_macro deps.bzl%go_dependencies
go_dependencies()
gazelle_dependencies()
# ------------------------------------------------------------------------------
# rules_js
# ------------------------------------------------------------------------------
http_archive(
name = "aspect_rules_js",
sha256 = "7b2a4d1d264e105eae49a27e2e78065b23e2e45724df2251eacdd317e95bfdfd",
strip_prefix = "rules_js-1.31.0",
url = "https://github.com/aspect-build/rules_js/releases/download/v1.31.0/rules_js-v1.31.0.tar.gz",
)
load("@aspect_rules_js//js:repositories.bzl", "rules_js_dependencies")
rules_js_dependencies()
load("@rules_nodejs//nodejs:repositories.bzl", "nodejs_register_toolchains")
nodejs_register_toolchains(
name = "nodejs",
node_version = "18.13.0",
)
load("@aspect_rules_js//npm:repositories.bzl", "npm_translate_lock")
npm_translate_lock(
name = "npm",
pnpm_lock = "//:pnpm-lock.yaml",
verify_node_modules_ignored = "//:.bazelignore",
)
load("@npm//:repositories.bzl", "npm_repositories")
npm_repositories()
# ------------------------------------------------------------------------------
# rules_swc
# ------------------------------------------------------------------------------
http_archive(
name = "aspect_rules_swc",
sha256 = "8eb9e42ed166f20cacedfdb22d8d5b31156352eac190fc3347db55603745a2d8",
strip_prefix = "rules_swc-1.1.0",
url = "https://github.com/aspect-build/rules_swc/releases/download/v1.1.0/rules_swc-v1.1.0.tar.gz",
)
# Fetches the rules_swc dependencies.
# If you want to have a different version of some dependency,
# you should fetch it *before* calling this.
# Alternatively, you can skip calling this function, so long as you've
# already fetched all the dependencies.
load("@aspect_rules_swc//swc:dependencies.bzl", "rules_swc_dependencies")
rules_swc_dependencies()
# Fetches a SWC cli from
# https://github.com/swc-project/swc/releases
# If you'd rather compile it from source, you can use rules_rust, fetch the project,
# then register the toolchain yourself. (Note, this is not yet documented)
load("@aspect_rules_swc//swc:repositories.bzl", "swc_register_toolchains")
swc_register_toolchains(
name = "swc",
swc_version_from = "//:package.json",
)
# ------------------------------------------------------------------------------
# rules_esbuild
# ------------------------------------------------------------------------------
http_archive(
name = "aspect_rules_esbuild",
sha256 = "098e38e5ee868c14a6484ba263b79e57d48afacfc361ba30137c757a9c4716d6",
strip_prefix = "rules_esbuild-0.15.0",
url = "https://github.com/aspect-build/rules_esbuild/releases/download/v0.15.0/rules_esbuild-v0.15.0.tar.gz",
)
# Fetches the rules_esbuild dependencies.
# If you want to have a different version of some dependency,
# you should fetch it *before* calling this.
# Alternatively, you can skip calling this function, so long as you've
# already fetched all the dependencies.
load("@aspect_rules_esbuild//esbuild:dependencies.bzl", "rules_esbuild_dependencies")
rules_esbuild_dependencies()
load("@aspect_rules_esbuild//esbuild:repositories.bzl", "esbuild_register_toolchains")
esbuild_register_toolchains(
name = "esbuild",
esbuild_version = "0.17.18",
)

11
base/BUILD.bazel Normal file
View File

@ -0,0 +1,11 @@
load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
bool_flag(
name = "minify_esbuild",
build_setting_default = False,
)
config_setting(
name = "minify_esbuild_enabled",
flag_values = {"minify_esbuild": "True"},
)

0
base/ci/BUILD.bazel Normal file
View File

9
base/ci/defaults.star Normal file
View File

@ -0,0 +1,9 @@
default(
istio = True,
)
flags(
# replace later with a real registry
registry = "registry.resf.org",
version = "",
)

27
base/go/BUILD.bazel Normal file
View File

@ -0,0 +1,27 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go",
srcs = [
"db.go",
"flags.go",
"frontend_server.go",
"grpc.go",
"log.go",
"pointer.go",
"slice.go",
],
importpath = "go.resf.org/peridot/base/go",
visibility = ["//visibility:public"],
deps = [
"//vendor/github.com/grpc-ecosystem/go-grpc-middleware",
"//vendor/github.com/grpc-ecosystem/go-grpc-prometheus",
"//vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime",
"//vendor/github.com/prometheus/client_golang/prometheus/promhttp",
"//vendor/github.com/urfave/cli/v2:cli",
"//vendor/github.com/wk8/go-ordered-map/v2:go-ordered-map",
"//vendor/go.ciq.dev/pika",
"@org_golang_google_grpc//:go_default_library",
"@org_golang_google_grpc//credentials/insecure",
],
)

110
base/go/db.go Normal file
View File

@ -0,0 +1,110 @@
package base
import (
"context"
"fmt"
orderedmap "github.com/wk8/go-ordered-map/v2"
"go.ciq.dev/pika"
)
type Pika[T any] interface {
pika.QuerySet[T]
F(keyval ...any) Pika[T]
Transaction(ctx context.Context) (Pika[T], error)
U(x any) error
}
type DB struct {
*pika.PostgreSQL
}
type innerDB[T any] struct {
pika.QuerySet[T]
*DB
}
type idInterfaceForInt interface {
GetID() int64
}
type idInterfaceForString interface {
GetID() string
}
func NewDB(databaseURL string) (*DB, error) {
db, err := pika.NewPostgreSQL(databaseURL)
if err != nil {
return nil, err
}
return &DB{db}, nil
}
func NewDBArgs(keyval ...any) *orderedmap.OrderedMap[string, any] {
args := pika.NewArgs()
for i := 0; i < len(keyval); i += 2 {
args.Set(keyval[i].(string), keyval[i+1])
}
return args
}
//goland:noinspection GoExportedFuncWithUnexportedType
func Q[T any](db *DB) *innerDB[T] {
return &innerDB[T]{pika.Q[T](db.PostgreSQL), db}
}
func (inner *innerDB[T]) F(keyval ...any) Pika[T] {
var qs pika.QuerySet[T] = inner
args := pika.NewArgs()
for i := 0; i < len(keyval); i += 2 {
args.Set(keyval[i].(string), keyval[i+1])
qs = qs.Filter(fmt.Sprintf("%s=:%s", keyval[i].(string), keyval[i].(string)))
}
inner.QuerySet = qs.Args(args)
return inner
}
func (inner *innerDB[T]) Transaction(ctx context.Context) (Pika[T], error) {
ts := pika.NewPostgreSQLFromDB(inner.DB.DB())
err := ts.Begin(ctx)
if err != nil {
return nil, err
}
return &innerDB[T]{pika.Q[T](ts), inner.DB}, nil
}
func (inner *innerDB[T]) U(x any) error {
y := x.(*T)
// Check if x has GetID() method
var id any
idInterface, ok := x.(idInterfaceForInt)
if ok {
intID := idInterface.GetID()
if intID == 0 {
return fmt.Errorf("id is 0")
}
id = intID
}
idInterface2, ok := x.(idInterfaceForString)
if ok {
stringID := idInterface2.GetID()
if stringID == "" {
return fmt.Errorf("id is empty")
}
id = stringID
}
if id == nil {
// Fallback to normal Update
return inner.Update(y)
}
qs := inner.F("id", id)
return qs.Update(y)
}

142
base/go/flags.go Normal file
View File

@ -0,0 +1,142 @@
package base
import (
"github.com/urfave/cli/v2"
"os"
)
type EnvVar string
const (
EnvVarGRPCPort EnvVar = "GRPC_PORT"
EnvVarGatewayPort EnvVar = "GATEWAY_PORT"
EnvVarDatabaseURL EnvVar = "DATABASE_URL"
EnvVarFrontendPort EnvVar = "FRONTEND_PORT"
EnvVarFrontendOIDCIssuer EnvVar = "FRONTEND_OIDC_ISSUER"
EnvVarFrontendOIDCClientID EnvVar = "FRONTEND_OIDC_CLIENT_ID"
EnvVarFrontendOIDCClientSecret EnvVar = "FRONTEND_OIDC_CLIENT_SECRET"
EnvVarFrontendRequiredOIDCGroup EnvVar = "FRONTEND_REQUIRED_OIDC_GROUP"
EnvVarFrontendAdminOIDCGroup EnvVar = "FRONTEND_ADMIN_OIDC_GROUP"
)
var defaultCliFlags = []cli.Flag{
&cli.IntFlag{
Name: "grpc-port",
Aliases: []string{"p"},
Usage: "gRPC port",
EnvVars: []string{string(EnvVarGRPCPort)},
Value: 8080,
},
&cli.IntFlag{
Name: "gateway-port",
Aliases: []string{"g"},
Usage: "gRPC gateway port",
EnvVars: []string{string(EnvVarGatewayPort)},
Value: 8081,
},
&cli.StringFlag{
Name: "database-url",
Aliases: []string{"d"},
Usage: "database url",
EnvVars: []string{string(EnvVarDatabaseURL)},
Value: "postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable",
},
}
var defaultFrontendNoAuthCliFlags = []cli.Flag{
&cli.IntFlag{
Name: "port",
Aliases: []string{"p"},
Usage: "frontend port",
EnvVars: []string{string(EnvVarFrontendPort)},
Value: 9111,
},
}
var defaultFrontendCliFlags = append(defaultFrontendNoAuthCliFlags, []cli.Flag{
&cli.StringFlag{
Name: "oidc-issuer",
Usage: "OIDC issuer",
EnvVars: []string{string(EnvVarFrontendOIDCIssuer)},
Value: "https://accounts.rockylinux.org/auth/realms/rocky",
},
&cli.StringFlag{
Name: "oidc-client-id",
Usage: "OIDC client ID",
EnvVars: []string{string(EnvVarFrontendOIDCClientID)},
},
&cli.StringFlag{
Name: "oidc-client-secret",
Usage: "OIDC client secret",
EnvVars: []string{string(EnvVarFrontendOIDCClientSecret)},
},
&cli.StringFlag{
Name: "required-oidc-group",
Usage: "OIDC group that is required to access the frontend",
EnvVars: []string{string(EnvVarFrontendRequiredOIDCGroup)},
},
}...)
var defaultFrontendAdminCliFlags = append(defaultFrontendCliFlags, []cli.Flag{
&cli.StringFlag{
Name: "admin-oidc-group",
Usage: "OIDC group that is allowed to access the admin page",
EnvVars: []string{string(EnvVarFrontendAdminOIDCGroup)},
},
}...)
// WithDefaultCliFlags adds the default cli flags to the app.
func WithDefaultCliFlags(flags ...cli.Flag) []cli.Flag {
return append(defaultCliFlags, flags...)
}
// WithDefaultFrontendNoAuthCliFlags adds the default frontend cli flags to the app.
func WithDefaultFrontendNoAuthCliFlags(flags ...cli.Flag) []cli.Flag {
return append(defaultFrontendNoAuthCliFlags, flags...)
}
// WithDefaultFrontendCliFlags adds the default frontend cli flags to the app.
func WithDefaultFrontendCliFlags(flags ...cli.Flag) []cli.Flag {
return append(defaultFrontendCliFlags, flags...)
}
// WithDefaultFrontendAdminCliFlags adds the default frontend cli flags to the app.
func WithDefaultFrontendAdminCliFlags(flags ...cli.Flag) []cli.Flag {
return append(defaultFrontendAdminCliFlags, flags...)
}
// FlagsToGRPCServerOptions converts the cli flags to gRPC server options.
func FlagsToGRPCServerOptions(ctx *cli.Context) []GRPCServerOption {
return []GRPCServerOption{
WithGRPCPort(ctx.Int("grpc-port")),
WithGatewayPort(ctx.Int("gateway-port")),
}
}
// GetDBFromFlags gets the database from the cli flags.
func GetDBFromFlags(ctx *cli.Context) *DB {
db, err := NewDB(ctx.String("database-url"))
if err != nil {
LogFatalf("failed to create database: %v", err)
}
return db
}
// ChangeDefaultForEnvVar changes the default value of a flag based on an environment variable.
func ChangeDefaultForEnvVar(envVar EnvVar, newDefault string) {
// Check if the environment variable is set.
if _, ok := os.LookupEnv(string(envVar)); ok {
return
}
// Change the default value.
if err := os.Setenv(string(envVar), newDefault); err != nil {
LogFatalf("failed to set environment variable %s: %v", envVar, err)
}
}
// ChangeDefaultDatabaseURL changes the default value of the database url based on an environment variable.
func ChangeDefaultDatabaseURL(appName string) {
ChangeDefaultForEnvVar(EnvVarDatabaseURL, "postgres://postgres:postgres@localhost:5432/"+appName+"?sslmode=disable")
}

113
base/go/frontend_server.go Normal file
View File

@ -0,0 +1,113 @@
package base
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"mime"
"net/http"
"path/filepath"
"strconv"
"strings"
)
type FrontendInfo struct {
// Title to add to the HTML page
Title string
}
var frontendHtmlTemplate = `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, viewport-fit=cover"
/>
<title>{{.Title}}</title>
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link
rel="stylesheet"
href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;600;700&display=swap"
/>
</head>
<body style="background-color:#f1f5f9;">
<div id="app"></div>
<script src="{{.BundleJS}}"></script>
</body>
</html>
`
func FrontendServer(info *FrontendInfo, kv ...string) {
port := 9111
if info == nil {
info = &FrontendInfo{}
}
newTemplate := frontendHtmlTemplate
// Set the title
if info.Title == "" {
info.Title = "Peridot"
}
newTemplate = strings.ReplaceAll(newTemplate, "{{.Title}}", info.Title)
pathToContent := map[string]string{}
// KV is a list of key-value pairs, where the key is the alias and the value
// is the actual file content.
for i := 0; i < len(kv); i += 2 {
content := kv[i+1]
// Sha256 hash of the content to add to name
hash := sha256.New()
hash.Write([]byte(content))
hashSum := hex.EncodeToString(hash.Sum(nil))
ext := filepath.Ext(kv[i])
noExtName := kv[i][:len(kv[i])-len(ext)]
path := fmt.Sprintf("/_ga/%s.%s%s", noExtName, hashSum[:8], ext)
pathToContent[path] = content
// If name is bundle.js, replace the template
if kv[i] == "bundle.js" {
newTemplate = strings.ReplaceAll(newTemplate, "{{.BundleJS}}", path)
}
}
// Log the paths
LogInfof("frontend server paths:")
for path := range pathToContent {
LogInfof(" %s", path)
}
// Serve the content
http.HandleFunc("/_ga/", func(w http.ResponseWriter, r *http.Request) {
mimeType := mime.TypeByExtension(filepath.Ext(r.URL.Path))
if mimeType == "" {
mimeType = "application/octet-stream"
}
w.Header().Set("Content-Type", mimeType)
_, _ = w.Write([]byte(pathToContent[r.URL.Path]))
})
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
_, _ = w.Write([]byte(newTemplate))
})
// Handle other _ga meta routes
http.HandleFunc("/_ga/healthz", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
_, _ = w.Write([]byte("ok"))
})
LogInfof("starting frontend server on port %d", port)
if err := http.ListenAndServe(":"+strconv.Itoa(port), nil); err != nil {
LogFatalf("failed to start frontend server: %v", err)
}
}

258
base/go/grpc.go Normal file
View File

@ -0,0 +1,258 @@
package base
import (
"context"
"errors"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
grpc_prometheus "github.com/grpc-ecosystem/go-grpc-prometheus"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/prometheus/client_golang/prometheus/promhttp"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
"net"
"net/http"
"strconv"
"sync"
"time"
)
type GRPCServer struct {
server *grpc.Server
gatewayMux *runtime.ServeMux
gatewayClientConn *grpc.ClientConn
serverOptions []grpc.ServerOption
dialOptions []grpc.DialOption
muxOptions []runtime.ServeMuxOption
outgoingHeaders []string
incomingHeaders []string
unaryInterceptors []grpc.UnaryServerInterceptor
streamInterceptors []grpc.StreamServerInterceptor
timeout time.Duration
grpcPort int
gatewayPort int
noGrpcGateway bool
}
type GrpcEndpointRegister func(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error
type GRPCServerOption func(*GRPCServer)
// WithServerOptions sets the gRPC server options. (Append)
func WithServerOptions(opts ...grpc.ServerOption) GRPCServerOption {
return func(g *GRPCServer) {
g.serverOptions = append(g.serverOptions, opts...)
}
}
// WithDialOptions sets the gRPC dial options. (Append)
func WithDialOptions(opts ...grpc.DialOption) GRPCServerOption {
return func(g *GRPCServer) {
g.dialOptions = append(g.dialOptions, opts...)
}
}
// WithMuxOptions sets the gRPC-gateway mux options. (Append)
func WithMuxOptions(opts ...runtime.ServeMuxOption) GRPCServerOption {
return func(g *GRPCServer) {
g.muxOptions = append(g.muxOptions, opts...)
}
}
// WithOutgoingHeaders sets the outgoing headers for the gRPC-gateway. (Append)
func WithOutgoingHeaders(headers ...string) GRPCServerOption {
return func(g *GRPCServer) {
g.outgoingHeaders = append(g.outgoingHeaders, headers...)
}
}
// WithIncomingHeaders sets the incoming headers for the gRPC-gateway. (Append)
func WithIncomingHeaders(headers ...string) GRPCServerOption {
return func(g *GRPCServer) {
g.incomingHeaders = append(g.incomingHeaders, headers...)
}
}
// WithUnaryInterceptors sets the gRPC unary interceptors. (Append)
func WithUnaryInterceptors(interceptors ...grpc.UnaryServerInterceptor) GRPCServerOption {
return func(g *GRPCServer) {
g.unaryInterceptors = append(g.unaryInterceptors, interceptors...)
}
}
// WithStreamInterceptors sets the gRPC stream interceptors. (Append)
func WithStreamInterceptors(interceptors ...grpc.StreamServerInterceptor) GRPCServerOption {
return func(g *GRPCServer) {
g.streamInterceptors = append(g.streamInterceptors, interceptors...)
}
}
// WithTimeout sets the timeout for the gRPC server.
func WithTimeout(timeout time.Duration) GRPCServerOption {
return func(g *GRPCServer) {
g.timeout = timeout
}
}
// WithGRPCPort sets the gRPC port for the gRPC server.
func WithGRPCPort(port int) GRPCServerOption {
return func(g *GRPCServer) {
g.grpcPort = port
}
}
// WithGatewayPort sets the gRPC-gateway port for the gRPC server.
func WithGatewayPort(port int) GRPCServerOption {
return func(g *GRPCServer) {
g.gatewayPort = port
}
}
// WithNoGRPCGateway disables the gRPC-gateway for the gRPC server.
func WithNoGRPCGateway() GRPCServerOption {
return func(g *GRPCServer) {
g.noGrpcGateway = true
}
}
// NewGRPCServer creates a new gRPC-server with gRPC-gateway, default interceptors
// and exposed Prometheus metrics.
func NewGRPCServer(opts ...GRPCServerOption) (*GRPCServer, error) {
g := &GRPCServer{
serverOptions: []grpc.ServerOption{},
dialOptions: []grpc.DialOption{},
muxOptions: []runtime.ServeMuxOption{},
outgoingHeaders: []string{},
incomingHeaders: []string{},
unaryInterceptors: []grpc.UnaryServerInterceptor{},
streamInterceptors: []grpc.StreamServerInterceptor{},
}
// Apply options first
for _, opt := range opts {
opt(g)
}
// Set defaults
if g.timeout == 0 {
g.timeout = 10 * time.Second
}
if g.grpcPort == 0 {
g.grpcPort = 8080
}
if g.gatewayPort == 0 {
g.gatewayPort = g.grpcPort + 1
}
// Always prepend the insecure dial option
// RESF deploys with Istio, which handles mTLS
g.dialOptions = append([]grpc.DialOption{grpc.WithTransportCredentials(insecure.NewCredentials())}, g.dialOptions...)
// Set default interceptors
if g.unaryInterceptors == nil {
g.unaryInterceptors = []grpc.UnaryServerInterceptor{}
}
if g.streamInterceptors == nil {
g.streamInterceptors = []grpc.StreamServerInterceptor{}
}
// Always prepend the prometheus interceptor
g.unaryInterceptors = append([]grpc.UnaryServerInterceptor{grpc_prometheus.UnaryServerInterceptor}, g.unaryInterceptors...)
g.streamInterceptors = append([]grpc.StreamServerInterceptor{grpc_prometheus.StreamServerInterceptor}, g.streamInterceptors...)
// Chain the interceptors
g.serverOptions = append(g.serverOptions, grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(g.unaryInterceptors...)))
g.serverOptions = append(g.serverOptions, grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(g.streamInterceptors...)))
g.server = grpc.NewServer(g.serverOptions...)
if !g.noGrpcGateway {
g.gatewayMux = runtime.NewServeMux(g.muxOptions...)
// Create gateway client connection
var err error
g.gatewayClientConn, err = grpc.Dial("localhost:"+strconv.Itoa(g.grpcPort), g.dialOptions...)
if err != nil {
return nil, err
}
}
return g, nil
}
func (g *GRPCServer) RegisterService(register func(*grpc.Server)) {
register(g.server)
}
func (g *GRPCServer) GatewayEndpoints(registerEndpoints ...GrpcEndpointRegister) error {
if g.noGrpcGateway {
return errors.New("gRPC-gateway is disabled")
}
for _, register := range registerEndpoints {
if err := register(context.Background(), g.gatewayMux, g.gatewayClientConn); err != nil {
return err
}
}
return nil
}
func (g *GRPCServer) Start() error {
// Create gRPC listener
grpcLis, err := net.Listen("tcp", ":"+strconv.Itoa(g.grpcPort))
if err != nil {
return err
}
var wg sync.WaitGroup
// First start the gRPC server
wg.Add(1)
go func(wg *sync.WaitGroup) {
defer wg.Done()
LogInfof("gRPC server listening on port " + strconv.Itoa(g.grpcPort))
grpc_prometheus.Register(g.server)
err := g.server.Serve(grpcLis)
if err != nil {
LogFatalf("gRPC server failed to serve: %v", err.Error())
}
LogInfof("gRPC server stopped")
}(&wg)
// Then start the gRPC-gateway
if !g.noGrpcGateway {
wg.Add(1)
go func(wg *sync.WaitGroup) {
defer wg.Done()
LogInfof("gRPC-gateway listening on port " + strconv.Itoa(g.gatewayPort))
err := http.ListenAndServe(":"+strconv.Itoa(g.gatewayPort), g.gatewayMux)
if err != nil {
LogFatalf("gRPC-gateway failed to serve: %v", err.Error())
}
LogInfof("gRPC-gateway stopped")
}(&wg)
}
// Serve proxmux
wg.Add(1)
go func(wg *sync.WaitGroup) {
defer wg.Done()
promMux := http.NewServeMux()
promMux.Handle("/metrics", promhttp.Handler())
err := http.ListenAndServe(":7332", promMux)
if err != nil {
LogFatalf("Prometheus mux failed to serve: %v", err.Error())
}
}(&wg)
wg.Wait()
return nil
}

49
base/go/log.go Normal file
View File

@ -0,0 +1,49 @@
package base
import (
"fmt"
"log"
"strings"
)
type LogLevel string
const (
LogLevelDebug LogLevel = "DEBUG"
LogLevelInfo LogLevel = "INFO"
LogLevelWarn LogLevel = "WARN"
LogLevelError LogLevel = "ERROR"
LogLevelFatal LogLevel = "FATAL"
)
func Logf(level LogLevel, format string, args ...interface{}) {
if !strings.HasPrefix(format, fmt.Sprintf("[%s] ", level)) {
format = fmt.Sprintf("[%s]: %s", level, format)
}
if level == LogLevelFatal {
log.Fatalf(format, args...)
} else {
log.Printf(format, args...)
}
}
func LogErrorf(format string, args ...interface{}) {
Logf(LogLevelError, format, args...)
}
func LogWarnf(format string, args ...interface{}) {
Logf(LogLevelWarn, format, args...)
}
func LogInfof(format string, args ...interface{}) {
Logf(LogLevelInfo, format, args...)
}
func LogDebugf(format string, args ...interface{}) {
Logf(LogLevelDebug, format, args...)
}
func LogFatalf(format string, args ...interface{}) {
Logf(LogLevelFatal, format, args...)
}

5
base/go/pointer.go Normal file
View File

@ -0,0 +1,5 @@
package base
func Pointer[T any](v T) *T {
return &v
}

11
base/go/slice.go Normal file
View File

@ -0,0 +1,11 @@
package base
func Contains[T comparable](s []T, e T) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}

9
base/ts/BUILD.bazel Normal file
View File

@ -0,0 +1,9 @@
load("@aspect_rules_swc//swc:defs.bzl", "swc")
swc(
name = "ts",
srcs = glob([
"*.tsx",
"*.ts",
]),
)

23
base/ts/mui/BUILD.bazel Normal file
View File

@ -0,0 +1,23 @@
load("@aspect_rules_swc//swc:defs.bzl", "swc")
load("@aspect_rules_js//js:defs.bzl", "js_library")
swc(
name = "lib",
srcs = glob([
"*.tsx",
"*.ts",
]),
data = [
"//:node_modules/@mui/material",
"//:node_modules/react",
"//:node_modules/react-dom",
"//:node_modules/tslib",
],
visibility = ["//visibility:private"],
)
js_library(
name = "mui",
srcs = [":lib"],
visibility = ["//visibility:public"],
)

71
base/ts/mui/Drawer.tsx Normal file
View File

@ -0,0 +1,71 @@
import React from 'react';
import Box from '@mui/material/Box';
import Toolbar from '@mui/material/Toolbar';
import List from '@mui/material/List';
import MuiDrawer from '@mui/material/Drawer';
import ListItem from '@mui/material/ListItem';
import ListItemButton from '@mui/material/ListItemButton';
import ListItemIcon from '@mui/material/ListItemIcon';
import ListItemText from '@mui/material/ListItemText';
import ListSubheader from '@mui/material/ListSubheader';
export interface DrawerLink {
text: string;
href: string;
external?: boolean;
icon?: React.ReactNode;
}
export interface DrawerSection {