Compare commits

..

1 commit

Author SHA1 Message Date
Matthieu cbaa166548 Add .circleci/config.yml 2023-01-17 18:33:56 +04:00
45 changed files with 4564 additions and 945 deletions

View file

@ -1,2 +1,2 @@
[net]
git-fetch-with-cli = true
git-fetch-with-cli = true

26
.circleci/config.yml Normal file
View file

@ -0,0 +1,26 @@
# Use the latest 2.1 version of CircleCI pipeline process engine.
# See: https://circleci.com/docs/2.0/configuration-reference
version: 2.1
# Define a job to be invoked later in a workflow.
# See: https://circleci.com/docs/2.0/configuration-reference/#jobs
jobs:
say-hello:
# Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub.
# See: https://circleci.com/docs/2.0/configuration-reference/#docker-machine-macos-windows-executor
docker:
- image: cimg/base:stable
# Add steps to the job
# See: https://circleci.com/docs/2.0/configuration-reference/#steps
steps:
- checkout
- run:
name: "Say hello"
command: "echo Hello, World!"
# Invoke jobs via workflows
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
workflows:
say-hello-workflow:
jobs:
- say-hello

View file

@ -1,3 +1,2 @@
target/
docs/
bin/
docs/

12
.env
View file

@ -1 +1,11 @@
TAG=amd64
GRAFANA_SERVICE_PORT=3000
GRAFANA_SERVICE_HOST=grafana
# Jaeger
JAEGER_SERVICE_PORT=16686
JAEGER_SERVICE_HOST=jaeger
# Prometheus
PROMETHEUS_SERVICE_PORT=9090
PROMETHEUS_SERVICE_HOST=prometheus
PROMETHEUS_ADDR=${PROMETHEUS_SERVICE_HOST}:${PROMETHEUS_SERVICE_PORT}

View file

@ -66,27 +66,16 @@ jobs:
name: macos
path: build/*
build_linux:
name: 'Build for Linux'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
arch:
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabi
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabi
- armv7-unknown-linux-musleabihf
- x86_64-unknown-linux-gnu
- x86_64-unknown-linux-musl
- x86_64-pc-windows-gnu
build_windows:
name: 'Build for Windows'
runs-on: windows-latest
defaults:
run:
shell: msys2 {0}
steps:
- uses: actions/checkout@v2
- uses: ATiltedTree/setup-rust@v1
with:
rust-version: stable
- uses: actions/cache@v3
with:
path: |
@ -95,16 +84,76 @@ jobs:
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ matrix.arch }}
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- uses: msys2/setup-msys2@v2
with:
install: git mingw-w64-x86_64-go mingw-w64-x86_64-make mingw-w64-x86_64-protobuf mingw-w64-x86_64-rust mingw-w64-x86_64-gcc mingw-w64-x86_64-dlfcn
- name: Build all
run: |
cargo install cross --force
export CROSS_CONTAINER_ENGINE_NO_BUILDKIT=1
cross build --release --target ${{ matrix.arch }}
mkdir -p ./build
cp target/${{ matrix.arch }}/release/* ./build/ || true
rm ./build/*.{d,rlib}
export CGO_LDFLAGS='-lntdll -lWs2_32 -lcrypt32 -lSecur32 -luserenv -lNcrypt -lbcrypt'
mingw32-make.exe all
- uses: actions/upload-artifact@v3
with:
name: release-${{ matrix.arch }}
name: windows
path: build/*
build_linux:
name: 'Build for Linux'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- uses: arduino/setup-protoc@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- uses: ATiltedTree/setup-rust@v1
with:
rust-version: stable
- uses: actions/setup-go@v3
with:
go-version: '1.18.4'
- name: Build all
run: |
make all
- uses: actions/upload-artifact@v3
with:
name: linux
path: build/*
release:
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/download-artifact@v3
with:
name: windows
path: windows
- uses: actions/download-artifact@v3
with:
name: linux_glibc
path: linux_glibc
- uses: actions/download-artifact@v3
with:
name: macos
path: macos
- uses: ncipollo/release-action@v1
with:
artifacts: "windows/*,macos/*,linux_glibc/*"
draft: true
generateReleaseNotes: true
skipIfReleaseExists: true

View file

@ -11,13 +11,6 @@ env:
jobs:
bake:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64/v8
- linux/arm/v7
steps:
- name: Checkout
uses: actions/checkout@v3
@ -29,7 +22,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64/v8,linux/arm/v7
platforms: linux/amd64,linux/arm64
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
@ -37,11 +30,6 @@ jobs:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Create tag
run: |
export TAG="${{ matrix.platform }}"
export TAG=${TAG/linux\//}
echo -n "TAG=${TAG//\//-}" > .env
- name: Build and push
uses: docker/bake-action@v2
with:
@ -49,4 +37,3 @@ jobs:
set: |
*.cache-from=type=gha
*.cache-to=type=gha,mode=max
*.platform=${{ matrix.platform }}

1
.gitignore vendored
View file

@ -8,4 +8,3 @@ config.yml
config/*
build/
*.yml
bin/

1264
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,39 +1,40 @@
[workspace]
members = [
"exes/cache/",
"exes/gateway/",
"exes/rest/",
"exes/webhook/",
"exes/ratelimit/",
"libs/all_in_one/",
"libs/proto/",
"libs/shared/",
"libs/leash/"
]
resolver = "2"
[workspace.dependencies]
shared = { path = "libs/shared" }
proto = { path = "libs/proto" }
leash = { path = "libs/leash" }
tokio = { version = "1", features = ["rt-multi-thread", "signal"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
hyper = "0.14.27"
anyhow = "1"
tracing = "0.1"
tracing-futures = "0.2"
tracing-opentelemetry = "0.19"
opentelemetry = { version = "0.19", features = ["rt-tokio"] }
opentelemetry-http = "0.8"
criterion = { version = "0.5", features = ["async_tokio"] }
tokio-test = "0.4.2"
tracing-test = "0.2.4"
tracing-subscriber = "0.3.17"
test-log = { version = "*", features = ["trace"] }
env_logger = "0.10.0"
[workspace]
members = [
"exes/cache/",
"exes/gateway/",
"exes/rest/",
"exes/webhook/",
"exes/ratelimit/",
"libs/all_in_one/",
"libs/proto/",
"libs/shared/",
"libs/leash/"
]
[workspace.dependencies]
shared = { path = "libs/shared" }
proto = { path = "libs/proto" }
leash = { path = "libs/leash" }
tokio = { version = "1", features = ["rt-multi-thread"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
hyper = "0.14.23"
anyhow = "1"
tracing = "0.1"
tracing-futures = "0.2"
tracing-opentelemetry = "0.18"
opentelemetry = { version = "0.18", features = ["rt-tokio"] }
opentelemetry-http = "0.7"
tikv-jemallocator = "0.5"
criterion = { version = "0.4", features = ["async_tokio"] }
tokio-test = "0.4.2"
tracing-test = "0.2.3"
tracing-subscriber = "0.3.16"
test-log = { version = "*", features = ["trace"] }
env_logger = "0.10.0"

View file

@ -1,6 +0,0 @@
[build]
pre-build = [
"dpkg --add-architecture $CROSS_DEB_ARCH",
"apt-get update && apt-get --assume-yes install libc6-dev",
]
dockerfile = "Dockerfile.cross"

View file

@ -1,27 +1,33 @@
# syntax=docker/dockerfile:1
FROM --platform=$BUILDPLATFORM tonistiigi/xx:master AS xx
FROM --platform=$BUILDPLATFORM rust:alpine as alpine_rbuild
RUN apk add clang lld protobuf-dev build-base git
# Copy the xx scripts
COPY --from=xx / /
# Copy source code
FROM rust AS chef
USER root
COPY .cargo .cargo
RUN cargo install cargo-chef
RUN apt-get update && apt-get install -y protobuf-compiler
WORKDIR /app
# Planning install
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
RUN --mount=type=cache,target=/root/.cargo/git/db \
--mount=type=cache,target=/root/.cargo/registry/cache \
--mount=type=cache,target=/root/.cargo/registry/index \
cargo fetch
ARG TARGETPLATFORM
RUN --mount=type=cache,target=/root/.cargo/git/db \
--mount=type=cache,target=/root/.cargo/registry/cache \
--mount=type=cache,target=/root/.cargo/registry/index \
xx-cargo build --release --target-dir ./build
# Building all targets
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
#Copy from the build/<target triple>/release folder to the out folder
RUN mkdir ./out && cp ./build/*/release/* ./out || true
# Notice that we are specifying the --target flag!
RUN cargo chef cook --release --recipe-path recipe.json
COPY . .
RUN cargo build --release
FROM alpine AS runtime
# Base os
FROM debian:latest AS runtime-base
# RUN addgroup -S nova && adduser -S nova -G nova
RUN apt-get update && apt-get install ca-certificates -y
# Final os
FROM runtime-base AS runtime
ARG COMPONENT
ENV COMPONENT=${COMPONENT}
COPY --from=alpine_rbuild /out/${COMPONENT} /usr/local/bin/
COPY --from=builder /app/target/release/${COMPONENT} /usr/local/bin/
# USER nova
ENTRYPOINT /usr/local/bin/${COMPONENT}

View file

@ -1,13 +0,0 @@
ARG CROSS_BASE_IMAGE
FROM debian as db
WORKDIR /dl
RUN apt-get update && apt-get install -y curl zip
RUN curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-linux-x86_64.zip && \
unzip protoc-21.12-linux-x86_64.zip -d protoc3
FROM $CROSS_BASE_IMAGE
COPY --from=db /dl/protoc3 /dl/
RUN mv /dl/bin/* /usr/local/bin/ && \
mv /dl/include/* /usr/local/include/

View file

@ -14,10 +14,14 @@ services:
mock:
image: nginx
cache:
image: ghcr.io/discordnova/nova/cache:${TAG:-latest}
image: ghcr.io/discordnova/nova/cache
restart: always
build:
context: .
x-bake:
platforms:
- linux/amd64
- linux/arm64
args:
- COMPONENT=cache
volumes:
@ -30,12 +34,16 @@ services:
- otelcol
gateway:
image: ghcr.io/discordnova/nova/gateway:${TAG:-latest}
image: ghcr.io/discordnova/nova/gateway
restart: always
build:
context: .
args:
- COMPONENT=gateway
x-bake:
platforms:
- linux/amd64
- linux/arm64
volumes:
- ./config/default.yml:/config/default.yml
environment:
@ -45,12 +53,16 @@ services:
- otelcol
rest:
image: ghcr.io/discordnova/nova/rest:${TAG:-latest}
image: ghcr.io/discordnova/nova/rest
restart: always
build:
context: .
args:
- COMPONENT=rest
x-bake:
platforms:
- linux/amd64
- linux/arm64
volumes:
- ./config/default.yml:/config/default.yml
environment:
@ -63,12 +75,17 @@ services:
- 8090:8090
webhook:
image: ghcr.io/discordnova/nova/webhook:${TAG:-latest}
image: ghcr.io/discordnova/nova/webhook
restart: always
build:
context: .
args:
- RUST_LOG=debug
- COMPONENT=webhook
x-bake:
platforms:
- linux/amd64
- linux/arm64
volumes:
- ./config/default.yml:/config/default.yml
environment:
@ -80,12 +97,16 @@ services:
- 9002:9000
- 8091:8091
ratelimit:
image: ghcr.io/discordnova/nova/ratelimit:${TAG:-latest}
image: ghcr.io/discordnova/nova/ratelimit
restart: always
build:
context: .
args:
- COMPONENT=ratelimit
x-bake:
platforms:
- linux/amd64
- linux/arm64
volumes:
- ./config/default.yml:/config/default.yml
environment:
@ -113,6 +134,7 @@ services:
memory: 275M
restart: always
ports:
- "${JAEGER_SERVICE_PORT}:${JAEGER_SERVICE_PORT}" # Jaeger UI
- "4317" # OTLP gRPC default port
environment:
- COLLECTOR_OTLP_ENABLED=true
@ -126,7 +148,7 @@ services:
- ./otel/grafana/grafana.ini:/etc/grafana/grafana.ini
- ./otel/grafana/provisioning/:/etc/grafana/provisioning/
ports:
- "3000:3000"
- "${GRAFANA_SERVICE_PORT}:${GRAFANA_SERVICE_PORT}"
# OpenTelemetry Collector
otelcol:
@ -162,3 +184,5 @@ services:
- --web.route-prefix=/
volumes:
- ./otel/prometheus/prometheus-config.yaml:/etc/prometheus/prometheus-config.yaml
ports:
- "${PROMETHEUS_SERVICE_PORT}:${PROMETHEUS_SERVICE_PORT}"

42
docs/architecture.md Normal file
View file

@ -0,0 +1,42 @@
# Nova architecture
The nova architecture is composed of multiple components. Each of them is horizontally scale-able.
```
┌──────────────────┐
│ │
┌─────────────┤ Discord API ├──────────────┐
│ │ │ │
│ └────────┬─────────┘ │
│ │ │
│ │ │
│ │ │
┌─────────┴────────┐ ┌────────┴─────────┐ ┌─────────┴────────┐
│ │ │ │ │ │
│ Rest Proxy │ │ Gateway client │ │ Webhook Server │
│ │ │ │ │ │
└─────────┬──┬─────┘ └────────┬─────────┘ └─────────┬────────┘
│ │ │ │
│ │ │ │
│ │ │ │
│ │ │ │
│ │ │ │
│ │ │ │
│ └───────┐ │ │
┌────────────────┐ ┌────────┴───────┐ │ ┌───────┴────────┐ │
│ │ │ │ │ │ ├───────────────┘
│ Redis ├───┤ Ratelimit │ │ │ Nats broker │
│ │ │ │ │ │ ├──────────────────┐
└────────────────┘ └────────────────┘ │ └───────┬────────┘ │
│ │ │
│ │ │
│ ┌───────┴────────┐ ┌──────┴─────┐
│ │ │ │ │
│ │ Cache manager ├───────────┤ User │
│ │ │ │ │
│ └────────────────┘ └──────┬─────┘
└───────────────────────────────────────┘
```
## Rest Proxy

10
docs/build.md Normal file
View file

@ -0,0 +1,10 @@
# Building nova
## Linux
In order to build nova you need a few tools
* Rust & Cargo
* Go
* Protoc
*

2
docs/quickstart.md Normal file
View file

@ -0,0 +1,2 @@
# 5 Minutes quickstart

12
exes/cache/Cargo.toml vendored
View file

@ -14,13 +14,13 @@ shared = { path = "../../libs/shared" }
proto = { path = "../../libs/proto" }
tokio = { version = "1", features = ["rt"] }
tokio-stream = "0.1.14"
tokio-stream = "0.1.11"
serde = { version = "1.0.166", features = ["derive"] }
serde = { version = "1.0.8", features = ["derive"] }
serde_json = { version = "1.0" }
async-nats = "0.29.0"
twilight-model = "0.15.2"
anyhow = "1.0.71"
async-nats = "0.26.0"
twilight-model = "0.14"
anyhow = "1.0.68"
tracing = "0.1.37"
tracing = "0.1.37"

View file

@ -17,8 +17,11 @@ anyhow = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio-stream = "0.1.14"
twilight-gateway = { default-features = false, features = ["rustls-webpki-roots"], version = "0.15.2" }
twilight-model = "0.15.2"
bytes = "1.4.0"
async-nats = "0.29.0"
tokio-stream = "0.1.11"
twilight-gateway = { version = "0.14" }
twilight-model = "0.14"
bytes = "1.3.0"
async-nats = "0.26.0"
[target.'cfg(not(target_os = "windows"))'.dependencies]
tikv-jemallocator = { workspace = true }

View file

@ -20,8 +20,9 @@ use shared::{
};
use std::{convert::TryFrom, future::Future, pin::Pin, str::FromStr};
use tokio::{select, sync::oneshot};
use tokio_stream::StreamExt;
use tracing_opentelemetry::OpenTelemetrySpanExt;
use twilight_gateway::{Event, Shard, ShardId};
use twilight_gateway::{Event, Shard};
pub mod config;
use tracing::{debug, error, info, info_span, instrument, Instrument};
use twilight_model::gateway::event::DispatchEvent;
@ -46,32 +47,25 @@ impl Component for GatewayServer {
mut stop: oneshot::Receiver<()>,
) -> AnyhowResultFuture<()> {
Box::pin(async move {
let mut shard = Shard::new(
ShardId::new(settings.shard, settings.shard_total),
settings.token.clone(),
settings.intents,
);
let (shard, mut events) = Shard::builder(settings.token.clone(), settings.intents)
.shard(settings.shard, settings.shard_total)?
.build();
let nats = Into::<Pin<Box<dyn Future<Output = anyhow::Result<Client>> + Send>>>::into(
settings.nats,
)
.await?;
shard.start().await?;
loop {
select! {
event = shard.next_event() => {
match event {
Ok(event) => {
let _ = handle_event(event, &nats)
.await
.map_err(|err| error!(error = ?err, "event publish failed"));
},
Err(source) => {
if source.is_fatal() {
break;
}
continue;
}
event = events.next() => {
if let Some(event) = event {
let _ = handle_event(event, &nats)
.await
.map_err(|err| error!(error = ?err, "event publish failed"));
} else {
break
}
},
_ = (&mut stop) => break
@ -79,6 +73,8 @@ impl Component for GatewayServer {
}
info!("stopping shard...");
shard.shutdown();
Ok(())
})
}

View file

@ -1,4 +1,11 @@
use gateway::GatewayServer;
use leash::ignite;
#[cfg(not(target_os = "windows"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_os = "windows"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
ignite!(GatewayServer);

View file

@ -20,10 +20,10 @@ serde = { workspace = true }
serde_json = { workspace = true }
hyper = { workspace = true }
twilight-http-ratelimiting = "0.15.1"
tonic = "0.9.2"
tokio-stream = "0.1.14"
redis = { version = "0.23.0", features = ["cluster", "connection-manager", "tokio-comp"] }
twilight-http-ratelimiting = { git = "https://github.com/MatthieuCoder/twilight.git" }
tonic = "0.8.3"
tokio-stream = "0.1.11"
redis = { version = "0.22.1", features = ["cluster", "connection-manager", "tokio-comp"] }
[dev-dependencies]
criterion = { workspace = true }
@ -33,6 +33,9 @@ tracing-subscriber = { workspace = true }
test-log = { workspace = true }
env_logger = { workspace = true }
[target.'cfg(not(target_os = "windows"))'.dependencies]
tikv-jemallocator = { workspace = true }
[[bench]]
name = "bucket"
harness = false

View file

@ -183,7 +183,7 @@ impl Bucket {
return false;
}
if matches!(self.time_remaining(), TimeRemaining::Finished) {
if let TimeRemaining::Finished = self.time_remaining() {
self.remaining.store(self.limit(), Ordering::Relaxed);
self.last_update.set_millis(0);

View file

@ -1,4 +1,11 @@
use leash::ignite;
use ratelimit::RatelimiterServerComponent;
#[cfg(not(target_os = "windows"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_os = "windows"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
ignite!(RatelimiterServerComponent);

View file

@ -20,13 +20,16 @@ serde = { workspace = true }
serde_json = { workspace = true }
hyper = { workspace = true }
futures-util = "0.3.28"
hyper-rustls = "0.24.1"
futures-util = "0.3.17"
hyper-rustls = "0.23.2"
lazy_static = "1.4.0"
xxhash-rust = { version = "0.8.6", features = ["xxh32"] }
twilight-http-ratelimiting = "0.15.1"
xxhash-rust = { version = "0.8.2", features = ["xxh32"] }
twilight-http-ratelimiting = { git = "https://github.com/MatthieuCoder/twilight.git" }
hashring = "0.3.0"
tonic = "0.9.2"
tokio-stream = "0.1.14"
dns-lookup = "2.0.2"
tonic = "0.8.3"
tokio-stream = "0.1.11"
dns-lookup = "1.0.8"
[target.'cfg(not(target_os = "windows"))'.dependencies]
tikv-jemallocator = { workspace = true }

View file

@ -1,4 +1,5 @@
use anyhow::bail;
use anyhow::{bail, Context};
use futures_util::future::FutureExt;
use hyper::http::{
header::{AUTHORIZATION, CONNECTION, HOST, TRANSFER_ENCODING, UPGRADE},
HeaderValue, Method as HttpMethod, Request, Response, Uri,
@ -18,7 +19,7 @@ use std::{
sync::Arc,
time::SystemTime,
};
use tracing::{debug_span, error, info_span, trace, Instrument};
use tracing::{debug_span, error, info_span, log::trace, Instrument};
use twilight_http_ratelimiting::{Method, Path};
use crate::{config::ReverseProxy, ratelimit_client::RemoteRatelimiter};
@ -50,21 +51,21 @@ lazy_static! {
.with_description("Amount of requests sent to the ratelimiter")
.init()
};
static ref UPSTREAM_TIMES: Histogram<u64> = {
static ref UPSTREAM_TIMES: Histogram<f64> = {
global::meter(&METER_NAME)
.u64_histogram("rest.upstream_http_request_duration_miliseconds")
.f64_histogram("rest.upstream_http_request_duration_seconds")
.with_description("Time took to request discord")
.init()
};
static ref TICKET_TIMES: Histogram<u64> = {
static ref TICKET_TIMES: Histogram<f64> = {
global::meter(&METER_NAME)
.u64_histogram("rest.ticket_http_request_duration_miliseconds")
.f64_histogram("rest.ticket_http_request_duration_seconds")
.with_description("Time took to get a ticket from the ratelimiter")
.init()
};
static ref HEADERS_SUBMIT_TIMES: Histogram<u64> = {
static ref HEADERS_SUBMIT_TIMES: Histogram<f64> = {
global::meter(&METER_NAME)
.u64_histogram("rest.header_submit_http_request_duration_miliseconds")
.f64_histogram("rest.header_submit_http_request_duration_seconds")
.with_description("Time took to get a ticket from the ratelimiter")
.init()
};
@ -88,91 +89,6 @@ fn normalize_path(request_path: &str) -> (&str, &str) {
("/api", request_path)
}
}
const fn path_name(path: &Path) -> &'static str {
match path {
Path::ApplicationCommand(..) => "Application commands",
Path::ApplicationCommandId(..) => "Application command",
Path::ApplicationGuildCommand(..) => "Application commands in guild",
Path::ApplicationGuildCommandId(..) => "Application command in guild",
Path::ChannelsId(..) => "Channel",
Path::ChannelsIdFollowers(..) => "Channel followers",
Path::ChannelsIdInvites(..) => "Channel invite",
Path::ChannelsIdMessages(..) | Path::ChannelsIdMessagesId(..) => "Channel message",
Path::ChannelsIdMessagesBulkDelete(..) => "Bulk delete message",
Path::ChannelsIdMessagesIdCrosspost(..) => "Crosspost message",
Path::ChannelsIdMessagesIdReactions(..) => "Message reaction",
Path::ChannelsIdMessagesIdReactionsUserIdType(..) => "Message reaction for user",
Path::ChannelsIdMessagesIdThreads(..) => "Threads of a specific message",
Path::ChannelsIdPermissionsOverwriteId(..) => "Channel permission override",
Path::ChannelsIdPins(..) => "Channel pins",
Path::ChannelsIdPinsMessageId(..) => "Specific channel pin",
Path::ChannelsIdRecipients(..) => "Channel recipients",
Path::ChannelsIdThreadMembers(..) => "Thread members",
Path::ChannelsIdThreadMembersId(..) => "Thread member",
Path::ChannelsIdThreads(..) => "Channel threads",
Path::ChannelsIdTyping(..) => "Typing indicator",
Path::ChannelsIdWebhooks(..) | Path::WebhooksId(..) | Path::WebhooksIdToken(..) => {
"Webhook"
}
Path::Gateway => "Gateway",
Path::GatewayBot => "Gateway bot info",
Path::Guilds => "Guilds",
Path::GuildsId(..) => "Guild",
Path::GuildsIdAuditLogs(..) => "Guild audit logs",
Path::GuildsIdAutoModerationRules(..) => "Guild automoderation rules",
Path::GuildsIdAutoModerationRulesId(..) => "Guild automoderation rule",
Path::GuildsIdBans(..) => "Guild bans",
Path::GuildsIdBansId(..) => "Specific guild ban",
Path::GuildsIdBansUserId(..) => "Guild ban for user",
Path::GuildsIdChannels(..) => "Guild channel",
Path::GuildsIdEmojis(..) => "Guild emoji",
Path::GuildsIdEmojisId(..) => "Specific guild emoji",
Path::GuildsIdIntegrations(..) => "Guild integrations",
Path::GuildsIdIntegrationsId(..) => "Specific guild integration",
Path::GuildsIdIntegrationsIdSync(..) => "Sync guild integration",
Path::GuildsIdInvites(..) => "Guild invites",
Path::GuildsIdMembers(..) => "Guild members",
Path::GuildsIdMembersId(..) => "Specific guild member",
Path::GuildsIdMembersIdRolesId(..) => "Guild member role",
Path::GuildsIdMembersMeNick(..) => "Modify own nickname",
Path::GuildsIdMembersSearch(..) => "Search guild members",
Path::GuildsIdMfa(..) => "Guild MFA setting",
Path::GuildsIdPreview(..) => "Guild preview",
Path::GuildsIdPrune(..) => "Guild prune",
Path::GuildsIdRegions(..) => "Guild region",
Path::GuildsIdRoles(..) => "Guild roles",
Path::GuildsIdRolesId(..) => "Specific guild role",
Path::GuildsIdScheduledEvents(..) => "Scheduled events in guild",
Path::GuildsIdScheduledEventsId(..) => "Scheduled event in guild",
Path::GuildsIdScheduledEventsIdUsers(..) => "Users of a scheduled event",
Path::GuildsIdStickers(..) => "Guild stickers",
Path::GuildsIdTemplates(..) => "Guild templates",
Path::GuildsIdTemplatesCode(..) | Path::GuildsTemplatesCode(..) => {
"Specific guild template"
}
Path::GuildsIdThreads(..) => "Guild threads",
Path::GuildsIdVanityUrl(..) => "Guild vanity invite",
Path::GuildsIdVoiceStates(..) => "Guild voice states",
Path::GuildsIdWebhooks(..) => "Guild webhooks",
Path::GuildsIdWelcomeScreen(..) => "Guild welcome screen",
Path::GuildsIdWidget(..) => "Guild widget",
Path::InteractionCallback(..) => "Interaction callback",
Path::InvitesCode => "Invite info",
Path::OauthApplicationsMe => "Current application info",
Path::StageInstances => "Stage instances",
Path::StickerPacks => "Sticker packs",
Path::Stickers => "Stickers",
Path::UsersId => "User info",
Path::UsersIdChannels => "User channels",
Path::UsersIdConnections => "User connections",
Path::UsersIdGuilds => "User in guild",
Path::UsersIdGuildsId => "Guild from user",
Path::UsersIdGuildsIdMember => "Member of a guild",
Path::VoiceRegions => "Voice region list",
Path::WebhooksIdTokenMessagesId(..) => "Specific webhook message",
_ => "Unknown path!",
}
}
#[inline]
#[allow(clippy::too_many_lines)]
@ -185,7 +101,7 @@ pub async fn handle_request(
) -> Result<Response<Body>, anyhow::Error> {
let cx = OpenTelemetryContext::current();
let (bucket, uri_string, name) = {
let (bucket, uri_string) = {
let method = match *request.method() {
HttpMethod::DELETE => Method::Delete,
HttpMethod::GET => Method::Get,
@ -213,7 +129,7 @@ pub async fn handle_request(
trace!("full request uri is {uri_string}");
let mut hash = DefaultHasher::new();
let path = match Path::try_from((method, trimmed_path)) {
match Path::try_from((method, trimmed_path)) {
Ok(path) => path,
Err(e) => {
error!(
@ -222,32 +138,33 @@ pub async fn handle_request(
);
bail!("failed to parse");
}
};
path.hash(&mut hash);
}
.hash(&mut hash);
let bucket = hash.finish().to_string();
trace!("Request bucket is {}", bucket);
(bucket, uri_string, path_name(&path))
(bucket, uri_string)
};
REQUESTS.add(&cx, 1, &[KeyValue::new("bucket", name)]);
REQUESTS.add(&cx, 1, &[KeyValue::new("bucket", bucket.clone())]);
let ticket_start = SystemTime::now();
TICKET_CALLS.add(&cx, 1, &[KeyValue::new("bucket", name)]);
TICKET_CALLS.add(&cx, 1, &[KeyValue::new("bucket", bucket.clone())]);
// waits for the request to be authorized
match ratelimiter
.ticket(bucket.clone())
.instrument(debug_span!("ticket validation request"))
.await
{
Ok(_) => {
#[allow(clippy::cast_possible_truncation)]
.then(|v| async {
TICKET_TIMES.record(
&cx,
ticket_start.elapsed()?.as_millis() as u64,
&[KeyValue::new("bucket", name)],
ticket_start.elapsed()?.as_secs_f64(),
&[KeyValue::new("bucket", bucket.clone())],
);
}
v
})
.await
{
Ok(_) => {}
Err(e) => {
error!("Error when requesting the ratelimiter: {:?}", e);
bail!("failed to request the ratelimiter");
@ -291,17 +208,21 @@ pub async fn handle_request(
*request.uri_mut() = uri;
let span = debug_span!("upstream request to discord");
let upstream_start = SystemTime::now();
UPSTREAM_CALLS.add(&cx, 1, &[KeyValue::new("bucket", name)]);
let resp = match client.request(request).instrument(span).await {
Ok(response) => {
#[allow(clippy::cast_possible_truncation)]
UPSTREAM_CALLS.add(&cx, 1, &[KeyValue::new("bucket", bucket.clone())]);
let resp = match client
.request(request)
.instrument(span)
.then(|v| async {
UPSTREAM_TIMES.record(
&cx,
upstream_start.elapsed()?.as_millis() as u64,
&[KeyValue::new("bucket", name)],
upstream_start.elapsed()?.as_secs_f64(),
&[KeyValue::new("bucket", bucket.clone())],
);
response
}
v.context("")
})
.await
{
Ok(response) => response,
Err(e) => {
error!("Error when requesting the Discord API: {:?}", e);
bail!("failed to request the discord api");
@ -321,18 +242,19 @@ pub async fn handle_request(
.map(|f| (f.0, f.1.expect("errors should be filtered")))
.collect();
let headers_start = SystemTime::now();
HEADERS_SUBMIT_CALLS.add(&cx, 1, &[KeyValue::new("bucket", name)]);
ratelimiter
HEADERS_SUBMIT_CALLS.add(&cx, 1, &[KeyValue::new("bucket", bucket.clone())]);
let _submit_headers = ratelimiter
.submit_headers(bucket.clone(), headers)
.instrument(info_span!("submitting headers"))
.await?;
#[allow(clippy::cast_possible_truncation)]
HEADERS_SUBMIT_TIMES.record(
&cx,
headers_start.elapsed()?.as_millis() as u64,
&[KeyValue::new("bucket", name)],
);
.then(|v| async {
HEADERS_SUBMIT_TIMES.record(
&cx,
upstream_start.elapsed()?.as_secs_f64(),
&[KeyValue::new("bucket", bucket.clone())],
);
v
})
.await;
Ok(resp)
}

View file

@ -1,4 +1,11 @@
use leash::ignite;
use rest::ReverseProxyServer;
#[cfg(not(target_os = "windows"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_os = "windows"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
ignite!(ReverseProxyServer);

View file

@ -16,6 +16,9 @@ anyhow = { workspace = true }
hex = "0.4.3"
ed25519-dalek = "1"
twilight-model = "0.15.2"
twilight-model = { version = "0.14" }
async-nats = "0.29.0"
async-nats = "0.26.0"
[target.'cfg(not(target_os = "windows"))'.dependencies]
tikv-jemallocator = "0.5"

View file

@ -1,4 +1,11 @@
use leash::ignite;
use webhook::WebhookServer;
#[cfg(not(target_os = "windows"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_os = "windows"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
ignite!(WebhookServer);

View file

@ -6,7 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
libc = "0.2.147"
libc = "0.2.139"
leash = { path = "../../libs/leash" }
shared = { path = "../../libs/shared" }
@ -15,24 +15,24 @@ gateway = { path = "../../exes/gateway" }
ratelimit = { path = "../../exes/ratelimit" }
rest = { path = "../../exes/rest" }
webhook = { path = "../../exes/webhook" }
ctrlc = "3.4.0"
ctrlc = "3.2.4"
tokio = { version = "1.29.1", features = ["rt"] }
serde = "1.0.166"
serde_json = "1.0.100"
anyhow = { version = "1.0.71", features = ["backtrace"] }
tokio = { version = "1.23.1", features = ["rt"] }
serde = "1.0.152"
serde_json = "1.0.91"
anyhow = { version = "1.0.68", features = ["backtrace"] }
tracing = "0.1.37"
config = "0.13.3"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
tracing-opentelemetry = "0.19.0"
opentelemetry = { version ="0.19.0", features = ["rt-tokio"] }
opentelemetry-otlp = { version = "0.12.0" }
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
tracing-opentelemetry = "0.18.0"
opentelemetry = { version ="0.18.0", features = ["rt-tokio"] }
opentelemetry-otlp = { version = "0.11.0" }
[lib]
crate-type = ["staticlib", "rlib"]
[build-dependencies]
cbindgen = "0.24.5"
cbindgen = "0.24.3"

View file

@ -11,7 +11,7 @@ use tokio::{
};
use tracing::{
debug,
error, info,
log::{error, info},
};
/// Represents a all in one instance

View file

@ -15,5 +15,5 @@ tracing-opentelemetry = { workspace = true }
tracing = { workspace = true }
tracing-log = { version = "0.1.3", features = ["env_logger"] }
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
opentelemetry-otlp = { version = "0.12.0", features = ["metrics"] }
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
opentelemetry-otlp = { version = "0.11.0", features = ["metrics"] }

View file

@ -24,7 +24,8 @@ use std::str::FromStr;
use std::time::Duration;
use std::{future::Future, pin::Pin};
use tokio::sync::oneshot;
use tracing::{info, trace, error};
use tracing::log::error;
use tracing::{info, log::trace};
use tracing_subscriber::filter::Directive;
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
@ -58,7 +59,7 @@ where
{
let meter = opentelemetry_otlp::new_pipeline()
.metrics(
selectors::simple::histogram([0.1, 1.0, 2.0, 5.0, 10.0, 20.0, 50.0]),
selectors::simple::histogram([1.0, 2.0, 5.0, 10.0, 20.0, 50.0]),
stateless_temporality_selector(),
opentelemetry::runtime::Tokio,
)
@ -99,7 +100,6 @@ where
let otel_layer = tracing_opentelemetry::layer().with_tracer(tracer);
tracing_subscriber::registry()
.with(fmt::layer())
.with(otel_layer)
.with(
// Use the info level as default
@ -133,7 +133,7 @@ where
.recv()
.await;
#[cfg(not(unix))]
tokio::signal::ctrl_c().await.unwrap();
return tokio::signal::ctrl_c().await.unwrap();
stop.send(()).unwrap();
shutdown_tracer_provider();

View file

@ -1,12 +1,12 @@
[package]
name = "proto"
version = "0.1.0"
edition = "2021"
edition = "2018"
[dependencies]
tonic = "0.9.2"
prost = "0.11.9"
tonic = "0.8.3"
prost = "0.11.5"
[build-dependencies]
tonic-build = "0.9.2"
glob = "0.3.1"
tonic-build = "0.8.4"
glob = "0.3.0"

View file

@ -11,9 +11,9 @@ tracing = { workspace = true }
tokio = { workspace = true }
config = { version = "0.13", default-features = false, features = ["json", "yaml-rust", "ini"] }
redis = { version = "0.23.0", features = ["cluster", "connection-manager", "tokio-comp"] }
redis = { version = "0.22.1", features = ["cluster", "connection-manager", "tokio-comp"] }
async-nats = "0.29.0"
twilight-model = "0.15.2"
anyhow = "1.0.71"
opentelemetry-otlp = "0.12.0"
async-nats = "0.26.0"
twilight-model = "0.14"
anyhow = "1.0.68"
opentelemetry-otlp = "0.11.0"

2
nats.config Normal file
View file

@ -0,0 +1,2 @@
max_payload: 100000000
max_pending: 1000000000

1170
otel/grafana/grafana.ini Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,10 @@
apiVersion: 1
providers:
- name: 'OpenTelemetry Demo'
orgId: 1
folder: 'Demo'
type: file
disableDeletion: false
editable: true
options:
path: /etc/grafana/provisioning/dashboards/general

View file

@ -0,0 +1,693 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 1,
"links": [],
"liveNow": false,
"panels": [
{
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 0
},
"id": 14,
"panels": [],
"title": "Metrics",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 1
},
"id": 6,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(runtime_cpython_cpu_time{type=~\"system\"}[$__interval])*100",
"legendFormat": "__auto",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(runtime_cpython_cpu_time{type=~\"user\"}[$__interval])*100",
"hide": false,
"legendFormat": "__auto",
"range": true,
"refId": "B"
}
],
"title": "Recommendation Service (CPU%)",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "decmbytes"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 1
},
"id": 8,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(runtime_cpython_memory{type=~\"rss|vms\"}[$__interval])/1024/1024",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Recommendation Service (Memory)",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 9
},
"id": 4,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(app_recommendations_counter{recommendation_type=\"catalog\"}[$__interval])",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Recommendations Count",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 9
},
"id": 10,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(calls_total{status_code=\"STATUS_CODE_ERROR\"}[$__interval])",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Error Rate",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "dtdurationms"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 17
},
"id": 2,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "histogram_quantile(0.50, sum(rate(latency_bucket{service_name=\"${service}\"}[$__rate_interval])) by (le))",
"legendFormat": "__auto",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "histogram_quantile(0.95, sum(rate(latency_bucket{service_name=\"${service}\"}[$__rate_interval])) by (le))",
"hide": false,
"legendFormat": "__auto",
"range": true,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "histogram_quantile(0.99, sum(rate(latency_bucket{service_name=\"${service}\"}[$__rate_interval])) by (le))",
"hide": false,
"legendFormat": "__auto",
"range": true,
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "histogram_quantile(0.999, sum(rate(latency_bucket{service_name=\"${service}\"}[$__rate_interval])) by (le))",
"hide": false,
"legendFormat": "__auto",
"range": true,
"refId": "D"
}
],
"title": "Service Latency (from SpanMetrics)",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 17
},
"id": 12,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"editorMode": "code",
"expr": "rate(latency_count{service_name=\"${service}\"}[$__rate_interval])",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Endpoint Rate by Service",
"type": "timeseries"
}
],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"allValue": "",
"current": {
"selected": false,
"text": "recommendationservice",
"value": "recommendationservice"
},
"datasource": {
"type": "prometheus",
"uid": "webstore-metrics"
},
"definition": "latency_bucket",
"hide": 0,
"includeAll": false,
"multi": false,
"name": "service",
"options": [],
"query": {
"query": "latency_bucket",
"refId": "StandardVariableQuery"
},
"refresh": 1,
"regex": "/.*service_name=\\\"([^\\\"]+)\\\".*/",
"skipUrlSync": false,
"sort": 1,
"type": "query"
}
]
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Demo Dashboard",
"uid": "W2gX2zHVk",
"version": 2,
"weekStart": ""
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,9 @@
apiVersion: 1
datasources:
- name: Prometheus
uid: webstore-metrics
type: prometheus
url: http://prometheus:9090
editable: true
isDefault: true

View file

@ -0,0 +1,9 @@
apiVersion: 1
datasources:
- name: Jaeger
uid: webstore-traces
type: jaeger
url: http://jaeger:16686/jaeger/ui
editable: true
isDefault: false

View file

@ -0,0 +1,2 @@
# extra settings to be merged into OpenTelemetry Collector configuration
# do not delete this file

View file

@ -0,0 +1,34 @@
receivers:
otlp:
protocols:
grpc:
http:
cors:
allowed_origins:
- "http://*"
- "https://*"
exporters:
otlp:
endpoint: "jaeger:4317"
tls:
insecure: true
logging:
prometheus:
endpoint: "otelcol:9464"
processors:
batch:
spanmetrics:
metrics_exporter: prometheus
service:
pipelines:
traces:
receivers: [otlp]
processors: [spanmetrics, batch]
exporters: [logging, otlp]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [prometheus, logging]

View file

@ -0,0 +1,12 @@
global:
evaluation_interval: 30s
scrape_interval: 5s
scrape_configs:
- job_name: otel
static_configs:
- targets:
- 'otelcol:9464'
- job_name: otel-collector
static_configs:
- targets:
- 'otelcol:8888'

View file

@ -11,11 +11,11 @@ mkShell {
buildInputs = [
cargo
gcc
clang
go
gnumake
protobuf
rustc
zlib
mdbook
];
}