Merge branch 'main' into max
This commit is contained in:
commit
d58cad7d25
1742
Cargo.lock
generated
1742
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
49
Cargo.toml
49
Cargo.toml
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "ap-relay"
|
name = "ap-relay"
|
||||||
description = "A simple activitypub relay"
|
description = "A simple activitypub relay"
|
||||||
version = "0.3.104"
|
version = "0.3.106"
|
||||||
authors = ["asonix <asonix@asonix.dog>"]
|
authors = ["asonix <asonix@asonix.dog>"]
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -15,44 +15,46 @@ name = "relay"
|
|||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
console = ["console-subscriber"]
|
console = ["dep:console-subscriber"]
|
||||||
default = []
|
default = []
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
actix-rt = "2.7.0"
|
|
||||||
actix-web = { version = "4.4.0", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls-0_21"] }
|
actix-web = { version = "4.4.0", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls-0_21"] }
|
||||||
actix-webfinger = { version = "0.5.0", default-features = false }
|
actix-webfinger = { version = "0.5.0", default-features = false }
|
||||||
activitystreams = "0.7.0-alpha.25"
|
activitystreams = "0.7.0-alpha.25"
|
||||||
activitystreams-ext = "0.1.0-alpha.3"
|
activitystreams-ext = "0.1.0-alpha.3"
|
||||||
ammonia = "3.1.0"
|
ammonia = "3.1.0"
|
||||||
|
async-cpupool = "0.2.0"
|
||||||
bcrypt = "0.15"
|
bcrypt = "0.15"
|
||||||
base64 = "0.21"
|
base64 = "0.21"
|
||||||
clap = { version = "4.0.0", features = ["derive"] }
|
clap = { version = "4.0.0", features = ["derive"] }
|
||||||
config = "0.13.0"
|
config = "0.13.0"
|
||||||
console-subscriber = { version = "0.1", optional = true }
|
console-subscriber = { version = "0.2", optional = true }
|
||||||
dashmap = "5.1.0"
|
dashmap = "5.1.0"
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
flume = "0.11.0"
|
flume = "0.11.0"
|
||||||
lru = "0.11.0"
|
lru = "0.12.0"
|
||||||
metrics = "0.21.0"
|
metrics = "0.22.0"
|
||||||
metrics-exporter-prometheus = { version = "0.12.0", default-features = false, features = [
|
metrics-exporter-prometheus = { version = "0.13.0", default-features = false, features = [
|
||||||
"http-listener",
|
"http-listener",
|
||||||
] }
|
] }
|
||||||
metrics-util = "0.15.0"
|
metrics-util = "0.16.0"
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
minify-html = "0.11.0"
|
minify-html = "0.15.0"
|
||||||
opentelemetry = { version = "0.20", features = ["rt-tokio"] }
|
opentelemetry = "0.21"
|
||||||
opentelemetry-otlp = "0.13"
|
opentelemetry_sdk = { version = "0.21", features = ["rt-tokio"] }
|
||||||
|
opentelemetry-otlp = "0.14"
|
||||||
pin-project-lite = "0.2.9"
|
pin-project-lite = "0.2.9"
|
||||||
quanta = "0.11.0"
|
# pinned to metrics-util
|
||||||
|
quanta = "0.12.0"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "stream"]}
|
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "stream"]}
|
||||||
reqwest-middleware = "0.2"
|
reqwest-middleware = "0.2"
|
||||||
reqwest-tracing = "0.4.5"
|
reqwest-tracing = "0.4.5"
|
||||||
ring = "0.16.20"
|
ring = "0.17.5"
|
||||||
rsa = { version = "0.9" }
|
rsa = { version = "0.9" }
|
||||||
rsa-magic-public-key = "0.8.0"
|
rsa-magic-public-key = "0.8.0"
|
||||||
rustls = "0.21.0"
|
rustls = "0.21.0"
|
||||||
@ -69,41 +71,40 @@ thiserror = "1.0"
|
|||||||
time = { version = "0.3.17", features = ["serde"] }
|
time = { version = "0.3.17", features = ["serde"] }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
tracing-error = "0.2"
|
tracing-error = "0.2"
|
||||||
tracing-futures = "0.2"
|
tracing-log = "0.2"
|
||||||
tracing-log = "0.1"
|
tracing-opentelemetry = "0.22"
|
||||||
tracing-opentelemetry = "0.21"
|
|
||||||
tracing-subscriber = { version = "0.3", features = [
|
tracing-subscriber = { version = "0.3", features = [
|
||||||
"ansi",
|
"ansi",
|
||||||
"env-filter",
|
"env-filter",
|
||||||
"fmt",
|
"fmt",
|
||||||
] }
|
] }
|
||||||
tokio = { version = "1", features = ["macros", "sync"] }
|
tokio = { version = "1", features = ["full", "tracing"] }
|
||||||
uuid = { version = "1", features = ["v4", "serde"] }
|
uuid = { version = "1", features = ["v4", "serde"] }
|
||||||
streem = "0.1.0"
|
streem = "0.2.0"
|
||||||
|
|
||||||
[dependencies.background-jobs]
|
[dependencies.background-jobs]
|
||||||
version = "0.15.0"
|
version = "0.17.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["background-jobs-actix", "error-logging"]
|
features = ["error-logging", "metrics", "tokio"]
|
||||||
|
|
||||||
[dependencies.http-signature-normalization-actix]
|
[dependencies.http-signature-normalization-actix]
|
||||||
version = "0.10.1"
|
version = "0.11.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["server", "ring"]
|
features = ["server", "ring"]
|
||||||
|
|
||||||
[dependencies.http-signature-normalization-reqwest]
|
[dependencies.http-signature-normalization-reqwest]
|
||||||
version = "0.10.0"
|
version = "0.11.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["middleware", "ring"]
|
features = ["middleware", "ring"]
|
||||||
|
|
||||||
[dependencies.tracing-actix-web]
|
[dependencies.tracing-actix-web]
|
||||||
version = "0.7.6"
|
version = "0.7.9"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
ructe = { version = "0.17.0", features = ["sass", "mime03"] }
|
ructe = { version = "0.17.0", features = ["sass", "mime03"] }
|
||||||
toml = "0.7.0"
|
toml = "0.8.0"
|
||||||
|
|
||||||
[profile.dev.package.rsa]
|
[profile.dev.package.rsa]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
|||||||
12
flake.lock
generated
12
flake.lock
generated
@ -5,11 +5,11 @@
|
|||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1692799911,
|
"lastModified": 1701680307,
|
||||||
"narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=",
|
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44",
|
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@ -20,11 +20,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1693003285,
|
"lastModified": 1705133751,
|
||||||
"narHash": "sha256-5nm4yrEHKupjn62MibENtfqlP6pWcRTuSKrMiH9bLkc=",
|
"narHash": "sha256-rCIsyE80jgiOU78gCWN3A0wE0tR2GI5nH6MlS+HaaSQ=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "5690c4271f2998c304a45c91a0aeb8fb69feaea7",
|
"rev": "9b19f5e77dd906cb52dade0b7bd280339d2a1f3d",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@ -1,17 +1,14 @@
|
|||||||
{ lib
|
{ lib
|
||||||
, nixosTests
|
, nixosTests
|
||||||
, protobuf
|
|
||||||
, rustPlatform
|
, rustPlatform
|
||||||
}:
|
}:
|
||||||
|
|
||||||
rustPlatform.buildRustPackage {
|
rustPlatform.buildRustPackage {
|
||||||
pname = "relay";
|
pname = "relay";
|
||||||
version = "0.3.104";
|
version = "0.3.106";
|
||||||
src = ./.;
|
src = ./.;
|
||||||
cargoLock.lockFile = ./Cargo.lock;
|
cargoLock.lockFile = ./Cargo.lock;
|
||||||
|
|
||||||
PROTOC = "${protobuf}/bin/protoc";
|
|
||||||
PROTOC_INCLUDE = "${protobuf}/include";
|
|
||||||
RUSTFLAGS = "--cfg tokio_unstable";
|
RUSTFLAGS = "--cfg tokio_unstable";
|
||||||
|
|
||||||
nativeBuildInputs = [ ];
|
nativeBuildInputs = [ ];
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use metrics::{Key, Recorder, SetRecorderError};
|
use metrics::{Key, Metadata, Recorder, SetRecorderError};
|
||||||
use metrics_util::{
|
use metrics_util::{
|
||||||
registry::{AtomicStorage, GenerationalStorage, Recency, Registry},
|
registry::{AtomicStorage, GenerationalStorage, Recency, Registry},
|
||||||
MetricKindMask, Summary,
|
MetricKindMask, Summary,
|
||||||
@ -289,7 +289,7 @@ impl Inner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut d = self.distributions.write().unwrap();
|
let mut d = self.distributions.write().unwrap();
|
||||||
let outer_entry = d.entry(name.clone()).or_insert_with(BTreeMap::new);
|
let outer_entry = d.entry(name.clone()).or_default();
|
||||||
|
|
||||||
let entry = outer_entry
|
let entry = outer_entry
|
||||||
.entry(labels)
|
.entry(labels)
|
||||||
@ -360,8 +360,8 @@ impl MemoryCollector {
|
|||||||
d.entry(key.as_str().to_owned()).or_insert(description);
|
d.entry(key.as_str().to_owned()).or_insert(description);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn install(&self) -> Result<(), SetRecorderError> {
|
pub(crate) fn install(&self) -> Result<(), SetRecorderError<Self>> {
|
||||||
metrics::set_boxed_recorder(Box::new(self.clone()))
|
metrics::set_global_recorder(self.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -393,19 +393,19 @@ impl Recorder for MemoryCollector {
|
|||||||
self.add_description_if_missing(&key, description)
|
self.add_description_if_missing(&key, description)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_counter(&self, key: &Key) -> metrics::Counter {
|
fn register_counter(&self, key: &Key, _: &Metadata<'_>) -> metrics::Counter {
|
||||||
self.inner
|
self.inner
|
||||||
.registry
|
.registry
|
||||||
.get_or_create_counter(key, |c| c.clone().into())
|
.get_or_create_counter(key, |c| c.clone().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_gauge(&self, key: &Key) -> metrics::Gauge {
|
fn register_gauge(&self, key: &Key, _: &Metadata<'_>) -> metrics::Gauge {
|
||||||
self.inner
|
self.inner
|
||||||
.registry
|
.registry
|
||||||
.get_or_create_gauge(key, |c| c.clone().into())
|
.get_or_create_gauge(key, |c| c.clone().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_histogram(&self, key: &Key) -> metrics::Histogram {
|
fn register_histogram(&self, key: &Key, _: &Metadata<'_>) -> metrics::Histogram {
|
||||||
self.inner
|
self.inner
|
||||||
.registry
|
.registry
|
||||||
.get_or_create_histogram(key, |c| c.clone().into())
|
.get_or_create_histogram(key, |c| c.clone().into())
|
||||||
|
|||||||
@ -750,6 +750,11 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let db =
|
let db =
|
||||||
Db::build_inner(true, sled::Config::new().temporary(true).open().unwrap()).unwrap();
|
Db::build_inner(true, sled::Config::new().temporary(true).open().unwrap()).unwrap();
|
||||||
actix_rt::System::new().block_on((f)(db));
|
|
||||||
|
tokio::runtime::Builder::new_current_thread()
|
||||||
|
.enable_all()
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
.block_on((f)(db));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
use activitystreams::checked::CheckError;
|
use activitystreams::checked::CheckError;
|
||||||
use actix_rt::task::JoinError;
|
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
error::{BlockingError, ResponseError},
|
error::{BlockingError, ResponseError},
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
@ -7,6 +6,7 @@ use actix_web::{
|
|||||||
};
|
};
|
||||||
use http_signature_normalization_reqwest::SignError;
|
use http_signature_normalization_reqwest::SignError;
|
||||||
use std::{convert::Infallible, fmt::Debug, io};
|
use std::{convert::Infallible, fmt::Debug, io};
|
||||||
|
use tokio::task::JoinError;
|
||||||
use tracing_error::SpanTrace;
|
use tracing_error::SpanTrace;
|
||||||
|
|
||||||
pub(crate) struct Error {
|
pub(crate) struct Error {
|
||||||
|
|||||||
@ -28,7 +28,7 @@ impl AdminConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn verify(&self, token: XApiToken) -> Result<bool, Error> {
|
fn verify(&self, token: XApiToken) -> Result<bool, Error> {
|
||||||
bcrypt::verify(&token.0, &self.hashed_api_token).map_err(Error::bcrypt_verify)
|
bcrypt::verify(token.0, &self.hashed_api_token).map_err(Error::bcrypt_verify)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,10 +200,8 @@ impl FromRequest for Admin {
|
|||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let (db, c, s, t) = res?;
|
let (db, c, s, t) = res?;
|
||||||
Self::verify(c, s, t).await?;
|
Self::verify(c, s, t).await?;
|
||||||
metrics::histogram!(
|
metrics::histogram!("relay.admin.verify")
|
||||||
"relay.admin.verify",
|
.record(now.elapsed().as_micros() as f64 / 1_000_000_f64);
|
||||||
now.elapsed().as_micros() as f64 / 1_000_000_f64
|
|
||||||
);
|
|
||||||
Ok(Admin { db })
|
Ok(Admin { db })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
use std::{future::Future, pin::Pin};
|
use std::{future::Future, pin::Pin};
|
||||||
|
|
||||||
pub(crate) type LocalBoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + 'a>>;
|
pub(crate) type LocalBoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + 'a>>;
|
||||||
|
pub(crate) type BoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + Send + 'a>>;
|
||||||
|
|||||||
37
src/jobs.rs
37
src/jobs.rs
@ -19,8 +19,10 @@ use crate::{
|
|||||||
jobs::{process_listeners::Listeners, record_last_online::RecordLastOnline},
|
jobs::{process_listeners::Listeners, record_last_online::RecordLastOnline},
|
||||||
};
|
};
|
||||||
use background_jobs::{
|
use background_jobs::{
|
||||||
memory_storage::{ActixTimer, Storage},
|
memory_storage::{Storage, TokioTimer},
|
||||||
Job, QueueHandle, WorkerConfig,
|
metrics::MetricsStorage,
|
||||||
|
tokio::{QueueHandle, WorkerConfig},
|
||||||
|
Job,
|
||||||
};
|
};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
@ -43,18 +45,21 @@ pub(crate) fn create_workers(
|
|||||||
actors: ActorCache,
|
actors: ActorCache,
|
||||||
media: MediaCache,
|
media: MediaCache,
|
||||||
config: Config,
|
config: Config,
|
||||||
) -> JobServer {
|
) -> std::io::Result<JobServer> {
|
||||||
let deliver_concurrency = config.deliver_concurrency();
|
let deliver_concurrency = config.deliver_concurrency();
|
||||||
|
|
||||||
let queue_handle = WorkerConfig::new(Storage::new(ActixTimer), move |queue_handle| {
|
let queue_handle = WorkerConfig::new(
|
||||||
JobState::new(
|
MetricsStorage::wrap(Storage::new(TokioTimer)),
|
||||||
state.clone(),
|
move |queue_handle| {
|
||||||
actors.clone(),
|
JobState::new(
|
||||||
JobServer::new(queue_handle),
|
state.clone(),
|
||||||
media.clone(),
|
actors.clone(),
|
||||||
config.clone(),
|
JobServer::new(queue_handle),
|
||||||
)
|
media.clone(),
|
||||||
})
|
config.clone(),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
.register::<Deliver>()
|
.register::<Deliver>()
|
||||||
.register::<DeliverMany>()
|
.register::<DeliverMany>()
|
||||||
.register::<QueryNodeinfo>()
|
.register::<QueryNodeinfo>()
|
||||||
@ -70,12 +75,12 @@ pub(crate) fn create_workers(
|
|||||||
.set_worker_count("maintenance", 2)
|
.set_worker_count("maintenance", 2)
|
||||||
.set_worker_count("apub", 2)
|
.set_worker_count("apub", 2)
|
||||||
.set_worker_count("deliver", deliver_concurrency)
|
.set_worker_count("deliver", deliver_concurrency)
|
||||||
.start();
|
.start()?;
|
||||||
|
|
||||||
queue_handle.every(Duration::from_secs(60 * 5), Listeners);
|
queue_handle.every(Duration::from_secs(60 * 5), Listeners)?;
|
||||||
queue_handle.every(Duration::from_secs(60 * 10), RecordLastOnline);
|
queue_handle.every(Duration::from_secs(60 * 10), RecordLastOnline)?;
|
||||||
|
|
||||||
JobServer::new(queue_handle)
|
Ok(JobServer::new(queue_handle))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
|||||||
@ -2,14 +2,14 @@ use crate::{
|
|||||||
config::{Config, UrlKind},
|
config::{Config, UrlKind},
|
||||||
db::Actor,
|
db::Actor,
|
||||||
error::Error,
|
error::Error,
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{
|
jobs::{
|
||||||
apub::{get_inboxes, prepare_activity},
|
apub::{get_inboxes, prepare_activity},
|
||||||
DeliverMany, JobState,
|
DeliverMany, JobState,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use activitystreams::{activity::Announce as AsAnnounce, iri_string::types::IriString};
|
use activitystreams::{activity::Announce as AsAnnounce, iri_string::types::IriString};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Announce {
|
pub(crate) struct Announce {
|
||||||
@ -62,9 +62,9 @@ fn generate_announce(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Announce {
|
impl Job for Announce {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::apub::Announce";
|
const NAME: &'static str = "relay::jobs::apub::Announce";
|
||||||
const QUEUE: &'static str = "apub";
|
const QUEUE: &'static str = "apub";
|
||||||
|
|||||||
@ -3,6 +3,7 @@ use crate::{
|
|||||||
config::{Config, UrlKind},
|
config::{Config, UrlKind},
|
||||||
db::Actor,
|
db::Actor,
|
||||||
error::{Error, ErrorKind},
|
error::{Error, ErrorKind},
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{apub::prepare_activity, Deliver, JobState, QueryInstance, QueryNodeinfo},
|
jobs::{apub::prepare_activity, Deliver, JobState, QueryInstance, QueryNodeinfo},
|
||||||
};
|
};
|
||||||
use activitystreams::{
|
use activitystreams::{
|
||||||
@ -10,8 +11,7 @@ use activitystreams::{
|
|||||||
iri_string::types::IriString,
|
iri_string::types::IriString,
|
||||||
prelude::*,
|
prelude::*,
|
||||||
};
|
};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Follow {
|
pub(crate) struct Follow {
|
||||||
@ -111,9 +111,9 @@ fn generate_accept_follow(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Follow {
|
impl Job for Follow {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::apub::Follow";
|
const NAME: &'static str = "relay::jobs::apub::Follow";
|
||||||
const QUEUE: &'static str = "apub";
|
const QUEUE: &'static str = "apub";
|
||||||
|
|||||||
@ -2,11 +2,11 @@ use crate::{
|
|||||||
apub::AcceptedActivities,
|
apub::AcceptedActivities,
|
||||||
db::Actor,
|
db::Actor,
|
||||||
error::{Error, ErrorKind},
|
error::{Error, ErrorKind},
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{apub::get_inboxes, DeliverMany, JobState},
|
jobs::{apub::get_inboxes, DeliverMany, JobState},
|
||||||
};
|
};
|
||||||
use activitystreams::prelude::*;
|
use activitystreams::prelude::*;
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Forward {
|
pub(crate) struct Forward {
|
||||||
@ -47,9 +47,9 @@ impl Forward {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Forward {
|
impl Job for Forward {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::apub::Forward";
|
const NAME: &'static str = "relay::jobs::apub::Forward";
|
||||||
const QUEUE: &'static str = "apub";
|
const QUEUE: &'static str = "apub";
|
||||||
|
|||||||
@ -2,10 +2,10 @@ use crate::{
|
|||||||
config::UrlKind,
|
config::UrlKind,
|
||||||
db::Actor,
|
db::Actor,
|
||||||
error::Error,
|
error::Error,
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{apub::generate_undo_follow, Deliver, JobState},
|
jobs::{apub::generate_undo_follow, Deliver, JobState},
|
||||||
};
|
};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Reject(pub(crate) Actor);
|
pub(crate) struct Reject(pub(crate) Actor);
|
||||||
@ -33,9 +33,9 @@ impl Reject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Reject {
|
impl Job for Reject {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::apub::Reject";
|
const NAME: &'static str = "relay::jobs::apub::Reject";
|
||||||
const QUEUE: &'static str = "apub";
|
const QUEUE: &'static str = "apub";
|
||||||
|
|||||||
@ -3,11 +3,11 @@ use crate::{
|
|||||||
config::UrlKind,
|
config::UrlKind,
|
||||||
db::Actor,
|
db::Actor,
|
||||||
error::Error,
|
error::Error,
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{apub::generate_undo_follow, Deliver, JobState},
|
jobs::{apub::generate_undo_follow, Deliver, JobState},
|
||||||
};
|
};
|
||||||
use activitystreams::prelude::BaseExt;
|
use activitystreams::prelude::BaseExt;
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Undo {
|
pub(crate) struct Undo {
|
||||||
@ -48,9 +48,9 @@ impl Undo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Undo {
|
impl Job for Undo {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::apub::Undo";
|
const NAME: &'static str = "relay::jobs::apub::Undo";
|
||||||
const QUEUE: &'static str = "apub";
|
const QUEUE: &'static str = "apub";
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
apub::AcceptedActors,
|
apub::AcceptedActors,
|
||||||
error::{Error, ErrorKind},
|
error::{Error, ErrorKind},
|
||||||
|
future::BoxFuture,
|
||||||
jobs::JobState,
|
jobs::JobState,
|
||||||
requests::BreakerStrategy,
|
requests::BreakerStrategy,
|
||||||
};
|
};
|
||||||
use activitystreams::{iri_string::types::IriString, object::Image, prelude::*};
|
use activitystreams::{iri_string::types::IriString, object::Image, prelude::*};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct QueryContact {
|
pub(crate) struct QueryContact {
|
||||||
@ -85,9 +85,9 @@ fn to_contact(contact: AcceptedActors) -> Option<(String, String, IriString, Iri
|
|||||||
Some((username, display_name, url, avatar))
|
Some((username, display_name, url, avatar))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for QueryContact {
|
impl Job for QueryContact {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::QueryContact";
|
const NAME: &'static str = "relay::jobs::QueryContact";
|
||||||
const QUEUE: &'static str = "maintenance";
|
const QUEUE: &'static str = "maintenance";
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{debug_object, JobState},
|
jobs::{debug_object, JobState},
|
||||||
requests::BreakerStrategy,
|
requests::BreakerStrategy,
|
||||||
};
|
};
|
||||||
use activitystreams::iri_string::types::IriString;
|
use activitystreams::iri_string::types::IriString;
|
||||||
use background_jobs::{ActixJob, Backoff};
|
use background_jobs::{Backoff, Job};
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Deliver {
|
pub(crate) struct Deliver {
|
||||||
@ -56,9 +56,9 @@ impl Deliver {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Deliver {
|
impl Job for Deliver {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::Deliver";
|
const NAME: &'static str = "relay::jobs::Deliver";
|
||||||
const QUEUE: &'static str = "deliver";
|
const QUEUE: &'static str = "deliver";
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
future::LocalBoxFuture,
|
future::BoxFuture,
|
||||||
jobs::{debug_object, Deliver, JobState},
|
jobs::{debug_object, Deliver, JobState},
|
||||||
};
|
};
|
||||||
use activitystreams::iri_string::types::IriString;
|
use activitystreams::iri_string::types::IriString;
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct DeliverMany {
|
pub(crate) struct DeliverMany {
|
||||||
@ -45,9 +45,9 @@ impl DeliverMany {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for DeliverMany {
|
impl Job for DeliverMany {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = LocalBoxFuture<'static, Result<(), anyhow::Error>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::DeliverMany";
|
const NAME: &'static str = "relay::jobs::DeliverMany";
|
||||||
const QUEUE: &'static str = "deliver";
|
const QUEUE: &'static str = "deliver";
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
config::UrlKind,
|
config::UrlKind,
|
||||||
error::{Error, ErrorKind},
|
error::{Error, ErrorKind},
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{Boolish, JobState},
|
jobs::{Boolish, JobState},
|
||||||
requests::BreakerStrategy,
|
requests::BreakerStrategy,
|
||||||
};
|
};
|
||||||
use activitystreams::{iri, iri_string::types::IriString};
|
use activitystreams::{iri, iri_string::types::IriString};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct QueryInstance {
|
pub(crate) struct QueryInstance {
|
||||||
@ -165,9 +165,9 @@ impl QueryInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for QueryInstance {
|
impl Job for QueryInstance {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::QueryInstance";
|
const NAME: &'static str = "relay::jobs::QueryInstance";
|
||||||
const QUEUE: &'static str = "maintenance";
|
const QUEUE: &'static str = "maintenance";
|
||||||
|
|||||||
@ -1,18 +1,18 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
error::{Error, ErrorKind},
|
error::{Error, ErrorKind},
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{Boolish, JobState, QueryContact},
|
jobs::{Boolish, JobState, QueryContact},
|
||||||
requests::BreakerStrategy,
|
requests::BreakerStrategy,
|
||||||
};
|
};
|
||||||
use activitystreams::{iri, iri_string::types::IriString, primitives::OneOrMany};
|
use activitystreams::{iri, iri_string::types::IriString, primitives::OneOrMany};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{fmt::Debug, future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct QueryNodeinfo {
|
pub(crate) struct QueryNodeinfo {
|
||||||
actor_id: IriString,
|
actor_id: IriString,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for QueryNodeinfo {
|
impl std::fmt::Debug for QueryNodeinfo {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("QueryNodeinfo")
|
f.debug_struct("QueryNodeinfo")
|
||||||
.field("actor_id", &self.actor_id.to_string())
|
.field("actor_id", &self.actor_id.to_string())
|
||||||
@ -92,7 +92,7 @@ impl QueryNodeinfo {
|
|||||||
.metadata
|
.metadata
|
||||||
.and_then(|meta| meta.into_iter().next().and_then(|meta| meta.staff_accounts))
|
.and_then(|meta| meta.into_iter().next().and_then(|meta| meta.staff_accounts))
|
||||||
{
|
{
|
||||||
if let Some(contact_id) = accounts.get(0) {
|
if let Some(contact_id) = accounts.first() {
|
||||||
state
|
state
|
||||||
.job_server
|
.job_server
|
||||||
.queue(QueryContact::new(self.actor_id, contact_id.clone()))
|
.queue(QueryContact::new(self.actor_id, contact_id.clone()))
|
||||||
@ -104,9 +104,9 @@ impl QueryNodeinfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for QueryNodeinfo {
|
impl Job for QueryNodeinfo {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::QueryNodeinfo";
|
const NAME: &'static str = "relay::jobs::QueryNodeinfo";
|
||||||
const QUEUE: &'static str = "maintenance";
|
const QUEUE: &'static str = "maintenance";
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
|
future::BoxFuture,
|
||||||
jobs::{instance::QueryInstance, nodeinfo::QueryNodeinfo, JobState},
|
jobs::{instance::QueryInstance, nodeinfo::QueryNodeinfo, JobState},
|
||||||
};
|
};
|
||||||
use background_jobs::ActixJob;
|
use background_jobs::Job;
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct Listeners;
|
pub(crate) struct Listeners;
|
||||||
@ -23,9 +23,9 @@ impl Listeners {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for Listeners {
|
impl Job for Listeners {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::Listeners";
|
const NAME: &'static str = "relay::jobs::Listeners";
|
||||||
const QUEUE: &'static str = "maintenance";
|
const QUEUE: &'static str = "maintenance";
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
use crate::{error::Error, jobs::JobState};
|
use crate::{error::Error, future::BoxFuture, jobs::JobState};
|
||||||
use background_jobs::{ActixJob, Backoff};
|
use background_jobs::{Backoff, Job};
|
||||||
use std::{future::Future, pin::Pin};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
pub(crate) struct RecordLastOnline;
|
pub(crate) struct RecordLastOnline;
|
||||||
@ -14,9 +13,9 @@ impl RecordLastOnline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ActixJob for RecordLastOnline {
|
impl Job for RecordLastOnline {
|
||||||
type State = JobState;
|
type State = JobState;
|
||||||
type Future = Pin<Box<dyn Future<Output = Result<(), anyhow::Error>>>>;
|
type Future = BoxFuture<'static, anyhow::Result<()>>;
|
||||||
|
|
||||||
const NAME: &'static str = "relay::jobs::RecordLastOnline";
|
const NAME: &'static str = "relay::jobs::RecordLastOnline";
|
||||||
const QUEUE: &'static str = "maintenance";
|
const QUEUE: &'static str = "maintenance";
|
||||||
|
|||||||
69
src/main.rs
69
src/main.rs
@ -4,7 +4,6 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use activitystreams::iri_string::types::IriString;
|
use activitystreams::iri_string::types::IriString;
|
||||||
use actix_rt::task::JoinHandle;
|
|
||||||
use actix_web::{middleware::Compress, web, App, HttpServer};
|
use actix_web::{middleware::Compress, web, App, HttpServer};
|
||||||
use collector::MemoryCollector;
|
use collector::MemoryCollector;
|
||||||
#[cfg(feature = "console")]
|
#[cfg(feature = "console")]
|
||||||
@ -13,10 +12,12 @@ use error::Error;
|
|||||||
use http_signature_normalization_actix::middleware::VerifySignature;
|
use http_signature_normalization_actix::middleware::VerifySignature;
|
||||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||||
use metrics_util::layers::FanoutBuilder;
|
use metrics_util::layers::FanoutBuilder;
|
||||||
use opentelemetry::{sdk::Resource, KeyValue};
|
use opentelemetry::KeyValue;
|
||||||
use opentelemetry_otlp::WithExportConfig;
|
use opentelemetry_otlp::WithExportConfig;
|
||||||
|
use opentelemetry_sdk::Resource;
|
||||||
use reqwest_middleware::ClientWithMiddleware;
|
use reqwest_middleware::ClientWithMiddleware;
|
||||||
use rustls::ServerConfig;
|
use rustls::ServerConfig;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
use tracing_actix_web::TracingLogger;
|
use tracing_actix_web::TracingLogger;
|
||||||
use tracing_error::ErrorLayer;
|
use tracing_error::ErrorLayer;
|
||||||
use tracing_log::LogTracer;
|
use tracing_log::LogTracer;
|
||||||
@ -81,18 +82,19 @@ fn init_subscriber(
|
|||||||
let subscriber = subscriber.with(console_layer);
|
let subscriber = subscriber.with(console_layer);
|
||||||
|
|
||||||
if let Some(url) = opentelemetry_url {
|
if let Some(url) = opentelemetry_url {
|
||||||
let tracer =
|
let tracer = opentelemetry_otlp::new_pipeline()
|
||||||
opentelemetry_otlp::new_pipeline()
|
.tracing()
|
||||||
.tracing()
|
.with_trace_config(
|
||||||
.with_trace_config(opentelemetry::sdk::trace::config().with_resource(
|
opentelemetry_sdk::trace::config().with_resource(Resource::new(vec![
|
||||||
Resource::new(vec![KeyValue::new("service.name", software_name)]),
|
KeyValue::new("service.name", software_name),
|
||||||
))
|
])),
|
||||||
.with_exporter(
|
)
|
||||||
opentelemetry_otlp::new_exporter()
|
.with_exporter(
|
||||||
.tonic()
|
opentelemetry_otlp::new_exporter()
|
||||||
.with_endpoint(url.as_str()),
|
.tonic()
|
||||||
)
|
.with_endpoint(url.as_str()),
|
||||||
.install_batch(opentelemetry::runtime::Tokio)?;
|
)
|
||||||
|
.install_batch(opentelemetry_sdk::runtime::Tokio)?;
|
||||||
|
|
||||||
let otel_layer = tracing_opentelemetry::layer()
|
let otel_layer = tracing_opentelemetry::layer()
|
||||||
.with_tracer(tracer)
|
.with_tracer(tracer)
|
||||||
@ -139,7 +141,7 @@ fn build_client(
|
|||||||
Ok(client_with_middleware)
|
Ok(client_with_middleware)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<(), anyhow::Error> {
|
async fn main() -> Result<(), anyhow::Error> {
|
||||||
dotenv::dotenv().ok();
|
dotenv::dotenv().ok();
|
||||||
|
|
||||||
@ -150,7 +152,8 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
let args = Args::new();
|
let args = Args::new();
|
||||||
|
|
||||||
if args.any() {
|
if args.any() {
|
||||||
return client_main(config, args).await?;
|
client_main(config, args).await??;
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let collector = MemoryCollector::new();
|
let collector = MemoryCollector::new();
|
||||||
@ -160,12 +163,12 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
.with_http_listener(bind_addr)
|
.with_http_listener(bind_addr)
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
actix_rt::spawn(exporter);
|
tokio::spawn(exporter);
|
||||||
let recorder = FanoutBuilder::default()
|
let recorder = FanoutBuilder::default()
|
||||||
.add_recorder(recorder)
|
.add_recorder(recorder)
|
||||||
.add_recorder(collector.clone())
|
.add_recorder(collector.clone())
|
||||||
.build();
|
.build();
|
||||||
metrics::set_boxed_recorder(Box::new(recorder))?;
|
metrics::set_global_recorder(recorder).map_err(|e| anyhow::anyhow!("{e}"))?;
|
||||||
} else {
|
} else {
|
||||||
collector.install()?;
|
collector.install()?;
|
||||||
}
|
}
|
||||||
@ -177,7 +180,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
let actors = ActorCache::new(db.clone());
|
let actors = ActorCache::new(db.clone());
|
||||||
let media = MediaCache::new(db.clone());
|
let media = MediaCache::new(db.clone());
|
||||||
|
|
||||||
server_main(db, actors, media, collector, config).await??;
|
server_main(db, actors, media, collector, config).await?;
|
||||||
|
|
||||||
tracing::warn!("Application exit");
|
tracing::warn!("Application exit");
|
||||||
|
|
||||||
@ -185,7 +188,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn client_main(config: Config, args: Args) -> JoinHandle<Result<(), anyhow::Error>> {
|
fn client_main(config: Config, args: Args) -> JoinHandle<Result<(), anyhow::Error>> {
|
||||||
actix_rt::spawn(do_client_main(config, args))
|
tokio::spawn(do_client_main(config, args))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn do_client_main(config: Config, args: Args) -> Result<(), anyhow::Error> {
|
async fn do_client_main(config: Config, args: Args) -> Result<(), anyhow::Error> {
|
||||||
@ -271,19 +274,9 @@ async fn do_client_main(config: Config, args: Args) -> Result<(), anyhow::Error>
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn server_main(
|
|
||||||
db: Db,
|
|
||||||
actors: ActorCache,
|
|
||||||
media: MediaCache,
|
|
||||||
collector: MemoryCollector,
|
|
||||||
config: Config,
|
|
||||||
) -> JoinHandle<Result<(), anyhow::Error>> {
|
|
||||||
actix_rt::spawn(do_server_main(db, actors, media, collector, config))
|
|
||||||
}
|
|
||||||
|
|
||||||
const VERIFY_RATIO: usize = 7;
|
const VERIFY_RATIO: usize = 7;
|
||||||
|
|
||||||
async fn do_server_main(
|
async fn server_main(
|
||||||
db: Db,
|
db: Db,
|
||||||
actors: ActorCache,
|
actors: ActorCache,
|
||||||
media: MediaCache,
|
media: MediaCache,
|
||||||
@ -309,11 +302,11 @@ async fn do_server_main(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let verify_spawner = Spawner::build("verify-cpu", verify_threads)?;
|
let verify_spawner = Spawner::build("verify-cpu", verify_threads.try_into()?)?;
|
||||||
let sign_spawner = Spawner::build("sign-cpu", signature_threads)?;
|
let sign_spawner = Spawner::build("sign-cpu", signature_threads.try_into()?)?;
|
||||||
|
|
||||||
let key_id = config.generate_url(UrlKind::MainKey).to_string();
|
let key_id = config.generate_url(UrlKind::MainKey).to_string();
|
||||||
let state = State::build(db.clone(), key_id, sign_spawner, client).await?;
|
let state = State::build(db.clone(), key_id, sign_spawner.clone(), client).await?;
|
||||||
|
|
||||||
if let Some((token, admin_handle)) = config.telegram_info() {
|
if let Some((token, admin_handle)) = config.telegram_info() {
|
||||||
tracing::warn!("Creating telegram handler");
|
tracing::warn!("Creating telegram handler");
|
||||||
@ -323,9 +316,12 @@ async fn do_server_main(
|
|||||||
let keys = config.open_keys()?;
|
let keys = config.open_keys()?;
|
||||||
|
|
||||||
let bind_address = config.bind_address();
|
let bind_address = config.bind_address();
|
||||||
|
let sign_spawner2 = sign_spawner.clone();
|
||||||
|
let verify_spawner2 = verify_spawner.clone();
|
||||||
let server = HttpServer::new(move || {
|
let server = HttpServer::new(move || {
|
||||||
let job_server =
|
let job_server =
|
||||||
create_workers(state.clone(), actors.clone(), media.clone(), config.clone());
|
create_workers(state.clone(), actors.clone(), media.clone(), config.clone())
|
||||||
|
.expect("Failed to create job server");
|
||||||
|
|
||||||
let app = App::new()
|
let app = App::new()
|
||||||
.app_data(web::Data::new(db.clone()))
|
.app_data(web::Data::new(db.clone()))
|
||||||
@ -408,6 +404,9 @@ async fn do_server_main(
|
|||||||
server.bind(bind_address)?.run().await?;
|
server.bind(bind_address)?.run().await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sign_spawner2.close().await;
|
||||||
|
verify_spawner2.close().await;
|
||||||
|
|
||||||
tracing::warn!("Server closed");
|
tracing::warn!("Server closed");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@ -40,7 +40,7 @@ impl Drop for LogOnDrop {
|
|||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if self.arm {
|
if self.arm {
|
||||||
let duration = self.begin.elapsed();
|
let duration = self.begin.elapsed();
|
||||||
metrics::histogram!("relay.request.complete", duration, "path" => self.path.clone(), "method" => self.method.clone());
|
metrics::histogram!("relay.request.complete", "path" => self.path.clone(), "method" => self.method.clone()).record(duration);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -432,7 +432,7 @@ struct Signer {
|
|||||||
|
|
||||||
impl Signer {
|
impl Signer {
|
||||||
fn sign(&self, signing_string: &str) -> Result<String, Error> {
|
fn sign(&self, signing_string: &str) -> Result<String, Error> {
|
||||||
let mut signature = vec![0; self.private_key.public_modulus_len()];
|
let mut signature = vec![0; self.private_key.public().modulus_len()];
|
||||||
|
|
||||||
self.private_key
|
self.private_key
|
||||||
.sign(
|
.sign(
|
||||||
|
|||||||
@ -14,10 +14,11 @@ const MINIFY_CONFIG: minify_html::Cfg = minify_html::Cfg {
|
|||||||
keep_html_and_head_opening_tags: false,
|
keep_html_and_head_opening_tags: false,
|
||||||
keep_spaces_between_attributes: true,
|
keep_spaces_between_attributes: true,
|
||||||
keep_comments: false,
|
keep_comments: false,
|
||||||
|
keep_input_type_text_attr: true,
|
||||||
|
keep_ssi_comments: false,
|
||||||
|
preserve_brace_template_syntax: false,
|
||||||
|
preserve_chevron_percent_template_syntax: false,
|
||||||
minify_css: true,
|
minify_css: true,
|
||||||
minify_css_level_1: true,
|
|
||||||
minify_css_level_2: false,
|
|
||||||
minify_css_level_3: false,
|
|
||||||
minify_js: true,
|
minify_js: true,
|
||||||
remove_bangs: true,
|
remove_bangs: true,
|
||||||
remove_processing_instructions: true,
|
remove_processing_instructions: true,
|
||||||
|
|||||||
149
src/spawner.rs
149
src/spawner.rs
@ -1,107 +1,30 @@
|
|||||||
|
use async_cpupool::CpuPool;
|
||||||
use http_signature_normalization_actix::{Canceled, Spawn};
|
use http_signature_normalization_actix::{Canceled, Spawn};
|
||||||
use std::{
|
use std::time::Duration;
|
||||||
panic::AssertUnwindSafe,
|
|
||||||
sync::Arc,
|
|
||||||
thread::JoinHandle,
|
|
||||||
time::{Duration, Instant},
|
|
||||||
};
|
|
||||||
|
|
||||||
fn spawner_thread(
|
#[derive(Clone)]
|
||||||
receiver: flume::Receiver<Box<dyn FnOnce() + Send>>,
|
|
||||||
name: &'static str,
|
|
||||||
id: usize,
|
|
||||||
) {
|
|
||||||
let guard = MetricsGuard::guard(name, id);
|
|
||||||
|
|
||||||
while let Ok(f) = receiver.recv() {
|
|
||||||
let start = Instant::now();
|
|
||||||
metrics::increment_counter!(format!("relay.{name}.operation.start"), "id" => id.to_string());
|
|
||||||
let res = std::panic::catch_unwind(AssertUnwindSafe(f));
|
|
||||||
metrics::increment_counter!(format!("relay.{name}.operation.end"), "complete" => res.is_ok().to_string(), "id" => id.to_string());
|
|
||||||
metrics::histogram!(format!("relay.{name}.operation.duration"), start.elapsed().as_secs_f64(), "complete" => res.is_ok().to_string(), "id" => id.to_string());
|
|
||||||
|
|
||||||
if let Err(e) = res {
|
|
||||||
tracing::warn!("{name} fn panicked: {e:?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
guard.disarm();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct Spawner {
|
pub(crate) struct Spawner {
|
||||||
name: &'static str,
|
pool: CpuPool,
|
||||||
sender: Option<flume::Sender<Box<dyn FnOnce() + Send>>>,
|
|
||||||
threads: Option<Arc<Vec<JoinHandle<()>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct MetricsGuard {
|
|
||||||
name: &'static str,
|
|
||||||
id: usize,
|
|
||||||
start: Instant,
|
|
||||||
armed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MetricsGuard {
|
|
||||||
fn guard(name: &'static str, id: usize) -> Self {
|
|
||||||
metrics::increment_counter!(format!("relay.{name}.launched"), "id" => id.to_string());
|
|
||||||
|
|
||||||
Self {
|
|
||||||
name,
|
|
||||||
id,
|
|
||||||
start: Instant::now(),
|
|
||||||
armed: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disarm(mut self) {
|
|
||||||
self.armed = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for MetricsGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
metrics::increment_counter!(format!("relay.{}.closed", self.name), "clean" => (!self.armed).to_string(), "id" => self.id.to_string());
|
|
||||||
metrics::histogram!(format!("relay.{}.duration", self.name), self.start.elapsed().as_secs_f64(), "clean" => (!self.armed).to_string(), "id" => self.id.to_string());
|
|
||||||
tracing::warn!("Stopping {} - {}", self.name, self.id);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Spawner {
|
impl Spawner {
|
||||||
pub(crate) fn build(name: &'static str, threads: usize) -> std::io::Result<Self> {
|
pub(crate) fn build(name: &'static str, threads: u16) -> anyhow::Result<Self> {
|
||||||
let (sender, receiver) = flume::bounded(8);
|
let pool = CpuPool::configure()
|
||||||
|
.name(name)
|
||||||
|
.max_threads(threads)
|
||||||
|
.build()?;
|
||||||
|
|
||||||
tracing::warn!("Launching {threads} {name}s");
|
Ok(Spawner { pool })
|
||||||
|
}
|
||||||
|
|
||||||
let threads = (0..threads)
|
pub(crate) async fn close(self) {
|
||||||
.map(|i| {
|
self.pool.close().await;
|
||||||
let receiver = receiver.clone();
|
|
||||||
std::thread::Builder::new()
|
|
||||||
.name(format!("{name}-{i}"))
|
|
||||||
.spawn(move || {
|
|
||||||
spawner_thread(receiver, name, i);
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
Ok(Spawner {
|
|
||||||
name,
|
|
||||||
sender: Some(sender),
|
|
||||||
threads: Some(Arc::new(threads)),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Spawner {
|
impl std::fmt::Debug for Spawner {
|
||||||
fn drop(&mut self) {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
self.sender.take();
|
f.debug_struct("Spawner").finish()
|
||||||
|
|
||||||
if let Some(threads) = self.threads.take().and_then(Arc::into_inner) {
|
|
||||||
tracing::warn!("Joining {}s", self.name);
|
|
||||||
for thread in threads {
|
|
||||||
let _ = thread.join();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,9 +34,9 @@ where
|
|||||||
{
|
{
|
||||||
let id = uuid::Uuid::new_v4();
|
let id = uuid::Uuid::new_v4();
|
||||||
|
|
||||||
metrics::increment_counter!("relay.spawner.wait-timer.start");
|
metrics::counter!("relay.spawner.wait-timer.start").increment(1);
|
||||||
|
|
||||||
let mut interval = actix_rt::time::interval(Duration::from_secs(5));
|
let mut interval = tokio::time::interval(Duration::from_secs(5));
|
||||||
|
|
||||||
// pass the first tick (instant)
|
// pass the first tick (instant)
|
||||||
interval.tick().await;
|
interval.tick().await;
|
||||||
@ -124,12 +47,12 @@ where
|
|||||||
loop {
|
loop {
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
out = &mut fut => {
|
out = &mut fut => {
|
||||||
metrics::increment_counter!("relay.spawner.wait-timer.end");
|
metrics::counter!("relay.spawner.wait-timer.end").increment(1);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
_ = interval.tick() => {
|
_ = interval.tick() => {
|
||||||
counter += 1;
|
counter += 1;
|
||||||
metrics::increment_counter!("relay.spawner.wait-timer.pending");
|
metrics::counter!("relay.spawner.wait-timer.pending").increment(1);
|
||||||
tracing::warn!("Blocking operation {id} is taking a long time, {} seconds", counter * 5);
|
tracing::warn!("Blocking operation {id} is taking a long time, {} seconds", counter * 5);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,22 +67,9 @@ impl Spawn for Spawner {
|
|||||||
Func: FnOnce() -> Out + Send + 'static,
|
Func: FnOnce() -> Out + Send + 'static,
|
||||||
Out: Send + 'static,
|
Out: Send + 'static,
|
||||||
{
|
{
|
||||||
let sender = self.sender.as_ref().expect("Sender exists").clone();
|
let pool = self.pool.clone();
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move { timer(pool.spawn(func)).await.map_err(|_| Canceled) })
|
||||||
let (tx, rx) = flume::bounded(1);
|
|
||||||
|
|
||||||
let _ = sender
|
|
||||||
.send_async(Box::new(move || {
|
|
||||||
if tx.try_send((func)()).is_err() {
|
|
||||||
tracing::warn!("Requestor hung up");
|
|
||||||
metrics::increment_counter!("relay.spawner.disconnected");
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
timer(rx.recv_async()).await.map_err(|_| Canceled)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,21 +81,10 @@ impl http_signature_normalization_reqwest::Spawn for Spawner {
|
|||||||
Func: FnOnce() -> Out + Send + 'static,
|
Func: FnOnce() -> Out + Send + 'static,
|
||||||
Out: Send + 'static,
|
Out: Send + 'static,
|
||||||
{
|
{
|
||||||
let sender = self.sender.as_ref().expect("Sender exists").clone();
|
let pool = self.pool.clone();
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let (tx, rx) = flume::bounded(1);
|
timer(pool.spawn(func))
|
||||||
|
|
||||||
let _ = sender
|
|
||||||
.send_async(Box::new(move || {
|
|
||||||
if tx.try_send((func)()).is_err() {
|
|
||||||
tracing::warn!("Requestor hung up");
|
|
||||||
metrics::increment_counter!("relay.spawner.disconnected");
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
timer(rx.recv_async())
|
|
||||||
.await
|
.await
|
||||||
.map_err(|_| http_signature_normalization_reqwest::Canceled)
|
.map_err(|_| http_signature_normalization_reqwest::Canceled)
|
||||||
})
|
})
|
||||||
|
|||||||
@ -46,7 +46,7 @@ pub(crate) fn start(admin_handle: String, db: Db, token: &str) {
|
|||||||
let bot = Bot::new(token);
|
let bot = Bot::new(token);
|
||||||
let admin_handle = Arc::new(admin_handle);
|
let admin_handle = Arc::new(admin_handle);
|
||||||
|
|
||||||
actix_rt::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let command_handler = teloxide::filter_command::<Command, _>().endpoint(
|
let command_handler = teloxide::filter_command::<Command, _>().endpoint(
|
||||||
move |bot: Bot, msg: Message, cmd: Command| {
|
move |bot: Bot, msg: Message, cmd: Command| {
|
||||||
let admin_handle = admin_handle.clone();
|
let admin_handle = admin_handle.clone();
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user