mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-14 19:35:23 +00:00
Merge branch 'main' into markdown-link-rule
This commit is contained in:
commit
9b40d74cce
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -10,8 +10,9 @@ checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "activitypub_federation"
|
name = "activitypub_federation"
|
||||||
version = "0.5.0-beta.3"
|
version = "0.5.0-beta.4"
|
||||||
source = "git+https://github.com/LemmyNet/activitypub-federation-rust.git?branch=webfinger-alphabets#071218396b2b1254e12ad061362befe0f17e76c9"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9a122cf2c2adf45b164134946bc069659cd93083fab294839a3f1d794b707c17"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitystreams-kinds",
|
"activitystreams-kinds",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
|
|
@ -41,7 +41,6 @@ console = [
|
||||||
"reqwest-tracing/opentelemetry_0_16",
|
"reqwest-tracing/opentelemetry_0_16",
|
||||||
]
|
]
|
||||||
json-log = ["tracing-subscriber/json"]
|
json-log = ["tracing-subscriber/json"]
|
||||||
prometheus-metrics = ["prometheus", "actix-web-prom"]
|
|
||||||
default = []
|
default = []
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
|
@ -70,7 +69,7 @@ lemmy_routes = { version = "=0.19.0-rc.3", path = "./crates/routes" }
|
||||||
lemmy_db_views = { version = "=0.19.0-rc.3", path = "./crates/db_views" }
|
lemmy_db_views = { version = "=0.19.0-rc.3", path = "./crates/db_views" }
|
||||||
lemmy_db_views_actor = { version = "=0.19.0-rc.3", path = "./crates/db_views_actor" }
|
lemmy_db_views_actor = { version = "=0.19.0-rc.3", path = "./crates/db_views_actor" }
|
||||||
lemmy_db_views_moderator = { version = "=0.19.0-rc.3", path = "./crates/db_views_moderator" }
|
lemmy_db_views_moderator = { version = "=0.19.0-rc.3", path = "./crates/db_views_moderator" }
|
||||||
activitypub_federation = { git = "https://github.com/LemmyNet/activitypub-federation-rust.git", branch = "webfinger-alphabets", default-features = false, features = [
|
activitypub_federation = { version = "0.5.0-beta.4", default-features = false, features = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
] }
|
] }
|
||||||
diesel = "2.1.3"
|
diesel = "2.1.3"
|
||||||
|
@ -170,8 +169,8 @@ futures-util = { workspace = true }
|
||||||
tokio-postgres = { workspace = true }
|
tokio-postgres = { workspace = true }
|
||||||
tokio-postgres-rustls = { workspace = true }
|
tokio-postgres-rustls = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
prometheus = { version = "0.13.3", features = ["process"], optional = true }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
actix-web-prom = { version = "0.6.0", optional = true }
|
actix-web-prom = { version = "0.6.0" }
|
||||||
serial_test = { workspace = true }
|
serial_test = { workspace = true }
|
||||||
clap = { version = "4.4.7", features = ["derive"] }
|
clap = { version = "4.4.7", features = ["derive"] }
|
||||||
actix-web-httpauth = "0.8.1"
|
actix-web-httpauth = "0.8.1"
|
||||||
|
|
|
@ -2,4 +2,4 @@
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
Message contact at join-lemmy.org for any security-related issues.
|
Use [Github's security advisory issue system](https://github.com/LemmyNet/lemmy/security/advisories/new).
|
||||||
|
|
|
@ -39,7 +39,7 @@ import {
|
||||||
loginUser,
|
loginUser,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
import { PostView } from "lemmy-js-client/dist/types/PostView";
|
import { PostView } from "lemmy-js-client/dist/types/PostView";
|
||||||
import { LemmyHttp } from "lemmy-js-client";
|
import { LemmyHttp, ResolveObject } from "lemmy-js-client";
|
||||||
|
|
||||||
let betaCommunity: CommunityView | undefined;
|
let betaCommunity: CommunityView | undefined;
|
||||||
|
|
||||||
|
@ -556,3 +556,26 @@ test("Report a post", async () => {
|
||||||
expect(betaReport.original_post_body).toBe(alphaReport.original_post_body);
|
expect(betaReport.original_post_body).toBe(alphaReport.original_post_body);
|
||||||
expect(betaReport.reason).toBe(alphaReport.reason);
|
expect(betaReport.reason).toBe(alphaReport.reason);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("Fetch post via redirect", async () => {
|
||||||
|
let alphaPost = await createPost(alpha, betaCommunity!.community.id);
|
||||||
|
expect(alphaPost.post_view.post).toBeDefined();
|
||||||
|
// Make sure that post is liked on beta
|
||||||
|
const betaPost = await waitForPost(
|
||||||
|
beta,
|
||||||
|
alphaPost.post_view.post,
|
||||||
|
res => res?.counts.score === 1,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(betaPost).toBeDefined();
|
||||||
|
expect(betaPost.post?.ap_id).toBe(alphaPost.post_view.post.ap_id);
|
||||||
|
|
||||||
|
// Fetch post from url on beta instance instead of ap_id
|
||||||
|
let q = `http://lemmy-beta:8551/post/${betaPost.post.id}`;
|
||||||
|
let form: ResolveObject = {
|
||||||
|
q,
|
||||||
|
};
|
||||||
|
let gammaPost = await gamma.resolveObject(form);
|
||||||
|
expect(gammaPost).toBeDefined();
|
||||||
|
expect(gammaPost.post?.post.ap_id).toBe(alphaPost.post_view.post.ap_id);
|
||||||
|
});
|
||||||
|
|
|
@ -92,4 +92,7 @@
|
||||||
bind: "127.0.0.1"
|
bind: "127.0.0.1"
|
||||||
port: 10002
|
port: 10002
|
||||||
}
|
}
|
||||||
|
# Sets a response Access-Control-Allow-Origin CORS header
|
||||||
|
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
|
||||||
|
cors_origin: "*"
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use lemmy_api_common::{
|
||||||
comment::{CommentResponse, CreateCommentLike},
|
comment::{CommentResponse, CreateCommentLike},
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
send_activity::{ActivityChannel, SendActivityData},
|
send_activity::{ActivityChannel, SendActivityData},
|
||||||
utils::{check_community_user_action, check_downvotes_enabled},
|
utils::{check_bot_account, check_community_user_action, check_downvotes_enabled},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::LocalUserId,
|
newtypes::LocalUserId,
|
||||||
|
@ -32,6 +32,7 @@ pub async fn like_comment(
|
||||||
|
|
||||||
// Don't do a downvote if site has downvotes disabled
|
// Don't do a downvote if site has downvotes disabled
|
||||||
check_downvotes_enabled(data.score, &local_site)?;
|
check_downvotes_enabled(data.score, &local_site)?;
|
||||||
|
check_bot_account(&local_user_view.person)?;
|
||||||
|
|
||||||
let comment_id = data.comment_id;
|
let comment_id = data.comment_id;
|
||||||
let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?;
|
let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?;
|
||||||
|
|
|
@ -5,7 +5,12 @@ use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
post::{CreatePostLike, PostResponse},
|
post::{CreatePostLike, PostResponse},
|
||||||
send_activity::{ActivityChannel, SendActivityData},
|
send_activity::{ActivityChannel, SendActivityData},
|
||||||
utils::{check_community_user_action, check_downvotes_enabled, mark_post_as_read},
|
utils::{
|
||||||
|
check_bot_account,
|
||||||
|
check_community_user_action,
|
||||||
|
check_downvotes_enabled,
|
||||||
|
mark_post_as_read,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -29,6 +34,7 @@ pub async fn like_post(
|
||||||
|
|
||||||
// Don't do a downvote if site has downvotes disabled
|
// Don't do a downvote if site has downvotes disabled
|
||||||
check_downvotes_enabled(data.score, &local_site)?;
|
check_downvotes_enabled(data.score, &local_site)?;
|
||||||
|
check_bot_account(&local_user_view.person)?;
|
||||||
|
|
||||||
// Check for a community ban
|
// Check for a community ban
|
||||||
let post_id = data.post_id;
|
let post_id = data.post_id;
|
||||||
|
|
|
@ -252,6 +252,16 @@ pub fn check_downvotes_enabled(score: i16, local_site: &LocalSite) -> Result<(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Dont allow bots to do certain actions, like voting
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
pub fn check_bot_account(person: &Person) -> Result<(), LemmyError> {
|
||||||
|
if person.bot_account {
|
||||||
|
Err(LemmyErrorType::InvalidBotAction)?
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub fn check_private_instance(
|
pub fn check_private_instance(
|
||||||
local_user_view: &Option<LocalUserView>,
|
local_user_view: &Option<LocalUserView>,
|
||||||
|
|
|
@ -18,7 +18,7 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor},
|
traits::{ActivityHandler, Actor},
|
||||||
};
|
};
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::{context::LemmyContext, utils::check_bot_account};
|
||||||
use lemmy_db_schema::source::local_site::LocalSite;
|
use lemmy_db_schema::source::local_site::LocalSite;
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -74,6 +74,9 @@ impl ActivityHandler for Vote {
|
||||||
async fn receive(self, context: &Data<LemmyContext>) -> Result<(), LemmyError> {
|
async fn receive(self, context: &Data<LemmyContext>) -> Result<(), LemmyError> {
|
||||||
let actor = self.actor.dereference(context).await?;
|
let actor = self.actor.dereference(context).await?;
|
||||||
let object = self.object.dereference(context).await?;
|
let object = self.object.dereference(context).await?;
|
||||||
|
|
||||||
|
check_bot_account(&actor.0)?;
|
||||||
|
|
||||||
match object {
|
match object {
|
||||||
PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await,
|
PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await,
|
||||||
PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await,
|
PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
http::{create_apub_response, create_apub_tombstone_response, err_object_not_local},
|
http::{create_apub_response, create_apub_tombstone_response, redirect_remote_object},
|
||||||
objects::comment::ApubComment,
|
objects::comment::ApubComment,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{config::Data, traits::Object};
|
use activitypub_federation::{config::Data, traits::Object};
|
||||||
|
@ -23,7 +23,7 @@ pub(crate) async fn get_apub_comment(
|
||||||
let id = CommentId(info.comment_id.parse::<i32>()?);
|
let id = CommentId(info.comment_id.parse::<i32>()?);
|
||||||
let comment: ApubComment = Comment::read(&mut context.pool(), id).await?.into();
|
let comment: ApubComment = Comment::read(&mut context.pool(), id).await?.into();
|
||||||
if !comment.local {
|
if !comment.local {
|
||||||
Err(err_object_not_local())
|
Ok(redirect_remote_object(&comment.ap_id))
|
||||||
} else if !comment.deleted && !comment.removed {
|
} else if !comment.deleted && !comment.removed {
|
||||||
create_apub_response(&comment.into_json(&context).await?)
|
create_apub_response(&comment.into_json(&context).await?)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -11,10 +11,10 @@ use activitypub_federation::{
|
||||||
FEDERATION_CONTENT_TYPE,
|
FEDERATION_CONTENT_TYPE,
|
||||||
};
|
};
|
||||||
use actix_web::{web, web::Bytes, HttpRequest, HttpResponse};
|
use actix_web::{web, web::Bytes, HttpRequest, HttpResponse};
|
||||||
use http::StatusCode;
|
use http::{header::LOCATION, StatusCode};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::source::activity::SentActivity;
|
use lemmy_db_schema::{newtypes::DbUrl, source::activity::SentActivity};
|
||||||
use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult};
|
use lemmy_utils::error::{LemmyError, LemmyResult};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -64,8 +64,10 @@ fn create_apub_tombstone_response<T: Into<Url>>(id: T) -> LemmyResult<HttpRespon
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_object_not_local() -> LemmyError {
|
fn redirect_remote_object(url: &DbUrl) -> HttpResponse {
|
||||||
LemmyErrorType::ObjectNotLocal.into()
|
let mut res = HttpResponse::PermanentRedirect();
|
||||||
|
res.insert_header((LOCATION, url.as_str()));
|
||||||
|
res.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
http::{create_apub_response, create_apub_tombstone_response, err_object_not_local},
|
http::{create_apub_response, create_apub_tombstone_response, redirect_remote_object},
|
||||||
objects::post::ApubPost,
|
objects::post::ApubPost,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{config::Data, traits::Object};
|
use activitypub_federation::{config::Data, traits::Object};
|
||||||
|
@ -23,7 +23,7 @@ pub(crate) async fn get_apub_post(
|
||||||
let id = PostId(info.post_id.parse::<i32>()?);
|
let id = PostId(info.post_id.parse::<i32>()?);
|
||||||
let post: ApubPost = Post::read(&mut context.pool(), id).await?.into();
|
let post: ApubPost = Post::read(&mut context.pool(), id).await?.into();
|
||||||
if !post.local {
|
if !post.local {
|
||||||
Err(err_object_not_local())
|
Ok(redirect_remote_object(&post.ap_id))
|
||||||
} else if !post.deleted && !post.removed {
|
} else if !post.deleted && !post.removed {
|
||||||
create_apub_response(&post.into_json(&context).await?)
|
create_apub_response(&post.into_json(&context).await?)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -74,7 +74,7 @@ impl<Kind: Id + DeserializeOwned + Send> IdOrNestedObject<Kind> {
|
||||||
pub(crate) async fn object(self, context: &Data<LemmyContext>) -> Result<Kind, LemmyError> {
|
pub(crate) async fn object(self, context: &Data<LemmyContext>) -> Result<Kind, LemmyError> {
|
||||||
match self {
|
match self {
|
||||||
// TODO: move IdOrNestedObject struct to library and make fetch_object_http private
|
// TODO: move IdOrNestedObject struct to library and make fetch_object_http private
|
||||||
IdOrNestedObject::Id(i) => Ok(fetch_object_http(&i, context).await?),
|
IdOrNestedObject::Id(i) => Ok(fetch_object_http(&i, context).await?.object),
|
||||||
IdOrNestedObject::NestedObject(o) => Ok(o),
|
IdOrNestedObject::NestedObject(o) => Ok(o),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ use diesel_async::{
|
||||||
};
|
};
|
||||||
use diesel_migrations::EmbeddedMigrations;
|
use diesel_migrations::EmbeddedMigrations;
|
||||||
use futures_util::{future::BoxFuture, Future, FutureExt};
|
use futures_util::{future::BoxFuture, Future, FutureExt};
|
||||||
use lemmy_utils::{error::LemmyError, settings::structs::Settings};
|
use lemmy_utils::{error::LemmyError, settings::SETTINGS};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustls::{
|
use rustls::{
|
||||||
|
@ -36,8 +36,6 @@ use rustls::{
|
||||||
ServerName,
|
ServerName,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
|
||||||
env::VarError,
|
|
||||||
ops::{Deref, DerefMut},
|
ops::{Deref, DerefMut},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
|
@ -146,10 +144,6 @@ macro_rules! try_join_with_pool {
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_database_url_from_env() -> Result<String, VarError> {
|
|
||||||
env::var("LEMMY_DATABASE_URL")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fuzzy_search(q: &str) -> String {
|
pub fn fuzzy_search(q: &str) -> String {
|
||||||
let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%");
|
let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%");
|
||||||
format!("%{replaced}%")
|
format!("%{replaced}%")
|
||||||
|
@ -209,36 +203,6 @@ pub fn diesel_option_overwrite(opt: Option<String>) -> Option<Option<String>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn build_db_pool_settings_opt(
|
|
||||||
settings: Option<&Settings>,
|
|
||||||
) -> Result<ActualDbPool, LemmyError> {
|
|
||||||
let db_url = get_database_url(settings);
|
|
||||||
let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5);
|
|
||||||
// We only support TLS with sslmode=require currently
|
|
||||||
let tls_enabled = db_url.contains("sslmode=require");
|
|
||||||
let manager = if tls_enabled {
|
|
||||||
// diesel-async does not support any TLS connections out of the box, so we need to manually
|
|
||||||
// provide a setup function which handles creating the connection
|
|
||||||
AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_setup(&db_url, establish_connection)
|
|
||||||
} else {
|
|
||||||
AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url)
|
|
||||||
};
|
|
||||||
let pool = Pool::builder(manager)
|
|
||||||
.max_size(pool_size)
|
|
||||||
.wait_timeout(POOL_TIMEOUT)
|
|
||||||
.create_timeout(POOL_TIMEOUT)
|
|
||||||
.recycle_timeout(POOL_TIMEOUT)
|
|
||||||
.runtime(Runtime::Tokio1)
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
// If there's no settings, that means its a unit test, and migrations need to be run
|
|
||||||
if settings.is_none() {
|
|
||||||
run_migrations(&db_url);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(pool)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let rustls_config = rustls::ClientConfig::builder()
|
let rustls_config = rustls::ClientConfig::builder()
|
||||||
|
@ -279,7 +243,7 @@ impl ServerCertVerifier for NoCertVerifier {
|
||||||
|
|
||||||
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
||||||
|
|
||||||
pub fn run_migrations(db_url: &str) {
|
fn run_migrations(db_url: &str) {
|
||||||
// Needs to be a sync connection
|
// Needs to be a sync connection
|
||||||
let mut conn =
|
let mut conn =
|
||||||
PgConnection::establish(db_url).unwrap_or_else(|e| panic!("Error connecting to {db_url}: {e}"));
|
PgConnection::establish(db_url).unwrap_or_else(|e| panic!("Error connecting to {db_url}: {e}"));
|
||||||
|
@ -290,29 +254,36 @@ pub fn run_migrations(db_url: &str) {
|
||||||
info!("Database migrations complete.");
|
info!("Database migrations complete.");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn build_db_pool(settings: &Settings) -> Result<ActualDbPool, LemmyError> {
|
pub async fn build_db_pool() -> Result<ActualDbPool, LemmyError> {
|
||||||
build_db_pool_settings_opt(Some(settings)).await
|
let db_url = SETTINGS.get_database_url();
|
||||||
|
// We only support TLS with sslmode=require currently
|
||||||
|
let tls_enabled = db_url.contains("sslmode=require");
|
||||||
|
let manager = if tls_enabled {
|
||||||
|
// diesel-async does not support any TLS connections out of the box, so we need to manually
|
||||||
|
// provide a setup function which handles creating the connection
|
||||||
|
AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_setup(&db_url, establish_connection)
|
||||||
|
} else {
|
||||||
|
AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url)
|
||||||
|
};
|
||||||
|
let pool = Pool::builder(manager)
|
||||||
|
.max_size(SETTINGS.database.pool_size)
|
||||||
|
.wait_timeout(POOL_TIMEOUT)
|
||||||
|
.create_timeout(POOL_TIMEOUT)
|
||||||
|
.recycle_timeout(POOL_TIMEOUT)
|
||||||
|
.runtime(Runtime::Tokio1)
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
run_migrations(&db_url);
|
||||||
|
|
||||||
|
Ok(pool)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn build_db_pool_for_tests() -> ActualDbPool {
|
pub async fn build_db_pool_for_tests() -> ActualDbPool {
|
||||||
build_db_pool_settings_opt(None)
|
build_db_pool().await.expect("db pool missing")
|
||||||
.await
|
|
||||||
.expect("db pool missing")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_database_url(settings: Option<&Settings>) -> String {
|
|
||||||
// The env var should override anything in the settings config
|
|
||||||
match get_database_url_from_env() {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(e) => match settings {
|
|
||||||
Some(settings) => settings.get_database_url(),
|
|
||||||
None => panic!("Failed to read database URL from env var LEMMY_DATABASE_URL: {e}"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn naive_now() -> DateTime<Utc> {
|
pub fn naive_now() -> DateTime<Utc> {
|
||||||
chrono::prelude::Utc::now()
|
Utc::now()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType {
|
pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType {
|
||||||
|
|
|
@ -75,10 +75,7 @@ pub async fn send_email(
|
||||||
};
|
};
|
||||||
|
|
||||||
// Set the creds if they exist
|
// Set the creds if they exist
|
||||||
let smtp_password = std::env::var("LEMMY_SMTP_PASSWORD")
|
let smtp_password = email_config.smtp_password();
|
||||||
.ok()
|
|
||||||
.or(email_config.smtp_password);
|
|
||||||
|
|
||||||
if let (Some(username), Some(password)) = (email_config.smtp_login, smtp_password) {
|
if let (Some(username), Some(password)) = (email_config.smtp_login, smtp_password) {
|
||||||
builder = builder.credentials(Credentials::new(username, password));
|
builder = builder.credentials(Credentials::new(username, password));
|
||||||
}
|
}
|
||||||
|
|
|
@ -226,6 +226,7 @@ pub enum LemmyErrorType {
|
||||||
CommunityHasNoFollowers,
|
CommunityHasNoFollowers,
|
||||||
BanExpirationInPast,
|
BanExpirationInPast,
|
||||||
InvalidUnixTime,
|
InvalidUnixTime,
|
||||||
|
InvalidBotAction,
|
||||||
Unknown(String),
|
Unknown(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,9 @@ impl Settings {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_database_url(&self) -> String {
|
pub fn get_database_url(&self) -> String {
|
||||||
|
if let Ok(url) = env::var("LEMMY_DATABASE_URL") {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
match &self.database.connection {
|
match &self.database.connection {
|
||||||
DatabaseConnection::Uri { uri } => uri.clone(),
|
DatabaseConnection::Uri { uri } => uri.clone(),
|
||||||
DatabaseConnection::Parts(parts) => {
|
DatabaseConnection::Parts(parts) => {
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use doku::Document;
|
use doku::Document;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::net::{IpAddr, Ipv4Addr};
|
use std::{
|
||||||
|
env,
|
||||||
|
net::{IpAddr, Ipv4Addr},
|
||||||
|
};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
||||||
|
@ -49,6 +52,19 @@ pub struct Settings {
|
||||||
#[default(None)]
|
#[default(None)]
|
||||||
#[doku(example = "Some(Default::default())")]
|
#[doku(example = "Some(Default::default())")]
|
||||||
pub prometheus: Option<PrometheusConfig>,
|
pub prometheus: Option<PrometheusConfig>,
|
||||||
|
/// Sets a response Access-Control-Allow-Origin CORS header
|
||||||
|
/// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
|
||||||
|
#[default(None)]
|
||||||
|
#[doku(example = "*")]
|
||||||
|
cors_origin: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Settings {
|
||||||
|
pub fn cors_origin(&self) -> Option<String> {
|
||||||
|
env::var("LEMMY_CORS_ORIGIN")
|
||||||
|
.ok()
|
||||||
|
.or(self.cors_origin.clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
||||||
|
@ -80,7 +96,7 @@ pub struct PictrsConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub struct DatabaseConfig {
|
pub struct DatabaseConfig {
|
||||||
#[serde(flatten, default)]
|
#[serde(flatten, default)]
|
||||||
pub connection: DatabaseConnection,
|
pub(crate) connection: DatabaseConnection,
|
||||||
|
|
||||||
/// Maximum number of active sql connections
|
/// Maximum number of active sql connections
|
||||||
#[default(95)]
|
#[default(95)]
|
||||||
|
@ -125,10 +141,10 @@ pub struct DatabaseConnectionParts {
|
||||||
pub(super) user: String,
|
pub(super) user: String,
|
||||||
/// Password to connect to postgres
|
/// Password to connect to postgres
|
||||||
#[default("password")]
|
#[default("password")]
|
||||||
pub password: String,
|
pub(super) password: String,
|
||||||
#[default("localhost")]
|
#[default("localhost")]
|
||||||
/// Host where postgres is running
|
/// Host where postgres is running
|
||||||
pub host: String,
|
pub(super) host: String,
|
||||||
/// Port where postgres can be accessed
|
/// Port where postgres can be accessed
|
||||||
#[default(5432)]
|
#[default(5432)]
|
||||||
pub(super) port: i32,
|
pub(super) port: i32,
|
||||||
|
@ -146,7 +162,7 @@ pub struct EmailConfig {
|
||||||
/// Login name for smtp server
|
/// Login name for smtp server
|
||||||
pub smtp_login: Option<String>,
|
pub smtp_login: Option<String>,
|
||||||
/// Password to login to the smtp server
|
/// Password to login to the smtp server
|
||||||
pub smtp_password: Option<String>,
|
smtp_password: Option<String>,
|
||||||
#[doku(example = "noreply@example.com")]
|
#[doku(example = "noreply@example.com")]
|
||||||
/// Address to send emails from, eg "noreply@your-instance.com"
|
/// Address to send emails from, eg "noreply@your-instance.com"
|
||||||
pub smtp_from_address: String,
|
pub smtp_from_address: String,
|
||||||
|
@ -156,6 +172,14 @@ pub struct EmailConfig {
|
||||||
pub tls_type: String,
|
pub tls_type: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl EmailConfig {
|
||||||
|
pub fn smtp_password(&self) -> Option<String> {
|
||||||
|
std::env::var("LEMMY_SMTP_PASSWORD")
|
||||||
|
.ok()
|
||||||
|
.or(self.smtp_password.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct SetupConfig {
|
pub struct SetupConfig {
|
||||||
|
@ -178,11 +202,11 @@ pub struct SetupConfig {
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct PrometheusConfig {
|
pub struct PrometheusConfig {
|
||||||
// Address that the Prometheus metrics will be served on.
|
// Address that the Prometheus metrics will be served on.
|
||||||
#[default(Some(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))))]
|
#[default(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)))]
|
||||||
#[doku(example = "127.0.0.1")]
|
#[doku(example = "127.0.0.1")]
|
||||||
pub bind: Option<IpAddr>,
|
pub bind: IpAddr,
|
||||||
// Port that the Prometheus metrics will be served on.
|
// Port that the Prometheus metrics will be served on.
|
||||||
#[default(Some(10002))]
|
#[default(10002)]
|
||||||
#[doku(example = "10002")]
|
#[doku(example = "10002")]
|
||||||
pub port: Option<i32>,
|
pub port: i32,
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ version: "3.7"
|
||||||
|
|
||||||
x-ui-default: &ui-default
|
x-ui-default: &ui-default
|
||||||
init: true
|
init: true
|
||||||
image: dessalines/lemmy-ui:0.18.4
|
image: dessalines/lemmy-ui:0.19.0-rc.3
|
||||||
# assuming lemmy-ui is cloned besides lemmy directory
|
# assuming lemmy-ui is cloned besides lemmy directory
|
||||||
# build:
|
# build:
|
||||||
# context: ../../../lemmy-ui
|
# context: ../../../lemmy-ui
|
||||||
|
|
|
@ -8,4 +8,4 @@ for Item in alpha beta gamma delta epsilon ; do
|
||||||
sudo chown -R 991:991 volumes/pictrs_$Item
|
sudo chown -R 991:991 volumes/pictrs_$Item
|
||||||
done
|
done
|
||||||
|
|
||||||
sudo docker compose up
|
sudo docker compose up --build
|
||||||
|
|
32
src/lib.rs
32
src/lib.rs
|
@ -1,6 +1,5 @@
|
||||||
pub mod api_routes_http;
|
pub mod api_routes_http;
|
||||||
pub mod code_migrations;
|
pub mod code_migrations;
|
||||||
#[cfg(feature = "prometheus-metrics")]
|
|
||||||
pub mod prometheus_metrics;
|
pub mod prometheus_metrics;
|
||||||
pub mod root_span_builder;
|
pub mod root_span_builder;
|
||||||
pub mod scheduled_tasks;
|
pub mod scheduled_tasks;
|
||||||
|
@ -40,10 +39,7 @@ use lemmy_apub::{
|
||||||
VerifyUrlData,
|
VerifyUrlData,
|
||||||
FEDERATION_HTTP_FETCH_LIMIT,
|
FEDERATION_HTTP_FETCH_LIMIT,
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{source::secret::Secret, utils::build_db_pool};
|
||||||
source::secret::Secret,
|
|
||||||
utils::{build_db_pool, get_database_url, run_migrations},
|
|
||||||
};
|
|
||||||
use lemmy_federate::{start_stop_federation_workers_cancellable, Opts};
|
use lemmy_federate::{start_stop_federation_workers_cancellable, Opts};
|
||||||
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
|
@ -52,6 +48,7 @@ use lemmy_utils::{
|
||||||
response::jsonify_plain_text_errors,
|
response::jsonify_plain_text_errors,
|
||||||
settings::{structs::Settings, SETTINGS},
|
settings::{structs::Settings, SETTINGS},
|
||||||
};
|
};
|
||||||
|
use prometheus_metrics::serve_prometheus;
|
||||||
use reqwest_middleware::ClientBuilder;
|
use reqwest_middleware::ClientBuilder;
|
||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
@ -63,12 +60,6 @@ use tracing_error::ErrorLayer;
|
||||||
use tracing_log::LogTracer;
|
use tracing_log::LogTracer;
|
||||||
use tracing_subscriber::{filter::Targets, layer::SubscriberExt, Layer, Registry};
|
use tracing_subscriber::{filter::Targets, layer::SubscriberExt, Layer, Registry};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
#[cfg(feature = "prometheus-metrics")]
|
|
||||||
use {
|
|
||||||
actix_web_prom::PrometheusMetricsBuilder,
|
|
||||||
prometheus::default_registry,
|
|
||||||
prometheus_metrics::serve_prometheus,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(
|
#[command(
|
||||||
|
@ -120,12 +111,8 @@ pub async fn start_lemmy_server(args: CmdArgs) -> Result<(), LemmyError> {
|
||||||
startup_server_handle = Some(create_startup_server()?);
|
startup_server_handle = Some(create_startup_server()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the DB migrations
|
|
||||||
let db_url = get_database_url(Some(&SETTINGS));
|
|
||||||
run_migrations(&db_url);
|
|
||||||
|
|
||||||
// Set up the connection pool
|
// Set up the connection pool
|
||||||
let pool = build_db_pool(&SETTINGS).await?;
|
let pool = build_db_pool().await?;
|
||||||
|
|
||||||
// Run the Code-required migrations
|
// Run the Code-required migrations
|
||||||
run_advanced_migrations(&mut (&pool).into(), &SETTINGS).await?;
|
run_advanced_migrations(&mut (&pool).into(), &SETTINGS).await?;
|
||||||
|
@ -173,8 +160,9 @@ pub async fn start_lemmy_server(args: CmdArgs) -> Result<(), LemmyError> {
|
||||||
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone()));
|
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "prometheus-metrics")]
|
if let Some(prometheus) = SETTINGS.prometheus.clone() {
|
||||||
serve_prometheus(SETTINGS.prometheus.as_ref(), context.clone());
|
serve_prometheus(prometheus, context.clone())?;
|
||||||
|
}
|
||||||
|
|
||||||
let federation_config = FederationConfig::builder()
|
let federation_config = FederationConfig::builder()
|
||||||
.domain(SETTINGS.hostname.clone())
|
.domain(SETTINGS.hostname.clone())
|
||||||
|
@ -281,6 +269,7 @@ fn create_http_server(
|
||||||
let context: LemmyContext = federation_config.deref().clone();
|
let context: LemmyContext = federation_config.deref().clone();
|
||||||
let rate_limit_cell = federation_config.rate_limit_cell().clone();
|
let rate_limit_cell = federation_config.rate_limit_cell().clone();
|
||||||
let self_origin = settings.get_protocol_and_hostname();
|
let self_origin = settings.get_protocol_and_hostname();
|
||||||
|
let cors_origin_setting = settings.cors_origin();
|
||||||
|
|
||||||
// Pictrs cannot use proxy
|
// Pictrs cannot use proxy
|
||||||
let pictrs_client = ClientBuilder::new(client_builder(&SETTINGS).no_proxy().build()?)
|
let pictrs_client = ClientBuilder::new(client_builder(&SETTINGS).no_proxy().build()?)
|
||||||
|
@ -289,9 +278,8 @@ fn create_http_server(
|
||||||
|
|
||||||
// Create Http server with websocket support
|
// Create Http server with websocket support
|
||||||
let server = HttpServer::new(move || {
|
let server = HttpServer::new(move || {
|
||||||
let cors_origin = env::var("LEMMY_CORS_ORIGIN");
|
let cors_config = match (cors_origin_setting.clone(), cfg!(debug_assertions)) {
|
||||||
let cors_config = match (cors_origin, cfg!(debug_assertions)) {
|
(Some(origin), false) => Cors::default()
|
||||||
(Ok(origin), false) => Cors::default()
|
|
||||||
.allowed_origin(&origin)
|
.allowed_origin(&origin)
|
||||||
.allowed_origin(&self_origin),
|
.allowed_origin(&self_origin),
|
||||||
_ => Cors::default()
|
_ => Cors::default()
|
||||||
|
@ -343,7 +331,7 @@ fn create_http_server(
|
||||||
pub fn init_logging(opentelemetry_url: &Option<Url>) -> Result<(), LemmyError> {
|
pub fn init_logging(opentelemetry_url: &Option<Url>) -> Result<(), LemmyError> {
|
||||||
LogTracer::init()?;
|
LogTracer::init()?;
|
||||||
|
|
||||||
let log_description = std::env::var("RUST_LOG").unwrap_or_else(|_| "info".into());
|
let log_description = env::var("RUST_LOG").unwrap_or_else(|_| "info".into());
|
||||||
|
|
||||||
let targets = log_description
|
let targets = log_description
|
||||||
.trim()
|
.trim()
|
||||||
|
|
|
@ -1,14 +1,9 @@
|
||||||
// TODO: should really not unwrap everywhere here....
|
use actix_web::{rt::System, web, App, HttpServer};
|
||||||
#![allow(clippy::unwrap_used)]
|
|
||||||
use actix_web::{rt::System, web, App, HttpResponse, HttpServer, Responder};
|
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_utils::settings::structs::PrometheusConfig;
|
use lemmy_utils::{error::LemmyResult, settings::structs::PrometheusConfig};
|
||||||
use prometheus::{default_registry, Encoder, Gauge, Opts, TextEncoder};
|
use prometheus::{default_registry, Encoder, Gauge, Opts, TextEncoder};
|
||||||
use std::{
|
use std::{sync::Arc, thread};
|
||||||
net::{IpAddr, Ipv4Addr},
|
use tracing::error;
|
||||||
sync::Arc,
|
|
||||||
thread,
|
|
||||||
};
|
|
||||||
|
|
||||||
struct PromContext {
|
struct PromContext {
|
||||||
lemmy: LemmyContext,
|
lemmy: LemmyContext,
|
||||||
|
@ -21,23 +16,12 @@ struct DbPoolMetrics {
|
||||||
available: Gauge,
|
available: Gauge,
|
||||||
}
|
}
|
||||||
|
|
||||||
static DEFAULT_BIND: IpAddr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
|
pub fn serve_prometheus(config: PrometheusConfig, lemmy_context: LemmyContext) -> LemmyResult<()> {
|
||||||
static DEFAULT_PORT: i32 = 10002;
|
|
||||||
|
|
||||||
pub fn serve_prometheus(config: Option<&PrometheusConfig>, lemmy_context: LemmyContext) {
|
|
||||||
let context = Arc::new(PromContext {
|
let context = Arc::new(PromContext {
|
||||||
lemmy: lemmy_context,
|
lemmy: lemmy_context,
|
||||||
db_pool_metrics: create_db_pool_metrics(),
|
db_pool_metrics: create_db_pool_metrics()?,
|
||||||
});
|
});
|
||||||
|
|
||||||
let (bind, port) = match config {
|
|
||||||
Some(config) => (
|
|
||||||
config.bind.unwrap_or(DEFAULT_BIND),
|
|
||||||
config.port.unwrap_or(DEFAULT_PORT),
|
|
||||||
),
|
|
||||||
None => (DEFAULT_BIND, DEFAULT_PORT),
|
|
||||||
};
|
|
||||||
|
|
||||||
// spawn thread that blocks on handling requests
|
// spawn thread that blocks on handling requests
|
||||||
// only mapping /metrics to a handler
|
// only mapping /metrics to a handler
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
|
@ -48,19 +32,20 @@ pub fn serve_prometheus(config: Option<&PrometheusConfig>, lemmy_context: LemmyC
|
||||||
.app_data(web::Data::new(Arc::clone(&context)))
|
.app_data(web::Data::new(Arc::clone(&context)))
|
||||||
.route("/metrics", web::get().to(metrics))
|
.route("/metrics", web::get().to(metrics))
|
||||||
})
|
})
|
||||||
.bind((bind, port as u16))
|
.bind((config.bind, config.port as u16))
|
||||||
.unwrap_or_else(|_| panic!("Cannot bind to {}:{}", bind, port))
|
.unwrap_or_else(|e| panic!("Cannot bind to {}:{}: {e}", config.bind, config.port))
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
if let Err(err) = server.await {
|
if let Err(err) = server.await {
|
||||||
eprintln!("Prometheus server error: {}", err);
|
error!("Prometheus server error: {err}");
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// handler for the /metrics path
|
// handler for the /metrics path
|
||||||
async fn metrics(context: web::Data<Arc<PromContext>>) -> impl Responder {
|
async fn metrics(context: web::Data<Arc<PromContext>>) -> LemmyResult<String> {
|
||||||
// collect metrics
|
// collect metrics
|
||||||
collect_db_pool_metrics(&context).await;
|
collect_db_pool_metrics(&context).await;
|
||||||
|
|
||||||
|
@ -69,43 +54,34 @@ async fn metrics(context: web::Data<Arc<PromContext>>) -> impl Responder {
|
||||||
|
|
||||||
// gather metrics from registry and encode in prometheus format
|
// gather metrics from registry and encode in prometheus format
|
||||||
let metric_families = prometheus::gather();
|
let metric_families = prometheus::gather();
|
||||||
encoder.encode(&metric_families, &mut buffer).unwrap();
|
encoder.encode(&metric_families, &mut buffer)?;
|
||||||
let output = String::from_utf8(buffer).unwrap();
|
let output = String::from_utf8(buffer)?;
|
||||||
|
|
||||||
HttpResponse::Ok().body(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
// create lemmy_db_pool_* metrics and register them with the default registry
|
// create lemmy_db_pool_* metrics and register them with the default registry
|
||||||
fn create_db_pool_metrics() -> DbPoolMetrics {
|
fn create_db_pool_metrics() -> LemmyResult<DbPoolMetrics> {
|
||||||
let metrics = DbPoolMetrics {
|
let metrics = DbPoolMetrics {
|
||||||
max_size: Gauge::with_opts(Opts::new(
|
max_size: Gauge::with_opts(Opts::new(
|
||||||
"lemmy_db_pool_max_connections",
|
"lemmy_db_pool_max_connections",
|
||||||
"Maximum number of connections in the pool",
|
"Maximum number of connections in the pool",
|
||||||
))
|
))?,
|
||||||
.unwrap(),
|
|
||||||
size: Gauge::with_opts(Opts::new(
|
size: Gauge::with_opts(Opts::new(
|
||||||
"lemmy_db_pool_connections",
|
"lemmy_db_pool_connections",
|
||||||
"Current number of connections in the pool",
|
"Current number of connections in the pool",
|
||||||
))
|
))?,
|
||||||
.unwrap(),
|
|
||||||
available: Gauge::with_opts(Opts::new(
|
available: Gauge::with_opts(Opts::new(
|
||||||
"lemmy_db_pool_available_connections",
|
"lemmy_db_pool_available_connections",
|
||||||
"Number of available connections in the pool",
|
"Number of available connections in the pool",
|
||||||
))
|
))?,
|
||||||
.unwrap(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
default_registry()
|
default_registry().register(Box::new(metrics.max_size.clone()))?;
|
||||||
.register(Box::new(metrics.max_size.clone()))
|
default_registry().register(Box::new(metrics.size.clone()))?;
|
||||||
.unwrap();
|
default_registry().register(Box::new(metrics.available.clone()))?;
|
||||||
default_registry()
|
|
||||||
.register(Box::new(metrics.size.clone()))
|
|
||||||
.unwrap();
|
|
||||||
default_registry()
|
|
||||||
.register(Box::new(metrics.available.clone()))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
metrics
|
Ok(metrics)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn collect_db_pool_metrics(context: &PromContext) {
|
async fn collect_db_pool_metrics(context: &PromContext) {
|
||||||
|
|
Loading…
Reference in a new issue