Merge remote-tracking branch 'upstream/main' into migration-runner

This commit is contained in:
Dull Bananas 2024-07-03 02:32:33 +00:00
commit de0163554b
17 changed files with 211 additions and 133 deletions

18
Cargo.lock generated
View file

@ -978,9 +978,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.7" version = "4.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" checksum = "84b3edb18336f4df585bc9aa31dd99c036dfa5dc5e9a2939a722a188f3a8970d"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@ -988,9 +988,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.7" version = "4.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" checksum = "c1c09dd5ada6c6c78075d6fd0da3f90d8080651e2d6cc8eb2f1aaa4034ced708"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -1000,9 +1000,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.5" version = "4.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
@ -3159,7 +3159,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.5", "windows-targets 0.48.5",
] ]
[[package]] [[package]]
@ -5078,9 +5078,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.117" version = "1.0.120"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
dependencies = [ dependencies = [
"indexmap 2.2.6", "indexmap 2.2.6",
"itoa", "itoa",

View file

@ -8,20 +8,18 @@ use lemmy_api_common::{
utils::{ utils::{
check_community_user_action, check_community_user_action,
check_post_deleted_or_removed, check_post_deleted_or_removed,
generate_local_apub_endpoint,
get_url_blocklist, get_url_blocklist,
is_mod_or_admin, is_mod_or_admin,
local_site_to_slur_regex, local_site_to_slur_regex,
process_markdown, process_markdown,
update_read_comments, update_read_comments,
EndpointType,
}, },
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
impls::actor_language::default_post_language, impls::actor_language::default_post_language,
source::{ source::{
actor_language::CommunityLanguage, actor_language::CommunityLanguage,
comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm, CommentUpdateForm}, comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm},
comment_reply::{CommentReply, CommentReplyUpdateForm}, comment_reply::{CommentReply, CommentReplyUpdateForm},
local_site::LocalSite, local_site::LocalSite,
person_mention::{PersonMention, PersonMentionUpdateForm}, person_mention::{PersonMention, PersonMentionUpdateForm},
@ -126,25 +124,7 @@ pub async fn create_comment(
.await .await
.with_lemmy_type(LemmyErrorType::CouldntCreateComment)?; .with_lemmy_type(LemmyErrorType::CouldntCreateComment)?;
// Necessary to update the ap_id
let inserted_comment_id = inserted_comment.id; let inserted_comment_id = inserted_comment.id;
let protocol_and_hostname = context.settings().get_protocol_and_hostname();
let apub_id = generate_local_apub_endpoint(
EndpointType::Comment,
&inserted_comment_id.to_string(),
&protocol_and_hostname,
)?;
let updated_comment = Comment::update(
&mut context.pool(),
inserted_comment_id,
&CommentUpdateForm {
ap_id: Some(apub_id),
..Default::default()
},
)
.await
.with_lemmy_type(LemmyErrorType::CouldntCreateComment)?;
// Scan the comment for user mentions, add those rows // Scan the comment for user mentions, add those rows
let mentions = scrape_text_for_mentions(&content); let mentions = scrape_text_for_mentions(&content);
@ -170,7 +150,7 @@ pub async fn create_comment(
.with_lemmy_type(LemmyErrorType::CouldntLikeComment)?; .with_lemmy_type(LemmyErrorType::CouldntLikeComment)?;
ActivityChannel::submit_activity( ActivityChannel::submit_activity(
SendActivityData::CreateComment(updated_comment.clone()), SendActivityData::CreateComment(inserted_comment.clone()),
&context, &context,
) )
.await?; .await?;

View file

@ -8,13 +8,11 @@ use lemmy_api_common::{
send_activity::SendActivityData, send_activity::SendActivityData,
utils::{ utils::{
check_community_user_action, check_community_user_action,
generate_local_apub_endpoint,
get_url_blocklist, get_url_blocklist,
honeypot_check, honeypot_check,
local_site_to_slur_regex, local_site_to_slur_regex,
mark_post_as_read, mark_post_as_read,
process_markdown_opt, process_markdown_opt,
EndpointType,
}, },
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
@ -23,7 +21,7 @@ use lemmy_db_schema::{
actor_language::CommunityLanguage, actor_language::CommunityLanguage,
community::Community, community::Community,
local_site::LocalSite, local_site::LocalSite,
post::{Post, PostInsertForm, PostLike, PostLikeForm, PostUpdateForm}, post::{Post, PostInsertForm, PostLike, PostLikeForm},
}, },
traits::{Crud, Likeable}, traits::{Crud, Likeable},
utils::diesel_url_create, utils::diesel_url_create,
@ -147,26 +145,8 @@ pub async fn create_post(
.await .await
.with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?;
let inserted_post_id = inserted_post.id;
let protocol_and_hostname = context.settings().get_protocol_and_hostname();
let apub_id = generate_local_apub_endpoint(
EndpointType::Post,
&inserted_post_id.to_string(),
&protocol_and_hostname,
)?;
let updated_post = Post::update(
&mut context.pool(),
inserted_post_id,
&PostUpdateForm {
ap_id: Some(apub_id),
..Default::default()
},
)
.await
.with_lemmy_type(LemmyErrorType::CouldntCreatePost)?;
generate_post_link_metadata( generate_post_link_metadata(
updated_post.clone(), inserted_post.clone(),
custom_thumbnail.map(Into::into), custom_thumbnail.map(Into::into),
|post| Some(SendActivityData::CreatePost(post)), |post| Some(SendActivityData::CreatePost(post)),
Some(local_site), Some(local_site),
@ -189,11 +169,11 @@ pub async fn create_post(
mark_post_as_read(person_id, post_id, &mut context.pool()).await?; mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
if let Some(url) = updated_post.url.clone() { if let Some(url) = inserted_post.url.clone() {
if community.visibility == CommunityVisibility::Public { if community.visibility == CommunityVisibility::Public {
spawn_try_task(async move { spawn_try_task(async move {
let mut webmention = let mut webmention =
Webmention::new::<Url>(updated_post.ap_id.clone().into(), url.clone().into())?; Webmention::new::<Url>(inserted_post.ap_id.clone().into(), url.clone().into())?;
webmention.set_checked(true); webmention.set_checked(true);
match webmention match webmention
.send() .send()

View file

@ -6,19 +6,17 @@ use lemmy_api_common::{
send_activity::{ActivityChannel, SendActivityData}, send_activity::{ActivityChannel, SendActivityData},
utils::{ utils::{
check_person_block, check_person_block,
generate_local_apub_endpoint,
get_interface_language, get_interface_language,
get_url_blocklist, get_url_blocklist,
local_site_to_slur_regex, local_site_to_slur_regex,
process_markdown, process_markdown,
send_email_to_user, send_email_to_user,
EndpointType,
}, },
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
source::{ source::{
local_site::LocalSite, local_site::LocalSite,
private_message::{PrivateMessage, PrivateMessageInsertForm, PrivateMessageUpdateForm}, private_message::{PrivateMessage, PrivateMessageInsertForm},
}, },
traits::Crud, traits::Crud,
}; };
@ -58,24 +56,6 @@ pub async fn create_private_message(
.await .await
.with_lemmy_type(LemmyErrorType::CouldntCreatePrivateMessage)?; .with_lemmy_type(LemmyErrorType::CouldntCreatePrivateMessage)?;
let inserted_private_message_id = inserted_private_message.id;
let protocol_and_hostname = context.settings().get_protocol_and_hostname();
let apub_id = generate_local_apub_endpoint(
EndpointType::PrivateMessage,
&inserted_private_message_id.to_string(),
&protocol_and_hostname,
)?;
PrivateMessage::update(
&mut context.pool(),
inserted_private_message.id,
&PrivateMessageUpdateForm {
ap_id: Some(apub_id),
..Default::default()
},
)
.await
.with_lemmy_type(LemmyErrorType::CouldntCreatePrivateMessage)?;
let view = PrivateMessageView::read(&mut context.pool(), inserted_private_message.id) let view = PrivateMessageView::read(&mut context.pool(), inserted_private_message.id)
.await? .await?
.ok_or(LemmyErrorType::CouldntFindPrivateMessage)?; .ok_or(LemmyErrorType::CouldntFindPrivateMessage)?;

View file

@ -564,6 +564,10 @@ BEGIN
IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN
NEW.path = NEW.path || id; NEW.path = NEW.path || id;
END IF; END IF;
-- Set local ap_id
IF NEW.local THEN
NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/comment/' || id));
END IF;
RETURN NEW; RETURN NEW;
END END
$$; $$;
@ -573,3 +577,39 @@ CREATE TRIGGER change_values
FOR EACH ROW FOR EACH ROW
EXECUTE FUNCTION r.comment_change_values (); EXECUTE FUNCTION r.comment_change_values ();
CREATE FUNCTION r.post_change_values ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
-- Set local ap_id
IF NEW.local THEN
NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/post/' || NEW.id::text));
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER change_values
BEFORE INSERT ON post
FOR EACH ROW
EXECUTE FUNCTION r.post_change_values ();
CREATE FUNCTION r.private_message_change_values ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
-- Set local ap_id
IF NEW.local THEN
NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/private_message/' || NEW.id::text));
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER change_values
BEFORE INSERT ON private_message
FOR EACH ROW
EXECUTE FUNCTION r.private_message_change_values ();

View file

@ -8,7 +8,7 @@ CREATE FUNCTION r.controversy_rank (upvotes numeric, downvotes numeric)
0 0
ELSE ELSE
( (
upvotes + downvotes) * CASE WHEN upvotes > downvotes THEN upvotes + downvotes) ^ CASE WHEN upvotes > downvotes THEN
downvotes::float / upvotes::float downvotes::float / upvotes::float
ELSE ELSE
upvotes::float / downvotes::float upvotes::float / downvotes::float
@ -57,6 +57,13 @@ BEGIN
END; END;
$$; $$;
CREATE FUNCTION r.local_url (url_path text)
RETURNS text
LANGUAGE sql
STABLE PARALLEL SAFE RETURN (
current_setting('lemmy.protocol_and_hostname') || url_path
);
-- This function creates statement-level triggers for all operation types. It's designed this way -- This function creates statement-level triggers for all operation types. It's designed this way
-- because of these limitations: -- because of these limitations:
-- * A trigger that uses transition tables can only handle 1 operation type. -- * A trigger that uses transition tables can only handle 1 operation type.

View file

@ -223,6 +223,7 @@ mod tests {
use diesel_ltree::Ltree; use diesel_ltree::Ltree;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use url::Url;
#[tokio::test] #[tokio::test]
#[serial] #[serial]
@ -273,7 +274,12 @@ mod tests {
path: Ltree(format!("0.{}", inserted_comment.id)), path: Ltree(format!("0.{}", inserted_comment.id)),
published: inserted_comment.published, published: inserted_comment.published,
updated: None, updated: None,
ap_id: inserted_comment.ap_id.clone(), ap_id: Url::parse(&format!(
"https://lemmy-alpha/comment/{}",
inserted_comment.id
))
.unwrap()
.into(),
distinguished: false, distinguished: false,
local: true, local: true,
language_id: LanguageId::default(), language_id: LanguageId::default(),

View file

@ -390,6 +390,7 @@ mod tests {
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use std::collections::HashSet; use std::collections::HashSet;
use url::Url;
#[tokio::test] #[tokio::test]
#[serial] #[serial]
@ -447,7 +448,9 @@ mod tests {
embed_description: None, embed_description: None,
embed_video_url: None, embed_video_url: None,
thumbnail_url: None, thumbnail_url: None,
ap_id: inserted_post.ap_id.clone(), ap_id: Url::parse(&format!("https://lemmy-alpha/post/{}", inserted_post.id))
.unwrap()
.into(),
local: true, local: true,
language_id: Default::default(), language_id: Default::default(),
featured_community: false, featured_community: false,

View file

@ -100,6 +100,7 @@ mod tests {
}; };
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use url::Url;
#[tokio::test] #[tokio::test]
#[serial] #[serial]
@ -138,7 +139,12 @@ mod tests {
read: false, read: false,
updated: None, updated: None,
published: inserted_private_message.published, published: inserted_private_message.published,
ap_id: inserted_private_message.ap_id.clone(), ap_id: Url::parse(&format!(
"https://lemmy-alpha/private_message/{}",
inserted_private_message.id
))
.unwrap()
.into(),
local: true, local: true,
}; };

View file

@ -85,12 +85,6 @@ impl fmt::Display for PrivateMessageId {
/// The person mention id. /// The person mention id.
pub struct PersonMentionId(i32); pub struct PersonMentionId(i32);
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType, TS))]
#[cfg_attr(feature = "full", ts(export))]
/// The person block id.
pub struct PersonBlockId(i32);
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType, TS))] #[cfg_attr(feature = "full", derive(DieselNewType, TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]

View file

@ -31,7 +31,8 @@ use diesel_async::{
AsyncDieselConnectionManager, AsyncDieselConnectionManager,
ManagerConfig, ManagerConfig,
}, },
SimpleAsyncConnection, AsyncConnection,
RunQueryDsl,
}; };
use futures_util::{future::BoxFuture, Future, FutureExt}; use futures_util::{future::BoxFuture, Future, FutureExt};
use i_love_jesus::CursorKey; use i_love_jesus::CursorKey;
@ -333,6 +334,8 @@ pub fn diesel_url_create(opt: Option<&str>) -> LemmyResult<Option<DbUrl>> {
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> { fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
let fut = async { let fut = async {
// We only support TLS with sslmode=require currently
let mut conn = if config.contains("sslmode=require") {
rustls::crypto::ring::default_provider() rustls::crypto::ring::default_provider()
.install_default() .install_default()
.expect("Failed to install rustls crypto provider"); .expect("Failed to install rustls crypto provider");
@ -352,13 +355,27 @@ fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConne
error!("Database connection failed: {e}"); error!("Database connection failed: {e}");
} }
}); });
let mut conn = AsyncPgConnection::try_from(client).await?; AsyncPgConnection::try_from(client).await?
// * Change geqo_threshold back to default value if it was changed, so it's higher than the } else {
AsyncPgConnection::establish(config).await?
};
diesel::select((
// Change geqo_threshold back to default value if it was changed, so it's higher than the
// collapse limits // collapse limits
// * Change collapse limits from 8 to 11 so the query planner can find a better table join order functions::set_config("geqo_threshold", "12", false),
// for more complicated queries // Change collapse limits from 8 to 11 so the query planner can find a better table join
conn // order for more complicated queries
.batch_execute("SET geqo_threshold=12;SET from_collapse_limit=11;SET join_collapse_limit=11;") functions::set_config("from_collapse_limit", "11", false),
functions::set_config("join_collapse_limit", "11", false),
// Set `lemmy.protocol_and_hostname` so triggers can use it
functions::set_config(
"lemmy.protocol_and_hostname",
SETTINGS.get_protocol_and_hostname(),
false,
),
))
.execute(&mut conn)
.await .await
.map_err(ConnectionError::CouldntSetupConfiguration)?; .map_err(ConnectionError::CouldntSetupConfiguration)?;
Ok(conn) Ok(conn)
@ -419,17 +436,11 @@ impl ServerCertVerifier for NoCertVerifier {
pub async fn build_db_pool() -> LemmyResult<ActualDbPool> { pub async fn build_db_pool() -> LemmyResult<ActualDbPool> {
let db_url = SETTINGS.get_database_url(); let db_url = SETTINGS.get_database_url();
// We only support TLS with sslmode=require currently
let tls_enabled = db_url.contains("sslmode=require");
let manager = if tls_enabled {
// diesel-async does not support any TLS connections out of the box, so we need to manually // diesel-async does not support any TLS connections out of the box, so we need to manually
// provide a setup function which handles creating the connection // provide a setup function which handles creating the connection
let mut config = ManagerConfig::default(); let mut config = ManagerConfig::default();
config.custom_setup = Box::new(establish_connection); config.custom_setup = Box::new(establish_connection);
AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_config(&db_url, config) let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_config(&db_url, config);
} else {
AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url)
};
let pool = Pool::builder(manager) let pool = Pool::builder(manager)
.max_size(SETTINGS.database.pool_size) .max_size(SETTINGS.database.pool_size)
.runtime(Runtime::Tokio1) .runtime(Runtime::Tokio1)
@ -486,7 +497,7 @@ static EMAIL_REGEX: Lazy<Regex> = Lazy::new(|| {
}); });
pub mod functions { pub mod functions {
use diesel::sql_types::{BigInt, Text, Timestamptz}; use diesel::sql_types::{BigInt, Bool, Text, Timestamptz};
sql_function! { sql_function! {
#[sql_name = "r.hot_rank"] #[sql_name = "r.hot_rank"]
@ -509,6 +520,8 @@ pub mod functions {
// really this function is variadic, this just adds the two-argument version // really this function is variadic, this just adds the two-argument version
sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T); sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T);
sql_function!(fn set_config(setting_name: Text, new_value: Text, is_local: Bool) -> Text);
} }
pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*"; pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*";

View file

@ -0,0 +1,9 @@
ALTER TABLE comment
ALTER COLUMN ap_id SET DEFAULT generate_unique_changeme ();
ALTER TABLE post
ALTER COLUMN ap_id SET DEFAULT generate_unique_changeme ();
ALTER TABLE private_message
ALTER COLUMN ap_id SET DEFAULT generate_unique_changeme ();

View file

@ -0,0 +1,9 @@
ALTER TABLE comment
ALTER COLUMN ap_id DROP DEFAULT;
ALTER TABLE post
ALTER COLUMN ap_id DROP DEFAULT;
ALTER TABLE private_message
ALTER COLUMN ap_id DROP DEFAULT;

View file

@ -0,0 +1,17 @@
UPDATE
post_aggregates
SET
controversy_rank = CASE WHEN downvotes <= 0
OR upvotes <= 0 THEN
0
ELSE
(upvotes + downvotes) * CASE WHEN upvotes > downvotes THEN
downvotes::float / upvotes::float
ELSE
upvotes::float / downvotes::float
END
END
WHERE
upvotes > 0
AND downvotes > 0;

View file

@ -0,0 +1,17 @@
UPDATE
post_aggregates
SET
controversy_rank = CASE WHEN downvotes <= 0
OR upvotes <= 0 THEN
0
ELSE
(upvotes + downvotes) ^ CASE WHEN upvotes > downvotes THEN
downvotes::float / upvotes::float
ELSE
upvotes::float / downvotes::float
END
END
WHERE
upvotes > 0
AND downvotes > 0;

View file

@ -12,6 +12,6 @@ cargo +nightly fmt
taplo format taplo format
# Format sql files # Format sql files
find migrations -type f -name '*.sql' -exec pg_format -i {} + find migrations crates/db_schema/replaceable_schema -type f -name '*.sql' -exec pg_format -i {} +
cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-targets --all-features -- -D warnings cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-targets --all-features -- -D warnings

View file

@ -55,7 +55,7 @@ use prometheus_metrics::serve_prometheus;
use reqwest_middleware::ClientBuilder; use reqwest_middleware::ClientBuilder;
use reqwest_tracing::TracingMiddleware; use reqwest_tracing::TracingMiddleware;
use serde_json::json; use serde_json::json;
use std::{env, ops::Deref}; use std::{env, ops::Deref, time::Duration};
use tokio::signal::unix::SignalKind; use tokio::signal::unix::SignalKind;
use tracing::subscriber::set_global_default; use tracing::subscriber::set_global_default;
use tracing_actix_web::TracingLogger; use tracing_actix_web::TracingLogger;
@ -64,6 +64,13 @@ use tracing_log::LogTracer;
use tracing_subscriber::{filter::Targets, layer::SubscriberExt, Layer, Registry}; use tracing_subscriber::{filter::Targets, layer::SubscriberExt, Layer, Registry};
use url::Url; use url::Url;
/// Timeout for HTTP requests while sending activities. A longer timeout provides better
/// compatibility with other ActivityPub software that might allocate more time for synchronous
/// processing of incoming activities. This timeout should be slightly longer than the time we
/// expect a remote server to wait before aborting processing on its own to account for delays from
/// establishing the HTTP connection and sending the request itself.
const ACTIVITY_SENDING_TIMEOUT: Duration = Duration::from_secs(125);
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command( #[command(
version, version,
@ -216,8 +223,8 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
serve_prometheus(prometheus, context.clone())?; serve_prometheus(prometheus, context.clone())?;
} }
let mut federation_config = FederationConfig::builder(); let mut federation_config_builder = FederationConfig::builder();
federation_config federation_config_builder
.domain(SETTINGS.hostname.clone()) .domain(SETTINGS.hostname.clone())
.app_data(context.clone()) .app_data(context.clone())
.client(client.clone()) .client(client.clone())
@ -227,9 +234,9 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
.url_verifier(Box::new(VerifyUrlData(context.inner_pool().clone()))); .url_verifier(Box::new(VerifyUrlData(context.inner_pool().clone())));
if local_site.federation_signed_fetch { if local_site.federation_signed_fetch {
let site: ApubSite = site_view.site.into(); let site: ApubSite = site_view.site.into();
federation_config.signed_fetch_actor(&site); federation_config_builder.signed_fetch_actor(&site);
} }
let federation_config = federation_config.build().await?; let federation_config = federation_config_builder.build().await?;
MATCH_OUTGOING_ACTIVITIES MATCH_OUTGOING_ACTIVITIES
.set(Box::new(move |d, c| { .set(Box::new(move |d, c| {
@ -252,13 +259,23 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
} else { } else {
None None
}; };
let federate = (!args.disable_activity_sending).then(|| {
// This FederationConfig instance is exclusively used to send activities, so we can safely
// increase the timeout without affecting timeouts for resolving objects anywhere.
let federation_sender_config = if !args.disable_activity_sending {
let mut federation_sender_config = federation_config_builder.clone();
federation_sender_config.request_timeout(ACTIVITY_SENDING_TIMEOUT);
Some(federation_sender_config.build().await?)
} else {
None
};
let federate = federation_sender_config.map(|cfg| {
SendManager::run( SendManager::run(
Opts { Opts {
process_index: args.federate_process_index, process_index: args.federate_process_index,
process_count: args.federate_process_count, process_count: args.federate_process_count,
}, },
federation_config, cfg,
) )
}); });
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?; let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;