mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-22 12:21:18 +00:00
Persistent, performant, reliable federation queue (#3605)
* persistent activity queue * fixes * fixes * make federation workers function callable from outside * log federation instances * dead instance detection not needed here * taplo fmt * split federate bin/lib * minor fix * better logging * log * create struct to hold cancellable task for readability * use boxfuture for readability * reset submodule * fix * fix lint * swap * remove json column, use separate array columns instead * some review comments * make worker a struct for readability * minor readability * add local filter to community follower view * remove separate lemmy_federate entry point * fix remaining duration * address review comments mostly * fix lint * upgrade actitypub-fed to simpler interface * fix sql format * increase delays a bit * fixes after merge * remove selectable * fix instance selectable * add comment * start federation based on latest id at the time * rename federate process args * dead instances in one query * filter follow+report activities by local * remove synchronous federation remove activity sender queue * lint * fix federation tests by waiting for results to change * fix fed test * fix comment report * wait some more * Apply suggestions from code review Co-authored-by: SorteKanin <sortekanin@gmail.com> * fix most remaining tests * wait until private messages * fix community tests * fix community tests * move arg parse * use instance_id instead of domain in federation_queue_state table --------- Co-authored-by: Dessalines <dessalines@users.noreply.github.com> Co-authored-by: SorteKanin <sortekanin@gmail.com>
This commit is contained in:
parent
3b67642ec2
commit
375d9a2a3c
61 changed files with 1878 additions and 377 deletions
190
Cargo.lock
generated
190
Cargo.lock
generated
|
@ -10,9 +10,9 @@ checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "activitypub_federation"
|
name = "activitypub_federation"
|
||||||
version = "0.5.0-beta.2"
|
version = "0.5.0-beta.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8210e0ac4675753f9288c1102fb4940b22e5868308383c286b07eb63f3ff4c65"
|
checksum = "509cbafa1b42e01b7ca76c26298814a6638825df4fd67aef2f4c9d36a39c2b6d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitystreams-kinds",
|
"activitystreams-kinds",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
@ -24,12 +24,14 @@ dependencies = [
|
||||||
"derive_builder",
|
"derive_builder",
|
||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"enum_delegate",
|
"enum_delegate",
|
||||||
|
"futures",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"http",
|
"http",
|
||||||
"http-signature-normalization",
|
"http-signature-normalization",
|
||||||
"http-signature-normalization-reqwest",
|
"http-signature-normalization-reqwest",
|
||||||
"httpdate",
|
"httpdate",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
|
"moka",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"openssl",
|
"openssl",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
@ -401,6 +403,54 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstream"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"anstyle-parse",
|
||||||
|
"anstyle-query",
|
||||||
|
"anstyle-wincon",
|
||||||
|
"colorchoice",
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-parse"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
|
||||||
|
dependencies = [
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-query"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-wincon"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.71"
|
version = "1.0.71"
|
||||||
|
@ -898,40 +948,44 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.0.32"
|
version = "4.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a7db700bc935f9e43e88d00b0850dae18a63773cfbec6d8e070fccf7fef89a39"
|
checksum = "1d5f1946157a96594eb2d2c10eb7ad9a2b27518cb3000209dec700c35df9197d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.3.2",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
"clap_lex",
|
|
||||||
"is-terminal",
|
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_builder"
|
||||||
|
version = "4.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "78116e32a042dd73c2901f0dc30790d20ff3447f3e3472fad359e8c3d282bcd6"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"clap_lex",
|
||||||
"strsim",
|
"strsim",
|
||||||
"termcolor",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_derive"
|
||||||
version = "4.0.21"
|
version = "4.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
|
checksum = "c9fd1a5729c4548118d7d70ff234a44868d00489a4b6597b0b020918a0e91a1a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro-error",
|
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 1.0.103",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.3.0"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
|
checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961"
|
||||||
dependencies = [
|
|
||||||
"os_str_bytes",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clokwerk"
|
name = "clokwerk"
|
||||||
|
@ -985,6 +1039,12 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
|
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorchoice"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "combine"
|
name = "combine"
|
||||||
version = "4.6.6"
|
version = "4.6.6"
|
||||||
|
@ -2111,15 +2171,6 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hermit-abi"
|
|
||||||
version = "0.2.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hermit-abi"
|
name = "hermit-abi"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
|
@ -2454,18 +2505,6 @@ version = "2.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"
|
checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "is-terminal"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189"
|
|
||||||
dependencies = [
|
|
||||||
"hermit-abi 0.2.6",
|
|
||||||
"io-lifetimes",
|
|
||||||
"rustix 0.36.5",
|
|
||||||
"windows-sys 0.42.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itertools"
|
name = "itertools"
|
||||||
version = "0.10.5"
|
version = "0.10.5"
|
||||||
|
@ -2742,6 +2781,7 @@ dependencies = [
|
||||||
name = "lemmy_db_views_actor"
|
name = "lemmy_db_views_actor"
|
||||||
version = "0.18.1"
|
version = "0.18.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
"lemmy_db_schema",
|
"lemmy_db_schema",
|
||||||
|
@ -2762,6 +2802,38 @@ dependencies = [
|
||||||
"ts-rs",
|
"ts-rs",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lemmy_federate"
|
||||||
|
version = "0.18.1"
|
||||||
|
dependencies = [
|
||||||
|
"activitypub_federation",
|
||||||
|
"anyhow",
|
||||||
|
"async-trait",
|
||||||
|
"bytes",
|
||||||
|
"chrono",
|
||||||
|
"diesel",
|
||||||
|
"diesel-async",
|
||||||
|
"enum_delegate",
|
||||||
|
"futures",
|
||||||
|
"lemmy_api_common",
|
||||||
|
"lemmy_apub",
|
||||||
|
"lemmy_db_schema",
|
||||||
|
"lemmy_db_views_actor",
|
||||||
|
"lemmy_utils",
|
||||||
|
"moka",
|
||||||
|
"once_cell",
|
||||||
|
"openssl",
|
||||||
|
"reqwest",
|
||||||
|
"reqwest-middleware",
|
||||||
|
"reqwest-tracing",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_routes"
|
name = "lemmy_routes"
|
||||||
version = "0.18.1"
|
version = "0.18.1"
|
||||||
|
@ -2796,6 +2868,7 @@ dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"actix-web-prom",
|
"actix-web-prom",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
"clap",
|
||||||
"clokwerk",
|
"clokwerk",
|
||||||
"console-subscriber",
|
"console-subscriber",
|
||||||
"diesel",
|
"diesel",
|
||||||
|
@ -2807,6 +2880,7 @@ dependencies = [
|
||||||
"lemmy_api_crud",
|
"lemmy_api_crud",
|
||||||
"lemmy_apub",
|
"lemmy_apub",
|
||||||
"lemmy_db_schema",
|
"lemmy_db_schema",
|
||||||
|
"lemmy_federate",
|
||||||
"lemmy_routes",
|
"lemmy_routes",
|
||||||
"lemmy_utils",
|
"lemmy_utils",
|
||||||
"opentelemetry 0.19.0",
|
"opentelemetry 0.19.0",
|
||||||
|
@ -3498,12 +3572,6 @@ dependencies = [
|
||||||
"hashbrown 0.12.3",
|
"hashbrown 0.12.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "os_str_bytes"
|
|
||||||
version = "6.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "overload"
|
name = "overload"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
|
@ -3881,30 +3949,6 @@ version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro-error-attr",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.103",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error-attr"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.64"
|
version = "1.0.64"
|
||||||
|
@ -5222,9 +5266,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-util"
|
name = "tokio-util"
|
||||||
version = "0.7.4"
|
version = "0.7.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740"
|
checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
@ -5710,6 +5754,12 @@ version = "0.1.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1"
|
checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "utf8parse"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
|
|
@ -54,6 +54,7 @@ members = [
|
||||||
"crates/db_views_actor",
|
"crates/db_views_actor",
|
||||||
"crates/db_views_actor",
|
"crates/db_views_actor",
|
||||||
"crates/routes",
|
"crates/routes",
|
||||||
|
"crates/federate",
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
|
@ -67,7 +68,7 @@ lemmy_routes = { version = "=0.18.1", path = "./crates/routes" }
|
||||||
lemmy_db_views = { version = "=0.18.1", path = "./crates/db_views" }
|
lemmy_db_views = { version = "=0.18.1", path = "./crates/db_views" }
|
||||||
lemmy_db_views_actor = { version = "=0.18.1", path = "./crates/db_views_actor" }
|
lemmy_db_views_actor = { version = "=0.18.1", path = "./crates/db_views_actor" }
|
||||||
lemmy_db_views_moderator = { version = "=0.18.1", path = "./crates/db_views_moderator" }
|
lemmy_db_views_moderator = { version = "=0.18.1", path = "./crates/db_views_moderator" }
|
||||||
activitypub_federation = { version = "0.5.0-beta.2", default-features = false, features = [
|
activitypub_federation = { version = "0.5.0-beta.3", default-features = false, features = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
] }
|
] }
|
||||||
diesel = "2.1.0"
|
diesel = "2.1.0"
|
||||||
|
@ -88,7 +89,6 @@ tracing-error = "0.2.0"
|
||||||
tracing-log = "0.1.3"
|
tracing-log = "0.1.3"
|
||||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||||
url = { version = "2.4.0", features = ["serde"] }
|
url = { version = "2.4.0", features = ["serde"] }
|
||||||
url_serde = "0.2.0"
|
|
||||||
reqwest = { version = "0.11.18", features = ["json", "blocking", "gzip"] }
|
reqwest = { version = "0.11.18", features = ["json", "blocking", "gzip"] }
|
||||||
reqwest-middleware = "0.2.2"
|
reqwest-middleware = "0.2.2"
|
||||||
reqwest-tracing = "0.4.5"
|
reqwest-tracing = "0.4.5"
|
||||||
|
@ -119,7 +119,6 @@ futures = "0.3.28"
|
||||||
http = "0.2.9"
|
http = "0.2.9"
|
||||||
percent-encoding = "2.3.0"
|
percent-encoding = "2.3.0"
|
||||||
rosetta-i18n = "0.1.3"
|
rosetta-i18n = "0.1.3"
|
||||||
rand = "0.8.5"
|
|
||||||
opentelemetry = { version = "0.19.0", features = ["rt-tokio"] }
|
opentelemetry = { version = "0.19.0", features = ["rt-tokio"] }
|
||||||
tracing-opentelemetry = { version = "0.19.0" }
|
tracing-opentelemetry = { version = "0.19.0" }
|
||||||
ts-rs = { version = "7.0.0", features = ["serde-compat", "chrono-impl"] }
|
ts-rs = { version = "7.0.0", features = ["serde-compat", "chrono-impl"] }
|
||||||
|
@ -167,3 +166,5 @@ tokio-postgres-rustls = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
prometheus = { version = "0.13.3", features = ["process"], optional = true }
|
prometheus = { version = "0.13.3", features = ["process"], optional = true }
|
||||||
actix-web-prom = { version = "0.6.0", optional = true }
|
actix-web-prom = { version = "0.6.0", optional = true }
|
||||||
|
clap = { version = "4.3.19", features = ["derive"] }
|
||||||
|
lemmy_federate = { version = "0.18.1", path = "crates/federate" }
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
export RUST_BACKTRACE=1
|
export RUST_BACKTRACE=1
|
||||||
export RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug"
|
export RUST_LOG="warn,lemmy_server=debug,lemmy_federate=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug"
|
||||||
|
|
||||||
for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do
|
for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do
|
||||||
echo "DB URL: ${LEMMY_DATABASE_URL} INSTANCE: $INSTANCE"
|
echo "DB URL: ${LEMMY_DATABASE_URL} INSTANCE: $INSTANCE"
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432
|
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432
|
||||||
export LEMMY_SYNCHRONOUS_FEDERATION=1 # currently this is true in debug by default, but still.
|
|
||||||
pushd ..
|
pushd ..
|
||||||
cargo build
|
cargo build
|
||||||
rm target/lemmy_server || true
|
rm target/lemmy_server || true
|
||||||
|
|
|
@ -32,6 +32,8 @@ import {
|
||||||
getPersonDetails,
|
getPersonDetails,
|
||||||
getReplies,
|
getReplies,
|
||||||
getUnreadCount,
|
getUnreadCount,
|
||||||
|
waitUntil,
|
||||||
|
delay,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
import { CommentView } from "lemmy-js-client/dist/types/CommentView";
|
import { CommentView } from "lemmy-js-client/dist/types/CommentView";
|
||||||
|
|
||||||
|
@ -42,6 +44,8 @@ beforeAll(async () => {
|
||||||
await unfollows();
|
await unfollows();
|
||||||
await followBeta(alpha);
|
await followBeta(alpha);
|
||||||
await followBeta(gamma);
|
await followBeta(gamma);
|
||||||
|
// wait for FOLLOW_ADDITIONS_RECHECK_DELAY
|
||||||
|
await delay(2000);
|
||||||
let betaCommunity = (await resolveBetaCommunity(alpha)).community;
|
let betaCommunity = (await resolveBetaCommunity(alpha)).community;
|
||||||
if (betaCommunity) {
|
if (betaCommunity) {
|
||||||
postOnAlphaRes = await createPost(alpha, betaCommunity.community.id);
|
postOnAlphaRes = await createPost(alpha, betaCommunity.community.id);
|
||||||
|
@ -75,7 +79,10 @@ test("Create a comment", async () => {
|
||||||
|
|
||||||
// Make sure that comment is liked on beta
|
// Make sure that comment is liked on beta
|
||||||
let betaComment = (
|
let betaComment = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(beta, commentRes.comment_view.comment),
|
||||||
|
c => c.comment?.counts.score === 1,
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
expect(betaComment).toBeDefined();
|
expect(betaComment).toBeDefined();
|
||||||
expect(betaComment?.community.local).toBe(true);
|
expect(betaComment?.community.local).toBe(true);
|
||||||
|
@ -108,7 +115,11 @@ test("Update a comment", async () => {
|
||||||
|
|
||||||
// Make sure that post is updated on beta
|
// Make sure that post is updated on beta
|
||||||
let betaCommentUpdated = (
|
let betaCommentUpdated = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(beta, commentRes.comment_view.comment),
|
||||||
|
c =>
|
||||||
|
c.comment?.comment.content === "A jest test federated comment update",
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
assertCommentFederation(betaCommentUpdated, updateCommentRes.comment_view);
|
assertCommentFederation(betaCommentUpdated, updateCommentRes.comment_view);
|
||||||
});
|
});
|
||||||
|
@ -121,16 +132,18 @@ test("Delete a comment", async () => {
|
||||||
let betaComment = (
|
let betaComment = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await resolveComment(beta, commentRes.comment_view.comment)
|
||||||
).comment;
|
).comment;
|
||||||
|
|
||||||
if (!betaComment) {
|
if (!betaComment) {
|
||||||
throw "Missing beta comment before delete";
|
throw "Missing beta comment before delete";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the comment on remote instance gamma
|
// Find the comment on remote instance gamma
|
||||||
let gammaComment = (
|
let gammaComment = (
|
||||||
await resolveComment(gamma, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() =>
|
||||||
|
resolveComment(gamma, commentRes.comment_view.comment).catch(e => e),
|
||||||
|
r => r !== "couldnt_find_object",
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
|
|
||||||
if (!gammaComment) {
|
if (!gammaComment) {
|
||||||
throw "Missing gamma comment (remote-home-remote replication) before delete";
|
throw "Missing gamma comment (remote-home-remote replication) before delete";
|
||||||
}
|
}
|
||||||
|
@ -143,14 +156,16 @@ test("Delete a comment", async () => {
|
||||||
expect(deleteCommentRes.comment_view.comment.deleted).toBe(true);
|
expect(deleteCommentRes.comment_view.comment.deleted).toBe(true);
|
||||||
|
|
||||||
// Make sure that comment is undefined on beta
|
// Make sure that comment is undefined on beta
|
||||||
await expect(
|
await waitUntil(
|
||||||
resolveComment(beta, commentRes.comment_view.comment),
|
() => resolveComment(beta, commentRes.comment_view.comment).catch(e => e),
|
||||||
).rejects.toBe("couldnt_find_object");
|
e => e === "couldnt_find_object",
|
||||||
|
);
|
||||||
|
|
||||||
// Make sure that comment is undefined on gamma after delete
|
// Make sure that comment is undefined on gamma after delete
|
||||||
await expect(
|
await waitUntil(
|
||||||
resolveComment(gamma, commentRes.comment_view.comment),
|
() => resolveComment(gamma, commentRes.comment_view.comment).catch(e => e),
|
||||||
).rejects.toBe("couldnt_find_object");
|
e => e === "couldnt_find_object",
|
||||||
|
);
|
||||||
|
|
||||||
// Test undeleting the comment
|
// Test undeleting the comment
|
||||||
let undeleteCommentRes = await deleteComment(
|
let undeleteCommentRes = await deleteComment(
|
||||||
|
@ -162,7 +177,10 @@ test("Delete a comment", async () => {
|
||||||
|
|
||||||
// Make sure that comment is undeleted on beta
|
// Make sure that comment is undeleted on beta
|
||||||
let betaComment2 = (
|
let betaComment2 = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(beta, commentRes.comment_view.comment).catch(e => e),
|
||||||
|
e => e !== "couldnt_find_object",
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
expect(betaComment2?.comment.deleted).toBe(false);
|
expect(betaComment2?.comment.deleted).toBe(false);
|
||||||
assertCommentFederation(betaComment2, undeleteCommentRes.comment_view);
|
assertCommentFederation(betaComment2, undeleteCommentRes.comment_view);
|
||||||
|
@ -257,8 +275,12 @@ test("Unlike a comment", async () => {
|
||||||
// Lemmy automatically creates 1 like (vote) by author of comment.
|
// Lemmy automatically creates 1 like (vote) by author of comment.
|
||||||
// Make sure that comment is liked (voted up) on gamma, downstream peer
|
// Make sure that comment is liked (voted up) on gamma, downstream peer
|
||||||
// This is testing replication from remote-home-remote (alpha-beta-gamma)
|
// This is testing replication from remote-home-remote (alpha-beta-gamma)
|
||||||
|
|
||||||
let gammaComment1 = (
|
let gammaComment1 = (
|
||||||
await resolveComment(gamma, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(gamma, commentRes.comment_view.comment),
|
||||||
|
c => c.comment?.counts.score === 1,
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
expect(gammaComment1).toBeDefined();
|
expect(gammaComment1).toBeDefined();
|
||||||
expect(gammaComment1?.community.local).toBe(false);
|
expect(gammaComment1?.community.local).toBe(false);
|
||||||
|
@ -270,7 +292,10 @@ test("Unlike a comment", async () => {
|
||||||
|
|
||||||
// Make sure that comment is unliked on beta
|
// Make sure that comment is unliked on beta
|
||||||
let betaComment = (
|
let betaComment = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(beta, commentRes.comment_view.comment),
|
||||||
|
c => c.comment?.counts.score === 0,
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
expect(betaComment).toBeDefined();
|
expect(betaComment).toBeDefined();
|
||||||
expect(betaComment?.community.local).toBe(true);
|
expect(betaComment?.community.local).toBe(true);
|
||||||
|
@ -280,7 +305,10 @@ test("Unlike a comment", async () => {
|
||||||
// Make sure that comment is unliked on gamma, downstream peer
|
// Make sure that comment is unliked on gamma, downstream peer
|
||||||
// This is testing replication from remote-home-remote (alpha-beta-gamma)
|
// This is testing replication from remote-home-remote (alpha-beta-gamma)
|
||||||
let gammaComment = (
|
let gammaComment = (
|
||||||
await resolveComment(gamma, commentRes.comment_view.comment)
|
await waitUntil(
|
||||||
|
() => resolveComment(gamma, commentRes.comment_view.comment),
|
||||||
|
c => c.comment?.counts.score === 0,
|
||||||
|
)
|
||||||
).comment;
|
).comment;
|
||||||
expect(gammaComment).toBeDefined();
|
expect(gammaComment).toBeDefined();
|
||||||
expect(gammaComment?.community.local).toBe(false);
|
expect(gammaComment?.community.local).toBe(false);
|
||||||
|
@ -290,7 +318,10 @@ test("Unlike a comment", async () => {
|
||||||
|
|
||||||
test("Federated comment like", async () => {
|
test("Federated comment like", async () => {
|
||||||
let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id);
|
let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id);
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveComment(beta, commentRes.comment_view.comment),
|
||||||
|
c => c.comment?.counts.score === 1,
|
||||||
|
);
|
||||||
// Find the comment on beta
|
// Find the comment on beta
|
||||||
let betaComment = (
|
let betaComment = (
|
||||||
await resolveComment(beta, commentRes.comment_view.comment)
|
await resolveComment(beta, commentRes.comment_view.comment)
|
||||||
|
@ -304,11 +335,20 @@ test("Federated comment like", async () => {
|
||||||
expect(like.comment_view.counts.score).toBe(2);
|
expect(like.comment_view.counts.score).toBe(2);
|
||||||
|
|
||||||
// Get the post from alpha, check the likes
|
// Get the post from alpha, check the likes
|
||||||
let postComments = await getComments(alpha, postOnAlphaRes.post_view.post.id);
|
let postComments = await waitUntil(
|
||||||
|
() => getComments(alpha, postOnAlphaRes.post_view.post.id),
|
||||||
|
c => c.comments[0].counts.score === 2,
|
||||||
|
);
|
||||||
expect(postComments.comments[0].counts.score).toBe(2);
|
expect(postComments.comments[0].counts.score).toBe(2);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Reply to a comment from another instance, get notification", async () => {
|
test("Reply to a comment from another instance, get notification", async () => {
|
||||||
|
let betaCommunity = (await resolveBetaCommunity(alpha)).community;
|
||||||
|
if (!betaCommunity) {
|
||||||
|
throw "Missing beta community";
|
||||||
|
}
|
||||||
|
const postOnAlphaRes = await createPost(alpha, betaCommunity.community.id);
|
||||||
|
|
||||||
// Create a root-level trunk-branch comment on alpha
|
// Create a root-level trunk-branch comment on alpha
|
||||||
let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id);
|
let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id);
|
||||||
// find that comment id on beta
|
// find that comment id on beta
|
||||||
|
@ -338,11 +378,15 @@ test("Reply to a comment from another instance, get notification", async () => {
|
||||||
// TODO not sure why, but a searchComment back to alpha, for the ap_id of betas
|
// TODO not sure why, but a searchComment back to alpha, for the ap_id of betas
|
||||||
// comment, isn't working.
|
// comment, isn't working.
|
||||||
// let searchAlpha = await searchComment(alpha, replyRes.comment);
|
// let searchAlpha = await searchComment(alpha, replyRes.comment);
|
||||||
let postComments = await getComments(alpha, postOnAlphaRes.post_view.post.id);
|
let postComments = await waitUntil(
|
||||||
// Note: in Lemmy 0.18.3 pre-release this is coming up 7
|
() => getComments(alpha, postOnAlphaRes.post_view.post.id),
|
||||||
|
pc => pc.comments.length >= 2,
|
||||||
|
);
|
||||||
|
// Note: this test fails when run twice and this count will differ
|
||||||
expect(postComments.comments.length).toBeGreaterThanOrEqual(2);
|
expect(postComments.comments.length).toBeGreaterThanOrEqual(2);
|
||||||
let alphaComment = postComments.comments[0];
|
let alphaComment = postComments.comments[0];
|
||||||
expect(alphaComment.comment.content).toBeDefined();
|
expect(alphaComment.comment.content).toBeDefined();
|
||||||
|
|
||||||
expect(getCommentParentId(alphaComment.comment)).toBe(
|
expect(getCommentParentId(alphaComment.comment)).toBe(
|
||||||
postComments.comments[1].comment.id,
|
postComments.comments[1].comment.id,
|
||||||
);
|
);
|
||||||
|
@ -352,7 +396,10 @@ test("Reply to a comment from another instance, get notification", async () => {
|
||||||
assertCommentFederation(alphaComment, replyRes.comment_view);
|
assertCommentFederation(alphaComment, replyRes.comment_view);
|
||||||
|
|
||||||
// Did alpha get notified of the reply from beta?
|
// Did alpha get notified of the reply from beta?
|
||||||
let alphaUnreadCountRes = await getUnreadCount(alpha);
|
let alphaUnreadCountRes = await waitUntil(
|
||||||
|
() => getUnreadCount(alpha),
|
||||||
|
e => e.replies >= 1,
|
||||||
|
);
|
||||||
expect(alphaUnreadCountRes.replies).toBe(1);
|
expect(alphaUnreadCountRes.replies).toBe(1);
|
||||||
|
|
||||||
// check inbox of replies on alpha, fetching read/unread both
|
// check inbox of replies on alpha, fetching read/unread both
|
||||||
|
@ -394,7 +441,10 @@ test("Mention beta from alpha", async () => {
|
||||||
expect(betaPost.post.name).toBe(postOnAlphaRes.post_view.post.name);
|
expect(betaPost.post.name).toBe(postOnAlphaRes.post_view.post.name);
|
||||||
|
|
||||||
// Make sure that both new comments are seen on beta and have parent/child relationship
|
// Make sure that both new comments are seen on beta and have parent/child relationship
|
||||||
let betaPostComments = await getComments(beta, betaPost.post.id);
|
let betaPostComments = await waitUntil(
|
||||||
|
() => getComments(beta, betaPost!.post.id),
|
||||||
|
c => c.comments[1].counts.score === 1,
|
||||||
|
);
|
||||||
expect(betaPostComments.comments.length).toBeGreaterThanOrEqual(2);
|
expect(betaPostComments.comments.length).toBeGreaterThanOrEqual(2);
|
||||||
// the trunk-branch root comment will be older than the mention reply comment, so index 1
|
// the trunk-branch root comment will be older than the mention reply comment, so index 1
|
||||||
let betaRootComment = betaPostComments.comments[1];
|
let betaRootComment = betaPostComments.comments[1];
|
||||||
|
@ -462,9 +512,9 @@ test("A and G subscribe to B (center) A posts, G mentions B, it gets announced t
|
||||||
expect(commentRes.comment_view.counts.score).toBe(1);
|
expect(commentRes.comment_view.counts.score).toBe(1);
|
||||||
|
|
||||||
// Make sure alpha sees it
|
// Make sure alpha sees it
|
||||||
let alphaPostComments2 = await getComments(
|
let alphaPostComments2 = await waitUntil(
|
||||||
alpha,
|
() => getComments(alpha, alphaPost.post_view.post.id),
|
||||||
alphaPost.post_view.post.id,
|
e => !!e.comments[0],
|
||||||
);
|
);
|
||||||
expect(alphaPostComments2.comments[0].comment.content).toBe(commentContent);
|
expect(alphaPostComments2.comments[0].comment.content).toBe(commentContent);
|
||||||
expect(alphaPostComments2.comments[0].community.local).toBe(true);
|
expect(alphaPostComments2.comments[0].community.local).toBe(true);
|
||||||
|
@ -476,10 +526,19 @@ test("A and G subscribe to B (center) A posts, G mentions B, it gets announced t
|
||||||
);
|
);
|
||||||
|
|
||||||
// Make sure beta has mentions
|
// Make sure beta has mentions
|
||||||
let mentionsRes = await getMentions(beta);
|
let relevantMention = await waitUntil(
|
||||||
expect(mentionsRes.mentions[0].comment.content).toBe(commentContent);
|
() =>
|
||||||
expect(mentionsRes.mentions[0].community.local).toBe(false);
|
getMentions(beta).then(m =>
|
||||||
expect(mentionsRes.mentions[0].creator.local).toBe(false);
|
m.mentions.find(
|
||||||
|
m => m.comment.ap_id === commentRes.comment_view.comment.ap_id,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
e => !!e,
|
||||||
|
);
|
||||||
|
if (!relevantMention) throw Error("could not find mention");
|
||||||
|
expect(relevantMention.comment.content).toBe(commentContent);
|
||||||
|
expect(relevantMention.community.local).toBe(false);
|
||||||
|
expect(relevantMention.creator.local).toBe(false);
|
||||||
// TODO this is failing because fetchInReplyTos aren't getting score
|
// TODO this is failing because fetchInReplyTos aren't getting score
|
||||||
// expect(mentionsRes.mentions[0].score).toBe(1);
|
// expect(mentionsRes.mentions[0].score).toBe(1);
|
||||||
});
|
});
|
||||||
|
@ -493,6 +552,16 @@ test("Check that activity from another instance is sent to third instance", asyn
|
||||||
let gammaFollow = await followBeta(gamma);
|
let gammaFollow = await followBeta(gamma);
|
||||||
expect(gammaFollow.community_view.community.local).toBe(false);
|
expect(gammaFollow.community_view.community.local).toBe(false);
|
||||||
expect(gammaFollow.community_view.community.name).toBe("main");
|
expect(gammaFollow.community_view.community.name).toBe("main");
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveBetaCommunity(alpha),
|
||||||
|
c => c.community?.subscribed === "Subscribed",
|
||||||
|
);
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveBetaCommunity(gamma),
|
||||||
|
c => c.community?.subscribed === "Subscribed",
|
||||||
|
);
|
||||||
|
// FOLLOW_ADDITIONS_RECHECK_DELAY
|
||||||
|
await delay(2000);
|
||||||
|
|
||||||
// Create a post on beta
|
// Create a post on beta
|
||||||
let betaPost = await createPost(beta, 2);
|
let betaPost = await createPost(beta, 2);
|
||||||
|
@ -525,7 +594,10 @@ test("Check that activity from another instance is sent to third instance", asyn
|
||||||
expect(commentRes.comment_view.counts.score).toBe(1);
|
expect(commentRes.comment_view.counts.score).toBe(1);
|
||||||
|
|
||||||
// Make sure alpha sees it
|
// Make sure alpha sees it
|
||||||
let alphaPostComments2 = await getComments(alpha, alphaPost.post.id);
|
let alphaPostComments2 = await waitUntil(
|
||||||
|
() => getComments(alpha, alphaPost!.post.id),
|
||||||
|
e => !!e.comments[0],
|
||||||
|
);
|
||||||
expect(alphaPostComments2.comments[0].comment.content).toBe(commentContent);
|
expect(alphaPostComments2.comments[0].comment.content).toBe(commentContent);
|
||||||
expect(alphaPostComments2.comments[0].community.local).toBe(false);
|
expect(alphaPostComments2.comments[0].community.local).toBe(false);
|
||||||
expect(alphaPostComments2.comments[0].creator.local).toBe(false);
|
expect(alphaPostComments2.comments[0].creator.local).toBe(false);
|
||||||
|
@ -595,7 +667,12 @@ test("Fetch in_reply_tos: A is unsubbed from B, B makes a post, and some embedde
|
||||||
}
|
}
|
||||||
|
|
||||||
let alphaPost = await getPost(alpha, alphaPostB.post.id);
|
let alphaPost = await getPost(alpha, alphaPostB.post.id);
|
||||||
let alphaPostComments = await getComments(alpha, alphaPostB.post.id);
|
let alphaPostComments = await waitUntil(
|
||||||
|
() => getComments(alpha, alphaPostB!.post.id),
|
||||||
|
c =>
|
||||||
|
c.comments[1]?.comment.content ===
|
||||||
|
parentCommentRes.comment_view.comment.content,
|
||||||
|
);
|
||||||
expect(alphaPost.post_view.post.name).toBeDefined();
|
expect(alphaPost.post_view.post.name).toBeDefined();
|
||||||
assertCommentFederation(
|
assertCommentFederation(
|
||||||
alphaPostComments.comments[1],
|
alphaPostComments.comments[1],
|
||||||
|
@ -632,8 +709,12 @@ test("Report a comment", async () => {
|
||||||
await reportComment(alpha, alphaComment.id, randomString(10))
|
await reportComment(alpha, alphaComment.id, randomString(10))
|
||||||
).comment_report_view.comment_report;
|
).comment_report_view.comment_report;
|
||||||
|
|
||||||
let betaReport = (await listCommentReports(beta)).comment_reports[0]
|
let betaReport = (
|
||||||
.comment_report;
|
await waitUntil(
|
||||||
|
() => listCommentReports(beta),
|
||||||
|
e => !!e.comment_reports[0],
|
||||||
|
)
|
||||||
|
).comment_reports[0].comment_report;
|
||||||
expect(betaReport).toBeDefined();
|
expect(betaReport).toBeDefined();
|
||||||
expect(betaReport.resolved).toBe(false);
|
expect(betaReport.resolved).toBe(false);
|
||||||
expect(betaReport.original_comment_text).toBe(
|
expect(betaReport.original_comment_text).toBe(
|
||||||
|
|
|
@ -24,6 +24,8 @@ import {
|
||||||
getComments,
|
getComments,
|
||||||
createComment,
|
createComment,
|
||||||
getCommunityByName,
|
getCommunityByName,
|
||||||
|
waitUntil,
|
||||||
|
delay,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
@ -85,6 +87,12 @@ test("Delete community", async () => {
|
||||||
// Make sure the follow response went through
|
// Make sure the follow response went through
|
||||||
expect(follow.community_view.community.local).toBe(false);
|
expect(follow.community_view.community.local).toBe(false);
|
||||||
|
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveCommunity(alpha, searchShort),
|
||||||
|
g => g.community?.subscribed === "Subscribed",
|
||||||
|
);
|
||||||
|
// wait FOLLOW_ADDITIONS_RECHECK_DELAY
|
||||||
|
await delay(2000);
|
||||||
let deleteCommunityRes = await deleteCommunity(
|
let deleteCommunityRes = await deleteCommunity(
|
||||||
beta,
|
beta,
|
||||||
true,
|
true,
|
||||||
|
@ -96,9 +104,9 @@ test("Delete community", async () => {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Make sure it got deleted on A
|
// Make sure it got deleted on A
|
||||||
let communityOnAlphaDeleted = await getCommunity(
|
let communityOnAlphaDeleted = await waitUntil(
|
||||||
alpha,
|
() => getCommunity(alpha, alphaCommunity!.community.id),
|
||||||
alphaCommunity.community.id,
|
g => g.community_view.community.deleted,
|
||||||
);
|
);
|
||||||
expect(communityOnAlphaDeleted.community_view.community.deleted).toBe(true);
|
expect(communityOnAlphaDeleted.community_view.community.deleted).toBe(true);
|
||||||
|
|
||||||
|
@ -111,9 +119,9 @@ test("Delete community", async () => {
|
||||||
expect(undeleteCommunityRes.community_view.community.deleted).toBe(false);
|
expect(undeleteCommunityRes.community_view.community.deleted).toBe(false);
|
||||||
|
|
||||||
// Make sure it got undeleted on A
|
// Make sure it got undeleted on A
|
||||||
let communityOnAlphaUnDeleted = await getCommunity(
|
let communityOnAlphaUnDeleted = await waitUntil(
|
||||||
alpha,
|
() => getCommunity(alpha, alphaCommunity!.community.id),
|
||||||
alphaCommunity.community.id,
|
g => !g.community_view.community.deleted,
|
||||||
);
|
);
|
||||||
expect(communityOnAlphaUnDeleted.community_view.community.deleted).toBe(
|
expect(communityOnAlphaUnDeleted.community_view.community.deleted).toBe(
|
||||||
false,
|
false,
|
||||||
|
@ -137,6 +145,10 @@ test("Remove community", async () => {
|
||||||
// Make sure the follow response went through
|
// Make sure the follow response went through
|
||||||
expect(follow.community_view.community.local).toBe(false);
|
expect(follow.community_view.community.local).toBe(false);
|
||||||
|
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveCommunity(alpha, searchShort),
|
||||||
|
g => g.community?.subscribed === "Subscribed",
|
||||||
|
);
|
||||||
let removeCommunityRes = await removeCommunity(
|
let removeCommunityRes = await removeCommunity(
|
||||||
beta,
|
beta,
|
||||||
true,
|
true,
|
||||||
|
@ -148,9 +160,9 @@ test("Remove community", async () => {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Make sure it got Removed on A
|
// Make sure it got Removed on A
|
||||||
let communityOnAlphaRemoved = await getCommunity(
|
let communityOnAlphaRemoved = await waitUntil(
|
||||||
alpha,
|
() => getCommunity(alpha, alphaCommunity!.community.id),
|
||||||
alphaCommunity.community.id,
|
g => g.community_view.community.removed,
|
||||||
);
|
);
|
||||||
expect(communityOnAlphaRemoved.community_view.community.removed).toBe(true);
|
expect(communityOnAlphaRemoved.community_view.community.removed).toBe(true);
|
||||||
|
|
||||||
|
@ -163,9 +175,9 @@ test("Remove community", async () => {
|
||||||
expect(unremoveCommunityRes.community_view.community.removed).toBe(false);
|
expect(unremoveCommunityRes.community_view.community.removed).toBe(false);
|
||||||
|
|
||||||
// Make sure it got undeleted on A
|
// Make sure it got undeleted on A
|
||||||
let communityOnAlphaUnRemoved = await getCommunity(
|
let communityOnAlphaUnRemoved = await waitUntil(
|
||||||
alpha,
|
() => getCommunity(alpha, alphaCommunity!.community.id),
|
||||||
alphaCommunity.community.id,
|
g => !g.community_view.community.removed,
|
||||||
);
|
);
|
||||||
expect(communityOnAlphaUnRemoved.community_view.community.removed).toBe(
|
expect(communityOnAlphaUnRemoved.community_view.community.removed).toBe(
|
||||||
false,
|
false,
|
||||||
|
@ -195,7 +207,10 @@ test("Admin actions in remote community are not federated to origin", async () =
|
||||||
}
|
}
|
||||||
await followCommunity(gamma, true, gammaCommunity.community.id);
|
await followCommunity(gamma, true, gammaCommunity.community.id);
|
||||||
gammaCommunity = (
|
gammaCommunity = (
|
||||||
await resolveCommunity(gamma, communityRes.community.actor_id)
|
await waitUntil(
|
||||||
|
() => resolveCommunity(gamma, communityRes.community.actor_id),
|
||||||
|
g => g.community?.subscribed === "Subscribed",
|
||||||
|
)
|
||||||
).community;
|
).community;
|
||||||
if (!gammaCommunity) {
|
if (!gammaCommunity) {
|
||||||
throw "Missing gamma community";
|
throw "Missing gamma community";
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
followCommunity,
|
followCommunity,
|
||||||
unfollowRemotes,
|
unfollowRemotes,
|
||||||
getSite,
|
getSite,
|
||||||
|
waitUntil,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
@ -23,7 +24,12 @@ test("Follow federated community", async () => {
|
||||||
throw "Missing beta community";
|
throw "Missing beta community";
|
||||||
}
|
}
|
||||||
await followCommunity(alpha, true, betaCommunity.community.id);
|
await followCommunity(alpha, true, betaCommunity.community.id);
|
||||||
betaCommunity = (await resolveBetaCommunity(alpha)).community;
|
betaCommunity = (
|
||||||
|
await waitUntil(
|
||||||
|
() => resolveBetaCommunity(alpha),
|
||||||
|
c => c.community?.subscribed === "Subscribed",
|
||||||
|
)
|
||||||
|
).community;
|
||||||
|
|
||||||
// Make sure the follow response went through
|
// Make sure the follow response went through
|
||||||
expect(betaCommunity?.community.local).toBe(false);
|
expect(betaCommunity?.community.local).toBe(false);
|
||||||
|
|
|
@ -34,6 +34,8 @@ import {
|
||||||
getSite,
|
getSite,
|
||||||
unfollows,
|
unfollows,
|
||||||
resolveCommunity,
|
resolveCommunity,
|
||||||
|
waitUntil,
|
||||||
|
delay,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
import { PostView } from "lemmy-js-client/dist/types/PostView";
|
import { PostView } from "lemmy-js-client/dist/types/PostView";
|
||||||
import { CreatePost } from "lemmy-js-client/dist/types/CreatePost";
|
import { CreatePost } from "lemmy-js-client/dist/types/CreatePost";
|
||||||
|
@ -80,7 +82,11 @@ test("Create a post", async () => {
|
||||||
expect(postRes.post_view.counts.score).toBe(1);
|
expect(postRes.post_view.counts.score).toBe(1);
|
||||||
|
|
||||||
// Make sure that post is liked on beta
|
// Make sure that post is liked on beta
|
||||||
let betaPost = (await resolvePost(beta, postRes.post_view.post)).post;
|
const res = await waitUntil(
|
||||||
|
() => resolvePost(beta, postRes.post_view.post),
|
||||||
|
res => res.post?.counts.score === 1,
|
||||||
|
);
|
||||||
|
let betaPost = res.post;
|
||||||
|
|
||||||
expect(betaPost).toBeDefined();
|
expect(betaPost).toBeDefined();
|
||||||
expect(betaPost?.community.local).toBe(true);
|
expect(betaPost?.community.local).toBe(true);
|
||||||
|
@ -116,7 +122,12 @@ test("Unlike a post", async () => {
|
||||||
expect(unlike2.post_view.counts.score).toBe(0);
|
expect(unlike2.post_view.counts.score).toBe(0);
|
||||||
|
|
||||||
// Make sure that post is unliked on beta
|
// Make sure that post is unliked on beta
|
||||||
let betaPost = (await resolvePost(beta, postRes.post_view.post)).post;
|
const betaPost = (
|
||||||
|
await waitUntil(
|
||||||
|
() => resolvePost(beta, postRes.post_view.post),
|
||||||
|
b => b.post?.counts.score === 0,
|
||||||
|
)
|
||||||
|
).post;
|
||||||
expect(betaPost).toBeDefined();
|
expect(betaPost).toBeDefined();
|
||||||
expect(betaPost?.community.local).toBe(true);
|
expect(betaPost?.community.local).toBe(true);
|
||||||
expect(betaPost?.creator.local).toBe(false);
|
expect(betaPost?.creator.local).toBe(false);
|
||||||
|
@ -129,9 +140,17 @@ test("Update a post", async () => {
|
||||||
throw "Missing beta community";
|
throw "Missing beta community";
|
||||||
}
|
}
|
||||||
let postRes = await createPost(alpha, betaCommunity.community.id);
|
let postRes = await createPost(alpha, betaCommunity.community.id);
|
||||||
|
await waitUntil(
|
||||||
|
() => resolvePost(beta, postRes.post_view.post),
|
||||||
|
res => !!res.post,
|
||||||
|
);
|
||||||
|
|
||||||
let updatedName = "A jest test federated post, updated";
|
let updatedName = "A jest test federated post, updated";
|
||||||
let updatedPost = await editPost(alpha, postRes.post_view.post);
|
let updatedPost = await editPost(alpha, postRes.post_view.post);
|
||||||
|
await waitUntil(
|
||||||
|
() => resolvePost(beta, postRes.post_view.post),
|
||||||
|
res => res.post?.post.name === updatedName,
|
||||||
|
);
|
||||||
expect(updatedPost.post_view.post.name).toBe(updatedName);
|
expect(updatedPost.post_view.post.name).toBe(updatedName);
|
||||||
expect(updatedPost.post_view.community.local).toBe(false);
|
expect(updatedPost.post_view.community.local).toBe(false);
|
||||||
expect(updatedPost.post_view.creator.local).toBe(true);
|
expect(updatedPost.post_view.creator.local).toBe(true);
|
||||||
|
@ -197,8 +216,19 @@ test("Lock a post", async () => {
|
||||||
throw "Missing beta community";
|
throw "Missing beta community";
|
||||||
}
|
}
|
||||||
await followCommunity(alpha, true, betaCommunity.community.id);
|
await followCommunity(alpha, true, betaCommunity.community.id);
|
||||||
let postRes = await createPost(alpha, betaCommunity.community.id);
|
await waitUntil(
|
||||||
|
() => resolveBetaCommunity(alpha),
|
||||||
|
c => c.community?.subscribed === "Subscribed",
|
||||||
|
);
|
||||||
|
// wait FOLLOW_ADDITIONS_RECHECK_DELAY (there's no API to wait for this currently)
|
||||||
|
await delay(2_000);
|
||||||
|
|
||||||
|
let postRes = await createPost(alpha, betaCommunity.community.id);
|
||||||
|
// wait for federation
|
||||||
|
await waitUntil(
|
||||||
|
() => searchPostLocal(beta, postRes.post_view.post),
|
||||||
|
res => !!res.posts[0],
|
||||||
|
);
|
||||||
// Lock the post
|
// Lock the post
|
||||||
let betaPost1 = (await resolvePost(beta, postRes.post_view.post)).post;
|
let betaPost1 = (await resolvePost(beta, postRes.post_view.post)).post;
|
||||||
if (!betaPost1) {
|
if (!betaPost1) {
|
||||||
|
@ -208,7 +238,10 @@ test("Lock a post", async () => {
|
||||||
expect(lockedPostRes.post_view.post.locked).toBe(true);
|
expect(lockedPostRes.post_view.post.locked).toBe(true);
|
||||||
|
|
||||||
// Make sure that post is locked on alpha
|
// Make sure that post is locked on alpha
|
||||||
let searchAlpha = await searchPostLocal(alpha, postRes.post_view.post);
|
let searchAlpha = await waitUntil(
|
||||||
|
() => searchPostLocal(alpha, postRes.post_view.post),
|
||||||
|
res => res.posts[0]?.post.locked,
|
||||||
|
);
|
||||||
let alphaPost1 = searchAlpha.posts[0];
|
let alphaPost1 = searchAlpha.posts[0];
|
||||||
expect(alphaPost1.post.locked).toBe(true);
|
expect(alphaPost1.post.locked).toBe(true);
|
||||||
|
|
||||||
|
@ -220,7 +253,10 @@ test("Lock a post", async () => {
|
||||||
expect(unlockedPost.post_view.post.locked).toBe(false);
|
expect(unlockedPost.post_view.post.locked).toBe(false);
|
||||||
|
|
||||||
// Make sure that post is unlocked on alpha
|
// Make sure that post is unlocked on alpha
|
||||||
let searchAlpha2 = await searchPostLocal(alpha, postRes.post_view.post);
|
let searchAlpha2 = await waitUntil(
|
||||||
|
() => searchPostLocal(alpha, postRes.post_view.post),
|
||||||
|
res => !res.posts[0]?.post.locked,
|
||||||
|
);
|
||||||
let alphaPost2 = searchAlpha2.posts[0];
|
let alphaPost2 = searchAlpha2.posts[0];
|
||||||
expect(alphaPost2.community.local).toBe(false);
|
expect(alphaPost2.community.local).toBe(false);
|
||||||
expect(alphaPost2.creator.local).toBe(true);
|
expect(alphaPost2.creator.local).toBe(true);
|
||||||
|
@ -312,9 +348,11 @@ test("Remove a post from admin and community on same instance", async () => {
|
||||||
await followBeta(alpha);
|
await followBeta(alpha);
|
||||||
let postRes = await createPost(alpha, betaCommunity.community.id);
|
let postRes = await createPost(alpha, betaCommunity.community.id);
|
||||||
expect(postRes.post_view.post).toBeDefined();
|
expect(postRes.post_view.post).toBeDefined();
|
||||||
|
|
||||||
// Get the id for beta
|
// Get the id for beta
|
||||||
let searchBeta = await searchPostLocal(beta, postRes.post_view.post);
|
let searchBeta = await waitUntil(
|
||||||
|
() => searchPostLocal(beta, postRes.post_view.post),
|
||||||
|
res => !!res.posts[0],
|
||||||
|
);
|
||||||
let betaPost = searchBeta.posts[0];
|
let betaPost = searchBeta.posts[0];
|
||||||
expect(betaPost).toBeDefined();
|
expect(betaPost).toBeDefined();
|
||||||
|
|
||||||
|
@ -361,7 +399,7 @@ test("Enforce site ban for federated user", async () => {
|
||||||
client: alpha.client,
|
client: alpha.client,
|
||||||
auth: alphaUserJwt.jwt ?? "",
|
auth: alphaUserJwt.jwt ?? "",
|
||||||
};
|
};
|
||||||
let alphaUserActorId = (await getSite(alpha_user)).my_user?.local_user_view
|
const alphaUserActorId = (await getSite(alpha_user)).my_user?.local_user_view
|
||||||
.person.actor_id;
|
.person.actor_id;
|
||||||
if (!alphaUserActorId) {
|
if (!alphaUserActorId) {
|
||||||
throw "Missing alpha user actor id";
|
throw "Missing alpha user actor id";
|
||||||
|
@ -375,7 +413,10 @@ test("Enforce site ban for federated user", async () => {
|
||||||
|
|
||||||
// alpha makes post in beta community, it federates to beta instance
|
// alpha makes post in beta community, it federates to beta instance
|
||||||
let postRes1 = await createPost(alpha_user, betaCommunity.community.id);
|
let postRes1 = await createPost(alpha_user, betaCommunity.community.id);
|
||||||
let searchBeta1 = await searchPostLocal(beta, postRes1.post_view.post);
|
let searchBeta1 = await waitUntil(
|
||||||
|
() => searchPostLocal(beta, postRes1.post_view.post),
|
||||||
|
res => !!res.posts[0],
|
||||||
|
);
|
||||||
expect(searchBeta1.posts[0]).toBeDefined();
|
expect(searchBeta1.posts[0]).toBeDefined();
|
||||||
|
|
||||||
// ban alpha from its instance
|
// ban alpha from its instance
|
||||||
|
@ -388,7 +429,10 @@ test("Enforce site ban for federated user", async () => {
|
||||||
expect(banAlpha.banned).toBe(true);
|
expect(banAlpha.banned).toBe(true);
|
||||||
|
|
||||||
// alpha ban should be federated to beta
|
// alpha ban should be federated to beta
|
||||||
let alphaUserOnBeta1 = await resolvePerson(beta, alphaUserActorId);
|
let alphaUserOnBeta1 = await waitUntil(
|
||||||
|
() => resolvePerson(beta, alphaUserActorId),
|
||||||
|
res => res.person?.person.banned ?? false,
|
||||||
|
);
|
||||||
expect(alphaUserOnBeta1.person?.person.banned).toBe(true);
|
expect(alphaUserOnBeta1.person?.person.banned).toBe(true);
|
||||||
|
|
||||||
// existing alpha post should be removed on beta
|
// existing alpha post should be removed on beta
|
||||||
|
@ -406,7 +450,10 @@ test("Enforce site ban for federated user", async () => {
|
||||||
|
|
||||||
// alpha makes new post in beta community, it federates
|
// alpha makes new post in beta community, it federates
|
||||||
let postRes2 = await createPost(alpha_user, betaCommunity.community.id);
|
let postRes2 = await createPost(alpha_user, betaCommunity.community.id);
|
||||||
let searchBeta3 = await searchPostLocal(beta, postRes2.post_view.post);
|
let searchBeta3 = await waitUntil(
|
||||||
|
() => searchPostLocal(beta, postRes2.post_view.post),
|
||||||
|
e => !!e.posts[0],
|
||||||
|
);
|
||||||
expect(searchBeta3.posts[0]).toBeDefined();
|
expect(searchBeta3.posts[0]).toBeDefined();
|
||||||
|
|
||||||
let alphaUserOnBeta2 = await resolvePerson(beta, alphaUserActorId);
|
let alphaUserOnBeta2 = await resolvePerson(beta, alphaUserActorId);
|
||||||
|
@ -497,7 +544,12 @@ test("Report a post", async () => {
|
||||||
await reportPost(alpha, alphaPost.post.id, randomString(10))
|
await reportPost(alpha, alphaPost.post.id, randomString(10))
|
||||||
).post_report_view.post_report;
|
).post_report_view.post_report;
|
||||||
|
|
||||||
let betaReport = (await listPostReports(beta)).post_reports[0].post_report;
|
let betaReport = (
|
||||||
|
await waitUntil(
|
||||||
|
() => listPostReports(beta),
|
||||||
|
res => !!res.post_reports[0],
|
||||||
|
)
|
||||||
|
).post_reports[0].post_report;
|
||||||
expect(betaReport).toBeDefined();
|
expect(betaReport).toBeDefined();
|
||||||
expect(betaReport.resolved).toBe(false);
|
expect(betaReport.resolved).toBe(false);
|
||||||
expect(betaReport.original_post_name).toBe(alphaReport.original_post_name);
|
expect(betaReport.original_post_name).toBe(alphaReport.original_post_name);
|
||||||
|
|
|
@ -9,6 +9,7 @@ import {
|
||||||
listPrivateMessages,
|
listPrivateMessages,
|
||||||
deletePrivateMessage,
|
deletePrivateMessage,
|
||||||
unfollowRemotes,
|
unfollowRemotes,
|
||||||
|
waitUntil,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
|
|
||||||
let recipient_id: number;
|
let recipient_id: number;
|
||||||
|
@ -30,7 +31,10 @@ test("Create a private message", async () => {
|
||||||
expect(pmRes.private_message_view.creator.local).toBe(true);
|
expect(pmRes.private_message_view.creator.local).toBe(true);
|
||||||
expect(pmRes.private_message_view.recipient.local).toBe(false);
|
expect(pmRes.private_message_view.recipient.local).toBe(false);
|
||||||
|
|
||||||
let betaPms = await listPrivateMessages(beta);
|
let betaPms = await waitUntil(
|
||||||
|
() => listPrivateMessages(beta),
|
||||||
|
e => !!e.private_messages[0],
|
||||||
|
);
|
||||||
expect(betaPms.private_messages[0].private_message.content).toBeDefined();
|
expect(betaPms.private_messages[0].private_message.content).toBeDefined();
|
||||||
expect(betaPms.private_messages[0].private_message.local).toBe(false);
|
expect(betaPms.private_messages[0].private_message.local).toBe(false);
|
||||||
expect(betaPms.private_messages[0].creator.local).toBe(false);
|
expect(betaPms.private_messages[0].creator.local).toBe(false);
|
||||||
|
@ -49,7 +53,10 @@ test("Update a private message", async () => {
|
||||||
updatedContent,
|
updatedContent,
|
||||||
);
|
);
|
||||||
|
|
||||||
let betaPms = await listPrivateMessages(beta);
|
let betaPms = await waitUntil(
|
||||||
|
() => listPrivateMessages(beta),
|
||||||
|
p => p.private_messages[0].private_message.content === updatedContent,
|
||||||
|
);
|
||||||
expect(betaPms.private_messages[0].private_message.content).toBe(
|
expect(betaPms.private_messages[0].private_message.content).toBe(
|
||||||
updatedContent,
|
updatedContent,
|
||||||
);
|
);
|
||||||
|
@ -57,7 +64,15 @@ test("Update a private message", async () => {
|
||||||
|
|
||||||
test("Delete a private message", async () => {
|
test("Delete a private message", async () => {
|
||||||
let pmRes = await createPrivateMessage(alpha, recipient_id);
|
let pmRes = await createPrivateMessage(alpha, recipient_id);
|
||||||
let betaPms1 = await listPrivateMessages(beta);
|
let betaPms1 = await waitUntil(
|
||||||
|
() => listPrivateMessages(beta),
|
||||||
|
m =>
|
||||||
|
!!m.private_messages.find(
|
||||||
|
e =>
|
||||||
|
e.private_message.ap_id ===
|
||||||
|
pmRes.private_message_view.private_message.ap_id,
|
||||||
|
),
|
||||||
|
);
|
||||||
let deletedPmRes = await deletePrivateMessage(
|
let deletedPmRes = await deletePrivateMessage(
|
||||||
alpha,
|
alpha,
|
||||||
true,
|
true,
|
||||||
|
@ -68,7 +83,10 @@ test("Delete a private message", async () => {
|
||||||
// The GetPrivateMessages filters out deleted,
|
// The GetPrivateMessages filters out deleted,
|
||||||
// even though they are in the actual database.
|
// even though they are in the actual database.
|
||||||
// no reason to show them
|
// no reason to show them
|
||||||
let betaPms2 = await listPrivateMessages(beta);
|
let betaPms2 = await waitUntil(
|
||||||
|
() => listPrivateMessages(beta),
|
||||||
|
p => p.private_messages.length === betaPms1.private_messages.length - 1,
|
||||||
|
);
|
||||||
expect(betaPms2.private_messages.length).toBe(
|
expect(betaPms2.private_messages.length).toBe(
|
||||||
betaPms1.private_messages.length - 1,
|
betaPms1.private_messages.length - 1,
|
||||||
);
|
);
|
||||||
|
@ -83,7 +101,10 @@ test("Delete a private message", async () => {
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
let betaPms3 = await listPrivateMessages(beta);
|
let betaPms3 = await waitUntil(
|
||||||
|
() => listPrivateMessages(beta),
|
||||||
|
p => p.private_messages.length === betaPms1.private_messages.length,
|
||||||
|
);
|
||||||
expect(betaPms3.private_messages.length).toBe(
|
expect(betaPms3.private_messages.length).toBe(
|
||||||
betaPms1.private_messages.length,
|
betaPms1.private_messages.length,
|
||||||
);
|
);
|
||||||
|
|
|
@ -201,6 +201,11 @@ export async function setupLogins() {
|
||||||
try {
|
try {
|
||||||
await createCommunity(alpha, "main");
|
await createCommunity(alpha, "main");
|
||||||
await createCommunity(beta, "main");
|
await createCommunity(beta, "main");
|
||||||
|
// wait for > INSTANCES_RECHECK_DELAY to ensure federation is initialized
|
||||||
|
// otherwise the first few federated events may be missed
|
||||||
|
// (because last_successful_id is set to current id when federation to an instance is first started)
|
||||||
|
// only needed the first time so do in this try
|
||||||
|
await delay(6_000);
|
||||||
} catch (_) {
|
} catch (_) {
|
||||||
console.log("Communities already exist");
|
console.log("Communities already exist");
|
||||||
}
|
}
|
||||||
|
@ -212,7 +217,9 @@ export async function createPost(
|
||||||
): Promise<PostResponse> {
|
): Promise<PostResponse> {
|
||||||
let name = randomString(5);
|
let name = randomString(5);
|
||||||
let body = randomString(10);
|
let body = randomString(10);
|
||||||
let url = "https://google.com/";
|
// switch from google.com to example.com for consistent title (embed_title and embed_description)
|
||||||
|
// google switches description when a google doodle appears
|
||||||
|
let url = "https://example.com/";
|
||||||
let form: CreatePost = {
|
let form: CreatePost = {
|
||||||
name,
|
name,
|
||||||
url,
|
url,
|
||||||
|
@ -851,3 +858,20 @@ export function getCommentParentId(comment: Comment): number | undefined {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function waitUntil<T>(
|
||||||
|
fetcher: () => Promise<T>,
|
||||||
|
checker: (t: T) => boolean,
|
||||||
|
retries = 10,
|
||||||
|
delaySeconds = 2,
|
||||||
|
) {
|
||||||
|
let retry = 0;
|
||||||
|
while (retry++ < retries) {
|
||||||
|
const result = await fetcher();
|
||||||
|
if (checker(result)) return result;
|
||||||
|
await delay(delaySeconds * 1000);
|
||||||
|
}
|
||||||
|
throw Error(
|
||||||
|
`Failed "${fetcher}": "${checker}" did not return true after ${retries} retries (delayed ${delaySeconds}s each)`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"noImplicitAny": true,
|
"noImplicitAny": true,
|
||||||
"lib": ["es2017", "es7", "es6", "dom"],
|
"lib": ["es2017", "es7", "es6", "dom"],
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
"target": "ES2015",
|
"target": "ES2020",
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"moduleResolution": "Node"
|
"moduleResolution": "Node"
|
||||||
},
|
},
|
||||||
|
|
|
@ -17,22 +17,14 @@ use lemmy_db_schema::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::PrivateMessageView;
|
use lemmy_db_views::structs::PrivateMessageView;
|
||||||
use lemmy_utils::{error::LemmyResult, SYNCHRONOUS_FEDERATION};
|
use lemmy_utils::error::LemmyResult;
|
||||||
use once_cell::sync::{Lazy, OnceCell};
|
use once_cell::sync::OnceCell;
|
||||||
use tokio::{
|
|
||||||
sync::{
|
|
||||||
mpsc,
|
|
||||||
mpsc::{UnboundedReceiver, UnboundedSender, WeakUnboundedSender},
|
|
||||||
Mutex,
|
|
||||||
},
|
|
||||||
task::JoinHandle,
|
|
||||||
};
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
type MatchOutgoingActivitiesBoxed =
|
type MatchOutgoingActivitiesBoxed =
|
||||||
Box<for<'a> fn(SendActivityData, &'a Data<LemmyContext>) -> BoxFuture<'a, LemmyResult<()>>>;
|
Box<for<'a> fn(SendActivityData, &'a Data<LemmyContext>) -> BoxFuture<'a, LemmyResult<()>>>;
|
||||||
|
|
||||||
/// This static is necessary so that activities can be sent out synchronously for tests.
|
/// This static is necessary so that the api_common crates don't need to depend on lemmy_apub
|
||||||
pub static MATCH_OUTGOING_ACTIVITIES: OnceCell<MatchOutgoingActivitiesBoxed> = OnceCell::new();
|
pub static MATCH_OUTGOING_ACTIVITIES: OnceCell<MatchOutgoingActivitiesBoxed> = OnceCell::new();
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -62,51 +54,16 @@ pub enum SendActivityData {
|
||||||
CreateReport(Url, Person, Community, String),
|
CreateReport(Url, Person, Community, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: instead of static, move this into LemmyContext. make sure that stopping the process with
|
pub struct ActivityChannel;
|
||||||
// ctrl+c still works.
|
|
||||||
static ACTIVITY_CHANNEL: Lazy<ActivityChannel> = Lazy::new(|| {
|
|
||||||
let (sender, receiver) = mpsc::unbounded_channel();
|
|
||||||
let weak_sender = sender.downgrade();
|
|
||||||
ActivityChannel {
|
|
||||||
weak_sender,
|
|
||||||
receiver: Mutex::new(receiver),
|
|
||||||
keepalive_sender: Mutex::new(Some(sender)),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
pub struct ActivityChannel {
|
|
||||||
weak_sender: WeakUnboundedSender<SendActivityData>,
|
|
||||||
receiver: Mutex<UnboundedReceiver<SendActivityData>>,
|
|
||||||
keepalive_sender: Mutex<Option<UnboundedSender<SendActivityData>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActivityChannel {
|
impl ActivityChannel {
|
||||||
pub async fn retrieve_activity() -> Option<SendActivityData> {
|
|
||||||
let mut lock = ACTIVITY_CHANNEL.receiver.lock().await;
|
|
||||||
lock.recv().await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn submit_activity(
|
pub async fn submit_activity(
|
||||||
data: SendActivityData,
|
data: SendActivityData,
|
||||||
context: &Data<LemmyContext>,
|
context: &Data<LemmyContext>,
|
||||||
) -> LemmyResult<()> {
|
) -> LemmyResult<()> {
|
||||||
if *SYNCHRONOUS_FEDERATION {
|
|
||||||
MATCH_OUTGOING_ACTIVITIES
|
MATCH_OUTGOING_ACTIVITIES
|
||||||
.get()
|
.get()
|
||||||
.expect("retrieve function pointer")(data, context)
|
.expect("retrieve function pointer")(data, context)
|
||||||
.await?;
|
.await
|
||||||
}
|
|
||||||
// could do `ACTIVITY_CHANNEL.keepalive_sender.lock()` instead and get rid of weak_sender,
|
|
||||||
// not sure which way is more efficient
|
|
||||||
else if let Some(sender) = ACTIVITY_CHANNEL.weak_sender.upgrade() {
|
|
||||||
sender.send(data)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn close(outgoing_activities_task: JoinHandle<LemmyResult<()>>) -> LemmyResult<()> {
|
|
||||||
ACTIVITY_CHANNEL.keepalive_sender.lock().await.take();
|
|
||||||
outgoing_activities_task.await??;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ use lemmy_utils::{
|
||||||
slurs::{check_slurs, check_slurs_opt},
|
slurs::{check_slurs, check_slurs_opt},
|
||||||
validation::{check_url_scheme, clean_url_params, is_valid_body_field, is_valid_post_title},
|
validation::{check_url_scheme, clean_url_params, is_valid_body_field, is_valid_post_title},
|
||||||
},
|
},
|
||||||
SYNCHRONOUS_FEDERATION,
|
|
||||||
};
|
};
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -190,11 +189,7 @@ pub async fn create_post(
|
||||||
Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention),
|
Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if *SYNCHRONOUS_FEDERATION {
|
|
||||||
task.await?;
|
|
||||||
} else {
|
|
||||||
spawn_try_task(task);
|
spawn_try_task(task);
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
build_post_response(&context, community_id, person_id, post_id).await
|
build_post_response(&context, community_id, person_id, post_id).await
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
activity_lists::AnnouncableActivities,
|
activity_lists::AnnouncableActivities,
|
||||||
insert_received_activity,
|
insert_received_activity,
|
||||||
objects::{instance::remote_instance_inboxes, person::ApubPerson},
|
objects::person::ApubPerson,
|
||||||
protocol::activities::block::block_user::BlockUser,
|
protocol::activities::block::block_user::BlockUser,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{
|
use activitypub_federation::{
|
||||||
|
@ -27,6 +27,7 @@ use lemmy_api_common::{
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{
|
community::{
|
||||||
CommunityFollower,
|
CommunityFollower,
|
||||||
CommunityFollowerForm,
|
CommunityFollowerForm,
|
||||||
|
@ -97,12 +98,12 @@ impl BlockUser {
|
||||||
|
|
||||||
match target {
|
match target {
|
||||||
SiteOrCommunity::Site(_) => {
|
SiteOrCommunity::Site(_) => {
|
||||||
let inboxes = remote_instance_inboxes(&mut context.pool()).await?;
|
let inboxes = ActivitySendTargets::to_all_instances();
|
||||||
send_lemmy_activity(context, block, mod_, inboxes, false).await
|
send_lemmy_activity(context, block, mod_, inboxes, false).await
|
||||||
}
|
}
|
||||||
SiteOrCommunity::Community(c) => {
|
SiteOrCommunity::Community(c) => {
|
||||||
let activity = AnnouncableActivities::BlockUser(block);
|
let activity = AnnouncableActivities::BlockUser(block);
|
||||||
let inboxes = vec![user.shared_inbox_or_inbox()];
|
let inboxes = ActivitySendTargets::to_inbox(user.shared_inbox_or_inbox());
|
||||||
send_activity_in_community(activity, mod_, c, inboxes, true, context).await
|
send_activity_in_community(activity, mod_, c, inboxes, true, context).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
activity_lists::AnnouncableActivities,
|
activity_lists::AnnouncableActivities,
|
||||||
insert_received_activity,
|
insert_received_activity,
|
||||||
objects::{instance::remote_instance_inboxes, person::ApubPerson},
|
objects::person::ApubPerson,
|
||||||
protocol::activities::block::{block_user::BlockUser, undo_block_user::UndoBlockUser},
|
protocol::activities::block::{block_user::BlockUser, undo_block_user::UndoBlockUser},
|
||||||
};
|
};
|
||||||
use activitypub_federation::{
|
use activitypub_federation::{
|
||||||
|
@ -20,6 +20,7 @@ use activitypub_federation::{
|
||||||
use lemmy_api_common::{context::LemmyContext, utils::sanitize_html_federation_opt};
|
use lemmy_api_common::{context::LemmyContext, utils::sanitize_html_federation_opt};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{CommunityPersonBan, CommunityPersonBanForm},
|
community::{CommunityPersonBan, CommunityPersonBanForm},
|
||||||
moderator::{ModBan, ModBanForm, ModBanFromCommunity, ModBanFromCommunityForm},
|
moderator::{ModBan, ModBanForm, ModBanFromCommunity, ModBanFromCommunityForm},
|
||||||
person::{Person, PersonUpdateForm},
|
person::{Person, PersonUpdateForm},
|
||||||
|
@ -59,10 +60,10 @@ impl UndoBlockUser {
|
||||||
audience,
|
audience,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut inboxes = vec![user.shared_inbox_or_inbox()];
|
let mut inboxes = ActivitySendTargets::to_inbox(user.shared_inbox_or_inbox());
|
||||||
match target {
|
match target {
|
||||||
SiteOrCommunity::Site(_) => {
|
SiteOrCommunity::Site(_) => {
|
||||||
inboxes.append(&mut remote_instance_inboxes(&mut context.pool()).await?);
|
inboxes.set_all_instances();
|
||||||
send_lemmy_activity(context, undo, mod_, inboxes, false).await
|
send_lemmy_activity(context, undo, mod_, inboxes, false).await
|
||||||
}
|
}
|
||||||
SiteOrCommunity::Community(c) => {
|
SiteOrCommunity::Community(c) => {
|
||||||
|
|
|
@ -21,6 +21,7 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor},
|
traits::{ActivityHandler, Actor},
|
||||||
};
|
};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_db_schema::source::activity::ActivitySendTargets;
|
||||||
use lemmy_utils::error::{LemmyError, LemmyErrorType};
|
use lemmy_utils::error::{LemmyError, LemmyErrorType};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -94,7 +95,7 @@ impl AnnounceActivity {
|
||||||
context: &Data<LemmyContext>,
|
context: &Data<LemmyContext>,
|
||||||
) -> Result<(), LemmyError> {
|
) -> Result<(), LemmyError> {
|
||||||
let announce = AnnounceActivity::new(object.clone(), community, context)?;
|
let announce = AnnounceActivity::new(object.clone(), community, context)?;
|
||||||
let inboxes = community.get_follower_inboxes(context).await?;
|
let inboxes = ActivitySendTargets::to_local_community_followers(community.id);
|
||||||
send_lemmy_activity(context, announce, community, inboxes.clone(), false).await?;
|
send_lemmy_activity(context, announce, community, inboxes.clone(), false).await?;
|
||||||
|
|
||||||
// Pleroma and Mastodon can't handle activities like Announce/Create/Page. So for
|
// Pleroma and Mastodon can't handle activities like Announce/Create/Page. So for
|
||||||
|
|
|
@ -28,6 +28,7 @@ use lemmy_db_schema::{
|
||||||
impls::community::CollectionType,
|
impls::community::CollectionType,
|
||||||
newtypes::{CommunityId, PersonId},
|
newtypes::{CommunityId, PersonId},
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{Community, CommunityModerator, CommunityModeratorForm},
|
community::{Community, CommunityModerator, CommunityModeratorForm},
|
||||||
moderator::{ModAddCommunity, ModAddCommunityForm},
|
moderator::{ModAddCommunity, ModAddCommunityForm},
|
||||||
person::Person,
|
person::Person,
|
||||||
|
@ -62,7 +63,7 @@ impl CollectionAdd {
|
||||||
};
|
};
|
||||||
|
|
||||||
let activity = AnnouncableActivities::CollectionAdd(add);
|
let activity = AnnouncableActivities::CollectionAdd(add);
|
||||||
let inboxes = vec![added_mod.shared_inbox_or_inbox()];
|
let inboxes = ActivitySendTargets::to_inbox(added_mod.shared_inbox_or_inbox());
|
||||||
send_activity_in_community(activity, actor, community, inboxes, true, context).await
|
send_activity_in_community(activity, actor, community, inboxes, true, context).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +88,15 @@ impl CollectionAdd {
|
||||||
audience: Some(community.id().into()),
|
audience: Some(community.id().into()),
|
||||||
};
|
};
|
||||||
let activity = AnnouncableActivities::CollectionAdd(add);
|
let activity = AnnouncableActivities::CollectionAdd(add);
|
||||||
send_activity_in_community(activity, actor, community, vec![], true, context).await
|
send_activity_in_community(
|
||||||
|
activity,
|
||||||
|
actor,
|
||||||
|
community,
|
||||||
|
ActivitySendTargets::empty(),
|
||||||
|
true,
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ use lemmy_api_common::{
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
impls::community::CollectionType,
|
impls::community::CollectionType,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{Community, CommunityModerator, CommunityModeratorForm},
|
community::{Community, CommunityModerator, CommunityModeratorForm},
|
||||||
moderator::{ModAddCommunity, ModAddCommunityForm},
|
moderator::{ModAddCommunity, ModAddCommunityForm},
|
||||||
post::{Post, PostUpdateForm},
|
post::{Post, PostUpdateForm},
|
||||||
|
@ -57,7 +58,7 @@ impl CollectionRemove {
|
||||||
};
|
};
|
||||||
|
|
||||||
let activity = AnnouncableActivities::CollectionRemove(remove);
|
let activity = AnnouncableActivities::CollectionRemove(remove);
|
||||||
let inboxes = vec![removed_mod.shared_inbox_or_inbox()];
|
let inboxes = ActivitySendTargets::to_inbox(removed_mod.shared_inbox_or_inbox());
|
||||||
send_activity_in_community(activity, actor, community, inboxes, true, context).await
|
send_activity_in_community(activity, actor, community, inboxes, true, context).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,7 +83,15 @@ impl CollectionRemove {
|
||||||
audience: Some(community.id().into()),
|
audience: Some(community.id().into()),
|
||||||
};
|
};
|
||||||
let activity = AnnouncableActivities::CollectionRemove(remove);
|
let activity = AnnouncableActivities::CollectionRemove(remove);
|
||||||
send_activity_in_community(activity, actor, community, vec![], true, context).await
|
send_activity_in_community(
|
||||||
|
activity,
|
||||||
|
actor,
|
||||||
|
community,
|
||||||
|
ActivitySendTargets::empty(),
|
||||||
|
true,
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ use activitypub_federation::{
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::Community,
|
community::Community,
|
||||||
person::Person,
|
person::Person,
|
||||||
post::{Post, PostUpdateForm},
|
post::{Post, PostUpdateForm},
|
||||||
|
@ -147,6 +148,14 @@ pub(crate) async fn send_lock_post(
|
||||||
};
|
};
|
||||||
AnnouncableActivities::UndoLockPost(undo)
|
AnnouncableActivities::UndoLockPost(undo)
|
||||||
};
|
};
|
||||||
send_activity_in_community(activity, &actor.into(), &community, vec![], true, &context).await?;
|
send_activity_in_community(
|
||||||
|
activity,
|
||||||
|
&actor.into(),
|
||||||
|
&community,
|
||||||
|
ActivitySendTargets::empty(),
|
||||||
|
true,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,9 +6,8 @@ use crate::{
|
||||||
};
|
};
|
||||||
use activitypub_federation::{config::Data, traits::Actor};
|
use activitypub_federation::{config::Data, traits::Actor};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::source::person::PersonFollower;
|
use lemmy_db_schema::source::{activity::ActivitySendTargets, person::PersonFollower};
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
pub mod announce;
|
pub mod announce;
|
||||||
pub mod collection_add;
|
pub mod collection_add;
|
||||||
|
@ -34,7 +33,7 @@ pub(crate) async fn send_activity_in_community(
|
||||||
activity: AnnouncableActivities,
|
activity: AnnouncableActivities,
|
||||||
actor: &ApubPerson,
|
actor: &ApubPerson,
|
||||||
community: &ApubCommunity,
|
community: &ApubCommunity,
|
||||||
extra_inboxes: Vec<Url>,
|
extra_inboxes: ActivitySendTargets,
|
||||||
is_mod_action: bool,
|
is_mod_action: bool,
|
||||||
context: &Data<LemmyContext>,
|
context: &Data<LemmyContext>,
|
||||||
) -> Result<(), LemmyError> {
|
) -> Result<(), LemmyError> {
|
||||||
|
@ -43,8 +42,8 @@ pub(crate) async fn send_activity_in_community(
|
||||||
|
|
||||||
// send to user followers
|
// send to user followers
|
||||||
if !is_mod_action {
|
if !is_mod_action {
|
||||||
inboxes.extend(
|
inboxes.add_inboxes(
|
||||||
&mut PersonFollower::list_followers(&mut context.pool(), actor.id)
|
PersonFollower::list_followers(&mut context.pool(), actor.id)
|
||||||
.await?
|
.await?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| ApubPerson(p).shared_inbox_or_inbox()),
|
.map(|p| ApubPerson(p).shared_inbox_or_inbox()),
|
||||||
|
@ -56,7 +55,7 @@ pub(crate) async fn send_activity_in_community(
|
||||||
AnnounceActivity::send(activity.clone().try_into()?, community, context).await?;
|
AnnounceActivity::send(activity.clone().try_into()?, community, context).await?;
|
||||||
} else {
|
} else {
|
||||||
// send to the community, which will then forward to followers
|
// send to the community, which will then forward to followers
|
||||||
inboxes.push(community.shared_inbox_or_inbox());
|
inboxes.add_inbox(community.shared_inbox_or_inbox());
|
||||||
}
|
}
|
||||||
|
|
||||||
send_lemmy_activity(context, activity.clone(), actor, inboxes, false).await?;
|
send_lemmy_activity(context, activity.clone(), actor, inboxes, false).await?;
|
||||||
|
|
|
@ -14,6 +14,7 @@ use activitypub_federation::{
|
||||||
use lemmy_api_common::{context::LemmyContext, utils::sanitize_html_federation};
|
use lemmy_api_common::{context::LemmyContext, utils::sanitize_html_federation};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
comment_report::{CommentReport, CommentReportForm},
|
comment_report::{CommentReport, CommentReportForm},
|
||||||
community::Community,
|
community::Community,
|
||||||
person::Person,
|
person::Person,
|
||||||
|
@ -49,8 +50,11 @@ impl Report {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
audience: Some(community.id().into()),
|
audience: Some(community.id().into()),
|
||||||
};
|
};
|
||||||
|
let inbox = if community.local {
|
||||||
let inbox = vec![community.shared_inbox_or_inbox()];
|
ActivitySendTargets::empty()
|
||||||
|
} else {
|
||||||
|
ActivitySendTargets::to_inbox(community.shared_inbox_or_inbox())
|
||||||
|
};
|
||||||
send_lemmy_activity(&context, report, &actor, inbox, false).await
|
send_lemmy_activity(&context, report, &actor, inbox, false).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ use activitypub_federation::{
|
||||||
};
|
};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{community::Community, person::Person},
|
source::{activity::ActivitySendTargets, community::Community, person::Person},
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
};
|
};
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
|
@ -46,7 +46,15 @@ pub(crate) async fn send_update_community(
|
||||||
};
|
};
|
||||||
|
|
||||||
let activity = AnnouncableActivities::UpdateCommunity(update);
|
let activity = AnnouncableActivities::UpdateCommunity(update);
|
||||||
send_activity_in_community(activity, &actor, &community, vec![], true, &context).await
|
send_activity_in_community(
|
||||||
|
activity,
|
||||||
|
&actor,
|
||||||
|
&community,
|
||||||
|
ActivitySendTargets::empty(),
|
||||||
|
true,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
|
|
|
@ -31,6 +31,7 @@ use lemmy_db_schema::{
|
||||||
aggregates::structs::CommentAggregates,
|
aggregates::structs::CommentAggregates,
|
||||||
newtypes::PersonId,
|
newtypes::PersonId,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
comment::{Comment, CommentLike, CommentLikeForm},
|
comment::{Comment, CommentLike, CommentLikeForm},
|
||||||
community::Community,
|
community::Community,
|
||||||
person::Person,
|
person::Person,
|
||||||
|
@ -88,10 +89,10 @@ impl CreateOrUpdateNote {
|
||||||
.map(|t| t.href.clone())
|
.map(|t| t.href.clone())
|
||||||
.map(ObjectId::from)
|
.map(ObjectId::from)
|
||||||
.collect();
|
.collect();
|
||||||
let mut inboxes = vec![];
|
let mut inboxes = ActivitySendTargets::empty();
|
||||||
for t in tagged_users {
|
for t in tagged_users {
|
||||||
let person = t.dereference(&context).await?;
|
let person = t.dereference(&context).await?;
|
||||||
inboxes.push(person.shared_inbox_or_inbox());
|
inboxes.add_inbox(person.shared_inbox_or_inbox());
|
||||||
}
|
}
|
||||||
|
|
||||||
let activity = AnnouncableActivities::CreateOrUpdateComment(create_or_update);
|
let activity = AnnouncableActivities::CreateOrUpdateComment(create_or_update);
|
||||||
|
|
|
@ -26,6 +26,7 @@ use lemmy_db_schema::{
|
||||||
aggregates::structs::PostAggregates,
|
aggregates::structs::PostAggregates,
|
||||||
newtypes::PersonId,
|
newtypes::PersonId,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::Community,
|
community::Community,
|
||||||
person::Person,
|
person::Person,
|
||||||
post::{Post, PostLike, PostLikeForm},
|
post::{Post, PostLike, PostLikeForm},
|
||||||
|
@ -80,7 +81,7 @@ impl CreateOrUpdatePage {
|
||||||
activity,
|
activity,
|
||||||
&person,
|
&person,
|
||||||
&community,
|
&community,
|
||||||
vec![],
|
ActivitySendTargets::empty(),
|
||||||
is_mod_action,
|
is_mod_action,
|
||||||
&context,
|
&context,
|
||||||
)
|
)
|
||||||
|
|
|
@ -13,6 +13,7 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor, Object},
|
traits::{ActivityHandler, Actor, Object},
|
||||||
};
|
};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_db_schema::source::activity::ActivitySendTargets;
|
||||||
use lemmy_db_views::structs::PrivateMessageView;
|
use lemmy_db_views::structs::PrivateMessageView;
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -38,7 +39,7 @@ pub(crate) async fn send_create_or_update_pm(
|
||||||
.await?,
|
.await?,
|
||||||
kind,
|
kind,
|
||||||
};
|
};
|
||||||
let inbox = vec![recipient.shared_inbox_or_inbox()];
|
let inbox = ActivitySendTargets::to_inbox(recipient.shared_inbox_or_inbox());
|
||||||
send_lemmy_activity(&context, create_or_update, &actor, inbox, true).await
|
send_lemmy_activity(&context, create_or_update, &actor, inbox, true).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
activities::{generate_activity_id, send_lemmy_activity, verify_is_public, verify_person},
|
activities::{generate_activity_id, send_lemmy_activity, verify_is_public, verify_person},
|
||||||
insert_received_activity,
|
insert_received_activity,
|
||||||
objects::{instance::remote_instance_inboxes, person::ApubPerson},
|
objects::person::ApubPerson,
|
||||||
protocol::activities::deletion::delete_user::DeleteUser,
|
protocol::activities::deletion::delete_user::DeleteUser,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{
|
use activitypub_federation::{
|
||||||
|
@ -11,7 +11,7 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor},
|
traits::{ActivityHandler, Actor},
|
||||||
};
|
};
|
||||||
use lemmy_api_common::{context::LemmyContext, utils::purge_user_account};
|
use lemmy_api_common::{context::LemmyContext, utils::purge_user_account};
|
||||||
use lemmy_db_schema::source::person::Person;
|
use lemmy_db_schema::source::{activity::ActivitySendTargets, person::Person};
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -36,7 +36,8 @@ pub async fn delete_user(
|
||||||
remove_data: Some(delete_content),
|
remove_data: Some(delete_content),
|
||||||
};
|
};
|
||||||
|
|
||||||
let inboxes = remote_instance_inboxes(&mut context.pool()).await?;
|
let inboxes = ActivitySendTargets::to_all_instances();
|
||||||
|
|
||||||
send_lemmy_activity(&context, delete, &actor, inboxes, true).await?;
|
send_lemmy_activity(&context, delete, &actor, inboxes, true).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::CommunityId,
|
newtypes::CommunityId,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
comment::{Comment, CommentUpdateForm},
|
comment::{Comment, CommentUpdateForm},
|
||||||
community::{Community, CommunityUpdateForm},
|
community::{Community, CommunityUpdateForm},
|
||||||
person::Person,
|
person::Person,
|
||||||
|
@ -71,7 +72,7 @@ pub(crate) async fn send_apub_delete_in_community(
|
||||||
activity,
|
activity,
|
||||||
&actor,
|
&actor,
|
||||||
&community.into(),
|
&community.into(),
|
||||||
vec![],
|
ActivitySendTargets::empty(),
|
||||||
is_mod_action,
|
is_mod_action,
|
||||||
context,
|
context,
|
||||||
)
|
)
|
||||||
|
@ -103,7 +104,7 @@ pub(crate) async fn send_apub_delete_in_community_new(
|
||||||
activity,
|
activity,
|
||||||
&actor,
|
&actor,
|
||||||
&community.into(),
|
&community.into(),
|
||||||
vec![],
|
ActivitySendTargets::empty(),
|
||||||
is_mod_action,
|
is_mod_action,
|
||||||
&context,
|
&context,
|
||||||
)
|
)
|
||||||
|
@ -123,9 +124,9 @@ pub(crate) async fn send_apub_delete_private_message(
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
let deletable = DeletableObjects::PrivateMessage(pm.into());
|
let deletable = DeletableObjects::PrivateMessage(pm.into());
|
||||||
let inbox = vec![recipient.shared_inbox_or_inbox()];
|
let inbox = ActivitySendTargets::to_inbox(recipient.shared_inbox_or_inbox());
|
||||||
if deleted {
|
if deleted {
|
||||||
let delete = Delete::new(actor, deletable, recipient.id(), None, None, &context)?;
|
let delete: Delete = Delete::new(actor, deletable, recipient.id(), None, None, &context)?;
|
||||||
send_lemmy_activity(&context, delete, actor, inbox, true).await?;
|
send_lemmy_activity(&context, delete, actor, inbox, true).await?;
|
||||||
} else {
|
} else {
|
||||||
let undo = UndoDelete::new(actor, deletable, recipient.id(), None, None, &context)?;
|
let undo = UndoDelete::new(actor, deletable, recipient.id(), None, None, &context)?;
|
||||||
|
|
|
@ -10,7 +10,10 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor},
|
traits::{ActivityHandler, Actor},
|
||||||
};
|
};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{source::community::CommunityFollower, traits::Followable};
|
use lemmy_db_schema::{
|
||||||
|
source::{activity::ActivitySendTargets, community::CommunityFollower},
|
||||||
|
traits::Followable,
|
||||||
|
};
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -29,7 +32,7 @@ impl AcceptFollow {
|
||||||
&context.settings().get_protocol_and_hostname(),
|
&context.settings().get_protocol_and_hostname(),
|
||||||
)?,
|
)?,
|
||||||
};
|
};
|
||||||
let inbox = vec![person.shared_inbox_or_inbox()];
|
let inbox = ActivitySendTargets::to_inbox(person.shared_inbox_or_inbox());
|
||||||
send_lemmy_activity(context, accept, &user_or_community, inbox, true).await
|
send_lemmy_activity(context, accept, &user_or_community, inbox, true).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ use activitypub_federation::{
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{CommunityFollower, CommunityFollowerForm},
|
community::{CommunityFollower, CommunityFollowerForm},
|
||||||
person::{PersonFollower, PersonFollowerForm},
|
person::{PersonFollower, PersonFollowerForm},
|
||||||
},
|
},
|
||||||
|
@ -61,7 +62,11 @@ impl Follow {
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
let follow = Follow::new(actor, community, context)?;
|
let follow = Follow::new(actor, community, context)?;
|
||||||
let inbox = vec![community.shared_inbox_or_inbox()];
|
let inbox = if community.local {
|
||||||
|
ActivitySendTargets::empty()
|
||||||
|
} else {
|
||||||
|
ActivitySendTargets::to_inbox(community.shared_inbox_or_inbox())
|
||||||
|
};
|
||||||
send_lemmy_activity(context, follow, actor, inbox, true).await
|
send_lemmy_activity(context, follow, actor, inbox, true).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ use activitypub_federation::{
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
community::{CommunityFollower, CommunityFollowerForm},
|
community::{CommunityFollower, CommunityFollowerForm},
|
||||||
person::{PersonFollower, PersonFollowerForm},
|
person::{PersonFollower, PersonFollowerForm},
|
||||||
},
|
},
|
||||||
|
@ -40,7 +41,11 @@ impl UndoFollow {
|
||||||
&context.settings().get_protocol_and_hostname(),
|
&context.settings().get_protocol_and_hostname(),
|
||||||
)?,
|
)?,
|
||||||
};
|
};
|
||||||
let inbox = vec![community.shared_inbox_or_inbox()];
|
let inbox = if community.local {
|
||||||
|
ActivitySendTargets::empty()
|
||||||
|
} else {
|
||||||
|
ActivitySendTargets::to_inbox(community.shared_inbox_or_inbox())
|
||||||
|
};
|
||||||
send_lemmy_activity(context, undo, actor, inbox, true).await
|
send_lemmy_activity(context, undo, actor, inbox, true).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,6 @@ use crate::{
|
||||||
CONTEXT,
|
CONTEXT,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{
|
use activitypub_federation::{
|
||||||
activity_queue::send_activity,
|
|
||||||
config::Data,
|
config::Data,
|
||||||
fetch::object_id::ObjectId,
|
fetch::object_id::ObjectId,
|
||||||
kinds::public,
|
kinds::public,
|
||||||
|
@ -34,28 +33,21 @@ use activitypub_federation::{
|
||||||
traits::{ActivityHandler, Actor},
|
traits::{ActivityHandler, Actor},
|
||||||
};
|
};
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{context::LemmyContext, send_activity::SendActivityData};
|
||||||
context::LemmyContext,
|
|
||||||
send_activity::{ActivityChannel, SendActivityData},
|
|
||||||
};
|
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::CommunityId,
|
newtypes::CommunityId,
|
||||||
source::{
|
source::{
|
||||||
activity::{SentActivity, SentActivityForm},
|
activity::{ActivitySendTargets, ActorType, SentActivity, SentActivityForm},
|
||||||
community::Community,
|
community::Community,
|
||||||
instance::Instance,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_db_views_actor::structs::{CommunityPersonBanView, CommunityView};
|
use lemmy_db_views_actor::structs::{CommunityPersonBanView, CommunityView};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult},
|
error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult},
|
||||||
spawn_try_task,
|
spawn_try_task,
|
||||||
SYNCHRONOUS_FEDERATION,
|
|
||||||
};
|
};
|
||||||
use moka::future::Cache;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::{ops::Deref, sync::Arc, time::Duration};
|
use std::{ops::Deref, time::Duration};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use url::{ParseError, Url};
|
use url::{ParseError, Url};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
@ -189,35 +181,23 @@ where
|
||||||
Url::parse(&id)
|
Url::parse(&id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) trait GetActorType {
|
||||||
|
fn actor_type(&self) -> ActorType;
|
||||||
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn send_lemmy_activity<Activity, ActorT>(
|
async fn send_lemmy_activity<Activity, ActorT>(
|
||||||
data: &Data<LemmyContext>,
|
data: &Data<LemmyContext>,
|
||||||
activity: Activity,
|
activity: Activity,
|
||||||
actor: &ActorT,
|
actor: &ActorT,
|
||||||
mut inbox: Vec<Url>,
|
send_targets: ActivitySendTargets,
|
||||||
sensitive: bool,
|
sensitive: bool,
|
||||||
) -> Result<(), LemmyError>
|
) -> Result<(), LemmyError>
|
||||||
where
|
where
|
||||||
Activity: ActivityHandler + Serialize + Send + Sync + Clone,
|
Activity: ActivityHandler + Serialize + Send + Sync + Clone,
|
||||||
ActorT: Actor,
|
ActorT: Actor + GetActorType,
|
||||||
Activity: ActivityHandler<Error = LemmyError>,
|
Activity: ActivityHandler<Error = LemmyError>,
|
||||||
{
|
{
|
||||||
static CACHE: Lazy<Cache<(), Arc<Vec<String>>>> = Lazy::new(|| {
|
|
||||||
Cache::builder()
|
|
||||||
.max_capacity(1)
|
|
||||||
.time_to_live(DEAD_INSTANCE_LIST_CACHE_DURATION)
|
|
||||||
.build()
|
|
||||||
});
|
|
||||||
let dead_instances = CACHE
|
|
||||||
.try_get_with((), async {
|
|
||||||
Ok::<_, diesel::result::Error>(Arc::new(Instance::dead_instances(&mut data.pool()).await?))
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
inbox.retain(|i| {
|
|
||||||
let domain = i.domain().expect("has domain").to_string();
|
|
||||||
!dead_instances.contains(&domain)
|
|
||||||
});
|
|
||||||
info!("Sending activity {}", activity.id().to_string());
|
info!("Sending activity {}", activity.id().to_string());
|
||||||
let activity = WithContext::new(activity, CONTEXT.deref().clone());
|
let activity = WithContext::new(activity, CONTEXT.deref().clone());
|
||||||
|
|
||||||
|
@ -225,20 +205,21 @@ where
|
||||||
ap_id: activity.id().clone().into(),
|
ap_id: activity.id().clone().into(),
|
||||||
data: serde_json::to_value(activity.clone())?,
|
data: serde_json::to_value(activity.clone())?,
|
||||||
sensitive,
|
sensitive,
|
||||||
|
send_inboxes: send_targets
|
||||||
|
.inboxes
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| Some(e.into()))
|
||||||
|
.collect(),
|
||||||
|
send_all_instances: send_targets.all_instances,
|
||||||
|
send_community_followers_of: send_targets.community_followers_of.map(|e| e.0),
|
||||||
|
actor_type: actor.actor_type(),
|
||||||
|
actor_apub_id: actor.id().into(),
|
||||||
};
|
};
|
||||||
SentActivity::create(&mut data.pool(), form).await?;
|
SentActivity::create(&mut data.pool(), form).await?;
|
||||||
send_activity(activity, actor, inbox, data).await?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_outgoing_activities(context: Data<LemmyContext>) -> LemmyResult<()> {
|
|
||||||
while let Some(data) = ActivityChannel::retrieve_activity().await {
|
|
||||||
match_outgoing_activities(data, &context.reset_request_count()).await?
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn match_outgoing_activities(
|
pub async fn match_outgoing_activities(
|
||||||
data: SendActivityData,
|
data: SendActivityData,
|
||||||
context: &Data<LemmyContext>,
|
context: &Data<LemmyContext>,
|
||||||
|
@ -343,10 +324,6 @@ pub async fn match_outgoing_activities(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if *SYNCHRONOUS_FEDERATION {
|
|
||||||
fed_task.await?;
|
|
||||||
} else {
|
|
||||||
spawn_try_task(fed_task);
|
spawn_try_task(fed_task);
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::DbUrl,
|
newtypes::DbUrl,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActivitySendTargets,
|
||||||
comment::{CommentLike, CommentLikeForm},
|
comment::{CommentLike, CommentLikeForm},
|
||||||
community::Community,
|
community::Community,
|
||||||
person::Person,
|
person::Person,
|
||||||
|
@ -36,17 +37,18 @@ pub(crate) async fn send_like_activity(
|
||||||
let actor: ApubPerson = actor.into();
|
let actor: ApubPerson = actor.into();
|
||||||
let community: ApubCommunity = community.into();
|
let community: ApubCommunity = community.into();
|
||||||
|
|
||||||
|
let empty = ActivitySendTargets::empty();
|
||||||
// score of 1 means upvote, -1 downvote, 0 undo a previous vote
|
// score of 1 means upvote, -1 downvote, 0 undo a previous vote
|
||||||
if score != 0 {
|
if score != 0 {
|
||||||
let vote = Vote::new(object_id, &actor, &community, score.try_into()?, &context)?;
|
let vote = Vote::new(object_id, &actor, &community, score.try_into()?, &context)?;
|
||||||
let activity = AnnouncableActivities::Vote(vote);
|
let activity = AnnouncableActivities::Vote(vote);
|
||||||
send_activity_in_community(activity, &actor, &community, vec![], false, &context).await
|
send_activity_in_community(activity, &actor, &community, empty, false, &context).await
|
||||||
} else {
|
} else {
|
||||||
// Lemmy API doesnt distinguish between Undo/Like and Undo/Dislike, so we hardcode it here.
|
// Lemmy API doesnt distinguish between Undo/Like and Undo/Dislike, so we hardcode it here.
|
||||||
let vote = Vote::new(object_id, &actor, &community, VoteType::Like, &context)?;
|
let vote = Vote::new(object_id, &actor, &community, VoteType::Like, &context)?;
|
||||||
let undo_vote = UndoVote::new(vote, &actor, &community, &context)?;
|
let undo_vote = UndoVote::new(vote, &actor, &community, &context)?;
|
||||||
let activity = AnnouncableActivities::UndoVote(undo_vote);
|
let activity = AnnouncableActivities::UndoVote(undo_vote);
|
||||||
send_activity_in_community(activity, &actor, &community, vec![], false, &context).await
|
send_activity_in_community(activity, &actor, &community, empty, false, &context).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ use lemmy_utils::error::LemmyError;
|
||||||
|
|
||||||
pub mod post_or_comment;
|
pub mod post_or_comment;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
|
pub mod site_or_community_or_user;
|
||||||
pub mod user_or_community;
|
pub mod user_or_community;
|
||||||
|
|
||||||
/// Resolve actor identifier like `!news@example.com` to user or community object.
|
/// Resolve actor identifier like `!news@example.com` to user or community object.
|
||||||
|
|
108
crates/apub/src/fetcher/site_or_community_or_user.rs
Normal file
108
crates/apub/src/fetcher/site_or_community_or_user.rs
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
use crate::{
|
||||||
|
fetcher::user_or_community::{PersonOrGroup, UserOrCommunity},
|
||||||
|
objects::instance::ApubSite,
|
||||||
|
protocol::objects::instance::Instance,
|
||||||
|
};
|
||||||
|
use activitypub_federation::{
|
||||||
|
config::Data,
|
||||||
|
traits::{Actor, Object},
|
||||||
|
};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_utils::error::LemmyError;
|
||||||
|
use reqwest::Url;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
// todo: maybe this enum should be somewhere else?
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum SiteOrCommunityOrUser {
|
||||||
|
Site(ApubSite),
|
||||||
|
UserOrCommunity(UserOrCommunity),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum SiteOrPersonOrGroup {
|
||||||
|
Instance(Instance),
|
||||||
|
PersonOrGroup(PersonOrGroup),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Object for SiteOrCommunityOrUser {
|
||||||
|
type DataType = LemmyContext;
|
||||||
|
type Kind = SiteOrPersonOrGroup;
|
||||||
|
type Error = LemmyError;
|
||||||
|
|
||||||
|
fn last_refreshed_at(&self) -> Option<DateTime<Utc>> {
|
||||||
|
Some(match self {
|
||||||
|
SiteOrCommunityOrUser::Site(p) => p.last_refreshed_at,
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(p) => p.last_refreshed_at()?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
async fn read_from_id(
|
||||||
|
_object_id: Url,
|
||||||
|
_data: &Data<Self::DataType>,
|
||||||
|
) -> Result<Option<Self>, LemmyError> {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
async fn delete(self, data: &Data<Self::DataType>) -> Result<(), LemmyError> {
|
||||||
|
match self {
|
||||||
|
SiteOrCommunityOrUser::Site(p) => p.delete(data).await,
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(p) => p.delete(data).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn into_json(self, _data: &Data<Self::DataType>) -> Result<Self::Kind, LemmyError> {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
async fn verify(
|
||||||
|
apub: &Self::Kind,
|
||||||
|
expected_domain: &Url,
|
||||||
|
data: &Data<Self::DataType>,
|
||||||
|
) -> Result<(), LemmyError> {
|
||||||
|
match apub {
|
||||||
|
SiteOrPersonOrGroup::Instance(a) => ApubSite::verify(a, expected_domain, data).await,
|
||||||
|
SiteOrPersonOrGroup::PersonOrGroup(a) => {
|
||||||
|
UserOrCommunity::verify(a, expected_domain, data).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
async fn from_json(_apub: Self::Kind, _data: &Data<Self::DataType>) -> Result<Self, LemmyError> {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Actor for SiteOrCommunityOrUser {
|
||||||
|
fn id(&self) -> Url {
|
||||||
|
match self {
|
||||||
|
SiteOrCommunityOrUser::Site(u) => u.id(),
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(c) => c.id(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn public_key_pem(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
SiteOrCommunityOrUser::Site(p) => p.public_key_pem(),
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(p) => p.public_key_pem(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn private_key_pem(&self) -> Option<String> {
|
||||||
|
match self {
|
||||||
|
SiteOrCommunityOrUser::Site(p) => p.private_key_pem(),
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(p) => p.private_key_pem(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inbox(&self) -> Url {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
activities::GetActorType,
|
||||||
objects::{community::ApubCommunity, person::ApubPerson},
|
objects::{community::ApubCommunity, person::ApubPerson},
|
||||||
protocol::objects::{group::Group, person::Person},
|
protocol::objects::{group::Group, person::Person},
|
||||||
};
|
};
|
||||||
|
@ -8,6 +9,7 @@ use activitypub_federation::{
|
||||||
};
|
};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_db_schema::source::activity::ActorType;
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -119,3 +121,12 @@ impl Actor for UserOrCommunity {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GetActorType for UserOrCommunity {
|
||||||
|
fn actor_type(&self) -> ActorType {
|
||||||
|
match self {
|
||||||
|
UserOrCommunity::User(p) => p.actor_type(),
|
||||||
|
UserOrCommunity::Community(p) => p.actor_type(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ use std::{sync::Arc, time::Duration};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
pub mod activities;
|
pub mod activities;
|
||||||
pub(crate) mod activity_lists;
|
pub mod activity_lists;
|
||||||
pub mod api;
|
pub mod api;
|
||||||
pub(crate) mod collections;
|
pub(crate) mod collections;
|
||||||
pub mod fetcher;
|
pub mod fetcher;
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
activities::GetActorType,
|
||||||
check_apub_id_valid,
|
check_apub_id_valid,
|
||||||
local_site_data_cached,
|
local_site_data_cached,
|
||||||
objects::instance::fetch_instance_actor_for_object,
|
objects::instance::fetch_instance_actor_for_object,
|
||||||
|
@ -20,6 +21,7 @@ use lemmy_api_common::{
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActorType,
|
||||||
actor_language::CommunityLanguage,
|
actor_language::CommunityLanguage,
|
||||||
community::{Community, CommunityUpdateForm},
|
community::{Community, CommunityUpdateForm},
|
||||||
},
|
},
|
||||||
|
@ -181,6 +183,12 @@ impl Actor for ApubCommunity {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GetActorType for ApubCommunity {
|
||||||
|
fn actor_type(&self) -> ActorType {
|
||||||
|
ActorType::Community
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ApubCommunity {
|
impl ApubCommunity {
|
||||||
/// For a given community, returns the inboxes of all followers.
|
/// For a given community, returns the inboxes of all followers.
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
activities::GetActorType,
|
||||||
check_apub_id_valid_with_strictness,
|
check_apub_id_valid_with_strictness,
|
||||||
local_site_data_cached,
|
local_site_data_cached,
|
||||||
objects::read_from_string_or_source_opt,
|
objects::read_from_string_or_source_opt,
|
||||||
|
@ -23,12 +24,13 @@ use lemmy_api_common::{
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::InstanceId,
|
newtypes::InstanceId,
|
||||||
source::{
|
source::{
|
||||||
|
activity::ActorType,
|
||||||
actor_language::SiteLanguage,
|
actor_language::SiteLanguage,
|
||||||
instance::Instance as DbInstance,
|
instance::Instance as DbInstance,
|
||||||
site::{Site, SiteInsertForm},
|
site::{Site, SiteInsertForm},
|
||||||
},
|
},
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::{naive_now, DbPool},
|
utils::naive_now,
|
||||||
};
|
};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::LemmyError,
|
error::LemmyError,
|
||||||
|
@ -175,6 +177,11 @@ impl Actor for ApubSite {
|
||||||
self.inbox_url.clone().into()
|
self.inbox_url.clone().into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl GetActorType for ApubSite {
|
||||||
|
fn actor_type(&self) -> ActorType {
|
||||||
|
ActorType::Site
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Try to fetch the instance actor (to make things like instance rules available).
|
/// Try to fetch the instance actor (to make things like instance rules available).
|
||||||
pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + Clone>(
|
pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + Clone>(
|
||||||
|
@ -201,16 +208,6 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + C
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn remote_instance_inboxes(pool: &mut DbPool<'_>) -> Result<Vec<Url>, LemmyError> {
|
|
||||||
Ok(
|
|
||||||
Site::read_remote_sites(pool)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| ApubSite::from(s).shared_inbox_or_inbox())
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod tests {
|
pub(crate) mod tests {
|
||||||
#![allow(clippy::unwrap_used)]
|
#![allow(clippy::unwrap_used)]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
activities::GetActorType,
|
||||||
check_apub_id_valid_with_strictness,
|
check_apub_id_valid_with_strictness,
|
||||||
local_site_data_cached,
|
local_site_data_cached,
|
||||||
objects::{instance::fetch_instance_actor_for_object, read_from_string_or_source_opt},
|
objects::{instance::fetch_instance_actor_for_object, read_from_string_or_source_opt},
|
||||||
|
@ -27,7 +28,10 @@ use lemmy_api_common::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::person::{Person as DbPerson, PersonInsertForm, PersonUpdateForm},
|
source::{
|
||||||
|
activity::ActorType,
|
||||||
|
person::{Person as DbPerson, PersonInsertForm, PersonUpdateForm},
|
||||||
|
},
|
||||||
traits::{ApubActor, Crud},
|
traits::{ApubActor, Crud},
|
||||||
utils::naive_now,
|
utils::naive_now,
|
||||||
};
|
};
|
||||||
|
@ -205,6 +209,12 @@ impl Actor for ApubPerson {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GetActorType for ApubPerson {
|
||||||
|
fn actor_type(&self) -> ActorType {
|
||||||
|
ActorType::Person
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod tests {
|
pub(crate) mod tests {
|
||||||
#![allow(clippy::unwrap_used)]
|
#![allow(clippy::unwrap_used)]
|
||||||
|
|
|
@ -30,6 +30,11 @@ impl SentActivity {
|
||||||
.first::<Self>(conn)
|
.first::<Self>(conn)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
pub async fn read(pool: &mut DbPool<'_>, object_id: i64) -> Result<Self, Error> {
|
||||||
|
use crate::schema::sent_activity::dsl::sent_activity;
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
sent_activity.find(object_id).first::<Self>(conn).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReceivedActivity {
|
impl ReceivedActivity {
|
||||||
|
@ -62,7 +67,7 @@ mod tests {
|
||||||
#![allow(clippy::indexing_slicing)]
|
#![allow(clippy::indexing_slicing)]
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::utils::build_db_pool_for_tests;
|
use crate::{source::activity::ActorType, utils::build_db_pool_for_tests};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use serial_test::serial;
|
use serial_test::serial;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -102,6 +107,13 @@ mod tests {
|
||||||
ap_id: ap_id.clone(),
|
ap_id: ap_id.clone(),
|
||||||
data: data.clone(),
|
data: data.clone(),
|
||||||
sensitive,
|
sensitive,
|
||||||
|
actor_apub_id: Url::parse("http://example.com/u/exampleuser")
|
||||||
|
.unwrap()
|
||||||
|
.into(),
|
||||||
|
actor_type: ActorType::Person,
|
||||||
|
send_all_instances: false,
|
||||||
|
send_community_followers_of: None,
|
||||||
|
send_inboxes: vec![],
|
||||||
};
|
};
|
||||||
|
|
||||||
SentActivity::create(pool, form).await.unwrap();
|
SentActivity::create(pool, form).await.unwrap();
|
||||||
|
|
|
@ -6,11 +6,13 @@ use crate::{
|
||||||
utils::{functions::lower, get_conn, naive_now, now, DbPool},
|
utils::{functions::lower, get_conn, naive_now, now, DbPool},
|
||||||
};
|
};
|
||||||
use diesel::{
|
use diesel::{
|
||||||
dsl::insert_into,
|
dsl::{count_star, insert_into},
|
||||||
result::Error,
|
result::Error,
|
||||||
sql_types::{Nullable, Timestamptz},
|
sql_types::{Nullable, Timestamptz},
|
||||||
ExpressionMethods,
|
ExpressionMethods,
|
||||||
|
NullableExpressionMethods,
|
||||||
QueryDsl,
|
QueryDsl,
|
||||||
|
SelectableHelper,
|
||||||
};
|
};
|
||||||
use diesel_async::RunQueryDsl;
|
use diesel_async::RunQueryDsl;
|
||||||
|
|
||||||
|
@ -62,15 +64,6 @@ impl Instance {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn dead_instances(pool: &mut DbPool<'_>) -> Result<Vec<String>, Error> {
|
|
||||||
let conn = &mut get_conn(pool).await?;
|
|
||||||
instance::table
|
|
||||||
.select(instance::domain)
|
|
||||||
.filter(coalesce(instance::updated, instance::published).lt(now() - 3.days()))
|
|
||||||
.get_results(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub async fn delete_all(pool: &mut DbPool<'_>) -> Result<usize, Error> {
|
pub async fn delete_all(pool: &mut DbPool<'_>) -> Result<usize, Error> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
@ -94,6 +87,44 @@ impl Instance {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// returns a list of all instances, each with a flag of whether the instance is allowed or not and dead or not
|
||||||
|
/// ordered by id
|
||||||
|
pub async fn read_all_with_blocked_and_dead(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
) -> Result<Vec<(Self, bool, bool)>, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
let is_dead_expr = coalesce(instance::updated, instance::published).lt(now() - 3.days());
|
||||||
|
// this needs to be done in two steps because the meaning of the "blocked" column depends on the existence
|
||||||
|
// of any value at all in the allowlist. (so a normal join wouldn't work)
|
||||||
|
let use_allowlist = federation_allowlist::table
|
||||||
|
.select(count_star().gt(0))
|
||||||
|
.get_result::<bool>(conn)
|
||||||
|
.await?;
|
||||||
|
if use_allowlist {
|
||||||
|
instance::table
|
||||||
|
.left_join(federation_allowlist::table)
|
||||||
|
.select((
|
||||||
|
Self::as_select(),
|
||||||
|
federation_allowlist::id.nullable().is_not_null(),
|
||||||
|
is_dead_expr,
|
||||||
|
))
|
||||||
|
.order_by(instance::id)
|
||||||
|
.get_results::<(Self, bool, bool)>(conn)
|
||||||
|
.await
|
||||||
|
} else {
|
||||||
|
instance::table
|
||||||
|
.left_join(federation_blocklist::table)
|
||||||
|
.select((
|
||||||
|
Self::as_select(),
|
||||||
|
federation_blocklist::id.nullable().is_null(),
|
||||||
|
is_dead_expr,
|
||||||
|
))
|
||||||
|
.order_by(instance::id)
|
||||||
|
.get_results::<(Self, bool, bool)>(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn linked(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
pub async fn linked(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
instance::table
|
instance::table
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
newtypes::{DbUrl, SiteId},
|
newtypes::{DbUrl, InstanceId, SiteId},
|
||||||
schema::site::dsl::{actor_id, id, site},
|
schema::site::dsl::{actor_id, id, instance_id, site},
|
||||||
source::{
|
source::{
|
||||||
actor_language::SiteLanguage,
|
actor_language::SiteLanguage,
|
||||||
site::{Site, SiteInsertForm, SiteUpdateForm},
|
site::{Site, SiteInsertForm, SiteUpdateForm},
|
||||||
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::{get_conn, DbPool},
|
utils::{get_conn, DbPool},
|
||||||
};
|
};
|
||||||
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl};
|
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, OptionalExtension, QueryDsl};
|
||||||
use diesel_async::RunQueryDsl;
|
use diesel_async::RunQueryDsl;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -61,19 +61,29 @@ impl Crud for Site {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Site {
|
impl Site {
|
||||||
|
pub async fn read_from_instance_id(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
_instance_id: InstanceId,
|
||||||
|
) -> Result<Option<Self>, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
site
|
||||||
|
.filter(instance_id.eq(_instance_id))
|
||||||
|
.get_result(conn)
|
||||||
|
.await
|
||||||
|
.optional()
|
||||||
|
}
|
||||||
pub async fn read_from_apub_id(
|
pub async fn read_from_apub_id(
|
||||||
pool: &mut DbPool<'_>,
|
pool: &mut DbPool<'_>,
|
||||||
object_id: &DbUrl,
|
object_id: &DbUrl,
|
||||||
) -> Result<Option<Self>, Error> {
|
) -> Result<Option<Self>, Error> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
Ok(
|
|
||||||
site
|
site
|
||||||
.filter(actor_id.eq(object_id))
|
.filter(actor_id.eq(object_id))
|
||||||
.first::<Site>(conn)
|
.first::<Site>(conn)
|
||||||
.await
|
.await
|
||||||
.ok()
|
.optional()
|
||||||
.map(Into::into),
|
.map(Into::into)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn read_remote_sites(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
pub async fn read_remote_sites(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
||||||
|
|
|
@ -168,7 +168,7 @@ pub struct CustomEmojiId(i32);
|
||||||
pub struct LtreeDef(pub String);
|
pub struct LtreeDef(pub String);
|
||||||
|
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
|
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)]
|
||||||
#[cfg_attr(feature = "full", derive(AsExpression, FromSqlRow))]
|
#[cfg_attr(feature = "full", derive(AsExpression, FromSqlRow))]
|
||||||
#[cfg_attr(feature = "full", diesel(sql_type = diesel::sql_types::Text))]
|
#[cfg_attr(feature = "full", diesel(sql_type = diesel::sql_types::Text))]
|
||||||
pub struct DbUrl(pub(crate) Box<Url>);
|
pub struct DbUrl(pub(crate) Box<Url>);
|
||||||
|
@ -255,3 +255,9 @@ impl TS for DbUrl {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InstanceId {
|
||||||
|
pub fn inner(self) -> i32 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
// @generated automatically by Diesel CLI.
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
pub mod sql_types {
|
pub mod sql_types {
|
||||||
|
#[derive(diesel::sql_types::SqlType)]
|
||||||
|
#[diesel(postgres_type(name = "actor_type_enum"))]
|
||||||
|
pub struct ActorTypeEnum;
|
||||||
|
|
||||||
#[derive(diesel::sql_types::SqlType)]
|
#[derive(diesel::sql_types::SqlType)]
|
||||||
#[diesel(postgres_type(name = "listing_type_enum"))]
|
#[diesel(postgres_type(name = "listing_type_enum"))]
|
||||||
pub struct ListingTypeEnum;
|
pub struct ListingTypeEnum;
|
||||||
|
@ -299,6 +303,16 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
federation_queue_state (id) {
|
||||||
|
id -> Int4,
|
||||||
|
instance_id -> Int4,
|
||||||
|
last_successful_id -> Int8,
|
||||||
|
fail_count -> Int4,
|
||||||
|
last_retry -> Timestamptz,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
image_upload (id) {
|
image_upload (id) {
|
||||||
id -> Int4,
|
id -> Int4,
|
||||||
|
@ -804,12 +818,20 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::ActorTypeEnum;
|
||||||
|
|
||||||
sent_activity (id) {
|
sent_activity (id) {
|
||||||
id -> Int8,
|
id -> Int8,
|
||||||
ap_id -> Text,
|
ap_id -> Text,
|
||||||
data -> Json,
|
data -> Json,
|
||||||
sensitive -> Bool,
|
sensitive -> Bool,
|
||||||
published -> Timestamptz,
|
published -> Timestamptz,
|
||||||
|
send_inboxes -> Array<Nullable<Text>>,
|
||||||
|
send_community_followers_of -> Nullable<Int4>,
|
||||||
|
send_all_instances -> Bool,
|
||||||
|
actor_type -> ActorTypeEnum,
|
||||||
|
actor_apub_id -> Nullable<Text>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -904,6 +926,7 @@ diesel::joinable!(custom_emoji_keyword -> custom_emoji (custom_emoji_id));
|
||||||
diesel::joinable!(email_verification -> local_user (local_user_id));
|
diesel::joinable!(email_verification -> local_user (local_user_id));
|
||||||
diesel::joinable!(federation_allowlist -> instance (instance_id));
|
diesel::joinable!(federation_allowlist -> instance (instance_id));
|
||||||
diesel::joinable!(federation_blocklist -> instance (instance_id));
|
diesel::joinable!(federation_blocklist -> instance (instance_id));
|
||||||
|
diesel::joinable!(federation_queue_state -> instance (instance_id));
|
||||||
diesel::joinable!(image_upload -> local_user (local_user_id));
|
diesel::joinable!(image_upload -> local_user (local_user_id));
|
||||||
diesel::joinable!(local_site -> site (site_id));
|
diesel::joinable!(local_site -> site (site_id));
|
||||||
diesel::joinable!(local_site_rate_limit -> local_site (local_site_id));
|
diesel::joinable!(local_site_rate_limit -> local_site (local_site_id));
|
||||||
|
@ -979,6 +1002,7 @@ diesel::allow_tables_to_appear_in_same_query!(
|
||||||
email_verification,
|
email_verification,
|
||||||
federation_allowlist,
|
federation_allowlist,
|
||||||
federation_blocklist,
|
federation_blocklist,
|
||||||
|
federation_queue_state,
|
||||||
image_upload,
|
image_upload,
|
||||||
instance,
|
instance,
|
||||||
language,
|
language,
|
||||||
|
|
|
@ -1,7 +1,55 @@
|
||||||
use crate::{newtypes::DbUrl, schema::sent_activity};
|
use crate::{
|
||||||
|
newtypes::{CommunityId, DbUrl},
|
||||||
|
schema::sent_activity,
|
||||||
|
};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use diesel::{sql_types::Nullable, Queryable};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::fmt::Debug;
|
use std::{collections::HashSet, fmt::Debug};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[derive(FromSqlRow, PartialEq, Eq, Debug, Default, Clone)]
|
||||||
|
/// describes where an activity should be sent
|
||||||
|
pub struct ActivitySendTargets {
|
||||||
|
/// send to these inboxes explicitly
|
||||||
|
pub inboxes: HashSet<Url>,
|
||||||
|
/// send to all followers of these local communities
|
||||||
|
pub community_followers_of: Option<CommunityId>,
|
||||||
|
/// send to all remote instances
|
||||||
|
pub all_instances: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: in different file?
|
||||||
|
impl ActivitySendTargets {
|
||||||
|
pub fn empty() -> ActivitySendTargets {
|
||||||
|
ActivitySendTargets::default()
|
||||||
|
}
|
||||||
|
pub fn to_inbox(url: Url) -> ActivitySendTargets {
|
||||||
|
let mut a = ActivitySendTargets::empty();
|
||||||
|
a.inboxes.insert(url);
|
||||||
|
a
|
||||||
|
}
|
||||||
|
pub fn to_local_community_followers(id: CommunityId) -> ActivitySendTargets {
|
||||||
|
let mut a = ActivitySendTargets::empty();
|
||||||
|
a.community_followers_of = Some(id);
|
||||||
|
a
|
||||||
|
}
|
||||||
|
pub fn to_all_instances() -> ActivitySendTargets {
|
||||||
|
let mut a = ActivitySendTargets::empty();
|
||||||
|
a.all_instances = true;
|
||||||
|
a
|
||||||
|
}
|
||||||
|
pub fn set_all_instances(&mut self) {
|
||||||
|
self.all_instances = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_inbox(&mut self, inbox: Url) {
|
||||||
|
self.inboxes.insert(inbox);
|
||||||
|
}
|
||||||
|
pub fn add_inboxes(&mut self, inboxes: impl Iterator<Item = Url>) {
|
||||||
|
self.inboxes.extend(inboxes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Debug, Queryable)]
|
#[derive(PartialEq, Eq, Debug, Queryable)]
|
||||||
#[diesel(table_name = sent_activity)]
|
#[diesel(table_name = sent_activity)]
|
||||||
|
@ -11,13 +59,32 @@ pub struct SentActivity {
|
||||||
pub data: Value,
|
pub data: Value,
|
||||||
pub sensitive: bool,
|
pub sensitive: bool,
|
||||||
pub published: DateTime<Utc>,
|
pub published: DateTime<Utc>,
|
||||||
|
pub send_inboxes: Vec<Option<DbUrl>>,
|
||||||
|
pub send_community_followers_of: Option<CommunityId>,
|
||||||
|
pub send_all_instances: bool,
|
||||||
|
pub actor_type: ActorType,
|
||||||
|
pub actor_apub_id: Option<DbUrl>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[diesel(table_name = sent_activity)]
|
#[diesel(table_name = sent_activity)]
|
||||||
pub struct SentActivityForm {
|
pub struct SentActivityForm {
|
||||||
pub ap_id: DbUrl,
|
pub ap_id: DbUrl,
|
||||||
pub data: Value,
|
pub data: Value,
|
||||||
pub sensitive: bool,
|
pub sensitive: bool,
|
||||||
|
pub send_inboxes: Vec<Option<DbUrl>>,
|
||||||
|
pub send_community_followers_of: Option<i32>,
|
||||||
|
pub send_all_instances: bool,
|
||||||
|
pub actor_type: ActorType,
|
||||||
|
pub actor_apub_id: DbUrl,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, diesel_derive_enum::DbEnum, PartialEq, Eq)]
|
||||||
|
#[ExistingTypePath = "crate::schema::sql_types::ActorTypeEnum"]
|
||||||
|
pub enum ActorType {
|
||||||
|
Site,
|
||||||
|
Community,
|
||||||
|
Person,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Debug, Queryable)]
|
#[derive(PartialEq, Eq, Debug, Queryable)]
|
||||||
|
|
|
@ -11,7 +11,7 @@ use typed_builder::TypedBuilder;
|
||||||
|
|
||||||
#[skip_serializing_none]
|
#[skip_serializing_none]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||||
#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))]
|
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))]
|
||||||
#[cfg_attr(feature = "full", diesel(table_name = instance))]
|
#[cfg_attr(feature = "full", diesel(table_name = instance))]
|
||||||
#[cfg_attr(feature = "full", ts(export))]
|
#[cfg_attr(feature = "full", ts(export))]
|
||||||
/// A federated instance / site.
|
/// A federated instance / site.
|
||||||
|
|
|
@ -396,6 +396,9 @@ pub mod functions {
|
||||||
}
|
}
|
||||||
|
|
||||||
sql_function!(fn lower(x: Text) -> Text);
|
sql_function!(fn lower(x: Text) -> Text);
|
||||||
|
|
||||||
|
// really this function is variadic, this just adds the two-argument version
|
||||||
|
sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*";
|
pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*";
|
||||||
|
|
|
@ -28,3 +28,4 @@ diesel-async = { workspace = true, features = [
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_with = { workspace = true }
|
serde_with = { workspace = true }
|
||||||
ts-rs = { workspace = true, optional = true }
|
ts-rs = { workspace = true, optional = true }
|
||||||
|
chrono.workspace = true
|
||||||
|
|
|
@ -1,21 +1,47 @@
|
||||||
use crate::structs::CommunityFollowerView;
|
use crate::structs::CommunityFollowerView;
|
||||||
|
use chrono::Utc;
|
||||||
use diesel::{
|
use diesel::{
|
||||||
dsl::{count_star, not},
|
dsl::{count_star, not},
|
||||||
result::Error,
|
result::Error,
|
||||||
sql_function,
|
|
||||||
ExpressionMethods,
|
ExpressionMethods,
|
||||||
QueryDsl,
|
QueryDsl,
|
||||||
};
|
};
|
||||||
use diesel_async::RunQueryDsl;
|
use diesel_async::RunQueryDsl;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::{CommunityId, DbUrl, PersonId},
|
newtypes::{CommunityId, DbUrl, InstanceId, PersonId},
|
||||||
schema::{community, community_follower, person},
|
schema::{community, community_follower, person},
|
||||||
utils::{get_conn, DbPool},
|
utils::{functions::coalesce, get_conn, DbPool},
|
||||||
};
|
};
|
||||||
|
|
||||||
sql_function!(fn coalesce(x: diesel::sql_types::Nullable<diesel::sql_types::Text>, y: diesel::sql_types::Text) -> diesel::sql_types::Text);
|
|
||||||
|
|
||||||
impl CommunityFollowerView {
|
impl CommunityFollowerView {
|
||||||
|
/// return a list of local community ids and remote inboxes that at least one user of the given instance has followed
|
||||||
|
pub async fn get_instance_followed_community_inboxes(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
instance_id: InstanceId,
|
||||||
|
published_since: chrono::DateTime<Utc>,
|
||||||
|
) -> Result<Vec<(CommunityId, DbUrl)>, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
// In most cases this will fetch the same url many times (the shared inbox url)
|
||||||
|
// PG will only send a single copy to rust, but it has to scan through all follower rows (same as it was before).
|
||||||
|
// So on the PG side it would be possible to optimize this further by adding e.g. a new table community_followed_instances (community_id, instance_id)
|
||||||
|
// that would work for all instances that support fully shared inboxes.
|
||||||
|
// It would be a bit more complicated though to keep it in sync.
|
||||||
|
|
||||||
|
community_follower::table
|
||||||
|
.inner_join(community::table)
|
||||||
|
.inner_join(person::table)
|
||||||
|
.filter(person::instance_id.eq(instance_id))
|
||||||
|
.filter(community::local) // this should be a no-op since community_followers table only has local-person+remote-community or remote-person+local-community
|
||||||
|
.filter(not(person::local))
|
||||||
|
.filter(community_follower::published.gt(published_since.naive_utc()))
|
||||||
|
.select((
|
||||||
|
community::id,
|
||||||
|
coalesce(person::shared_inbox_url, person::inbox_url),
|
||||||
|
))
|
||||||
|
.distinct() // only need each community_id, inbox combination once
|
||||||
|
.load::<(CommunityId, DbUrl)>(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
pub async fn get_community_follower_inboxes(
|
pub async fn get_community_follower_inboxes(
|
||||||
pool: &mut DbPool<'_>,
|
pool: &mut DbPool<'_>,
|
||||||
community_id: CommunityId,
|
community_id: CommunityId,
|
||||||
|
|
41
crates/federate/Cargo.toml
Normal file
41
crates/federate/Cargo.toml
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
[package]
|
||||||
|
name = "lemmy_federate"
|
||||||
|
version.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
description.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
documentation.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
lemmy_api_common.workspace = true
|
||||||
|
lemmy_apub.workspace = true
|
||||||
|
lemmy_db_schema = { workspace = true, features = ["full"] }
|
||||||
|
lemmy_db_views_actor.workspace = true
|
||||||
|
lemmy_utils.workspace = true
|
||||||
|
|
||||||
|
activitypub_federation.workspace = true
|
||||||
|
anyhow.workspace = true
|
||||||
|
futures.workspace = true
|
||||||
|
chrono.workspace = true
|
||||||
|
diesel = { workspace = true, features = ["postgres", "chrono", "serde_json"] }
|
||||||
|
diesel-async = { workspace = true, features = ["deadpool", "postgres"] }
|
||||||
|
once_cell.workspace = true
|
||||||
|
reqwest.workspace = true
|
||||||
|
serde_json.workspace = true
|
||||||
|
serde.workspace = true
|
||||||
|
tokio = { workspace = true, features = ["full"] }
|
||||||
|
tracing.workspace = true
|
||||||
|
|
||||||
|
async-trait = "0.1.71"
|
||||||
|
bytes = "1.4.0"
|
||||||
|
enum_delegate = "0.2.0"
|
||||||
|
moka = { version = "0.11.2", features = ["future"] }
|
||||||
|
openssl = "0.10.55"
|
||||||
|
reqwest-middleware = "0.2.2"
|
||||||
|
reqwest-tracing = "0.4.5"
|
||||||
|
tokio-util = "0.7.8"
|
||||||
|
tracing-subscriber = "0.3.17"
|
63
crates/federate/src/federation_queue_state.rs
Normal file
63
crates/federate/src/federation_queue_state.rs
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
use crate::util::ActivityId;
|
||||||
|
use anyhow::Result;
|
||||||
|
use chrono::{DateTime, TimeZone, Utc};
|
||||||
|
use diesel::prelude::*;
|
||||||
|
use diesel_async::RunQueryDsl;
|
||||||
|
use lemmy_db_schema::{
|
||||||
|
newtypes::InstanceId,
|
||||||
|
utils::{get_conn, DbPool},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Queryable, Selectable, Insertable, AsChangeset, Clone)]
|
||||||
|
#[diesel(table_name = lemmy_db_schema::schema::federation_queue_state)]
|
||||||
|
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||||
|
pub struct FederationQueueState {
|
||||||
|
pub instance_id: InstanceId,
|
||||||
|
pub last_successful_id: ActivityId, // todo: i64
|
||||||
|
pub fail_count: i32,
|
||||||
|
pub last_retry: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FederationQueueState {
|
||||||
|
/// load state or return a default empty value
|
||||||
|
pub async fn load(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
instance_id_: InstanceId,
|
||||||
|
) -> Result<FederationQueueState> {
|
||||||
|
use lemmy_db_schema::schema::federation_queue_state::dsl::{
|
||||||
|
federation_queue_state,
|
||||||
|
instance_id,
|
||||||
|
};
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
Ok(
|
||||||
|
federation_queue_state
|
||||||
|
.filter(instance_id.eq(&instance_id_))
|
||||||
|
.select(FederationQueueState::as_select())
|
||||||
|
.get_result(conn)
|
||||||
|
.await
|
||||||
|
.optional()?
|
||||||
|
.unwrap_or(FederationQueueState {
|
||||||
|
instance_id: instance_id_,
|
||||||
|
fail_count: 0,
|
||||||
|
last_retry: Utc.timestamp_nanos(0),
|
||||||
|
last_successful_id: -1, // this value is set to the most current id for new instances
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
pub async fn upsert(pool: &mut DbPool<'_>, state: &FederationQueueState) -> Result<()> {
|
||||||
|
use lemmy_db_schema::schema::federation_queue_state::dsl::{
|
||||||
|
federation_queue_state,
|
||||||
|
instance_id,
|
||||||
|
};
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
|
||||||
|
state
|
||||||
|
.insert_into(federation_queue_state)
|
||||||
|
.on_conflict(instance_id)
|
||||||
|
.do_update()
|
||||||
|
.set(state)
|
||||||
|
.execute(conn)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
207
crates/federate/src/lib.rs
Normal file
207
crates/federate/src/lib.rs
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
use crate::{
|
||||||
|
util::{retry_sleep_duration, CancellableTask},
|
||||||
|
worker::InstanceWorker,
|
||||||
|
};
|
||||||
|
use activitypub_federation::config::FederationConfig;
|
||||||
|
use chrono::{Local, Timelike};
|
||||||
|
use federation_queue_state::FederationQueueState;
|
||||||
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_db_schema::{
|
||||||
|
newtypes::InstanceId,
|
||||||
|
source::instance::Instance,
|
||||||
|
utils::{ActualDbPool, DbPool},
|
||||||
|
};
|
||||||
|
use std::{collections::HashMap, time::Duration};
|
||||||
|
use tokio::{
|
||||||
|
sync::mpsc::{unbounded_channel, UnboundedReceiver},
|
||||||
|
time::sleep,
|
||||||
|
};
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
|
mod federation_queue_state;
|
||||||
|
mod util;
|
||||||
|
mod worker;
|
||||||
|
|
||||||
|
static WORKER_EXIT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
static INSTANCES_RECHECK_DELAY: Duration = Duration::from_secs(5);
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
static INSTANCES_RECHECK_DELAY: Duration = Duration::from_secs(60);
|
||||||
|
|
||||||
|
pub struct Opts {
|
||||||
|
/// how many processes you are starting in total
|
||||||
|
pub process_count: i32,
|
||||||
|
/// the index of this process (1-based: 1 - process_count)
|
||||||
|
pub process_index: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn start_stop_federation_workers(
|
||||||
|
opts: Opts,
|
||||||
|
pool: ActualDbPool,
|
||||||
|
federation_config: FederationConfig<LemmyContext>,
|
||||||
|
cancel: CancellationToken,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let mut workers = HashMap::<InstanceId, CancellableTask<_>>::new();
|
||||||
|
|
||||||
|
let (stats_sender, stats_receiver) = unbounded_channel();
|
||||||
|
let exit_print = tokio::spawn(receive_print_stats(pool.clone(), stats_receiver));
|
||||||
|
let pool2 = &mut DbPool::Pool(&pool);
|
||||||
|
let process_index = opts.process_index - 1;
|
||||||
|
let local_domain = federation_config.settings().get_hostname_without_port()?;
|
||||||
|
loop {
|
||||||
|
let mut total_count = 0;
|
||||||
|
let mut dead_count = 0;
|
||||||
|
let mut disallowed_count = 0;
|
||||||
|
for (instance, allowed, is_dead) in Instance::read_all_with_blocked_and_dead(pool2).await? {
|
||||||
|
if instance.domain == local_domain {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if instance.id.inner() % opts.process_count != process_index {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
total_count += 1;
|
||||||
|
if !allowed {
|
||||||
|
disallowed_count += 1;
|
||||||
|
}
|
||||||
|
if is_dead {
|
||||||
|
dead_count += 1;
|
||||||
|
}
|
||||||
|
let should_federate = allowed && !is_dead;
|
||||||
|
if should_federate {
|
||||||
|
if workers.contains_key(&instance.id) {
|
||||||
|
if workers
|
||||||
|
.get(&instance.id)
|
||||||
|
.map(util::CancellableTask::has_ended)
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
// task must have errored out, remove and recreated it
|
||||||
|
let worker = workers
|
||||||
|
.remove(&instance.id)
|
||||||
|
.expect("just checked contains_key");
|
||||||
|
tracing::error!(
|
||||||
|
"worker for {} has stopped, recreating: {:?}",
|
||||||
|
instance.domain,
|
||||||
|
worker.cancel().await
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// create new worker
|
||||||
|
let stats_sender = stats_sender.clone();
|
||||||
|
let context = federation_config.to_request_data();
|
||||||
|
let pool = pool.clone();
|
||||||
|
workers.insert(
|
||||||
|
instance.id,
|
||||||
|
CancellableTask::spawn(WORKER_EXIT_TIMEOUT, |stop| async move {
|
||||||
|
InstanceWorker::init_and_loop(
|
||||||
|
instance,
|
||||||
|
context,
|
||||||
|
&mut DbPool::Pool(&pool),
|
||||||
|
stop,
|
||||||
|
stats_sender,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} else if !should_federate {
|
||||||
|
if let Some(worker) = workers.remove(&instance.id) {
|
||||||
|
if let Err(e) = worker.cancel().await {
|
||||||
|
tracing::error!("error stopping worker: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let worker_count = workers.len();
|
||||||
|
tracing::info!("Federating to {worker_count}/{total_count} instances ({dead_count} dead, {disallowed_count} disallowed)");
|
||||||
|
tokio::select! {
|
||||||
|
() = sleep(INSTANCES_RECHECK_DELAY) => {},
|
||||||
|
_ = cancel.cancelled() => { break; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
drop(stats_sender);
|
||||||
|
tracing::warn!(
|
||||||
|
"Waiting for {} workers ({:.2?} max)",
|
||||||
|
workers.len(),
|
||||||
|
WORKER_EXIT_TIMEOUT
|
||||||
|
);
|
||||||
|
// the cancel futures need to be awaited concurrently for the shutdown processes to be triggered concurrently
|
||||||
|
futures::future::join_all(workers.into_values().map(util::CancellableTask::cancel)).await;
|
||||||
|
exit_print.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// starts and stops federation workers depending on which instances are on db
|
||||||
|
/// await the returned future to stop/cancel all workers gracefully
|
||||||
|
pub fn start_stop_federation_workers_cancellable(
|
||||||
|
opts: Opts,
|
||||||
|
pool: ActualDbPool,
|
||||||
|
config: FederationConfig<LemmyContext>,
|
||||||
|
) -> CancellableTask<()> {
|
||||||
|
CancellableTask::spawn(WORKER_EXIT_TIMEOUT, move |c| {
|
||||||
|
start_stop_federation_workers(opts, pool, config, c)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// every 60s, print the state for every instance. exits if the receiver is done (all senders dropped)
|
||||||
|
async fn receive_print_stats(
|
||||||
|
pool: ActualDbPool,
|
||||||
|
mut receiver: UnboundedReceiver<(String, FederationQueueState)>,
|
||||||
|
) {
|
||||||
|
let pool = &mut DbPool::Pool(&pool);
|
||||||
|
let mut printerval = tokio::time::interval(Duration::from_secs(60));
|
||||||
|
printerval.tick().await; // skip first
|
||||||
|
let mut stats = HashMap::new();
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
ele = receiver.recv() => {
|
||||||
|
let Some((domain, ele)) = ele else {
|
||||||
|
tracing::info!("done. quitting");
|
||||||
|
print_stats(pool, &stats).await;
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
stats.insert(domain, ele);
|
||||||
|
},
|
||||||
|
_ = printerval.tick() => {
|
||||||
|
print_stats(pool, &stats).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn print_stats(pool: &mut DbPool<'_>, stats: &HashMap<String, FederationQueueState>) {
|
||||||
|
let last_id = crate::util::get_latest_activity_id(pool).await;
|
||||||
|
let Ok(last_id) = last_id else {
|
||||||
|
tracing::error!("could not get last id");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
// it's expected that the values are a bit out of date, everything < SAVE_STATE_EVERY should be considered up to date
|
||||||
|
tracing::info!(
|
||||||
|
"Federation state as of {}:",
|
||||||
|
Local::now()
|
||||||
|
.with_nanosecond(0)
|
||||||
|
.expect("0 is valid nanos")
|
||||||
|
.to_rfc3339()
|
||||||
|
);
|
||||||
|
// todo: less noisy output (only output failing instances and summary for successful)
|
||||||
|
// todo: more stats (act/sec, avg http req duration)
|
||||||
|
let mut ok_count = 0;
|
||||||
|
for (domain, stat) in stats {
|
||||||
|
let behind = last_id - stat.last_successful_id;
|
||||||
|
if stat.fail_count > 0 {
|
||||||
|
tracing::info!(
|
||||||
|
"{}: Warning. {} behind, {} consecutive fails, current retry delay {:.2?}",
|
||||||
|
domain,
|
||||||
|
behind,
|
||||||
|
stat.fail_count,
|
||||||
|
retry_sleep_duration(stat.fail_count)
|
||||||
|
);
|
||||||
|
} else if behind > 0 {
|
||||||
|
tracing::info!("{}: Ok. {} behind", domain, behind);
|
||||||
|
} else {
|
||||||
|
ok_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("{ok_count} others up to date");
|
||||||
|
}
|
198
crates/federate/src/util.rs
Normal file
198
crates/federate/src/util.rs
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use diesel::{
|
||||||
|
prelude::*,
|
||||||
|
sql_types::{Bool, Int8},
|
||||||
|
};
|
||||||
|
use diesel_async::RunQueryDsl;
|
||||||
|
use lemmy_apub::{
|
||||||
|
activity_lists::SharedInboxActivities,
|
||||||
|
fetcher::{site_or_community_or_user::SiteOrCommunityOrUser, user_or_community::UserOrCommunity},
|
||||||
|
};
|
||||||
|
use lemmy_db_schema::{
|
||||||
|
source::{
|
||||||
|
activity::{ActorType, SentActivity},
|
||||||
|
community::Community,
|
||||||
|
person::Person,
|
||||||
|
site::Site,
|
||||||
|
},
|
||||||
|
traits::ApubActor,
|
||||||
|
utils::{get_conn, DbPool},
|
||||||
|
};
|
||||||
|
use moka::future::Cache;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use reqwest::Url;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::{
|
||||||
|
future::Future,
|
||||||
|
pin::Pin,
|
||||||
|
sync::{Arc, RwLock},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
use tokio::{task::JoinHandle, time::sleep};
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
|
pub struct CancellableTask<R: Send + 'static> {
|
||||||
|
f: Pin<Box<dyn Future<Output = Result<R, anyhow::Error>> + Send + 'static>>,
|
||||||
|
ended: Arc<RwLock<bool>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: Send + 'static> CancellableTask<R> {
|
||||||
|
/// spawn a task but with graceful shutdown
|
||||||
|
pub fn spawn<F>(
|
||||||
|
timeout: Duration,
|
||||||
|
task: impl FnOnce(CancellationToken) -> F,
|
||||||
|
) -> CancellableTask<R>
|
||||||
|
where
|
||||||
|
F: Future<Output = Result<R>> + Send + 'static,
|
||||||
|
{
|
||||||
|
let stop = CancellationToken::new();
|
||||||
|
let task = task(stop.clone());
|
||||||
|
let ended = Arc::new(RwLock::new(false));
|
||||||
|
let ended_write = ended.clone();
|
||||||
|
let task: JoinHandle<Result<R>> = tokio::spawn(async move {
|
||||||
|
match task.await {
|
||||||
|
Ok(o) => Ok(o),
|
||||||
|
Err(e) => {
|
||||||
|
*ended_write.write().expect("poisoned") = true;
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let abort = task.abort_handle();
|
||||||
|
CancellableTask {
|
||||||
|
ended,
|
||||||
|
f: Box::pin(async move {
|
||||||
|
stop.cancel();
|
||||||
|
tokio::select! {
|
||||||
|
r = task => {
|
||||||
|
Ok(r.context("could not join")??)
|
||||||
|
},
|
||||||
|
_ = sleep(timeout) => {
|
||||||
|
abort.abort();
|
||||||
|
tracing::warn!("Graceful shutdown timed out, aborting task");
|
||||||
|
Err(anyhow!("task aborted due to timeout"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// cancel the cancel signal, wait for timeout for the task to stop gracefully, otherwise abort it
|
||||||
|
pub async fn cancel(self) -> Result<R, anyhow::Error> {
|
||||||
|
self.f.await
|
||||||
|
}
|
||||||
|
pub fn has_ended(&self) -> bool {
|
||||||
|
*self.ended.read().expect("poisoned")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// assuming apub priv key and ids are immutable, then we don't need to have TTL
|
||||||
|
/// TODO: capacity should be configurable maybe based on memory use
|
||||||
|
pub(crate) async fn get_actor_cached(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
actor_type: ActorType,
|
||||||
|
actor_apub_id: &Url,
|
||||||
|
) -> Result<Arc<SiteOrCommunityOrUser>> {
|
||||||
|
static CACHE: Lazy<Cache<Url, Arc<SiteOrCommunityOrUser>>> =
|
||||||
|
Lazy::new(|| Cache::builder().max_capacity(10000).build());
|
||||||
|
CACHE
|
||||||
|
.try_get_with(actor_apub_id.clone(), async {
|
||||||
|
let url = actor_apub_id.clone().into();
|
||||||
|
let person = match actor_type {
|
||||||
|
ActorType::Site => SiteOrCommunityOrUser::Site(
|
||||||
|
Site::read_from_apub_id(pool, &url)
|
||||||
|
.await?
|
||||||
|
.context("apub site not found")?
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
ActorType::Community => SiteOrCommunityOrUser::UserOrCommunity(UserOrCommunity::Community(
|
||||||
|
Community::read_from_apub_id(pool, &url)
|
||||||
|
.await?
|
||||||
|
.context("apub community not found")?
|
||||||
|
.into(),
|
||||||
|
)),
|
||||||
|
ActorType::Person => SiteOrCommunityOrUser::UserOrCommunity(UserOrCommunity::User(
|
||||||
|
Person::read_from_apub_id(pool, &url)
|
||||||
|
.await?
|
||||||
|
.context("apub person not found")?
|
||||||
|
.into(),
|
||||||
|
)),
|
||||||
|
};
|
||||||
|
Result::<_, anyhow::Error>::Ok(Arc::new(person))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow::anyhow!("err getting actor {actor_type:?} {actor_apub_id}: {e:?}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// this should maybe be a newtype like all the other PersonId CommunityId etc.
|
||||||
|
pub(crate) type ActivityId = i64;
|
||||||
|
|
||||||
|
type CachedActivityInfo = Option<Arc<(SentActivity, SharedInboxActivities)>>;
|
||||||
|
/// activities are immutable so cache does not need to have TTL
|
||||||
|
/// May return None if the corresponding id does not exist or is a received activity.
|
||||||
|
/// Holes in serials are expected behaviour in postgresql
|
||||||
|
/// todo: cache size should probably be configurable / dependent on desired memory usage
|
||||||
|
pub(crate) async fn get_activity_cached(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
activity_id: ActivityId,
|
||||||
|
) -> Result<CachedActivityInfo> {
|
||||||
|
static ACTIVITIES: Lazy<Cache<ActivityId, CachedActivityInfo>> =
|
||||||
|
Lazy::new(|| Cache::builder().max_capacity(10000).build());
|
||||||
|
ACTIVITIES
|
||||||
|
.try_get_with(activity_id, async {
|
||||||
|
let row = SentActivity::read(pool, activity_id)
|
||||||
|
.await
|
||||||
|
.optional()
|
||||||
|
.context("could not read activity")?;
|
||||||
|
let Some(mut row) = row else {
|
||||||
|
return anyhow::Result::<_, anyhow::Error>::Ok(None);
|
||||||
|
};
|
||||||
|
// swap to avoid cloning
|
||||||
|
let mut data = Value::Null;
|
||||||
|
std::mem::swap(&mut row.data, &mut data);
|
||||||
|
let activity_actual: SharedInboxActivities = serde_json::from_value(data)?;
|
||||||
|
|
||||||
|
Ok(Some(Arc::new((row, activity_actual))))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow::anyhow!("err getting activity: {e:?}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// return the most current activity id (with 1 second cache)
|
||||||
|
pub(crate) async fn get_latest_activity_id(pool: &mut DbPool<'_>) -> Result<ActivityId> {
|
||||||
|
static CACHE: Lazy<Cache<(), ActivityId>> = Lazy::new(|| {
|
||||||
|
Cache::builder()
|
||||||
|
.time_to_live(Duration::from_secs(1))
|
||||||
|
.build()
|
||||||
|
});
|
||||||
|
CACHE
|
||||||
|
.try_get_with((), async {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
let seq: Sequence =
|
||||||
|
diesel::sql_query("select last_value, is_called from sent_activity_id_seq")
|
||||||
|
.get_result(conn)
|
||||||
|
.await?;
|
||||||
|
let latest_id = if seq.is_called {
|
||||||
|
seq.last_value as ActivityId
|
||||||
|
} else {
|
||||||
|
// if a PG sequence has never been used, last_value will actually be next_value
|
||||||
|
(seq.last_value - 1) as ActivityId
|
||||||
|
};
|
||||||
|
anyhow::Result::<_, anyhow::Error>::Ok(latest_id as ActivityId)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow::anyhow!("err getting id: {e:?}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// how long to sleep based on how many retries have already happened
|
||||||
|
pub(crate) fn retry_sleep_duration(retry_count: i32) -> Duration {
|
||||||
|
Duration::from_secs_f64(10.0 * 2.0_f64.powf(f64::from(retry_count)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(QueryableByName)]
|
||||||
|
struct Sequence {
|
||||||
|
#[diesel(sql_type = Int8)]
|
||||||
|
last_value: i64, // this value is bigint for some reason even if sequence is int4
|
||||||
|
#[diesel(sql_type = Bool)]
|
||||||
|
is_called: bool,
|
||||||
|
}
|
312
crates/federate/src/worker.rs
Normal file
312
crates/federate/src/worker.rs
Normal file
|
@ -0,0 +1,312 @@
|
||||||
|
use crate::{
|
||||||
|
federation_queue_state::FederationQueueState,
|
||||||
|
util::{get_activity_cached, get_actor_cached, get_latest_activity_id, retry_sleep_duration},
|
||||||
|
};
|
||||||
|
use activitypub_federation::{activity_sending::SendActivityTask, config::Data};
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use chrono::{DateTime, TimeZone, Utc};
|
||||||
|
use lemmy_api_common::context::LemmyContext;
|
||||||
|
use lemmy_apub::activity_lists::SharedInboxActivities;
|
||||||
|
use lemmy_db_schema::{
|
||||||
|
newtypes::{CommunityId, InstanceId},
|
||||||
|
source::{activity::SentActivity, instance::Instance, site::Site},
|
||||||
|
utils::DbPool,
|
||||||
|
};
|
||||||
|
use lemmy_db_views_actor::structs::CommunityFollowerView;
|
||||||
|
use lemmy_utils::error::LemmyErrorExt2;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use reqwest::Url;
|
||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
use tokio::{sync::mpsc::UnboundedSender, time::sleep};
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
/// save state to db every n sends if there's no failures (otherwise state is saved after every attempt)
|
||||||
|
static CHECK_SAVE_STATE_EVERY_IT: i64 = 100;
|
||||||
|
static SAVE_STATE_EVERY_TIME: Duration = Duration::from_secs(60);
|
||||||
|
/// recheck for new federation work every n seconds
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
static WORK_FINISHED_RECHECK_DELAY: Duration = Duration::from_secs(1);
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
static WORK_FINISHED_RECHECK_DELAY: Duration = Duration::from_secs(30);
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::Duration> =
|
||||||
|
Lazy::new(|| chrono::Duration::seconds(1));
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::Duration> =
|
||||||
|
Lazy::new(|| chrono::Duration::minutes(1));
|
||||||
|
static FOLLOW_REMOVALS_RECHECK_DELAY: Lazy<chrono::Duration> =
|
||||||
|
Lazy::new(|| chrono::Duration::hours(1));
|
||||||
|
pub(crate) struct InstanceWorker {
|
||||||
|
instance: Instance,
|
||||||
|
// load site lazily because if an instance is first seen due to being on allowlist,
|
||||||
|
// the corresponding row in `site` may not exist yet since that is only added once
|
||||||
|
// `fetch_instance_actor_for_object` is called.
|
||||||
|
// (this should be unlikely to be relevant outside of the federation tests)
|
||||||
|
site_loaded: bool,
|
||||||
|
site: Option<Site>,
|
||||||
|
followed_communities: HashMap<CommunityId, HashSet<Url>>,
|
||||||
|
stop: CancellationToken,
|
||||||
|
context: Data<LemmyContext>,
|
||||||
|
stats_sender: UnboundedSender<(String, FederationQueueState)>,
|
||||||
|
last_full_communities_fetch: DateTime<Utc>,
|
||||||
|
last_incremental_communities_fetch: DateTime<Utc>,
|
||||||
|
state: FederationQueueState,
|
||||||
|
last_state_insert: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InstanceWorker {
|
||||||
|
pub(crate) async fn init_and_loop(
|
||||||
|
instance: Instance,
|
||||||
|
context: Data<LemmyContext>,
|
||||||
|
pool: &mut DbPool<'_>, // in theory there's a ref to the pool in context, but i couldn't get that to work wrt lifetimes
|
||||||
|
stop: CancellationToken,
|
||||||
|
stats_sender: UnboundedSender<(String, FederationQueueState)>,
|
||||||
|
) -> Result<(), anyhow::Error> {
|
||||||
|
let state = FederationQueueState::load(pool, instance.id).await?;
|
||||||
|
let mut worker = InstanceWorker {
|
||||||
|
instance,
|
||||||
|
site_loaded: false,
|
||||||
|
site: None,
|
||||||
|
followed_communities: HashMap::new(),
|
||||||
|
stop,
|
||||||
|
context,
|
||||||
|
stats_sender,
|
||||||
|
last_full_communities_fetch: Utc.timestamp_nanos(0),
|
||||||
|
last_incremental_communities_fetch: Utc.timestamp_nanos(0),
|
||||||
|
state,
|
||||||
|
last_state_insert: Utc.timestamp_nanos(0),
|
||||||
|
};
|
||||||
|
worker.loop_until_stopped(pool).await
|
||||||
|
}
|
||||||
|
/// loop fetch new activities from db and send them to the inboxes of the given instances
|
||||||
|
/// this worker only returns if (a) there is an internal error or (b) the cancellation token is cancelled (graceful exit)
|
||||||
|
pub(crate) async fn loop_until_stopped(
|
||||||
|
&mut self,
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
) -> Result<(), anyhow::Error> {
|
||||||
|
let save_state_every = chrono::Duration::from_std(SAVE_STATE_EVERY_TIME).expect("not negative");
|
||||||
|
|
||||||
|
self.update_communities(pool).await?;
|
||||||
|
self.initial_fail_sleep().await?;
|
||||||
|
while !self.stop.is_cancelled() {
|
||||||
|
self.loop_batch(pool).await?;
|
||||||
|
if self.stop.is_cancelled() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (Utc::now() - self.last_state_insert) > save_state_every {
|
||||||
|
self.save_and_send_state(pool).await?;
|
||||||
|
}
|
||||||
|
self.update_communities(pool).await?;
|
||||||
|
}
|
||||||
|
// final update of state in db
|
||||||
|
self.save_and_send_state(pool).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn initial_fail_sleep(&mut self) -> Result<()> {
|
||||||
|
// before starting queue, sleep remaining duration if last request failed
|
||||||
|
if self.state.fail_count > 0 {
|
||||||
|
let elapsed = (Utc::now() - self.state.last_retry).to_std()?;
|
||||||
|
let required = retry_sleep_duration(self.state.fail_count);
|
||||||
|
if elapsed >= required {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let remaining = required - elapsed;
|
||||||
|
tokio::select! {
|
||||||
|
() = sleep(remaining) => {},
|
||||||
|
() = self.stop.cancelled() => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
async fn loop_batch(&mut self, pool: &mut DbPool<'_>) -> Result<()> {
|
||||||
|
let latest_id = get_latest_activity_id(pool).await?;
|
||||||
|
if self.state.last_successful_id == -1 {
|
||||||
|
// this is the initial creation (instance first seen) of the federation queue for this instance
|
||||||
|
// skip all past activities:
|
||||||
|
self.state.last_successful_id = latest_id;
|
||||||
|
// save here to ensure it's not read as 0 again later if no activities have happened
|
||||||
|
self.save_and_send_state(pool).await?;
|
||||||
|
}
|
||||||
|
let mut id = self.state.last_successful_id;
|
||||||
|
if id == latest_id {
|
||||||
|
// no more work to be done, wait before rechecking
|
||||||
|
tokio::select! {
|
||||||
|
() = sleep(WORK_FINISHED_RECHECK_DELAY) => {},
|
||||||
|
() = self.stop.cancelled() => {}
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let mut processed_activities = 0;
|
||||||
|
while id < latest_id
|
||||||
|
&& processed_activities < CHECK_SAVE_STATE_EVERY_IT
|
||||||
|
&& !self.stop.is_cancelled()
|
||||||
|
{
|
||||||
|
id += 1;
|
||||||
|
processed_activities += 1;
|
||||||
|
let Some(ele) = get_activity_cached(pool, id)
|
||||||
|
.await
|
||||||
|
.context("failed reading activity from db")?
|
||||||
|
else {
|
||||||
|
self.state.last_successful_id = id;
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if let Err(e) = self.send_retry_loop(pool, &ele.0, &ele.1).await {
|
||||||
|
tracing::warn!(
|
||||||
|
"sending {} errored internally, skipping activity: {:?}",
|
||||||
|
ele.0.ap_id,
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if self.stop.is_cancelled() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
// send success!
|
||||||
|
self.state.last_successful_id = id;
|
||||||
|
self.state.fail_count = 0;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// this function will return successfully when (a) send succeeded or (b) worker cancelled
|
||||||
|
// and will return an error if an internal error occurred (send errors cause an infinite loop)
|
||||||
|
async fn send_retry_loop(
|
||||||
|
&mut self,
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
activity: &SentActivity,
|
||||||
|
object: &SharedInboxActivities,
|
||||||
|
) -> Result<()> {
|
||||||
|
let inbox_urls = self
|
||||||
|
.get_inbox_urls(pool, activity)
|
||||||
|
.await
|
||||||
|
.context("failed figuring out inbox urls")?;
|
||||||
|
if inbox_urls.is_empty() {
|
||||||
|
self.state.last_successful_id = activity.id;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let Some(actor_apub_id) = &activity.actor_apub_id else {
|
||||||
|
return Ok(()); // activity was inserted before persistent queue was activated
|
||||||
|
};
|
||||||
|
let actor = get_actor_cached(pool, activity.actor_type, actor_apub_id)
|
||||||
|
.await
|
||||||
|
.context("failed getting actor instance (was it marked deleted / removed?)")?;
|
||||||
|
|
||||||
|
let inbox_urls = inbox_urls.into_iter().collect();
|
||||||
|
let requests = SendActivityTask::prepare(object, actor.as_ref(), inbox_urls, &self.context)
|
||||||
|
.await
|
||||||
|
.into_anyhow()?;
|
||||||
|
for task in requests {
|
||||||
|
// usually only one due to shared inbox
|
||||||
|
tracing::info!("sending out {}", task);
|
||||||
|
while let Err(e) = task.sign_and_send(&self.context).await {
|
||||||
|
self.state.fail_count += 1;
|
||||||
|
self.state.last_retry = Utc::now();
|
||||||
|
let retry_delay: Duration = retry_sleep_duration(self.state.fail_count);
|
||||||
|
tracing::info!(
|
||||||
|
"{}: retrying {} attempt {} with delay {retry_delay:.2?}. ({e})",
|
||||||
|
self.instance.domain,
|
||||||
|
activity.id,
|
||||||
|
self.state.fail_count
|
||||||
|
);
|
||||||
|
self.save_and_send_state(pool).await?;
|
||||||
|
tokio::select! {
|
||||||
|
() = sleep(retry_delay) => {},
|
||||||
|
() = self.stop.cancelled() => {
|
||||||
|
// save state to db and exit
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get inbox urls of sending the given activity to the given instance
|
||||||
|
/// most often this will return 0 values (if instance doesn't care about the activity)
|
||||||
|
/// or 1 value (the shared inbox)
|
||||||
|
/// > 1 values only happens for non-lemmy software
|
||||||
|
async fn get_inbox_urls(
|
||||||
|
&mut self,
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
activity: &SentActivity,
|
||||||
|
) -> Result<HashSet<Url>> {
|
||||||
|
let mut inbox_urls: HashSet<Url> = HashSet::new();
|
||||||
|
|
||||||
|
if activity.send_all_instances {
|
||||||
|
if !self.site_loaded {
|
||||||
|
self.site = Site::read_from_instance_id(pool, self.instance.id).await?;
|
||||||
|
self.site_loaded = true;
|
||||||
|
}
|
||||||
|
if let Some(site) = &self.site {
|
||||||
|
// Nutomic: Most non-lemmy software wont have a site row. That means it cant handle these activities. So handling it like this is fine.
|
||||||
|
inbox_urls.insert(site.inbox_url.inner().clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(t) = &activity.send_community_followers_of {
|
||||||
|
if let Some(urls) = self.followed_communities.get(t) {
|
||||||
|
inbox_urls.extend(urls.iter().map(std::clone::Clone::clone));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inbox_urls.extend(
|
||||||
|
activity
|
||||||
|
.send_inboxes
|
||||||
|
.iter()
|
||||||
|
.filter_map(std::option::Option::as_ref)
|
||||||
|
.filter_map(|u| (u.domain() == Some(&self.instance.domain)).then(|| u.inner().clone())),
|
||||||
|
);
|
||||||
|
Ok(inbox_urls)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_communities(&mut self, pool: &mut DbPool<'_>) -> Result<()> {
|
||||||
|
if (Utc::now() - self.last_full_communities_fetch) > *FOLLOW_REMOVALS_RECHECK_DELAY {
|
||||||
|
// process removals every hour
|
||||||
|
(self.followed_communities, self.last_full_communities_fetch) = self
|
||||||
|
.get_communities(pool, self.instance.id, self.last_full_communities_fetch)
|
||||||
|
.await?;
|
||||||
|
self.last_incremental_communities_fetch = self.last_full_communities_fetch;
|
||||||
|
}
|
||||||
|
if (Utc::now() - self.last_incremental_communities_fetch) > *FOLLOW_ADDITIONS_RECHECK_DELAY {
|
||||||
|
// process additions every minute
|
||||||
|
let (news, time) = self
|
||||||
|
.get_communities(
|
||||||
|
pool,
|
||||||
|
self.instance.id,
|
||||||
|
self.last_incremental_communities_fetch,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
self.followed_communities.extend(news);
|
||||||
|
self.last_incremental_communities_fetch = time;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get a list of local communities with the remote inboxes on the given instance that cares about them
|
||||||
|
async fn get_communities(
|
||||||
|
&mut self,
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
instance_id: InstanceId,
|
||||||
|
last_fetch: DateTime<Utc>,
|
||||||
|
) -> Result<(HashMap<CommunityId, HashSet<Url>>, DateTime<Utc>)> {
|
||||||
|
let new_last_fetch = Utc::now(); // update to time before fetch to ensure overlap
|
||||||
|
Ok((
|
||||||
|
CommunityFollowerView::get_instance_followed_community_inboxes(pool, instance_id, last_fetch)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.fold(HashMap::new(), |mut map, (c, u)| {
|
||||||
|
map.entry(c).or_insert_with(HashSet::new).insert(u.into());
|
||||||
|
map
|
||||||
|
}),
|
||||||
|
new_last_fetch,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
async fn save_and_send_state(&mut self, pool: &mut DbPool<'_>) -> Result<()> {
|
||||||
|
self.last_state_insert = Utc::now();
|
||||||
|
FederationQueueState::upsert(pool, &self.state).await?;
|
||||||
|
self
|
||||||
|
.stats_sender
|
||||||
|
.send((self.instance.domain.clone(), self.state.clone()))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -239,6 +239,7 @@ impl<T, E: Into<anyhow::Error>> LemmyErrorExt<T, E> for Result<T, E> {
|
||||||
}
|
}
|
||||||
pub trait LemmyErrorExt2<T> {
|
pub trait LemmyErrorExt2<T> {
|
||||||
fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result<T, LemmyError>;
|
fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result<T, LemmyError>;
|
||||||
|
fn into_anyhow(self) -> Result<T, anyhow::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> LemmyErrorExt2<T> for Result<T, LemmyError> {
|
impl<T> LemmyErrorExt2<T> for Result<T, LemmyError> {
|
||||||
|
@ -248,6 +249,10 @@ impl<T> LemmyErrorExt2<T> for Result<T, LemmyError> {
|
||||||
e
|
e
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
// this function can't be an impl From or similar because it would conflict with one of the other broad Into<> implementations
|
||||||
|
fn into_anyhow(self) -> Result<T, anyhow::Error> {
|
||||||
|
self.map_err(|e| e.inner)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -18,7 +18,6 @@ pub mod version;
|
||||||
|
|
||||||
use error::LemmyError;
|
use error::LemmyError;
|
||||||
use futures::Future;
|
use futures::Future;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
|
|
||||||
|
@ -38,16 +37,6 @@ macro_rules! location_info {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// if true, all federation should happen synchronously. useful for debugging and testing.
|
|
||||||
/// defaults to true on debug mode, false on releasemode
|
|
||||||
/// override to true by setting env LEMMY_SYNCHRONOUS_FEDERATION=1
|
|
||||||
/// override to false by setting env LEMMY_SYNCHRONOUS_FEDERATION=""
|
|
||||||
pub static SYNCHRONOUS_FEDERATION: Lazy<bool> = Lazy::new(|| {
|
|
||||||
std::env::var("LEMMY_SYNCHRONOUS_FEDERATION")
|
|
||||||
.map(|s| !s.is_empty())
|
|
||||||
.unwrap_or(cfg!(debug_assertions))
|
|
||||||
});
|
|
||||||
|
|
||||||
/// tokio::spawn, but accepts a future that may fail and also
|
/// tokio::spawn, but accepts a future that may fail and also
|
||||||
/// * logs errors
|
/// * logs errors
|
||||||
/// * attaches the spawned task to the tracing span of the caller for better logging
|
/// * attaches the spawned task to the tracing span of the caller for better logging
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
ALTER TABLE sent_activity
|
||||||
|
DROP COLUMN send_inboxes,
|
||||||
|
DROP COLUMN send_community_followers_of,
|
||||||
|
DROP COLUMN send_all_instances,
|
||||||
|
DROP COLUMN actor_apub_id,
|
||||||
|
DROP COLUMN actor_type;
|
||||||
|
|
||||||
|
DROP TYPE actor_type_enum;
|
||||||
|
|
||||||
|
DROP TABLE federation_queue_state;
|
||||||
|
|
||||||
|
DROP INDEX idx_community_follower_published;
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
CREATE TYPE actor_type_enum AS enum (
|
||||||
|
'site',
|
||||||
|
'community',
|
||||||
|
'person'
|
||||||
|
);
|
||||||
|
|
||||||
|
-- actor_apub_id only null for old entries before this migration
|
||||||
|
ALTER TABLE sent_activity
|
||||||
|
ADD COLUMN send_inboxes text[] NOT NULL DEFAULT '{}', -- list of specific inbox urls
|
||||||
|
ADD COLUMN send_community_followers_of integer DEFAULT NULL,
|
||||||
|
ADD COLUMN send_all_instances boolean NOT NULL DEFAULT FALSE,
|
||||||
|
ADD COLUMN actor_type actor_type_enum NOT NULL DEFAULT 'person',
|
||||||
|
ADD COLUMN actor_apub_id text DEFAULT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE sent_activity
|
||||||
|
ALTER COLUMN send_inboxes DROP DEFAULT,
|
||||||
|
ALTER COLUMN send_community_followers_of DROP DEFAULT,
|
||||||
|
ALTER COLUMN send_all_instances DROP DEFAULT,
|
||||||
|
ALTER COLUMN actor_type DROP DEFAULT,
|
||||||
|
ALTER COLUMN actor_apub_id DROP DEFAULT;
|
||||||
|
|
||||||
|
CREATE TABLE federation_queue_state (
|
||||||
|
id serial PRIMARY KEY,
|
||||||
|
instance_id integer NOT NULL UNIQUE REFERENCES instance (id),
|
||||||
|
last_successful_id bigint NOT NULL,
|
||||||
|
fail_count integer NOT NULL,
|
||||||
|
last_retry timestamptz NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- for incremental fetches of followers
|
||||||
|
CREATE INDEX idx_community_follower_published ON community_follower (published);
|
||||||
|
|
158
src/lib.rs
158
src/lib.rs
|
@ -16,24 +16,26 @@ use crate::{
|
||||||
use activitypub_federation::config::{FederationConfig, FederationMiddleware};
|
use activitypub_federation::config::{FederationConfig, FederationMiddleware};
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
|
dev::ServerHandle,
|
||||||
middleware::{self, ErrorHandlers},
|
middleware::{self, ErrorHandlers},
|
||||||
web::Data,
|
web::Data,
|
||||||
App,
|
App,
|
||||||
HttpServer,
|
HttpServer,
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
use clap::{ArgAction, Parser};
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
lemmy_db_views::structs::SiteView,
|
lemmy_db_views::structs::SiteView,
|
||||||
request::build_user_agent,
|
request::build_user_agent,
|
||||||
send_activity::{ActivityChannel, MATCH_OUTGOING_ACTIVITIES},
|
send_activity::MATCH_OUTGOING_ACTIVITIES,
|
||||||
utils::{
|
utils::{
|
||||||
check_private_instance_and_federation_enabled,
|
check_private_instance_and_federation_enabled,
|
||||||
local_site_rate_limit_to_rate_limit_config,
|
local_site_rate_limit_to_rate_limit_config,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_apub::{
|
use lemmy_apub::{
|
||||||
activities::{handle_outgoing_activities, match_outgoing_activities},
|
activities::match_outgoing_activities,
|
||||||
VerifyUrlData,
|
VerifyUrlData,
|
||||||
FEDERATION_HTTP_FETCH_LIMIT,
|
FEDERATION_HTTP_FETCH_LIMIT,
|
||||||
};
|
};
|
||||||
|
@ -41,18 +43,19 @@ use lemmy_db_schema::{
|
||||||
source::secret::Secret,
|
source::secret::Secret,
|
||||||
utils::{build_db_pool, get_database_url, run_migrations},
|
utils::{build_db_pool, get_database_url, run_migrations},
|
||||||
};
|
};
|
||||||
|
use lemmy_federate::{start_stop_federation_workers_cancellable, Opts};
|
||||||
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::LemmyError,
|
error::LemmyError,
|
||||||
rate_limit::RateLimitCell,
|
rate_limit::RateLimitCell,
|
||||||
response::jsonify_plain_text_errors,
|
response::jsonify_plain_text_errors,
|
||||||
settings::SETTINGS,
|
settings::{structs::Settings, SETTINGS},
|
||||||
SYNCHRONOUS_FEDERATION,
|
|
||||||
};
|
};
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
use reqwest_middleware::ClientBuilder;
|
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
use std::{env, thread, time::Duration};
|
use std::{env, ops::Deref, thread, time::Duration};
|
||||||
|
use tokio::signal::unix::SignalKind;
|
||||||
use tracing::subscriber::set_global_default;
|
use tracing::subscriber::set_global_default;
|
||||||
use tracing_actix_web::TracingLogger;
|
use tracing_actix_web::TracingLogger;
|
||||||
use tracing_error::ErrorLayer;
|
use tracing_error::ErrorLayer;
|
||||||
|
@ -66,15 +69,53 @@ use {
|
||||||
prometheus_metrics::serve_prometheus,
|
prometheus_metrics::serve_prometheus,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(
|
||||||
|
version,
|
||||||
|
about = "A link aggregator for the fediverse",
|
||||||
|
long_about = "A link aggregator for the fediverse.\n\nThis is the Lemmy backend API server. This will connect to a PostgreSQL database, run any pending migrations and start accepting API requests."
|
||||||
|
)]
|
||||||
|
pub struct CmdArgs {
|
||||||
|
#[arg(long, default_value_t = false)]
|
||||||
|
/// Disables running scheduled tasks.
|
||||||
|
///
|
||||||
|
/// If you are running multiple Lemmy server processes,
|
||||||
|
/// you probably want to disable scheduled tasks on all but one of the processes,
|
||||||
|
/// to avoid running the tasks more often than intended.
|
||||||
|
disable_scheduled_tasks: bool,
|
||||||
|
/// Whether or not to run the HTTP server.
|
||||||
|
///
|
||||||
|
/// This can be used to run a Lemmy server process that only runs scheduled tasks.
|
||||||
|
#[arg(long, default_value_t = true, action=ArgAction::Set)]
|
||||||
|
http_server: bool,
|
||||||
|
/// Whether or not to emit outgoing ActivityPub messages.
|
||||||
|
///
|
||||||
|
/// Set to true for a simple setup. Only set to false for horizontally scaled setups.
|
||||||
|
/// See https://join-lemmy.org/docs/administration/horizontal_scaling.html for detail.
|
||||||
|
#[arg(long, default_value_t = true, action=ArgAction::Set)]
|
||||||
|
federate_activities: bool,
|
||||||
|
/// The index of this outgoing federation process.
|
||||||
|
///
|
||||||
|
/// Defaults to 1/1. If you want to split the federation workload onto n servers, run each server 1≤i≤n with these args:
|
||||||
|
/// --federate-process-index i --federate-process-count n
|
||||||
|
///
|
||||||
|
/// Make you have exactly one server with each `i` running, otherwise federation will randomly send duplicates or nothing.
|
||||||
|
///
|
||||||
|
/// See https://join-lemmy.org/docs/administration/horizontal_scaling.html for more detail.
|
||||||
|
#[arg(long, default_value_t = 1)]
|
||||||
|
federate_process_index: i32,
|
||||||
|
/// How many outgoing federation processes you are starting in total.
|
||||||
|
///
|
||||||
|
/// If set, make sure to set --federate-process-index differently for each.
|
||||||
|
#[arg(long, default_value_t = 1)]
|
||||||
|
federate_process_count: i32,
|
||||||
|
}
|
||||||
/// Max timeout for http requests
|
/// Max timeout for http requests
|
||||||
pub(crate) const REQWEST_TIMEOUT: Duration = Duration::from_secs(10);
|
pub(crate) const REQWEST_TIMEOUT: Duration = Duration::from_secs(10);
|
||||||
|
|
||||||
/// Placing the main function in lib.rs allows other crates to import it and embed Lemmy
|
/// Placing the main function in lib.rs allows other crates to import it and embed Lemmy
|
||||||
pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
pub async fn start_lemmy_server(args: CmdArgs) -> Result<(), LemmyError> {
|
||||||
let args: Vec<String> = env::args().collect();
|
let scheduled_tasks_enabled = !args.disable_scheduled_tasks;
|
||||||
|
|
||||||
let scheduled_tasks_enabled = args.get(1) != Some(&"--disable-scheduled-tasks".to_string());
|
|
||||||
|
|
||||||
let settings = SETTINGS.to_owned();
|
let settings = SETTINGS.to_owned();
|
||||||
|
|
||||||
// Run the DB migrations
|
// Run the DB migrations
|
||||||
|
@ -152,21 +193,73 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
#[cfg(feature = "prometheus-metrics")]
|
#[cfg(feature = "prometheus-metrics")]
|
||||||
serve_prometheus(settings.prometheus.as_ref(), context.clone());
|
serve_prometheus(settings.prometheus.as_ref(), context.clone());
|
||||||
|
|
||||||
let settings_bind = settings.clone();
|
|
||||||
|
|
||||||
let federation_config = FederationConfig::builder()
|
let federation_config = FederationConfig::builder()
|
||||||
.domain(settings.hostname.clone())
|
.domain(settings.hostname.clone())
|
||||||
.app_data(context.clone())
|
.app_data(context.clone())
|
||||||
.client(client.clone())
|
.client(client.clone())
|
||||||
.http_fetch_limit(FEDERATION_HTTP_FETCH_LIMIT)
|
.http_fetch_limit(FEDERATION_HTTP_FETCH_LIMIT)
|
||||||
.worker_count(settings.worker_count)
|
.debug(cfg!(debug_assertions))
|
||||||
.retry_count(settings.retry_count)
|
|
||||||
.debug(*SYNCHRONOUS_FEDERATION)
|
|
||||||
.http_signature_compat(true)
|
.http_signature_compat(true)
|
||||||
.url_verifier(Box::new(VerifyUrlData(context.inner_pool().clone())))
|
.url_verifier(Box::new(VerifyUrlData(context.inner_pool().clone())))
|
||||||
.build()
|
.build()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
MATCH_OUTGOING_ACTIVITIES
|
||||||
|
.set(Box::new(move |d, c| {
|
||||||
|
Box::pin(match_outgoing_activities(d, c))
|
||||||
|
}))
|
||||||
|
.expect("set function pointer");
|
||||||
|
|
||||||
|
let server = if args.http_server {
|
||||||
|
Some(create_http_server(
|
||||||
|
federation_config.clone(),
|
||||||
|
settings.clone(),
|
||||||
|
federation_enabled,
|
||||||
|
pictrs_client,
|
||||||
|
)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let federate = args.federate_activities.then(|| {
|
||||||
|
start_stop_federation_workers_cancellable(
|
||||||
|
Opts {
|
||||||
|
process_index: args.federate_process_index,
|
||||||
|
process_count: args.federate_process_count,
|
||||||
|
},
|
||||||
|
pool.clone(),
|
||||||
|
federation_config.clone(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
|
||||||
|
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
_ = tokio::signal::ctrl_c() => {
|
||||||
|
tracing::warn!("Received ctrl-c, shutting down gracefully...");
|
||||||
|
}
|
||||||
|
_ = interrupt.recv() => {
|
||||||
|
tracing::warn!("Received interrupt, shutting down gracefully...");
|
||||||
|
}
|
||||||
|
_ = terminate.recv() => {
|
||||||
|
tracing::warn!("Received terminate, shutting down gracefully...");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(server) = server {
|
||||||
|
server.stop(true).await;
|
||||||
|
}
|
||||||
|
if let Some(federate) = federate {
|
||||||
|
federate.cancel().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_http_server(
|
||||||
|
federation_config: FederationConfig<LemmyContext>,
|
||||||
|
settings: Settings,
|
||||||
|
federation_enabled: bool,
|
||||||
|
pictrs_client: ClientWithMiddleware,
|
||||||
|
) -> Result<ServerHandle, LemmyError> {
|
||||||
// this must come before the HttpServer creation
|
// this must come before the HttpServer creation
|
||||||
// creates a middleware that populates http metrics for each path, method, and status code
|
// creates a middleware that populates http metrics for each path, method, and status code
|
||||||
#[cfg(feature = "prometheus-metrics")]
|
#[cfg(feature = "prometheus-metrics")]
|
||||||
|
@ -175,21 +268,16 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
.build()
|
.build()
|
||||||
.expect("Should always be buildable");
|
.expect("Should always be buildable");
|
||||||
|
|
||||||
MATCH_OUTGOING_ACTIVITIES
|
let context: LemmyContext = federation_config.deref().clone();
|
||||||
.set(Box::new(move |d, c| {
|
let rate_limit_cell = federation_config.settings_updated_channel().clone();
|
||||||
Box::pin(match_outgoing_activities(d, c))
|
let self_origin = settings.get_protocol_and_hostname();
|
||||||
}))
|
|
||||||
.expect("set function pointer");
|
|
||||||
let request_data = federation_config.to_request_data();
|
|
||||||
let outgoing_activities_task = tokio::task::spawn(handle_outgoing_activities(request_data));
|
|
||||||
|
|
||||||
// Create Http server with websocket support
|
// Create Http server with websocket support
|
||||||
HttpServer::new(move || {
|
let server = HttpServer::new(move || {
|
||||||
let cors_origin = env::var("LEMMY_CORS_ORIGIN");
|
let cors_origin = env::var("LEMMY_CORS_ORIGIN");
|
||||||
let cors_config = match (cors_origin, cfg!(debug_assertions)) {
|
let cors_config = match (cors_origin, cfg!(debug_assertions)) {
|
||||||
(Ok(origin), false) => Cors::default()
|
(Ok(origin), false) => Cors::default()
|
||||||
.allowed_origin(&origin)
|
.allowed_origin(&origin)
|
||||||
.allowed_origin(&settings.get_protocol_and_hostname()),
|
.allowed_origin(&self_origin),
|
||||||
_ => Cors::default()
|
_ => Cors::default()
|
||||||
.allow_any_origin()
|
.allow_any_origin()
|
||||||
.allow_any_method()
|
.allow_any_method()
|
||||||
|
@ -217,7 +305,7 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
|
|
||||||
// The routes
|
// The routes
|
||||||
app
|
app
|
||||||
.configure(|cfg| api_routes_http::config(cfg, rate_limit_cell))
|
.configure(|cfg| api_routes_http::config(cfg, &rate_limit_cell))
|
||||||
.configure(|cfg| {
|
.configure(|cfg| {
|
||||||
if federation_enabled {
|
if federation_enabled {
|
||||||
lemmy_apub::http::routes::config(cfg);
|
lemmy_apub::http::routes::config(cfg);
|
||||||
|
@ -225,17 +313,15 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.configure(feeds::config)
|
.configure(feeds::config)
|
||||||
.configure(|cfg| images::config(cfg, pictrs_client.clone(), rate_limit_cell))
|
.configure(|cfg| images::config(cfg, pictrs_client.clone(), &rate_limit_cell))
|
||||||
.configure(nodeinfo::config)
|
.configure(nodeinfo::config)
|
||||||
})
|
})
|
||||||
.bind((settings_bind.bind, settings_bind.port))?
|
.disable_signals()
|
||||||
.run()
|
.bind((settings.bind, settings.port))?
|
||||||
.await?;
|
.run();
|
||||||
|
let handle = server.handle();
|
||||||
// Wait for outgoing apub sends to complete
|
tokio::task::spawn(server);
|
||||||
ActivityChannel::close(outgoing_activities_task).await?;
|
Ok(handle)
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init_logging(opentelemetry_url: &Option<Url>) -> Result<(), LemmyError> {
|
pub fn init_logging(opentelemetry_url: &Option<Url>) -> Result<(), LemmyError> {
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
use lemmy_server::{init_logging, start_lemmy_server};
|
use clap::Parser;
|
||||||
|
use lemmy_server::{init_logging, start_lemmy_server, CmdArgs};
|
||||||
use lemmy_utils::{error::LemmyError, settings::SETTINGS};
|
use lemmy_utils::{error::LemmyError, settings::SETTINGS};
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
pub async fn main() -> Result<(), LemmyError> {
|
pub async fn main() -> Result<(), LemmyError> {
|
||||||
init_logging(&SETTINGS.opentelemetry_url)?;
|
init_logging(&SETTINGS.opentelemetry_url)?;
|
||||||
|
let args = CmdArgs::parse();
|
||||||
|
|
||||||
#[cfg(not(feature = "embed-pictrs"))]
|
#[cfg(not(feature = "embed-pictrs"))]
|
||||||
start_lemmy_server().await?;
|
start_lemmy_server(args).await?;
|
||||||
#[cfg(feature = "embed-pictrs")]
|
#[cfg(feature = "embed-pictrs")]
|
||||||
{
|
{
|
||||||
let pictrs_port = &SETTINGS
|
let pictrs_port = &SETTINGS
|
||||||
|
@ -33,7 +36,7 @@ pub async fn main() -> Result<(), LemmyError> {
|
||||||
}))
|
}))
|
||||||
.init::<&str>(None)
|
.init::<&str>(None)
|
||||||
.expect("initialize pictrs config");
|
.expect("initialize pictrs config");
|
||||||
let (lemmy, pictrs) = tokio::join!(start_lemmy_server(), pict_rs::run());
|
let (lemmy, pictrs) = tokio::join!(start_lemmy_server(args), pict_rs::run());
|
||||||
lemmy?;
|
lemmy?;
|
||||||
pictrs.expect("run pictrs");
|
pictrs.expect("run pictrs");
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,6 @@ fn handle_error(span: Span, status_code: StatusCode, response_error: &dyn Respon
|
||||||
|
|
||||||
// pre-formatting errors is a workaround for https://github.com/tokio-rs/tracing/issues/1565
|
// pre-formatting errors is a workaround for https://github.com/tokio-rs/tracing/issues/1565
|
||||||
let display_error = format!("{response_error}");
|
let display_error = format!("{response_error}");
|
||||||
let debug_error = format!("{response_error:?}");
|
|
||||||
|
|
||||||
tracing::info_span!(
|
tracing::info_span!(
|
||||||
parent: None,
|
parent: None,
|
||||||
|
@ -74,12 +73,11 @@ fn handle_error(span: Span, status_code: StatusCode, response_error: &dyn Respon
|
||||||
)
|
)
|
||||||
.in_scope(|| {
|
.in_scope(|| {
|
||||||
if status_code.is_client_error() {
|
if status_code.is_client_error() {
|
||||||
tracing::warn!("{}\n{}", display_error, debug_error);
|
tracing::warn!("{}", display_error);
|
||||||
} else {
|
} else {
|
||||||
tracing::error!("{}\n{}", display_error, debug_error);
|
tracing::error!("{}", display_error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
span.record("exception.message", &tracing::field::display(display_error));
|
span.record("exception.message", &tracing::field::display(display_error));
|
||||||
span.record("exception.details", &tracing::field::display(debug_error));
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue