mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-23 04:41:19 +00:00
merge
This commit is contained in:
parent
a8adf2dfb7
commit
2c479314ce
90 changed files with 2162 additions and 398 deletions
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
# Normalize EOL for all files that Git considers text files.
|
||||||
|
* text=auto eol=lf
|
|
@ -60,6 +60,9 @@ pipeline:
|
||||||
-D clippy::unused_self
|
-D clippy::unused_self
|
||||||
-A clippy::uninlined_format_args
|
-A clippy::uninlined_format_args
|
||||||
-D clippy::get_first
|
-D clippy::get_first
|
||||||
|
-D clippy::explicit_into_iter_loop
|
||||||
|
-D clippy::explicit_iter_loop
|
||||||
|
-D clippy::needless_collect
|
||||||
- cargo clippy --workspace --features console --
|
- cargo clippy --workspace --features console --
|
||||||
-D clippy::unwrap_used
|
-D clippy::unwrap_used
|
||||||
-D clippy::indexing_slicing
|
-D clippy::indexing_slicing
|
||||||
|
|
209
Cargo.lock
generated
209
Cargo.lock
generated
|
@ -14,9 +14,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "activitypub_federation"
|
name = "activitypub_federation"
|
||||||
version = "0.4.4"
|
version = "0.4.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "27540f6c4b72c91176610ed5279061a021387f972c7c6f42c41032b78a808267"
|
checksum = "4ab3ac148d9c0b4163a6d41040c17de7558a42224b9ecbd4e8f033aef6c254d9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitystreams-kinds",
|
"activitystreams-kinds",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
@ -122,10 +122,12 @@ dependencies = [
|
||||||
"ahash 0.8.3",
|
"ahash 0.8.3",
|
||||||
"base64 0.21.2",
|
"base64 0.21.2",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
|
"brotli",
|
||||||
"bytes",
|
"bytes",
|
||||||
"bytestring",
|
"bytestring",
|
||||||
"derive_more",
|
"derive_more",
|
||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
|
"flate2",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"h2",
|
"h2",
|
||||||
"http",
|
"http",
|
||||||
|
@ -143,6 +145,7 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util 0.7.4",
|
"tokio-util 0.7.4",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"zstd",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -215,7 +218,7 @@ dependencies = [
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"mio",
|
"mio",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"socket2",
|
"socket2 0.4.9",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
@ -245,7 +248,7 @@ dependencies = [
|
||||||
"http",
|
"http",
|
||||||
"log",
|
"log",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"tokio-rustls",
|
"tokio-rustls 0.23.4",
|
||||||
"tokio-util 0.7.4",
|
"tokio-util 0.7.4",
|
||||||
"webpki-roots",
|
"webpki-roots",
|
||||||
]
|
]
|
||||||
|
@ -297,7 +300,7 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"socket2",
|
"socket2 0.4.9",
|
||||||
"time 0.3.15",
|
"time 0.3.15",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
@ -361,6 +364,21 @@ dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alloc-no-stdlib"
|
||||||
|
version = "2.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alloc-stdlib"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "android-tzdata"
|
name = "android-tzdata"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
|
@ -496,7 +514,7 @@ dependencies = [
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rustls",
|
"rustls 0.20.7",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
|
@ -714,6 +732,27 @@ dependencies = [
|
||||||
"cipher",
|
"cipher",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "brotli"
|
||||||
|
version = "3.3.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
"alloc-stdlib",
|
||||||
|
"brotli-decompressor",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "brotli-decompressor"
|
||||||
|
version = "2.3.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
"alloc-stdlib",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.11.1"
|
version = "3.11.1"
|
||||||
|
@ -766,6 +805,9 @@ name = "cc"
|
||||||
version = "1.0.73"
|
version = "1.0.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
|
checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
|
||||||
|
dependencies = [
|
||||||
|
"jobserver",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cesu8"
|
name = "cesu8"
|
||||||
|
@ -1328,6 +1370,7 @@ dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"pq-sys",
|
"pq-sys",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2262,7 +2305,7 @@ dependencies = [
|
||||||
"httpdate",
|
"httpdate",
|
||||||
"itoa",
|
"itoa",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"socket2",
|
"socket2 0.4.9",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -2470,6 +2513,15 @@ version = "0.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
|
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jobserver"
|
||||||
|
version = "0.1.26"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.60"
|
version = "0.3.60"
|
||||||
|
@ -2518,7 +2570,7 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_api"
|
name = "lemmy_api"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -2526,6 +2578,7 @@ dependencies = [
|
||||||
"base64 0.13.1",
|
"base64 0.13.1",
|
||||||
"bcrypt",
|
"bcrypt",
|
||||||
"captcha",
|
"captcha",
|
||||||
|
"chrono",
|
||||||
"lemmy_api_common",
|
"lemmy_api_common",
|
||||||
"lemmy_db_schema",
|
"lemmy_db_schema",
|
||||||
"lemmy_db_views",
|
"lemmy_db_views",
|
||||||
|
@ -2541,9 +2594,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_api_common"
|
name = "lemmy_api_common"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-rt",
|
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
@ -2561,6 +2613,7 @@ dependencies = [
|
||||||
"rosetta-i18n",
|
"rosetta-i18n",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_with",
|
"serde_with",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
"ts-rs",
|
"ts-rs",
|
||||||
"url",
|
"url",
|
||||||
|
@ -2570,12 +2623,13 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_api_crud"
|
name = "lemmy_api_crud"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"bcrypt",
|
"bcrypt",
|
||||||
|
"chrono",
|
||||||
"lemmy_api_common",
|
"lemmy_api_common",
|
||||||
"lemmy_db_schema",
|
"lemmy_db_schema",
|
||||||
"lemmy_db_views",
|
"lemmy_db_views",
|
||||||
|
@ -2584,15 +2638,15 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"tracing",
|
"tracing",
|
||||||
"url",
|
"url",
|
||||||
|
"uuid",
|
||||||
"webmention",
|
"webmention",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_apub"
|
name = "lemmy_apub"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"actix-rt",
|
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"assert-json-diff",
|
"assert-json-diff",
|
||||||
|
@ -2620,6 +2674,7 @@ dependencies = [
|
||||||
"sha2",
|
"sha2",
|
||||||
"strum_macros",
|
"strum_macros",
|
||||||
"task-local-extensions",
|
"task-local-extensions",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
"url",
|
"url",
|
||||||
"uuid",
|
"uuid",
|
||||||
|
@ -2627,7 +2682,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_db_schema"
|
name = "lemmy_db_schema"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
@ -2640,9 +2695,11 @@ dependencies = [
|
||||||
"diesel-derive-newtype",
|
"diesel-derive-newtype",
|
||||||
"diesel_ltree",
|
"diesel_ltree",
|
||||||
"diesel_migrations",
|
"diesel_migrations",
|
||||||
|
"futures-util",
|
||||||
"lemmy_utils",
|
"lemmy_utils",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"regex",
|
"regex",
|
||||||
|
"rustls 0.21.2",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_with",
|
"serde_with",
|
||||||
|
@ -2651,15 +2708,18 @@ dependencies = [
|
||||||
"strum",
|
"strum",
|
||||||
"strum_macros",
|
"strum_macros",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
"tokio-postgres-rustls",
|
||||||
"tracing",
|
"tracing",
|
||||||
"ts-rs",
|
"ts-rs",
|
||||||
"typed-builder",
|
"typed-builder",
|
||||||
"url",
|
"url",
|
||||||
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_db_views"
|
name = "lemmy_db_views"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
|
@ -2676,7 +2736,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_db_views_actor"
|
name = "lemmy_db_views_actor"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
|
@ -2689,7 +2749,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_db_views_moderator"
|
name = "lemmy_db_views_moderator"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
|
@ -2701,7 +2761,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_routes"
|
name = "lemmy_routes"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
@ -2726,16 +2786,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_server"
|
name = "lemmy_server"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
"chrono",
|
||||||
"clokwerk",
|
"clokwerk",
|
||||||
"console-subscriber",
|
"console-subscriber",
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
"doku",
|
"doku",
|
||||||
|
"futures-util",
|
||||||
"lemmy_api",
|
"lemmy_api",
|
||||||
"lemmy_api_common",
|
"lemmy_api_common",
|
||||||
"lemmy_api_crud",
|
"lemmy_api_crud",
|
||||||
|
@ -2749,9 +2811,12 @@ dependencies = [
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"reqwest-middleware",
|
"reqwest-middleware",
|
||||||
"reqwest-tracing",
|
"reqwest-tracing",
|
||||||
|
"rustls 0.21.2",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
"tokio-postgres-rustls",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-actix-web 0.6.2",
|
"tracing-actix-web 0.6.2",
|
||||||
"tracing-error",
|
"tracing-error",
|
||||||
|
@ -2763,7 +2828,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lemmy_utils"
|
name = "lemmy_utils"
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -2820,7 +2885,7 @@ dependencies = [
|
||||||
"nom 7.1.1",
|
"nom 7.1.1",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"quoted_printable",
|
"quoted_printable",
|
||||||
"socket2",
|
"socket2 0.4.9",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -3932,11 +3997,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "postgres-protocol"
|
name = "postgres-protocol"
|
||||||
version = "0.6.4"
|
version = "0.6.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "878c6cbf956e03af9aa8204b407b9cbf47c072164800aa918c516cd4b056c50c"
|
checksum = "78b7fa9f396f51dffd61546fd8573ee20592287996568e6175ceb0f8699ad75d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.13.1",
|
"base64 0.21.2",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"bytes",
|
"bytes",
|
||||||
"fallible-iterator",
|
"fallible-iterator",
|
||||||
|
@ -4495,6 +4560,28 @@ dependencies = [
|
||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls"
|
||||||
|
version = "0.21.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
"ring",
|
||||||
|
"rustls-webpki",
|
||||||
|
"sct",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls-webpki"
|
||||||
|
version = "0.100.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b"
|
||||||
|
dependencies = [
|
||||||
|
"ring",
|
||||||
|
"untrusted",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustversion"
|
name = "rustversion"
|
||||||
version = "1.0.9"
|
version = "1.0.9"
|
||||||
|
@ -4859,6 +4946,16 @@ dependencies = [
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "socket2"
|
||||||
|
version = "0.5.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "spin"
|
name = "spin"
|
||||||
version = "0.5.2"
|
version = "0.5.2"
|
||||||
|
@ -5304,7 +5401,7 @@ dependencies = [
|
||||||
"parking_lot 0.12.1",
|
"parking_lot 0.12.1",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"signal-hook-registry",
|
||||||
"socket2",
|
"socket2 0.4.9",
|
||||||
"tokio-macros",
|
"tokio-macros",
|
||||||
"tracing",
|
"tracing",
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
|
@ -5343,9 +5440,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-postgres"
|
name = "tokio-postgres"
|
||||||
version = "0.7.7"
|
version = "0.7.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "29a12c1b3e0704ae7dfc25562629798b29c72e6b1d0a681b6f29ab4ae5e7f7bf"
|
checksum = "6e89f6234aa8fd43779746012fcf53603cdb91fdd8399aa0de868c2d56b6dde1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
|
@ -5360,22 +5457,46 @@ dependencies = [
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"postgres-protocol",
|
"postgres-protocol",
|
||||||
"postgres-types",
|
"postgres-types",
|
||||||
"socket2",
|
"socket2 0.5.3",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util 0.7.4",
|
"tokio-util 0.7.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-postgres-rustls"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dd5831152cb0d3f79ef5523b357319ba154795d64c7078b2daa95a803b54057f"
|
||||||
|
dependencies = [
|
||||||
|
"futures",
|
||||||
|
"ring",
|
||||||
|
"rustls 0.21.2",
|
||||||
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
"tokio-rustls 0.24.1",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-rustls"
|
name = "tokio-rustls"
|
||||||
version = "0.23.4"
|
version = "0.23.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
|
checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rustls",
|
"rustls 0.20.7",
|
||||||
"tokio",
|
"tokio",
|
||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-rustls"
|
||||||
|
version = "0.24.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
|
||||||
|
dependencies = [
|
||||||
|
"rustls 0.21.2",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-stream"
|
name = "tokio-stream"
|
||||||
version = "0.1.11"
|
version = "0.1.11"
|
||||||
|
@ -6460,3 +6581,33 @@ name = "zeroize"
|
||||||
version = "1.5.7"
|
version = "1.5.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f"
|
checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd"
|
||||||
|
version = "0.12.3+zstd.1.5.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
|
||||||
|
dependencies = [
|
||||||
|
"zstd-safe",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd-safe"
|
||||||
|
version = "6.0.5+zstd.1.5.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"zstd-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd-sys"
|
||||||
|
version = "2.0.8+zstd.1.5.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"libc",
|
||||||
|
"pkg-config",
|
||||||
|
]
|
||||||
|
|
36
Cargo.toml
36
Cargo.toml
|
@ -1,5 +1,5 @@
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.18.0"
|
version = "0.18.1-rc.4"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "A link aggregator for the fediverse"
|
description = "A link aggregator for the fediverse"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
|
@ -49,23 +49,23 @@ members = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lemmy_api = { version = "=0.18.0", path = "./crates/api" }
|
lemmy_api = { version = "=0.18.1-rc.4", path = "./crates/api" }
|
||||||
lemmy_api_crud = { version = "=0.18.0", path = "./crates/api_crud" }
|
lemmy_api_crud = { version = "=0.18.1-rc.4", path = "./crates/api_crud" }
|
||||||
lemmy_apub = { version = "=0.18.0", path = "./crates/apub" }
|
lemmy_apub = { version = "=0.18.1-rc.4", path = "./crates/apub" }
|
||||||
lemmy_utils = { version = "=0.18.0", path = "./crates/utils" }
|
lemmy_utils = { version = "=0.18.1-rc.4", path = "./crates/utils" }
|
||||||
lemmy_db_schema = { version = "=0.18.0", path = "./crates/db_schema" }
|
lemmy_db_schema = { version = "=0.18.1-rc.4", path = "./crates/db_schema" }
|
||||||
lemmy_api_common = { version = "=0.18.0", path = "./crates/api_common" }
|
lemmy_api_common = { version = "=0.18.1-rc.4", path = "./crates/api_common" }
|
||||||
lemmy_routes = { version = "=0.18.0", path = "./crates/routes" }
|
lemmy_routes = { version = "=0.18.1-rc.4", path = "./crates/routes" }
|
||||||
lemmy_db_views = { version = "=0.18.0", path = "./crates/db_views" }
|
lemmy_db_views = { version = "=0.18.1-rc.4", path = "./crates/db_views" }
|
||||||
lemmy_db_views_actor = { version = "=0.18.0", path = "./crates/db_views_actor" }
|
lemmy_db_views_actor = { version = "=0.18.1-rc.4", path = "./crates/db_views_actor" }
|
||||||
lemmy_db_views_moderator = { version = "=0.18.0", path = "./crates/db_views_moderator" }
|
lemmy_db_views_moderator = { version = "=0.18.1-rc.4", path = "./crates/db_views_moderator" }
|
||||||
activitypub_federation = { version = "0.4.4", default-features = false, features = ["actix-web"] }
|
activitypub_federation = { version = "0.4.4", default-features = false, features = ["actix-web"] }
|
||||||
diesel = "2.1.0"
|
diesel = "2.1.0"
|
||||||
diesel_migrations = "2.1.0"
|
diesel_migrations = "2.1.0"
|
||||||
diesel-async = "0.3.1"
|
diesel-async = "0.3.1"
|
||||||
serde = { version = "1.0.164", features = ["derive"] }
|
serde = { version = "1.0.164", features = ["derive"] }
|
||||||
serde_with = "1.14.0"
|
serde_with = "1.14.0"
|
||||||
actix-web = { version = "4.3.1", default-features = false, features = ["macros", "rustls"] }
|
actix-web = { version = "4.3.1", default-features = false, features = ["macros", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"] }
|
||||||
tracing = "0.1.37"
|
tracing = "0.1.37"
|
||||||
tracing-actix-web = { version = "0.6.2", default-features = false }
|
tracing-actix-web = { version = "0.6.2", default-features = false }
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
|
@ -89,7 +89,7 @@ anyhow = "1.0.71"
|
||||||
diesel_ltree = "0.3.0"
|
diesel_ltree = "0.3.0"
|
||||||
typed-builder = "0.10.0"
|
typed-builder = "0.10.0"
|
||||||
serial_test = "0.9.0"
|
serial_test = "0.9.0"
|
||||||
tokio = "1.28.2"
|
tokio = { version = "1.28.2", features = ["full"] }
|
||||||
sha2 = "0.10.6"
|
sha2 = "0.10.6"
|
||||||
regex = "1.8.4"
|
regex = "1.8.4"
|
||||||
once_cell = "1.18.0"
|
once_cell = "1.18.0"
|
||||||
|
@ -100,13 +100,16 @@ strum_macros = "0.24.3"
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
futures = "0.3.28"
|
futures = "0.3.28"
|
||||||
http = "0.2.9"
|
http = "0.2.9"
|
||||||
actix-rt = { version = "2.8.0", default-features = false }
|
|
||||||
percent-encoding = "2.3.0"
|
percent-encoding = "2.3.0"
|
||||||
rosetta-i18n = "0.1.2"
|
rosetta-i18n = "0.1.2"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
opentelemetry = { version = "0.17.0", features = ["rt-tokio"] }
|
opentelemetry = { version = "0.17.0", features = ["rt-tokio"] }
|
||||||
tracing-opentelemetry = { version = "0.17.4" }
|
tracing-opentelemetry = { version = "0.17.4" }
|
||||||
ts-rs = { version = "6.2", features = ["serde-compat", "format", "chrono-impl"] }
|
ts-rs = { version = "6.2", features = ["serde-compat", "format", "chrono-impl"] }
|
||||||
|
rustls = { version ="0.21.2", features = ["dangerous_configuration"]}
|
||||||
|
futures-util = "0.3.28"
|
||||||
|
tokio-postgres = "0.7.8"
|
||||||
|
tokio-postgres-rustls = "0.10.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lemmy_api = { workspace = true }
|
lemmy_api = { workspace = true }
|
||||||
|
@ -140,3 +143,8 @@ opentelemetry-otlp = { version = "0.10.0", optional = true }
|
||||||
pict-rs = { version = "0.4.0-rc.3", optional = true }
|
pict-rs = { version = "0.4.0-rc.3", optional = true }
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
actix-cors = "0.6.4"
|
actix-cors = "0.6.4"
|
||||||
|
rustls = { workspace = true }
|
||||||
|
futures-util = { workspace = true }
|
||||||
|
tokio-postgres = { workspace = true }
|
||||||
|
tokio-postgres-rustls = { workspace = true }
|
||||||
|
chrono = { workspace = true }
|
|
@ -123,6 +123,7 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins
|
||||||
- [Jerboa - A native Android app made by Lemmy's developers](https://github.com/dessalines/jerboa)
|
- [Jerboa - A native Android app made by Lemmy's developers](https://github.com/dessalines/jerboa)
|
||||||
- [Mlem - A Lemmy client for iOS](https://github.com/buresdv/Mlem)
|
- [Mlem - A Lemmy client for iOS](https://github.com/buresdv/Mlem)
|
||||||
- [Lemoa - A Gtk client for Lemmy on Linux](https://github.com/lemmy-gtk/lemoa)
|
- [Lemoa - A Gtk client for Lemmy on Linux](https://github.com/lemmy-gtk/lemoa)
|
||||||
|
- [Liftoff - A Lemmy for Windows , Linux and Android ](https://github.com/liftoff-app/liftoff)
|
||||||
|
|
||||||
### Libraries
|
### Libraries
|
||||||
|
|
||||||
|
|
|
@ -76,4 +76,8 @@
|
||||||
port: 8536
|
port: 8536
|
||||||
# Whether the site is available over TLS. Needs to be true for federation to work.
|
# Whether the site is available over TLS. Needs to be true for federation to work.
|
||||||
tls_enabled: true
|
tls_enabled: true
|
||||||
|
# The number of activitypub federation workers that can be in-flight concurrently
|
||||||
|
worker_count: 0
|
||||||
|
# The number of activitypub federation retry workers that can be in-flight concurrently
|
||||||
|
retry_count: 0
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ async-trait = { workspace = true }
|
||||||
captcha = { workspace = true }
|
captcha = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
chrono = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = { workspace = true }
|
serial_test = { workspace = true }
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl Perform for BanFromCommunity {
|
||||||
community_id,
|
community_id,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
is_valid_body_field(&data.reason)?;
|
is_valid_body_field(&data.reason, false)?;
|
||||||
|
|
||||||
let community_user_ban_form = CommunityPersonBanForm {
|
let community_user_ban_form = CommunityPersonBanForm {
|
||||||
community_id: data.community_id,
|
community_id: data.community_id,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
|
use captcha::Captcha;
|
||||||
use lemmy_api_common::{context::LemmyContext, utils::local_site_to_slur_regex};
|
use lemmy_api_common::{context::LemmyContext, utils::local_site_to_slur_regex};
|
||||||
use lemmy_db_schema::source::local_site::LocalSite;
|
use lemmy_db_schema::source::local_site::LocalSite;
|
||||||
use lemmy_utils::{error::LemmyError, utils::slurs::check_slurs};
|
use lemmy_utils::{error::LemmyError, utils::slurs::check_slurs};
|
||||||
|
@ -20,6 +21,21 @@ pub trait Perform {
|
||||||
async fn perform(&self, context: &Data<LemmyContext>) -> Result<Self::Response, LemmyError>;
|
async fn perform(&self, context: &Data<LemmyContext>) -> Result<Self::Response, LemmyError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts the captcha to a base64 encoded wav audio file
|
||||||
|
pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> String {
|
||||||
|
let letters = captcha.as_wav();
|
||||||
|
|
||||||
|
let mut concat_letters: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
|
for letter in letters {
|
||||||
|
let bytes = letter.unwrap_or_default();
|
||||||
|
concat_letters.extend(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to base64
|
||||||
|
base64::encode(concat_letters)
|
||||||
|
}
|
||||||
|
|
||||||
/// Check size of report and remove whitespace
|
/// Check size of report and remove whitespace
|
||||||
pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> Result<(), LemmyError> {
|
pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> Result<(), LemmyError> {
|
||||||
let slur_regex = &local_site_to_slur_regex(local_site);
|
let slur_regex = &local_site_to_slur_regex(local_site);
|
||||||
|
|
|
@ -30,7 +30,7 @@ impl Perform for BanPerson {
|
||||||
// Make sure user is an admin
|
// Make sure user is an admin
|
||||||
is_admin(&local_user_view)?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
is_valid_body_field(&data.reason)?;
|
is_valid_body_field(&data.reason, false)?;
|
||||||
|
|
||||||
let ban = data.ban;
|
let ban = data.ban;
|
||||||
let banned_person_id = data.person_id;
|
let banned_person_id = data.person_id;
|
||||||
|
|
50
crates/api/src/local_user/get_captcha.rs
Normal file
50
crates/api/src/local_user/get_captcha.rs
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
use crate::{captcha_as_wav_base64, Perform};
|
||||||
|
use actix_web::web::Data;
|
||||||
|
use captcha::{gen, Difficulty};
|
||||||
|
use lemmy_api_common::{
|
||||||
|
context::LemmyContext,
|
||||||
|
person::{CaptchaResponse, GetCaptcha, GetCaptchaResponse},
|
||||||
|
};
|
||||||
|
use lemmy_db_schema::source::{
|
||||||
|
captcha_answer::{CaptchaAnswer, CaptchaAnswerForm},
|
||||||
|
local_site::LocalSite,
|
||||||
|
};
|
||||||
|
use lemmy_utils::error::LemmyError;
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl Perform for GetCaptcha {
|
||||||
|
type Response = GetCaptchaResponse;
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(context))]
|
||||||
|
async fn perform(&self, context: &Data<LemmyContext>) -> Result<Self::Response, LemmyError> {
|
||||||
|
let local_site = LocalSite::read(context.pool()).await?;
|
||||||
|
|
||||||
|
if !local_site.captcha_enabled {
|
||||||
|
return Ok(GetCaptchaResponse { ok: None });
|
||||||
|
}
|
||||||
|
|
||||||
|
let captcha = gen(match local_site.captcha_difficulty.as_str() {
|
||||||
|
"easy" => Difficulty::Easy,
|
||||||
|
"hard" => Difficulty::Hard,
|
||||||
|
_ => Difficulty::Medium,
|
||||||
|
});
|
||||||
|
|
||||||
|
let answer = captcha.chars_as_string();
|
||||||
|
|
||||||
|
let png = captcha.as_base64().expect("failed to generate captcha");
|
||||||
|
|
||||||
|
let wav = captcha_as_wav_base64(&captcha);
|
||||||
|
|
||||||
|
let captcha_form: CaptchaAnswerForm = CaptchaAnswerForm { answer };
|
||||||
|
// Stores the captcha item in the db
|
||||||
|
let captcha = CaptchaAnswer::insert(context.pool(), &captcha_form).await?;
|
||||||
|
|
||||||
|
Ok(GetCaptchaResponse {
|
||||||
|
ok: Some(CaptchaResponse {
|
||||||
|
png,
|
||||||
|
wav,
|
||||||
|
uuid: captcha.uuid.to_string(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ mod ban_person;
|
||||||
mod block;
|
mod block;
|
||||||
mod change_password;
|
mod change_password;
|
||||||
mod change_password_after_reset;
|
mod change_password_after_reset;
|
||||||
|
mod get_captcha;
|
||||||
mod list_banned;
|
mod list_banned;
|
||||||
mod login;
|
mod login;
|
||||||
mod notifications;
|
mod notifications;
|
||||||
|
|
|
@ -5,6 +5,7 @@ use lemmy_api_common::{
|
||||||
person::{PasswordReset, PasswordResetResponse},
|
person::{PasswordReset, PasswordResetResponse},
|
||||||
utils::send_password_reset_email,
|
utils::send_password_reset_email,
|
||||||
};
|
};
|
||||||
|
use lemmy_db_schema::source::password_reset_request::PasswordResetRequest;
|
||||||
use lemmy_db_views::structs::LocalUserView;
|
use lemmy_db_views::structs::LocalUserView;
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::LemmyError;
|
||||||
|
|
||||||
|
@ -25,6 +26,16 @@ impl Perform for PasswordReset {
|
||||||
.await
|
.await
|
||||||
.map_err(|e| LemmyError::from_error_message(e, "couldnt_find_that_username_or_email"))?;
|
.map_err(|e| LemmyError::from_error_message(e, "couldnt_find_that_username_or_email"))?;
|
||||||
|
|
||||||
|
// Check for too many attempts (to limit potential abuse)
|
||||||
|
let recent_resets_count = PasswordResetRequest::get_recent_password_resets_count(
|
||||||
|
context.pool(),
|
||||||
|
local_user_view.local_user.id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
if recent_resets_count >= 3 {
|
||||||
|
return Err(LemmyError::from_message("password_reset_limit_reached"));
|
||||||
|
}
|
||||||
|
|
||||||
// Email the pure token to the user.
|
// Email the pure token to the user.
|
||||||
send_password_reset_email(
|
send_password_reset_email(
|
||||||
&local_user_view,
|
&local_user_view,
|
||||||
|
|
|
@ -3,7 +3,7 @@ use actix_web::web::Data;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::{PurgeComment, PurgeItemResponse},
|
site::{PurgeComment, PurgeItemResponse},
|
||||||
utils::{is_top_admin, local_user_view_from_jwt},
|
utils::{is_admin, local_user_view_from_jwt},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -23,8 +23,8 @@ impl Perform for PurgeComment {
|
||||||
let data: &Self = self;
|
let data: &Self = self;
|
||||||
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
||||||
|
|
||||||
// Only let the top admin purge an item
|
// Only let admin purge an item
|
||||||
is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
let comment_id = data.comment_id;
|
let comment_id = data.comment_id;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
request::purge_image_from_pictrs,
|
request::purge_image_from_pictrs,
|
||||||
site::{PurgeCommunity, PurgeItemResponse},
|
site::{PurgeCommunity, PurgeItemResponse},
|
||||||
utils::{is_top_admin, local_user_view_from_jwt, purge_image_posts_for_community},
|
utils::{is_admin, local_user_view_from_jwt, purge_image_posts_for_community},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -24,8 +24,8 @@ impl Perform for PurgeCommunity {
|
||||||
let data: &Self = self;
|
let data: &Self = self;
|
||||||
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
||||||
|
|
||||||
// Only let the top admin purge an item
|
// Only let admin purge an item
|
||||||
is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
let community_id = data.community_id;
|
let community_id = data.community_id;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
request::purge_image_from_pictrs,
|
request::purge_image_from_pictrs,
|
||||||
site::{PurgeItemResponse, PurgePerson},
|
site::{PurgeItemResponse, PurgePerson},
|
||||||
utils::{is_top_admin, local_user_view_from_jwt, purge_image_posts_for_person},
|
utils::{is_admin, local_user_view_from_jwt, purge_image_posts_for_person},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -24,8 +24,8 @@ impl Perform for PurgePerson {
|
||||||
let data: &Self = self;
|
let data: &Self = self;
|
||||||
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
||||||
|
|
||||||
// Only let the top admin purge an item
|
// Only let admin purge an item
|
||||||
is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
// Read the person to get their images
|
// Read the person to get their images
|
||||||
let person_id = data.person_id;
|
let person_id = data.person_id;
|
||||||
|
|
|
@ -4,7 +4,7 @@ use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
request::purge_image_from_pictrs,
|
request::purge_image_from_pictrs,
|
||||||
site::{PurgeItemResponse, PurgePost},
|
site::{PurgeItemResponse, PurgePost},
|
||||||
utils::{is_top_admin, local_user_view_from_jwt},
|
utils::{is_admin, local_user_view_from_jwt},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -24,8 +24,8 @@ impl Perform for PurgePost {
|
||||||
let data: &Self = self;
|
let data: &Self = self;
|
||||||
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
||||||
|
|
||||||
// Only let the top admin purge an item
|
// Only let admin purge an item
|
||||||
is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
let post_id = data.post_id;
|
let post_id = data.post_id;
|
||||||
|
|
||||||
|
|
|
@ -33,12 +33,12 @@ reqwest-middleware = { workspace = true, optional = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
rosetta-i18n = { workspace = true, optional = true }
|
rosetta-i18n = { workspace = true, optional = true }
|
||||||
percent-encoding = { workspace = true, optional = true }
|
percent-encoding = { workspace = true, optional = true }
|
||||||
webpage = { version = "1.6.0", default-features = false, features = ["serde"], optional = true }
|
webpage = { version = "1.6", default-features = false, features = ["serde"], optional = true }
|
||||||
encoding = { version = "0.2.33", optional = true }
|
encoding = { version = "0.2.33", optional = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
uuid = { workspace = true }
|
uuid = { workspace = true }
|
||||||
actix-rt = { workspace = true }
|
tokio = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
ts-rs = { workspace = true, optional = true }
|
ts-rs = { workspace = true, optional = true }
|
||||||
actix-web = { workspace = true }
|
actix-web = { workspace = true }
|
||||||
|
|
|
@ -24,3 +24,10 @@ As you can see, each API endpoint needs a parameter type ( GetPosts), path (/pos
|
||||||
For a real example of a Lemmy API client, look at [lemmyBB](https://github.com/LemmyNet/lemmyBB/tree/main/src/api).
|
For a real example of a Lemmy API client, look at [lemmyBB](https://github.com/LemmyNet/lemmyBB/tree/main/src/api).
|
||||||
|
|
||||||
Lemmy also provides a websocket API. You can find the full websocket code in [this file](https://github.com/LemmyNet/lemmy/blob/main/src/api_routes_websocket.rs).
|
Lemmy also provides a websocket API. You can find the full websocket code in [this file](https://github.com/LemmyNet/lemmy/blob/main/src/api_routes_websocket.rs).
|
||||||
|
|
||||||
|
## Generate TypeScript bindings
|
||||||
|
|
||||||
|
TypeScript bindings (API types) can be generated by running `cargo test --features full`.
|
||||||
|
The ts files be generated into a `bindings` folder.
|
||||||
|
|
||||||
|
This crate uses [`ts_rs`](https://docs.rs/ts-rs/6.2.1/ts_rs/#traits) macros `derive(TS)` and `ts(export)` to attribute types for binding generating.
|
||||||
|
|
|
@ -103,7 +103,6 @@ pub async fn send_local_notifs(
|
||||||
for mention in mentions
|
for mention in mentions
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|m| m.is_local(&context.settings().hostname) && m.name.ne(&person.name))
|
.filter(|m| m.is_local(&context.settings().hostname) && m.name.ne(&person.name))
|
||||||
.collect::<Vec<&MentionData>>()
|
|
||||||
{
|
{
|
||||||
let mention_name = mention.name.clone();
|
let mention_name = mention.name.clone();
|
||||||
let user_view = LocalUserView::read_from_name(&mut *context.conn().await?, &mention_name).await;
|
let user_view = LocalUserView::read_from_name(&mut *context.conn().await?, &mention_name).await;
|
||||||
|
|
|
@ -76,6 +76,7 @@ pub struct CommunityResponse {
|
||||||
pub struct ListCommunities {
|
pub struct ListCommunities {
|
||||||
pub type_: Option<ListingType>,
|
pub type_: Option<ListingType>,
|
||||||
pub sort: Option<SortType>,
|
pub sort: Option<SortType>,
|
||||||
|
pub show_nsfw: Option<bool>,
|
||||||
pub page: Option<i64>,
|
pub page: Option<i64>,
|
||||||
pub limit: Option<i64>,
|
pub limit: Option<i64>,
|
||||||
pub auth: Option<Sensitive<String>>,
|
pub auth: Option<Sensitive<String>>,
|
||||||
|
|
|
@ -27,12 +27,12 @@ pub async fn fetch_site_metadata(
|
||||||
// https://github.com/LemmyNet/lemmy/issues/1964
|
// https://github.com/LemmyNet/lemmy/issues/1964
|
||||||
let html_bytes = response.bytes().await.map_err(LemmyError::from)?.to_vec();
|
let html_bytes = response.bytes().await.map_err(LemmyError::from)?.to_vec();
|
||||||
|
|
||||||
let tags = html_to_site_metadata(&html_bytes)?;
|
let tags = html_to_site_metadata(&html_bytes, url)?;
|
||||||
|
|
||||||
Ok(tags)
|
Ok(tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn html_to_site_metadata(html_bytes: &[u8]) -> Result<SiteMetadata, LemmyError> {
|
fn html_to_site_metadata(html_bytes: &[u8], url: &Url) -> Result<SiteMetadata, LemmyError> {
|
||||||
let html = String::from_utf8_lossy(html_bytes);
|
let html = String::from_utf8_lossy(html_bytes);
|
||||||
|
|
||||||
// Make sure the first line is doctype html
|
// Make sure the first line is doctype html
|
||||||
|
@ -81,12 +81,14 @@ fn html_to_site_metadata(html_bytes: &[u8]) -> Result<SiteMetadata, LemmyError>
|
||||||
.opengraph
|
.opengraph
|
||||||
.images
|
.images
|
||||||
.first()
|
.first()
|
||||||
.and_then(|ogo| Url::parse(&ogo.url).ok());
|
// join also works if the target URL is absolute
|
||||||
|
.and_then(|ogo| url.join(&ogo.url).ok());
|
||||||
let og_embed_url = page
|
let og_embed_url = page
|
||||||
.opengraph
|
.opengraph
|
||||||
.videos
|
.videos
|
||||||
.first()
|
.first()
|
||||||
.and_then(|v| Url::parse(&v.url).ok());
|
// join also works if the target URL is absolute
|
||||||
|
.and_then(|v| url.join(&v.url).ok());
|
||||||
|
|
||||||
Ok(SiteMetadata {
|
Ok(SiteMetadata {
|
||||||
title: og_title.or(page_title),
|
title: og_title.or(page_title),
|
||||||
|
@ -266,12 +268,17 @@ pub fn build_user_agent(settings: &Settings) -> String {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::request::{build_user_agent, fetch_site_metadata, SiteMetadata};
|
use crate::request::{
|
||||||
|
build_user_agent,
|
||||||
|
fetch_site_metadata,
|
||||||
|
html_to_site_metadata,
|
||||||
|
SiteMetadata,
|
||||||
|
};
|
||||||
use lemmy_utils::settings::SETTINGS;
|
use lemmy_utils::settings::SETTINGS;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
// These helped with testing
|
// These helped with testing
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
async fn test_site_metadata() {
|
async fn test_site_metadata() {
|
||||||
let settings = &SETTINGS.clone();
|
let settings = &SETTINGS.clone();
|
||||||
let client = reqwest::Client::builder()
|
let client = reqwest::Client::builder()
|
||||||
|
@ -305,4 +312,46 @@ mod tests {
|
||||||
// let res_other = fetch_pictshare("https://upload.wikimedia.org/wikipedia/en/2/27/The_Mandalorian_logo.jpgaoeu");
|
// let res_other = fetch_pictshare("https://upload.wikimedia.org/wikipedia/en/2/27/The_Mandalorian_logo.jpgaoeu");
|
||||||
// assert!(res_other.is_err());
|
// assert!(res_other.is_err());
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resolve_image_url() {
|
||||||
|
// url that lists the opengraph fields
|
||||||
|
let url = Url::parse("https://example.com/one/two.html").unwrap();
|
||||||
|
|
||||||
|
// root relative url
|
||||||
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='/image.jpg'></head><body></body></html>";
|
||||||
|
let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata");
|
||||||
|
assert_eq!(
|
||||||
|
metadata.image,
|
||||||
|
Some(Url::parse("https://example.com/image.jpg").unwrap().into())
|
||||||
|
);
|
||||||
|
|
||||||
|
// base relative url
|
||||||
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='image.jpg'></head><body></body></html>";
|
||||||
|
let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata");
|
||||||
|
assert_eq!(
|
||||||
|
metadata.image,
|
||||||
|
Some(
|
||||||
|
Url::parse("https://example.com/one/image.jpg")
|
||||||
|
.unwrap()
|
||||||
|
.into()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// absolute url
|
||||||
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='https://cdn.host.com/image.jpg'></head><body></body></html>";
|
||||||
|
let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata");
|
||||||
|
assert_eq!(
|
||||||
|
metadata.image,
|
||||||
|
Some(Url::parse("https://cdn.host.com/image.jpg").unwrap().into())
|
||||||
|
);
|
||||||
|
|
||||||
|
// protocol relative url
|
||||||
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='//example.com/image.jpg'></head><body></body></html>";
|
||||||
|
let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata");
|
||||||
|
assert_eq!(
|
||||||
|
metadata.image,
|
||||||
|
Some(Url::parse("https://example.com/image.jpg").unwrap().into())
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,7 +177,6 @@ pub struct CreateSite {
|
||||||
pub rate_limit_search_per_second: Option<i32>,
|
pub rate_limit_search_per_second: Option<i32>,
|
||||||
pub federation_enabled: Option<bool>,
|
pub federation_enabled: Option<bool>,
|
||||||
pub federation_debug: Option<bool>,
|
pub federation_debug: Option<bool>,
|
||||||
pub federation_worker_count: Option<i32>,
|
|
||||||
pub captcha_enabled: Option<bool>,
|
pub captcha_enabled: Option<bool>,
|
||||||
pub captcha_difficulty: Option<String>,
|
pub captcha_difficulty: Option<String>,
|
||||||
pub allowed_instances: Option<Vec<String>>,
|
pub allowed_instances: Option<Vec<String>>,
|
||||||
|
@ -250,8 +249,6 @@ pub struct EditSite {
|
||||||
pub federation_enabled: Option<bool>,
|
pub federation_enabled: Option<bool>,
|
||||||
/// Enables federation debugging.
|
/// Enables federation debugging.
|
||||||
pub federation_debug: Option<bool>,
|
pub federation_debug: Option<bool>,
|
||||||
/// The number of federation workers.
|
|
||||||
pub federation_worker_count: Option<i32>,
|
|
||||||
/// Whether to enable captchas for signups.
|
/// Whether to enable captchas for signups.
|
||||||
pub captcha_enabled: Option<bool>,
|
pub captcha_enabled: Option<bool>,
|
||||||
/// The captcha difficulty. Can be easy, medium, or hard
|
/// The captcha difficulty. Can be easy, medium, or hard
|
||||||
|
|
|
@ -32,7 +32,6 @@ use lemmy_db_views_actor::structs::{
|
||||||
CommunityModeratorView,
|
CommunityModeratorView,
|
||||||
CommunityPersonBanView,
|
CommunityPersonBanView,
|
||||||
CommunityView,
|
CommunityView,
|
||||||
PersonView,
|
|
||||||
};
|
};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
claims::Claims,
|
claims::Claims,
|
||||||
|
@ -79,18 +78,6 @@ pub async fn is_mod_or_admin_opt(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn is_top_admin(conn: &mut DbConn, person_id: PersonId) -> Result<(), LemmyError> {
|
|
||||||
let admins = PersonView::admins(conn).await?;
|
|
||||||
let top_admin = admins
|
|
||||||
.first()
|
|
||||||
.ok_or_else(|| LemmyError::from_message("no admins"))?;
|
|
||||||
|
|
||||||
if top_admin.person.id != person_id {
|
|
||||||
return Err(LemmyError::from_message("not_top_admin"));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_admin(local_user_view: &LocalUserView) -> Result<(), LemmyError> {
|
pub fn is_admin(local_user_view: &LocalUserView) -> Result<(), LemmyError> {
|
||||||
if !local_user_view.person.admin {
|
if !local_user_view.person.admin {
|
||||||
return Err(LemmyError::from_message("not_an_admin"));
|
return Err(LemmyError::from_message("not_an_admin"));
|
||||||
|
@ -316,15 +303,6 @@ pub fn password_length_check(pass: &str) -> Result<(), LemmyError> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks the site description length
|
|
||||||
pub fn site_description_length_check(description: &str) -> Result<(), LemmyError> {
|
|
||||||
if description.len() > 150 {
|
|
||||||
Err(LemmyError::from_message("site_description_length_overflow"))
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks for a honeypot. If this field is filled, fail the rest of the function
|
/// Checks for a honeypot. If this field is filled, fail the rest of the function
|
||||||
pub fn honeypot_check(honeypot: &Option<String>) -> Result<(), LemmyError> {
|
pub fn honeypot_check(honeypot: &Option<String>) -> Result<(), LemmyError> {
|
||||||
if honeypot.is_some() && honeypot != &Some(String::new()) {
|
if honeypot.is_some() && honeypot != &Some(String::new()) {
|
||||||
|
|
|
@ -22,3 +22,5 @@ tracing = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
webmention = "0.4.0"
|
webmention = "0.4.0"
|
||||||
|
chrono = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
|
@ -49,7 +49,7 @@ impl PerformCrud for CreateComment {
|
||||||
&data.content.clone(),
|
&data.content.clone(),
|
||||||
&local_site_to_slur_regex(&local_site),
|
&local_site_to_slur_regex(&local_site),
|
||||||
);
|
);
|
||||||
is_valid_body_field(&Some(content_slurs_removed.clone()))?;
|
is_valid_body_field(&Some(content_slurs_removed.clone()), false)?;
|
||||||
|
|
||||||
// Check for a community ban
|
// Check for a community ban
|
||||||
let post_id = data.post_id;
|
let post_id = data.post_id;
|
||||||
|
@ -207,7 +207,7 @@ impl PerformCrud for CreateComment {
|
||||||
|
|
||||||
pub fn check_comment_depth(comment: &Comment) -> Result<(), LemmyError> {
|
pub fn check_comment_depth(comment: &Comment) -> Result<(), LemmyError> {
|
||||||
let path = &comment.path.0;
|
let path = &comment.path.0;
|
||||||
let length = path.split('.').collect::<Vec<&str>>().len();
|
let length = path.split('.').count();
|
||||||
if length > MAX_COMMENT_DEPTH_LIMIT {
|
if length > MAX_COMMENT_DEPTH_LIMIT {
|
||||||
Err(LemmyError::from_message("max_comment_depth_reached"))
|
Err(LemmyError::from_message("max_comment_depth_reached"))
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl PerformCrud for EditComment {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|c| remove_slurs(c, &local_site_to_slur_regex(&local_site)));
|
.map(|c| remove_slurs(c, &local_site_to_slur_regex(&local_site)));
|
||||||
|
|
||||||
is_valid_body_field(&content_slurs_removed)?;
|
is_valid_body_field(&content_slurs_removed, false)?;
|
||||||
|
|
||||||
let comment_id = data.comment_id;
|
let comment_id = data.comment_id;
|
||||||
let form = CommentUpdateForm::builder()
|
let form = CommentUpdateForm::builder()
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl PerformCrud for CreateCommunity {
|
||||||
check_slurs_opt(&data.description, &slur_regex)?;
|
check_slurs_opt(&data.description, &slur_regex)?;
|
||||||
|
|
||||||
is_valid_actor_name(&data.name, local_site.actor_name_max_length as usize)?;
|
is_valid_actor_name(&data.name, local_site.actor_name_max_length as usize)?;
|
||||||
is_valid_body_field(&data.description)?;
|
is_valid_body_field(&data.description, false)?;
|
||||||
|
|
||||||
// Double check for duplicate community actor_ids
|
// Double check for duplicate community actor_ids
|
||||||
let community_actor_id = generate_local_apub_endpoint(
|
let community_actor_id = generate_local_apub_endpoint(
|
||||||
|
|
|
@ -27,6 +27,7 @@ impl PerformCrud for ListCommunities {
|
||||||
|
|
||||||
let sort = data.sort;
|
let sort = data.sort;
|
||||||
let listing_type = data.type_;
|
let listing_type = data.type_;
|
||||||
|
let show_nsfw = data.show_nsfw;
|
||||||
let page = data.page;
|
let page = data.page;
|
||||||
let limit = data.limit;
|
let limit = data.limit;
|
||||||
let local_user = local_user_view.map(|l| l.local_user);
|
let local_user = local_user_view.map(|l| l.local_user);
|
||||||
|
@ -34,6 +35,7 @@ impl PerformCrud for ListCommunities {
|
||||||
let communities = CommunityQuery::builder()
|
let communities = CommunityQuery::builder()
|
||||||
.conn(&mut conn)
|
.conn(&mut conn)
|
||||||
.listing_type(listing_type)
|
.listing_type(listing_type)
|
||||||
|
.show_nsfw(show_nsfw)
|
||||||
.sort(sort)
|
.sort(sort)
|
||||||
.local_user(local_user.as_ref())
|
.local_user(local_user.as_ref())
|
||||||
.page(page)
|
.page(page)
|
||||||
|
|
|
@ -39,7 +39,7 @@ impl PerformCrud for EditCommunity {
|
||||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||||
check_slurs_opt(&data.title, &slur_regex)?;
|
check_slurs_opt(&data.title, &slur_regex)?;
|
||||||
check_slurs_opt(&data.description, &slur_regex)?;
|
check_slurs_opt(&data.description, &slur_regex)?;
|
||||||
is_valid_body_field(&data.description)?;
|
is_valid_body_field(&data.description, false)?;
|
||||||
|
|
||||||
// Verify its a mod (only mods can edit it)
|
// Verify its a mod (only mods can edit it)
|
||||||
let community_id = data.community_id;
|
let community_id = data.community_id;
|
||||||
|
|
|
@ -57,7 +57,7 @@ impl PerformCrud for CreatePost {
|
||||||
let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear"
|
let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear"
|
||||||
|
|
||||||
is_valid_post_title(&data.name)?;
|
is_valid_post_title(&data.name)?;
|
||||||
is_valid_body_field(&data.body)?;
|
is_valid_body_field(&data.body, true)?;
|
||||||
|
|
||||||
check_community_ban(
|
check_community_ban(
|
||||||
local_user_view.person.id,
|
local_user_view.person.id,
|
||||||
|
|
|
@ -49,7 +49,7 @@ impl PerformCrud for EditPost {
|
||||||
is_valid_post_title(name)?;
|
is_valid_post_title(name)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
is_valid_body_field(&data.body)?;
|
is_valid_body_field(&data.body, true)?;
|
||||||
|
|
||||||
let post_id = data.post_id;
|
let post_id = data.post_id;
|
||||||
let orig_post = Post::read(&mut *context.conn().await?, post_id).await?;
|
let orig_post = Post::read(&mut *context.conn().await?, post_id).await?;
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl PerformCrud for CreatePrivateMessage {
|
||||||
&data.content.clone(),
|
&data.content.clone(),
|
||||||
&local_site_to_slur_regex(&local_site),
|
&local_site_to_slur_regex(&local_site),
|
||||||
);
|
);
|
||||||
is_valid_body_field(&Some(content_slurs_removed.clone()))?;
|
is_valid_body_field(&Some(content_slurs_removed.clone()), false)?;
|
||||||
|
|
||||||
check_person_block(
|
check_person_block(
|
||||||
local_user_view.person.id,
|
local_user_view.person.id,
|
||||||
|
|
|
@ -42,7 +42,7 @@ impl PerformCrud for EditPrivateMessage {
|
||||||
|
|
||||||
// Doing the update
|
// Doing the update
|
||||||
let content_slurs_removed = remove_slurs(&data.content, &local_site_to_slur_regex(&local_site));
|
let content_slurs_removed = remove_slurs(&data.content, &local_site_to_slur_regex(&local_site));
|
||||||
is_valid_body_field(&Some(content_slurs_removed.clone()))?;
|
is_valid_body_field(&Some(content_slurs_removed.clone()), false)?;
|
||||||
|
|
||||||
let private_message_id = data.private_message_id;
|
let private_message_id = data.private_message_id;
|
||||||
PrivateMessage::update(
|
PrivateMessage::update(
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
use crate::{site::check_application_question, PerformCrud};
|
use crate::{
|
||||||
|
site::{application_question_check, site_default_post_listing_type_check},
|
||||||
|
PerformCrud,
|
||||||
|
};
|
||||||
use activitypub_federation::http_signatures::generate_actor_keypair;
|
use activitypub_federation::http_signatures::generate_actor_keypair;
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
|
@ -8,9 +11,7 @@ use lemmy_api_common::{
|
||||||
generate_site_inbox_url,
|
generate_site_inbox_url,
|
||||||
is_admin,
|
is_admin,
|
||||||
local_site_rate_limit_to_rate_limit_config,
|
local_site_rate_limit_to_rate_limit_config,
|
||||||
local_site_to_slur_regex,
|
|
||||||
local_user_view_from_jwt,
|
local_user_view_from_jwt,
|
||||||
site_description_length_check,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
|
@ -26,10 +27,16 @@ use lemmy_db_schema::{
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::SiteView;
|
use lemmy_db_views::structs::SiteView;
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::LemmyError,
|
error::{LemmyError, LemmyResult},
|
||||||
utils::{
|
utils::{
|
||||||
slurs::{check_slurs, check_slurs_opt},
|
slurs::{check_slurs, check_slurs_opt},
|
||||||
validation::{check_site_visibility_valid, is_valid_body_field},
|
validation::{
|
||||||
|
build_and_check_regex,
|
||||||
|
check_site_visibility_valid,
|
||||||
|
is_valid_body_field,
|
||||||
|
site_description_length_check,
|
||||||
|
site_name_length_check,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -42,56 +49,23 @@ impl PerformCrud for CreateSite {
|
||||||
async fn perform(&self, context: &Data<LemmyContext>) -> Result<SiteResponse, LemmyError> {
|
async fn perform(&self, context: &Data<LemmyContext>) -> Result<SiteResponse, LemmyError> {
|
||||||
let data: &CreateSite = self;
|
let data: &CreateSite = self;
|
||||||
|
|
||||||
|
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
||||||
let local_site = LocalSite::read(&mut *context.conn().await?).await?;
|
let local_site = LocalSite::read(&mut *context.conn().await?).await?;
|
||||||
|
|
||||||
if local_site.site_setup {
|
// Make sure user is an admin; other types of users should not create site data...
|
||||||
return Err(LemmyError::from_message("site_already_exists"));
|
|
||||||
};
|
|
||||||
|
|
||||||
let local_user_view = local_user_view_from_jwt(&data.auth, context).await?;
|
|
||||||
|
|
||||||
// Make sure user is an admin
|
|
||||||
is_admin(&local_user_view)?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
check_site_visibility_valid(
|
validate_create_payload(&local_site, data)?;
|
||||||
local_site.private_instance,
|
|
||||||
local_site.federation_enabled,
|
|
||||||
&data.private_instance,
|
|
||||||
&data.federation_enabled,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let sidebar = diesel_option_overwrite(&data.sidebar);
|
|
||||||
let description = diesel_option_overwrite(&data.description);
|
|
||||||
let icon = diesel_option_overwrite_to_url(&data.icon)?;
|
|
||||||
let banner = diesel_option_overwrite_to_url(&data.banner)?;
|
|
||||||
|
|
||||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
|
||||||
check_slurs(&data.name, &slur_regex)?;
|
|
||||||
check_slurs_opt(&data.description, &slur_regex)?;
|
|
||||||
|
|
||||||
if let Some(Some(desc)) = &description {
|
|
||||||
site_description_length_check(desc)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
is_valid_body_field(&data.sidebar)?;
|
|
||||||
|
|
||||||
let application_question = diesel_option_overwrite(&data.application_question);
|
|
||||||
check_application_question(
|
|
||||||
&application_question,
|
|
||||||
data
|
|
||||||
.registration_mode
|
|
||||||
.unwrap_or(local_site.registration_mode),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let actor_id: DbUrl = Url::parse(&context.settings().get_protocol_and_hostname())?.into();
|
let actor_id: DbUrl = Url::parse(&context.settings().get_protocol_and_hostname())?.into();
|
||||||
let inbox_url = Some(generate_site_inbox_url(&actor_id)?);
|
let inbox_url = Some(generate_site_inbox_url(&actor_id)?);
|
||||||
let keypair = generate_actor_keypair()?;
|
let keypair = generate_actor_keypair()?;
|
||||||
let site_form = SiteUpdateForm::builder()
|
let site_form = SiteUpdateForm::builder()
|
||||||
.name(Some(data.name.clone()))
|
.name(Some(data.name.clone()))
|
||||||
.sidebar(sidebar)
|
.sidebar(diesel_option_overwrite(&data.sidebar))
|
||||||
.description(description)
|
.description(diesel_option_overwrite(&data.description))
|
||||||
.icon(icon)
|
.icon(diesel_option_overwrite_to_url(&data.icon)?)
|
||||||
.banner(banner)
|
.banner(diesel_option_overwrite_to_url(&data.banner)?)
|
||||||
.actor_id(Some(actor_id))
|
.actor_id(Some(actor_id))
|
||||||
.last_refreshed_at(Some(naive_now()))
|
.last_refreshed_at(Some(naive_now()))
|
||||||
.inbox_url(inbox_url)
|
.inbox_url(inbox_url)
|
||||||
|
@ -111,7 +85,7 @@ impl PerformCrud for CreateSite {
|
||||||
.enable_nsfw(data.enable_nsfw)
|
.enable_nsfw(data.enable_nsfw)
|
||||||
.community_creation_admin_only(data.community_creation_admin_only)
|
.community_creation_admin_only(data.community_creation_admin_only)
|
||||||
.require_email_verification(data.require_email_verification)
|
.require_email_verification(data.require_email_verification)
|
||||||
.application_question(application_question)
|
.application_question(diesel_option_overwrite(&data.application_question))
|
||||||
.private_instance(data.private_instance)
|
.private_instance(data.private_instance)
|
||||||
.default_theme(data.default_theme.clone())
|
.default_theme(data.default_theme.clone())
|
||||||
.default_post_listing_type(data.default_post_listing_type)
|
.default_post_listing_type(data.default_post_listing_type)
|
||||||
|
@ -122,7 +96,6 @@ impl PerformCrud for CreateSite {
|
||||||
.slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex))
|
.slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex))
|
||||||
.actor_name_max_length(data.actor_name_max_length)
|
.actor_name_max_length(data.actor_name_max_length)
|
||||||
.federation_enabled(data.federation_enabled)
|
.federation_enabled(data.federation_enabled)
|
||||||
.federation_worker_count(data.federation_worker_count)
|
|
||||||
.captcha_enabled(data.captcha_enabled)
|
.captcha_enabled(data.captcha_enabled)
|
||||||
.captcha_difficulty(data.captcha_difficulty.clone())
|
.captcha_difficulty(data.captcha_difficulty.clone())
|
||||||
.build();
|
.build();
|
||||||
|
@ -165,3 +138,449 @@ impl PerformCrud for CreateSite {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> {
|
||||||
|
// Make sure the site hasn't already been set up...
|
||||||
|
if local_site.site_setup {
|
||||||
|
return Err(LemmyError::from_message("site_already_exists"));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check that the slur regex compiles, and returns the regex if valid...
|
||||||
|
// Prioritize using new slur regex from the request; if not provided, use the existing regex.
|
||||||
|
let slur_regex = build_and_check_regex(
|
||||||
|
&create_site
|
||||||
|
.slur_filter_regex
|
||||||
|
.as_deref()
|
||||||
|
.or(local_site.slur_filter_regex.as_deref()),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
site_name_length_check(&create_site.name)?;
|
||||||
|
check_slurs(&create_site.name, &slur_regex)?;
|
||||||
|
|
||||||
|
if let Some(desc) = &create_site.description {
|
||||||
|
site_description_length_check(desc)?;
|
||||||
|
check_slurs_opt(&create_site.description, &slur_regex)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
site_default_post_listing_type_check(&create_site.default_post_listing_type)?;
|
||||||
|
|
||||||
|
check_site_visibility_valid(
|
||||||
|
local_site.private_instance,
|
||||||
|
local_site.federation_enabled,
|
||||||
|
&create_site.private_instance,
|
||||||
|
&create_site.federation_enabled,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Ensure that the sidebar has fewer than the max num characters...
|
||||||
|
is_valid_body_field(&create_site.sidebar, false)?;
|
||||||
|
|
||||||
|
application_question_check(
|
||||||
|
&local_site.application_question,
|
||||||
|
&create_site.application_question,
|
||||||
|
create_site
|
||||||
|
.registration_mode
|
||||||
|
.unwrap_or(local_site.registration_mode),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::site::create::validate_create_payload;
|
||||||
|
use lemmy_api_common::site::CreateSite;
|
||||||
|
use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_invalid_create_payload() {
|
||||||
|
let invalid_payloads = [
|
||||||
|
(
|
||||||
|
"CreateSite attempted on set up LocalSite",
|
||||||
|
"site_already_exists",
|
||||||
|
&generate_local_site(
|
||||||
|
true,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite name matches LocalSite slur filter",
|
||||||
|
"slurs",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("foo site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite name matches new slur filter",
|
||||||
|
"slurs",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("zeta site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
Some(String::from("(zeta|alpha)")),
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite listing type is Subscribed, which is invalid",
|
||||||
|
"invalid_default_post_listing_type",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
Some(ListingType::Subscribed),
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite is both private and federated",
|
||||||
|
"cant_enable_private_instance_and_federation_together",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
Some(true),
|
||||||
|
Some(true),
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"LocalSite is private, but CreateSite also makes it federated",
|
||||||
|
"cant_enable_private_instance_and_federation_together",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
Some(true),
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite requires application, but neither it nor LocalSite has an application question",
|
||||||
|
"application_question_required",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
Some(RegistrationMode::RequireApplication),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
invalid_payloads.iter().enumerate().for_each(
|
||||||
|
|(
|
||||||
|
idx,
|
||||||
|
&(reason, expected_err, local_site, create_site),
|
||||||
|
)| {
|
||||||
|
match validate_create_payload(
|
||||||
|
local_site,
|
||||||
|
create_site,
|
||||||
|
) {
|
||||||
|
Ok(_) => {
|
||||||
|
panic!(
|
||||||
|
"Got Ok, but validation should have failed with error: {} for reason: {}. invalid_payloads.nth({})",
|
||||||
|
expected_err, reason, idx
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
assert!(
|
||||||
|
error.message.eq(&Some(String::from(expected_err))),
|
||||||
|
"Got Err {:?}, but should have failed with message: {} for reason: {}. invalid_payloads.nth({})",
|
||||||
|
error.message,
|
||||||
|
expected_err,
|
||||||
|
reason,
|
||||||
|
idx
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_valid_create_payload() {
|
||||||
|
let valid_payloads = [
|
||||||
|
(
|
||||||
|
"No changes between LocalSite and CreateSite",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite allows clearing and changing values",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(ListingType::All),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(false),
|
||||||
|
Some(true),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(RegistrationMode::Open),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CreateSite clears existing slur filter regex",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("foo site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
Some(String::new()),
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"LocalSite has application question and CreateSite now requires applications,",
|
||||||
|
&generate_local_site(
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
Some(String::from("question")),
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_create_site(
|
||||||
|
String::from("site_name"),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
Some(RegistrationMode::RequireApplication),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
valid_payloads
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.for_each(|(idx, &(reason, local_site, edit_site))| {
|
||||||
|
assert!(
|
||||||
|
validate_create_payload(local_site, edit_site).is_ok(),
|
||||||
|
"Got Err, but should have got Ok for reason: {}. valid_payloads.nth({})",
|
||||||
|
reason,
|
||||||
|
idx
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_local_site(
|
||||||
|
site_setup: bool,
|
||||||
|
site_slur_filter_regex: Option<String>,
|
||||||
|
site_is_private: bool,
|
||||||
|
site_is_federated: bool,
|
||||||
|
site_application_question: Option<String>,
|
||||||
|
site_registration_mode: RegistrationMode,
|
||||||
|
) -> LocalSite {
|
||||||
|
LocalSite {
|
||||||
|
id: Default::default(),
|
||||||
|
site_id: Default::default(),
|
||||||
|
site_setup,
|
||||||
|
enable_downvotes: false,
|
||||||
|
enable_nsfw: false,
|
||||||
|
community_creation_admin_only: false,
|
||||||
|
require_email_verification: false,
|
||||||
|
application_question: site_application_question,
|
||||||
|
private_instance: site_is_private,
|
||||||
|
default_theme: String::new(),
|
||||||
|
default_post_listing_type: ListingType::All,
|
||||||
|
legal_information: None,
|
||||||
|
hide_modlog_mod_names: false,
|
||||||
|
application_email_admins: false,
|
||||||
|
slur_filter_regex: site_slur_filter_regex,
|
||||||
|
actor_name_max_length: 0,
|
||||||
|
federation_enabled: site_is_federated,
|
||||||
|
captcha_enabled: false,
|
||||||
|
captcha_difficulty: String::new(),
|
||||||
|
published: Default::default(),
|
||||||
|
updated: None,
|
||||||
|
registration_mode: site_registration_mode,
|
||||||
|
reports_email_admins: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow the test helper function to have too many arguments.
|
||||||
|
// It's either this or generate the entire struct each time for testing.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn generate_create_site(
|
||||||
|
site_name: String,
|
||||||
|
site_description: Option<String>,
|
||||||
|
site_sidebar: Option<String>,
|
||||||
|
site_listing_type: Option<ListingType>,
|
||||||
|
site_slur_filter_regex: Option<String>,
|
||||||
|
site_is_private: Option<bool>,
|
||||||
|
site_is_federated: Option<bool>,
|
||||||
|
site_application_question: Option<String>,
|
||||||
|
site_registration_mode: Option<RegistrationMode>,
|
||||||
|
) -> CreateSite {
|
||||||
|
CreateSite {
|
||||||
|
name: site_name,
|
||||||
|
sidebar: site_sidebar,
|
||||||
|
description: site_description,
|
||||||
|
icon: None,
|
||||||
|
banner: None,
|
||||||
|
enable_downvotes: None,
|
||||||
|
enable_nsfw: None,
|
||||||
|
community_creation_admin_only: None,
|
||||||
|
require_email_verification: None,
|
||||||
|
application_question: site_application_question,
|
||||||
|
private_instance: site_is_private,
|
||||||
|
default_theme: None,
|
||||||
|
default_post_listing_type: site_listing_type,
|
||||||
|
legal_information: None,
|
||||||
|
application_email_admins: None,
|
||||||
|
hide_modlog_mod_names: None,
|
||||||
|
discussion_languages: None,
|
||||||
|
slur_filter_regex: site_slur_filter_regex,
|
||||||
|
actor_name_max_length: None,
|
||||||
|
rate_limit_message: None,
|
||||||
|
rate_limit_message_per_second: None,
|
||||||
|
rate_limit_post: None,
|
||||||
|
rate_limit_post_per_second: None,
|
||||||
|
rate_limit_register: None,
|
||||||
|
rate_limit_register_per_second: None,
|
||||||
|
rate_limit_image: None,
|
||||||
|
rate_limit_image_per_second: None,
|
||||||
|
rate_limit_comment: None,
|
||||||
|
rate_limit_comment_per_second: None,
|
||||||
|
rate_limit_search: None,
|
||||||
|
rate_limit_search_per_second: None,
|
||||||
|
federation_enabled: site_is_federated,
|
||||||
|
federation_debug: None,
|
||||||
|
captcha_enabled: None,
|
||||||
|
captcha_difficulty: None,
|
||||||
|
allowed_instances: None,
|
||||||
|
blocked_instances: None,
|
||||||
|
taglines: None,
|
||||||
|
registration_mode: site_registration_mode,
|
||||||
|
auth: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,19 +1,95 @@
|
||||||
use lemmy_db_schema::RegistrationMode;
|
use lemmy_db_schema::{ListingType, RegistrationMode};
|
||||||
use lemmy_utils::error::LemmyError;
|
use lemmy_utils::error::{LemmyError, LemmyResult};
|
||||||
|
|
||||||
mod create;
|
mod create;
|
||||||
mod read;
|
mod read;
|
||||||
mod update;
|
mod update;
|
||||||
|
|
||||||
pub fn check_application_question(
|
/// Checks whether the default post listing type is valid for a site.
|
||||||
application_question: &Option<Option<String>>,
|
pub fn site_default_post_listing_type_check(
|
||||||
|
default_post_listing_type: &Option<ListingType>,
|
||||||
|
) -> LemmyResult<()> {
|
||||||
|
if let Some(listing_type) = default_post_listing_type {
|
||||||
|
// Only allow all or local as default listing types...
|
||||||
|
if listing_type != &ListingType::All && listing_type != &ListingType::Local {
|
||||||
|
Err(LemmyError::from_message(
|
||||||
|
"invalid_default_post_listing_type",
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks whether the application question and registration mode align.
|
||||||
|
pub fn application_question_check(
|
||||||
|
current_application_question: &Option<String>,
|
||||||
|
new_application_question: &Option<String>,
|
||||||
registration_mode: RegistrationMode,
|
registration_mode: RegistrationMode,
|
||||||
) -> Result<(), LemmyError> {
|
) -> LemmyResult<()> {
|
||||||
|
let has_no_question: bool =
|
||||||
|
current_application_question.is_none() && new_application_question.is_none();
|
||||||
|
let is_nullifying_question: bool = new_application_question == &Some(String::new());
|
||||||
|
|
||||||
if registration_mode == RegistrationMode::RequireApplication
|
if registration_mode == RegistrationMode::RequireApplication
|
||||||
&& application_question.as_ref().unwrap_or(&None).is_none()
|
&& (has_no_question || is_nullifying_question)
|
||||||
{
|
{
|
||||||
Err(LemmyError::from_message("application_question_required"))
|
Err(LemmyError::from_message("application_question_required"))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::site::{application_question_check, site_default_post_listing_type_check};
|
||||||
|
use lemmy_db_schema::{ListingType, RegistrationMode};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_site_default_post_listing_type_check() {
|
||||||
|
assert!(site_default_post_listing_type_check(&None::<ListingType>).is_ok());
|
||||||
|
assert!(site_default_post_listing_type_check(&Some(ListingType::All)).is_ok());
|
||||||
|
assert!(site_default_post_listing_type_check(&Some(ListingType::Local)).is_ok());
|
||||||
|
assert!(site_default_post_listing_type_check(&Some(ListingType::Subscribed)).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_application_question_check() {
|
||||||
|
assert!(
|
||||||
|
application_question_check(&Some(String::from("q")), &Some(String::new()), RegistrationMode::RequireApplication).is_err(),
|
||||||
|
"Expected application to be invalid because an application is required, current question: {:?}, new question: {:?}",
|
||||||
|
"q",
|
||||||
|
String::new(),
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
application_question_check(&None, &None, RegistrationMode::RequireApplication).is_err(),
|
||||||
|
"Expected application to be invalid because an application is required, current question: {:?}, new question: {:?}",
|
||||||
|
None::<String>,
|
||||||
|
None::<String>
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
application_question_check(&None, &None, RegistrationMode::Open).is_ok(),
|
||||||
|
"Expected application to be valid because no application required, current question: {:?}, new question: {:?}, mode: {:?}",
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
application_question_check(&None, &Some(String::from("q")), RegistrationMode::RequireApplication).is_ok(),
|
||||||
|
"Expected application to be valid because new application provided, current question: {:?}, new question: {:?}, mode: {:?}",
|
||||||
|
None::<String>,
|
||||||
|
Some(String::from("q")),
|
||||||
|
RegistrationMode::RequireApplication
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
application_question_check(&Some(String::from("q")), &None, RegistrationMode::RequireApplication).is_ok(),
|
||||||
|
"Expected application to be valid because application existed, current question: {:?}, new question: {:?}, mode: {:?}",
|
||||||
|
Some(String::from("q")),
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::RequireApplication
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
use crate::{site::check_application_question, PerformCrud};
|
use crate::{
|
||||||
|
site::{application_question_check, site_default_post_listing_type_check},
|
||||||
|
PerformCrud,
|
||||||
|
};
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::{EditSite, SiteResponse},
|
site::{EditSite, SiteResponse},
|
||||||
utils::{
|
utils::{is_admin, local_site_rate_limit_to_rate_limit_config, local_user_view_from_jwt},
|
||||||
is_admin,
|
|
||||||
local_site_rate_limit_to_rate_limit_config,
|
|
||||||
local_site_to_slur_regex,
|
|
||||||
local_user_view_from_jwt,
|
|
||||||
site_description_length_check,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{
|
source::{
|
||||||
|
@ -24,15 +21,20 @@ use lemmy_db_schema::{
|
||||||
},
|
},
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now},
|
utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now},
|
||||||
ListingType,
|
|
||||||
RegistrationMode,
|
RegistrationMode,
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::SiteView;
|
use lemmy_db_views::structs::SiteView;
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::LemmyError,
|
error::{LemmyError, LemmyResult},
|
||||||
utils::{
|
utils::{
|
||||||
slurs::check_slurs_opt,
|
slurs::check_slurs_opt,
|
||||||
validation::{check_site_visibility_valid, is_valid_body_field},
|
validation::{
|
||||||
|
build_and_check_regex,
|
||||||
|
check_site_visibility_valid,
|
||||||
|
is_valid_body_field,
|
||||||
|
site_description_length_check,
|
||||||
|
site_name_length_check,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -48,43 +50,10 @@ impl PerformCrud for EditSite {
|
||||||
let local_site = site_view.local_site;
|
let local_site = site_view.local_site;
|
||||||
let site = site_view.site;
|
let site = site_view.site;
|
||||||
|
|
||||||
// Make sure user is an admin
|
// Make sure user is an admin; other types of users should not update site data...
|
||||||
is_admin(&local_user_view)?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
check_site_visibility_valid(
|
validate_update_payload(&local_site, data)?;
|
||||||
local_site.private_instance,
|
|
||||||
local_site.federation_enabled,
|
|
||||||
&data.private_instance,
|
|
||||||
&data.federation_enabled,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
|
||||||
|
|
||||||
check_slurs_opt(&data.name, &slur_regex)?;
|
|
||||||
check_slurs_opt(&data.description, &slur_regex)?;
|
|
||||||
|
|
||||||
if let Some(desc) = &data.description {
|
|
||||||
site_description_length_check(desc)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
is_valid_body_field(&data.sidebar)?;
|
|
||||||
|
|
||||||
let application_question = diesel_option_overwrite(&data.application_question);
|
|
||||||
check_application_question(
|
|
||||||
&application_question,
|
|
||||||
data
|
|
||||||
.registration_mode
|
|
||||||
.unwrap_or(local_site.registration_mode),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if let Some(listing_type) = &data.default_post_listing_type {
|
|
||||||
// only allow all or local as default listing types
|
|
||||||
if listing_type != &ListingType::All && listing_type != &ListingType::Local {
|
|
||||||
return Err(LemmyError::from_message(
|
|
||||||
"invalid_default_post_listing_type",
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(discussion_languages) = data.discussion_languages.clone() {
|
if let Some(discussion_languages) = data.discussion_languages.clone() {
|
||||||
SiteLanguage::update(
|
SiteLanguage::update(
|
||||||
|
@ -95,9 +64,8 @@ impl PerformCrud for EditSite {
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = data.name.clone();
|
|
||||||
let site_form = SiteUpdateForm::builder()
|
let site_form = SiteUpdateForm::builder()
|
||||||
.name(name)
|
.name(data.name.clone())
|
||||||
.sidebar(diesel_option_overwrite(&data.sidebar))
|
.sidebar(diesel_option_overwrite(&data.sidebar))
|
||||||
.description(diesel_option_overwrite(&data.description))
|
.description(diesel_option_overwrite(&data.description))
|
||||||
.icon(diesel_option_overwrite_to_url(&data.icon)?)
|
.icon(diesel_option_overwrite_to_url(&data.icon)?)
|
||||||
|
@ -117,7 +85,7 @@ impl PerformCrud for EditSite {
|
||||||
.enable_nsfw(data.enable_nsfw)
|
.enable_nsfw(data.enable_nsfw)
|
||||||
.community_creation_admin_only(data.community_creation_admin_only)
|
.community_creation_admin_only(data.community_creation_admin_only)
|
||||||
.require_email_verification(data.require_email_verification)
|
.require_email_verification(data.require_email_verification)
|
||||||
.application_question(application_question)
|
.application_question(diesel_option_overwrite(&data.application_question))
|
||||||
.private_instance(data.private_instance)
|
.private_instance(data.private_instance)
|
||||||
.default_theme(data.default_theme.clone())
|
.default_theme(data.default_theme.clone())
|
||||||
.default_post_listing_type(data.default_post_listing_type)
|
.default_post_listing_type(data.default_post_listing_type)
|
||||||
|
@ -128,7 +96,6 @@ impl PerformCrud for EditSite {
|
||||||
.slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex))
|
.slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex))
|
||||||
.actor_name_max_length(data.actor_name_max_length)
|
.actor_name_max_length(data.actor_name_max_length)
|
||||||
.federation_enabled(data.federation_enabled)
|
.federation_enabled(data.federation_enabled)
|
||||||
.federation_worker_count(data.federation_worker_count)
|
|
||||||
.captcha_enabled(data.captcha_enabled)
|
.captcha_enabled(data.captcha_enabled)
|
||||||
.captcha_difficulty(data.captcha_difficulty.clone())
|
.captcha_difficulty(data.captcha_difficulty.clone())
|
||||||
.reports_email_admins(data.reports_email_admins)
|
.reports_email_admins(data.reports_email_admins)
|
||||||
|
@ -211,3 +178,411 @@ impl PerformCrud for EditSite {
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> {
|
||||||
|
// Check that the slur regex compiles, and return the regex if valid...
|
||||||
|
// Prioritize using new slur regex from the request; if not provided, use the existing regex.
|
||||||
|
let slur_regex = build_and_check_regex(
|
||||||
|
&edit_site
|
||||||
|
.slur_filter_regex
|
||||||
|
.as_deref()
|
||||||
|
.or(local_site.slur_filter_regex.as_deref()),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(name) = &edit_site.name {
|
||||||
|
// The name doesn't need to be updated, but if provided it cannot be blanked out...
|
||||||
|
site_name_length_check(name)?;
|
||||||
|
check_slurs_opt(&edit_site.name, &slur_regex)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(desc) = &edit_site.description {
|
||||||
|
site_description_length_check(desc)?;
|
||||||
|
check_slurs_opt(&edit_site.description, &slur_regex)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
site_default_post_listing_type_check(&edit_site.default_post_listing_type)?;
|
||||||
|
|
||||||
|
check_site_visibility_valid(
|
||||||
|
local_site.private_instance,
|
||||||
|
local_site.federation_enabled,
|
||||||
|
&edit_site.private_instance,
|
||||||
|
&edit_site.federation_enabled,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Ensure that the sidebar has fewer than the max num characters...
|
||||||
|
is_valid_body_field(&edit_site.sidebar, false)?;
|
||||||
|
|
||||||
|
application_question_check(
|
||||||
|
&local_site.application_question,
|
||||||
|
&edit_site.application_question,
|
||||||
|
edit_site
|
||||||
|
.registration_mode
|
||||||
|
.unwrap_or(local_site.registration_mode),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::site::update::validate_update_payload;
|
||||||
|
use lemmy_api_common::site::EditSite;
|
||||||
|
use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_invalid_update_payload() {
|
||||||
|
let invalid_payloads = [
|
||||||
|
(
|
||||||
|
"EditSite name matches LocalSite slur filter",
|
||||||
|
"slurs",
|
||||||
|
&generate_local_site(
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("foo site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite name matches new slur filter",
|
||||||
|
"slurs",
|
||||||
|
&generate_local_site(
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("zeta site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
Some(String::from("(zeta|alpha)")),
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite listing type is Subscribed, which is invalid",
|
||||||
|
"invalid_default_post_listing_type",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
Some(ListingType::Subscribed),
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite is both private and federated",
|
||||||
|
"cant_enable_private_instance_and_federation_together",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
Some(true),
|
||||||
|
Some(true),
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"LocalSite is private, but EditSite also makes it federated",
|
||||||
|
"cant_enable_private_instance_and_federation_together",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
Some(true),
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite requires application, but neither it nor LocalSite has an application question",
|
||||||
|
"application_question_required",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
Some(RegistrationMode::RequireApplication),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
invalid_payloads.iter().enumerate().for_each(
|
||||||
|
|(
|
||||||
|
idx,
|
||||||
|
&(reason, expected_err, local_site, edit_site),
|
||||||
|
)| {
|
||||||
|
match validate_update_payload(local_site, edit_site) {
|
||||||
|
Ok(_) => {
|
||||||
|
panic!(
|
||||||
|
"Got Ok, but validation should have failed with error: {} for reason: {}. invalid_payloads.nth({})",
|
||||||
|
expected_err, reason, idx
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
assert!(
|
||||||
|
error.message.eq(&Some(String::from(expected_err))),
|
||||||
|
"Got Err {:?}, but should have failed with message: {} for reason: {}. invalid_payloads.nth({})",
|
||||||
|
error.message,
|
||||||
|
expected_err,
|
||||||
|
reason,
|
||||||
|
idx
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_valid_update_payload() {
|
||||||
|
let valid_payloads = [
|
||||||
|
(
|
||||||
|
"No changes between LocalSite and EditSite",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite allows clearing and changing values",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(ListingType::All),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(false),
|
||||||
|
Some(true),
|
||||||
|
Some(String::new()),
|
||||||
|
Some(RegistrationMode::Open),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"EditSite name passes slur filter regex",
|
||||||
|
&generate_local_site(
|
||||||
|
Some(String::from("(foo|bar)")),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
None::<String>,
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("foo site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
Some(String::new()),
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
None::<RegistrationMode>,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"LocalSite has application question and EditSite now requires applications,",
|
||||||
|
&generate_local_site(
|
||||||
|
None::<String>,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
Some(String::from("question")),
|
||||||
|
RegistrationMode::Open,
|
||||||
|
),
|
||||||
|
&generate_edit_site(
|
||||||
|
Some(String::from("site_name")),
|
||||||
|
None::<String>,
|
||||||
|
None::<String>,
|
||||||
|
None::<ListingType>,
|
||||||
|
None::<String>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<bool>,
|
||||||
|
None::<String>,
|
||||||
|
Some(RegistrationMode::RequireApplication),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
valid_payloads
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.for_each(|(idx, &(reason, local_site, edit_site))| {
|
||||||
|
assert!(
|
||||||
|
validate_update_payload(local_site, edit_site).is_ok(),
|
||||||
|
"Got Err, but should have got Ok for reason: {}. valid_payloads.nth({})",
|
||||||
|
reason,
|
||||||
|
idx
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_local_site(
|
||||||
|
site_slur_filter_regex: Option<String>,
|
||||||
|
site_is_private: bool,
|
||||||
|
site_is_federated: bool,
|
||||||
|
site_application_question: Option<String>,
|
||||||
|
site_registration_mode: RegistrationMode,
|
||||||
|
) -> LocalSite {
|
||||||
|
LocalSite {
|
||||||
|
id: Default::default(),
|
||||||
|
site_id: Default::default(),
|
||||||
|
site_setup: true,
|
||||||
|
enable_downvotes: false,
|
||||||
|
enable_nsfw: false,
|
||||||
|
community_creation_admin_only: false,
|
||||||
|
require_email_verification: false,
|
||||||
|
application_question: site_application_question,
|
||||||
|
private_instance: site_is_private,
|
||||||
|
default_theme: String::new(),
|
||||||
|
default_post_listing_type: ListingType::All,
|
||||||
|
legal_information: None,
|
||||||
|
hide_modlog_mod_names: false,
|
||||||
|
application_email_admins: false,
|
||||||
|
slur_filter_regex: site_slur_filter_regex,
|
||||||
|
actor_name_max_length: 0,
|
||||||
|
federation_enabled: site_is_federated,
|
||||||
|
captcha_enabled: false,
|
||||||
|
captcha_difficulty: String::new(),
|
||||||
|
published: Default::default(),
|
||||||
|
updated: None,
|
||||||
|
registration_mode: site_registration_mode,
|
||||||
|
reports_email_admins: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow the test helper function to have too many arguments.
|
||||||
|
// It's either this or generate the entire struct each time for testing.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn generate_edit_site(
|
||||||
|
site_name: Option<String>,
|
||||||
|
site_description: Option<String>,
|
||||||
|
site_sidebar: Option<String>,
|
||||||
|
site_listing_type: Option<ListingType>,
|
||||||
|
site_slur_filter_regex: Option<String>,
|
||||||
|
site_is_private: Option<bool>,
|
||||||
|
site_is_federated: Option<bool>,
|
||||||
|
site_application_question: Option<String>,
|
||||||
|
site_registration_mode: Option<RegistrationMode>,
|
||||||
|
) -> EditSite {
|
||||||
|
EditSite {
|
||||||
|
name: site_name,
|
||||||
|
sidebar: site_sidebar,
|
||||||
|
description: site_description,
|
||||||
|
icon: None,
|
||||||
|
banner: None,
|
||||||
|
enable_downvotes: None,
|
||||||
|
enable_nsfw: None,
|
||||||
|
community_creation_admin_only: None,
|
||||||
|
require_email_verification: None,
|
||||||
|
application_question: site_application_question,
|
||||||
|
private_instance: site_is_private,
|
||||||
|
default_theme: None,
|
||||||
|
default_post_listing_type: site_listing_type,
|
||||||
|
legal_information: None,
|
||||||
|
application_email_admins: None,
|
||||||
|
hide_modlog_mod_names: None,
|
||||||
|
discussion_languages: None,
|
||||||
|
slur_filter_regex: site_slur_filter_regex,
|
||||||
|
actor_name_max_length: None,
|
||||||
|
rate_limit_message: None,
|
||||||
|
rate_limit_message_per_second: None,
|
||||||
|
rate_limit_post: None,
|
||||||
|
rate_limit_post_per_second: None,
|
||||||
|
rate_limit_register: None,
|
||||||
|
rate_limit_register_per_second: None,
|
||||||
|
rate_limit_image: None,
|
||||||
|
rate_limit_image_per_second: None,
|
||||||
|
rate_limit_comment: None,
|
||||||
|
rate_limit_comment_per_second: None,
|
||||||
|
rate_limit_search: None,
|
||||||
|
rate_limit_search_per_second: None,
|
||||||
|
federation_enabled: site_is_federated,
|
||||||
|
federation_debug: None,
|
||||||
|
captcha_enabled: None,
|
||||||
|
captcha_difficulty: None,
|
||||||
|
allowed_instances: None,
|
||||||
|
blocked_instances: None,
|
||||||
|
taglines: None,
|
||||||
|
registration_mode: site_registration_mode,
|
||||||
|
reports_email_admins: None,
|
||||||
|
auth: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ use lemmy_api_common::{
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
aggregates::structs::PersonAggregates,
|
aggregates::structs::PersonAggregates,
|
||||||
source::{
|
source::{
|
||||||
|
captcha_answer::{CaptchaAnswer, CheckCaptchaAnswer},
|
||||||
local_user::{LocalUser, LocalUserInsertForm},
|
local_user::{LocalUser, LocalUserInsertForm},
|
||||||
person::{Person, PersonInsertForm},
|
person::{Person, PersonInsertForm},
|
||||||
registration_application::{RegistrationApplication, RegistrationApplicationInsertForm},
|
registration_application::{RegistrationApplication, RegistrationApplicationInsertForm},
|
||||||
|
@ -71,6 +72,25 @@ impl PerformCrud for Register {
|
||||||
return Err(LemmyError::from_message("passwords_dont_match"));
|
return Err(LemmyError::from_message("passwords_dont_match"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if local_site.site_setup && local_site.captcha_enabled {
|
||||||
|
if let Some(captcha_uuid) = &data.captcha_uuid {
|
||||||
|
let uuid = uuid::Uuid::parse_str(captcha_uuid)?;
|
||||||
|
let check = CaptchaAnswer::check_captcha(
|
||||||
|
context.pool(),
|
||||||
|
CheckCaptchaAnswer {
|
||||||
|
uuid,
|
||||||
|
answer: data.captcha_answer.clone().unwrap_or_default(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
if !check {
|
||||||
|
return Err(LemmyError::from_message("captcha_incorrect"));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(LemmyError::from_message("captcha_incorrect"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||||
check_slurs(&data.username, &slur_regex)?;
|
check_slurs(&data.username, &slur_regex)?;
|
||||||
check_slurs_opt(&data.answer, &slur_regex)?;
|
check_slurs_opt(&data.answer, &slur_regex)?;
|
||||||
|
|
|
@ -25,7 +25,7 @@ chrono = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
actix-web = { workspace = true }
|
actix-web = { workspace = true }
|
||||||
actix-rt = { workspace = true }
|
tokio = {workspace = true}
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
strum_macros = { workspace = true }
|
strum_macros = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
|
|
|
@ -43,12 +43,11 @@ pub(crate) async fn send_activity_in_community(
|
||||||
|
|
||||||
// send to user followers
|
// send to user followers
|
||||||
if !is_mod_action {
|
if !is_mod_action {
|
||||||
inboxes.append(
|
inboxes.extend(
|
||||||
&mut PersonFollower::list_followers(&mut *context.conn().await?, actor.id)
|
&mut PersonFollower::list_followers(&mut *context.conn().await?, actor.id)
|
||||||
.await?
|
.await?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| ApubPerson(p).shared_inbox_or_inbox())
|
.map(|p| ApubPerson(p).shared_inbox_or_inbox()),
|
||||||
.collect(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -121,7 +121,7 @@ mod tests {
|
||||||
};
|
};
|
||||||
use serial_test::serial;
|
use serial_test::serial;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_community_moderators() {
|
async fn test_parse_lemmy_community_moderators() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -48,7 +48,7 @@ where
|
||||||
Ok(
|
Ok(
|
||||||
HttpResponse::Ok()
|
HttpResponse::Ok()
|
||||||
.content_type(FEDERATION_CONTENT_TYPE)
|
.content_type(FEDERATION_CONTENT_TYPE)
|
||||||
.content_type("application/json")
|
.content_type("application/activity+json")
|
||||||
.body(json),
|
.body(json),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ fn create_apub_tombstone_response<T: Into<Url>>(id: T) -> LemmyResult<HttpRespon
|
||||||
HttpResponse::Gone()
|
HttpResponse::Gone()
|
||||||
.content_type(FEDERATION_CONTENT_TYPE)
|
.content_type(FEDERATION_CONTENT_TYPE)
|
||||||
.status(StatusCode::GONE)
|
.status(StatusCode::GONE)
|
||||||
.content_type("application/json")
|
.content_type("application/activity+json")
|
||||||
.body(json),
|
.body(json),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,10 +11,7 @@ use lemmy_db_schema::{
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::DbConn,
|
utils::DbConn,
|
||||||
};
|
};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{error::LemmyError, utils::mention::scrape_text_for_mentions};
|
||||||
error::LemmyError,
|
|
||||||
utils::mention::{scrape_text_for_mentions, MentionData},
|
|
||||||
};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -67,10 +64,9 @@ pub async fn collect_non_local_mentions(
|
||||||
let mentions = scrape_text_for_mentions(&comment.content)
|
let mentions = scrape_text_for_mentions(&comment.content)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// Filter only the non-local ones
|
// Filter only the non-local ones
|
||||||
.filter(|m| !m.is_local(&context.settings().hostname))
|
.filter(|m| !m.is_local(&context.settings().hostname));
|
||||||
.collect::<Vec<MentionData>>();
|
|
||||||
|
|
||||||
for mention in &mentions {
|
for mention in mentions {
|
||||||
let identifier = format!("{}@{}", mention.name, mention.domain);
|
let identifier = format!("{}@{}", mention.name, mention.domain);
|
||||||
let person = webfinger_resolve_actor::<LemmyContext, ApubPerson>(&identifier, context).await;
|
let person = webfinger_resolve_actor::<LemmyContext, ApubPerson>(&identifier, context).await;
|
||||||
if let Ok(person) = person {
|
if let Ok(person) = person {
|
||||||
|
|
|
@ -239,7 +239,7 @@ pub(crate) mod tests {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
pub(crate) async fn test_parse_lemmy_comment() {
|
pub(crate) async fn test_parse_lemmy_comment() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
@ -267,7 +267,7 @@ pub(crate) mod tests {
|
||||||
cleanup(data, &context).await;
|
cleanup(data, &context).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_pleroma_comment() {
|
async fn test_parse_pleroma_comment() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
@ -299,7 +299,7 @@ pub(crate) mod tests {
|
||||||
cleanup(data, &context).await;
|
cleanup(data, &context).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_html_to_markdown_sanitize() {
|
async fn test_html_to_markdown_sanitize() {
|
||||||
let parsed = parse_html("<script></script><b>hello</b>");
|
let parsed = parse_html("<script></script><b>hello</b>");
|
||||||
|
|
|
@ -141,19 +141,16 @@ impl Object for ApubCommunity {
|
||||||
|
|
||||||
// Fetching mods and outbox is not necessary for Lemmy to work, so ignore errors. Besides,
|
// Fetching mods and outbox is not necessary for Lemmy to work, so ignore errors. Besides,
|
||||||
// we need to ignore these errors so that tests can work entirely offline.
|
// we need to ignore these errors so that tests can work entirely offline.
|
||||||
group
|
let fetch_outbox = group.outbox.dereference(&community, context);
|
||||||
.outbox
|
|
||||||
.dereference(&community, context)
|
|
||||||
.await
|
|
||||||
.map_err(|e| debug!("{}", e))
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if let Some(moderators) = group.attributed_to {
|
if let Some(moderators) = group.attributed_to {
|
||||||
moderators
|
let fetch_moderators = moderators.dereference(&community, context);
|
||||||
.dereference(&community, context)
|
// Fetch mods and outbox in parallel
|
||||||
.await
|
let res = tokio::join!(fetch_outbox, fetch_moderators);
|
||||||
.map_err(|e| debug!("{}", e))
|
res.0.map_err(|e| debug!("{}", e)).ok();
|
||||||
.ok();
|
res.1.map_err(|e| debug!("{}", e)).ok();
|
||||||
|
} else {
|
||||||
|
fetch_outbox.await.map_err(|e| debug!("{}", e)).ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(community)
|
Ok(community)
|
||||||
|
@ -243,7 +240,7 @@ pub(crate) mod tests {
|
||||||
community
|
community
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_community() {
|
async fn test_parse_lemmy_community() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -222,7 +222,7 @@ pub(crate) mod tests {
|
||||||
site
|
site
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_instance() {
|
async fn test_parse_lemmy_instance() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -223,7 +223,7 @@ pub(crate) mod tests {
|
||||||
(person, site)
|
(person, site)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_person() {
|
async fn test_parse_lemmy_person() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
@ -236,7 +236,7 @@ pub(crate) mod tests {
|
||||||
cleanup((person, site), &context).await;
|
cleanup((person, site), &context).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_pleroma_person() {
|
async fn test_parse_pleroma_person() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -282,7 +282,7 @@ mod tests {
|
||||||
use lemmy_db_schema::source::site::Site;
|
use lemmy_db_schema::source::site::Site;
|
||||||
use serial_test::serial;
|
use serial_test::serial;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_post() {
|
async fn test_parse_lemmy_post() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -193,7 +193,7 @@ mod tests {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_lemmy_pm() {
|
async fn test_parse_lemmy_pm() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
@ -221,7 +221,7 @@ mod tests {
|
||||||
cleanup(data, &context).await;
|
cleanup(data, &context).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_parse_pleroma_pm() {
|
async fn test_parse_pleroma_pm() {
|
||||||
let context = init_context().await;
|
let context = init_context().await;
|
||||||
|
|
|
@ -29,7 +29,7 @@ serde_json = { workspace = true, optional = true }
|
||||||
activitypub_federation = { workspace = true, optional = true }
|
activitypub_federation = { workspace = true, optional = true }
|
||||||
lemmy_utils = { workspace = true, optional = true }
|
lemmy_utils = { workspace = true, optional = true }
|
||||||
bcrypt = { workspace = true, optional = true }
|
bcrypt = { workspace = true, optional = true }
|
||||||
diesel = { workspace = true, features = ["postgres","chrono", "serde_json"], optional = true }
|
diesel = { workspace = true, features = ["postgres","chrono", "serde_json", "uuid"], optional = true }
|
||||||
diesel-derive-newtype = { workspace = true, optional = true }
|
diesel-derive-newtype = { workspace = true, optional = true }
|
||||||
diesel-derive-enum = { workspace = true, optional = true }
|
diesel-derive-enum = { workspace = true, optional = true }
|
||||||
diesel_migrations = { workspace = true, optional = true }
|
diesel_migrations = { workspace = true, optional = true }
|
||||||
|
@ -44,7 +44,11 @@ tokio = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
deadpool = { version = "0.9.5", features = ["rt_tokio_1"], optional = true }
|
deadpool = { version = "0.9.5", features = ["rt_tokio_1"], optional = true }
|
||||||
ts-rs = { workspace = true, optional = true }
|
ts-rs = { workspace = true, optional = true }
|
||||||
|
rustls = { workspace = true }
|
||||||
|
futures-util = { workspace = true }
|
||||||
|
tokio-postgres = { workspace = true }
|
||||||
|
tokio-postgres-rustls = { workspace = true }
|
||||||
|
uuid = { workspace = true, features = ["v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = { workspace = true }
|
serial_test = { workspace = true }
|
||||||
|
|
||||||
|
|
|
@ -19,8 +19,8 @@ index 255c6422..f2ccf5e2 100644
|
||||||
|
|
||||||
#[derive(diesel::sql_types::SqlType)]
|
#[derive(diesel::sql_types::SqlType)]
|
||||||
#[diesel(postgres_type(name = "sort_type_enum"))]
|
#[diesel(postgres_type(name = "sort_type_enum"))]
|
||||||
@@ -67,13 +63,13 @@ diesel::table! {
|
@@ -76,13 +76,13 @@ diesel::table! {
|
||||||
when_ -> Timestamp,
|
published -> Timestamp,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
118
crates/db_schema/src/impls/captcha_answer.rs
Normal file
118
crates/db_schema/src/impls/captcha_answer.rs
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
use crate::{
|
||||||
|
schema::captcha_answer::dsl::{answer, captcha_answer, uuid},
|
||||||
|
source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer},
|
||||||
|
utils::{functions::lower, get_conn, DbPool},
|
||||||
|
};
|
||||||
|
use diesel::{
|
||||||
|
delete,
|
||||||
|
dsl::exists,
|
||||||
|
insert_into,
|
||||||
|
result::Error,
|
||||||
|
select,
|
||||||
|
ExpressionMethods,
|
||||||
|
QueryDsl,
|
||||||
|
};
|
||||||
|
use diesel_async::RunQueryDsl;
|
||||||
|
|
||||||
|
impl CaptchaAnswer {
|
||||||
|
pub async fn insert(pool: &DbPool, captcha: &CaptchaAnswerForm) -> Result<Self, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
|
||||||
|
insert_into(captcha_answer)
|
||||||
|
.values(captcha)
|
||||||
|
.get_result::<Self>(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn check_captcha(pool: &DbPool, to_check: CheckCaptchaAnswer) -> Result<bool, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
|
||||||
|
// fetch requested captcha
|
||||||
|
let captcha_exists = select(exists(
|
||||||
|
captcha_answer
|
||||||
|
.filter((uuid).eq(to_check.uuid))
|
||||||
|
.filter(lower(answer).eq(to_check.answer.to_lowercase().clone())),
|
||||||
|
))
|
||||||
|
.get_result::<bool>(conn)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// delete checked captcha
|
||||||
|
delete(captcha_answer.filter(uuid.eq(to_check.uuid)))
|
||||||
|
.execute(conn)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(captcha_exists)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::{
|
||||||
|
source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer},
|
||||||
|
utils::build_db_pool_for_tests,
|
||||||
|
};
|
||||||
|
use serial_test::serial;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[serial]
|
||||||
|
async fn test_captcha_happy_path() {
|
||||||
|
let pool = &build_db_pool_for_tests().await;
|
||||||
|
|
||||||
|
let inserted = CaptchaAnswer::insert(
|
||||||
|
pool,
|
||||||
|
&CaptchaAnswerForm {
|
||||||
|
answer: "XYZ".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("should not fail to insert captcha");
|
||||||
|
|
||||||
|
let result = CaptchaAnswer::check_captcha(
|
||||||
|
pool,
|
||||||
|
CheckCaptchaAnswer {
|
||||||
|
uuid: inserted.uuid,
|
||||||
|
answer: "xyz".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert!(result.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[serial]
|
||||||
|
async fn test_captcha_repeat_answer_fails() {
|
||||||
|
let pool = &build_db_pool_for_tests().await;
|
||||||
|
|
||||||
|
let inserted = CaptchaAnswer::insert(
|
||||||
|
pool,
|
||||||
|
&CaptchaAnswerForm {
|
||||||
|
answer: "XYZ".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("should not fail to insert captcha");
|
||||||
|
|
||||||
|
let _result = CaptchaAnswer::check_captcha(
|
||||||
|
pool,
|
||||||
|
CheckCaptchaAnswer {
|
||||||
|
uuid: inserted.uuid,
|
||||||
|
answer: "xyz".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let result_repeat = CaptchaAnswer::check_captcha(
|
||||||
|
pool,
|
||||||
|
CheckCaptchaAnswer {
|
||||||
|
uuid: inserted.uuid,
|
||||||
|
answer: "xyz".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert!(result_repeat.is_ok());
|
||||||
|
assert!(!result_repeat.unwrap());
|
||||||
|
}
|
||||||
|
}
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
CommentUpdateForm,
|
CommentUpdateForm,
|
||||||
},
|
},
|
||||||
traits::{Crud, Likeable, Saveable},
|
traits::{Crud, Likeable, Saveable},
|
||||||
utils::{naive_now, DbConn},
|
utils::{get_conn, naive_now, DbConn, DELETED_REPLACEMENT_TEXT},
|
||||||
};
|
};
|
||||||
use diesel::{
|
use diesel::{
|
||||||
dsl::{insert_into, sql_query},
|
dsl::{insert_into, sql_query},
|
||||||
|
@ -30,7 +30,7 @@ impl Comment {
|
||||||
) -> Result<Vec<Self>, Error> {
|
) -> Result<Vec<Self>, Error> {
|
||||||
diesel::update(comment.filter(creator_id.eq(for_creator_id)))
|
diesel::update(comment.filter(creator_id.eq(for_creator_id)))
|
||||||
.set((
|
.set((
|
||||||
content.eq("*Permananently Deleted*"),
|
content.eq(DELETED_REPLACEMENT_TEXT),
|
||||||
deleted.eq(true),
|
deleted.eq(true),
|
||||||
updated.eq(naive_now()),
|
updated.eq(naive_now()),
|
||||||
))
|
))
|
||||||
|
@ -94,8 +94,7 @@ impl Comment {
|
||||||
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
|
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
|
||||||
// group by c.id
|
// group by c.id
|
||||||
|
|
||||||
let path_split = parent_path.0.split('.').collect::<Vec<&str>>();
|
let parent_id = parent_path.0.split('.').nth(1);
|
||||||
let parent_id = path_split.get(1);
|
|
||||||
|
|
||||||
if let Some(parent_id) = parent_id {
|
if let Some(parent_id) = parent_id {
|
||||||
let top_parent = format!("0.{}", parent_id);
|
let top_parent = format!("0.{}", parent_id);
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
pub mod activity;
|
pub mod activity;
|
||||||
pub mod actor_language;
|
pub mod actor_language;
|
||||||
|
pub mod captcha_answer;
|
||||||
pub mod comment;
|
pub mod comment;
|
||||||
pub mod comment_reply;
|
pub mod comment_reply;
|
||||||
pub mod comment_report;
|
pub mod comment_report;
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
newtypes::LocalUserId,
|
newtypes::LocalUserId,
|
||||||
schema::password_reset_request::dsl::{password_reset_request, published, token_encrypted},
|
schema::password_reset_request::dsl::{
|
||||||
|
local_user_id,
|
||||||
|
password_reset_request,
|
||||||
|
published,
|
||||||
|
token_encrypted,
|
||||||
|
},
|
||||||
source::password_reset_request::{PasswordResetRequest, PasswordResetRequestForm},
|
source::password_reset_request::{PasswordResetRequest, PasswordResetRequestForm},
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::DbConn,
|
utils::DbConn,
|
||||||
|
@ -73,6 +78,19 @@ impl PasswordResetRequest {
|
||||||
.first::<Self>(conn)
|
.first::<Self>(conn)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_recent_password_resets_count(
|
||||||
|
pool: &DbPool,
|
||||||
|
user_id: LocalUserId,
|
||||||
|
) -> Result<i64, Error> {
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
password_reset_request
|
||||||
|
.filter(local_user_id.eq(user_id))
|
||||||
|
.filter(published.gt(now - 1.days()))
|
||||||
|
.count()
|
||||||
|
.get_result(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bytes_to_hex(bytes: Vec<u8>) -> String {
|
fn bytes_to_hex(bytes: Vec<u8>) -> String {
|
||||||
|
|
|
@ -27,7 +27,7 @@ use crate::{
|
||||||
PostUpdateForm,
|
PostUpdateForm,
|
||||||
},
|
},
|
||||||
traits::{Crud, Likeable, Readable, Saveable},
|
traits::{Crud, Likeable, Readable, Saveable},
|
||||||
utils::{naive_now, DbConn, FETCH_LIMIT_MAX},
|
utils::{get_conn, naive_now, DbConn, DbPool, DELETED_REPLACEMENT_TEXT, FETCH_LIMIT_MAX},
|
||||||
};
|
};
|
||||||
use ::url::Url;
|
use ::url::Url;
|
||||||
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods};
|
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods};
|
||||||
|
@ -108,9 +108,9 @@ impl Post {
|
||||||
|
|
||||||
diesel::update(post.filter(creator_id.eq(for_creator_id)))
|
diesel::update(post.filter(creator_id.eq(for_creator_id)))
|
||||||
.set((
|
.set((
|
||||||
name.eq(perma_deleted),
|
name.eq(DELETED_REPLACEMENT_TEXT),
|
||||||
url.eq(perma_deleted_url),
|
url.eq(Option::<&str>::None),
|
||||||
body.eq(perma_deleted),
|
body.eq(DELETED_REPLACEMENT_TEXT),
|
||||||
deleted.eq(true),
|
deleted.eq(true),
|
||||||
updated.eq(naive_now()),
|
updated.eq(naive_now()),
|
||||||
))
|
))
|
||||||
|
|
|
@ -26,6 +26,7 @@ pub mod impls;
|
||||||
pub mod newtypes;
|
pub mod newtypes;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
|
#[allow(clippy::wildcard_imports)]
|
||||||
pub mod schema;
|
pub mod schema;
|
||||||
pub mod source;
|
pub mod source;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
|
@ -62,6 +63,9 @@ pub enum SortType {
|
||||||
TopHour,
|
TopHour,
|
||||||
TopSixHour,
|
TopSixHour,
|
||||||
TopTwelveHour,
|
TopTwelveHour,
|
||||||
|
TopThreeMonths,
|
||||||
|
TopSixMonths,
|
||||||
|
TopNineMonths,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy)]
|
#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||||
|
|
|
@ -65,7 +65,16 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar};
|
captcha_answer (id) {
|
||||||
|
id -> Int4,
|
||||||
|
uuid -> Uuid,
|
||||||
|
answer -> Text,
|
||||||
|
published -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
use diesel_ltree::sql_types::Ltree;
|
use diesel_ltree::sql_types::Ltree;
|
||||||
|
|
||||||
comment (id) {
|
comment (id) {
|
||||||
|
@ -317,7 +326,7 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar};
|
use diesel::sql_types::*;
|
||||||
use super::sql_types::ListingTypeEnum;
|
use super::sql_types::ListingTypeEnum;
|
||||||
use super::sql_types::RegistrationModeEnum;
|
use super::sql_types::RegistrationModeEnum;
|
||||||
|
|
||||||
|
@ -339,7 +348,6 @@ diesel::table! {
|
||||||
slur_filter_regex -> Nullable<Text>,
|
slur_filter_regex -> Nullable<Text>,
|
||||||
actor_name_max_length -> Int4,
|
actor_name_max_length -> Int4,
|
||||||
federation_enabled -> Bool,
|
federation_enabled -> Bool,
|
||||||
federation_worker_count -> Int4,
|
|
||||||
captcha_enabled -> Bool,
|
captcha_enabled -> Bool,
|
||||||
#[max_length = 255]
|
#[max_length = 255]
|
||||||
captcha_difficulty -> Varchar,
|
captcha_difficulty -> Varchar,
|
||||||
|
@ -372,7 +380,7 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar};
|
use diesel::sql_types::*;
|
||||||
use super::sql_types::SortTypeEnum;
|
use super::sql_types::SortTypeEnum;
|
||||||
use super::sql_types::ListingTypeEnum;
|
use super::sql_types::ListingTypeEnum;
|
||||||
|
|
||||||
|
@ -382,8 +390,7 @@ diesel::table! {
|
||||||
password_encrypted -> Text,
|
password_encrypted -> Text,
|
||||||
email -> Nullable<Text>,
|
email -> Nullable<Text>,
|
||||||
show_nsfw -> Bool,
|
show_nsfw -> Bool,
|
||||||
#[max_length = 20]
|
theme -> Text,
|
||||||
theme -> Varchar,
|
|
||||||
default_sort_type -> SortTypeEnum,
|
default_sort_type -> SortTypeEnum,
|
||||||
default_listing_type -> ListingTypeEnum,
|
default_listing_type -> ListingTypeEnum,
|
||||||
#[max_length = 20]
|
#[max_length = 20]
|
||||||
|
@ -916,6 +923,7 @@ diesel::allow_tables_to_appear_in_same_query!(
|
||||||
admin_purge_community,
|
admin_purge_community,
|
||||||
admin_purge_person,
|
admin_purge_person,
|
||||||
admin_purge_post,
|
admin_purge_post,
|
||||||
|
captcha_answer,
|
||||||
comment,
|
comment,
|
||||||
comment_aggregates,
|
comment_aggregates,
|
||||||
comment_like,
|
comment_like,
|
||||||
|
|
33
crates/db_schema/src/source/captcha_answer.rs
Normal file
33
crates/db_schema/src/source/captcha_answer.rs
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
#[cfg(feature = "full")]
|
||||||
|
use crate::schema::captcha_answer;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_with::skip_serializing_none;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[skip_serializing_none]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "full", derive(Queryable))]
|
||||||
|
#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))]
|
||||||
|
pub struct CaptchaAnswer {
|
||||||
|
pub id: i32,
|
||||||
|
pub uuid: Uuid,
|
||||||
|
pub answer: String,
|
||||||
|
pub published: chrono::NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[skip_serializing_none]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "full", derive(Queryable))]
|
||||||
|
#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))]
|
||||||
|
pub struct CheckCaptchaAnswer {
|
||||||
|
pub uuid: Uuid,
|
||||||
|
pub answer: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[skip_serializing_none]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
|
||||||
|
#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))]
|
||||||
|
pub struct CaptchaAnswerForm {
|
||||||
|
pub answer: String,
|
||||||
|
}
|
|
@ -50,8 +50,6 @@ pub struct LocalSite {
|
||||||
pub actor_name_max_length: i32,
|
pub actor_name_max_length: i32,
|
||||||
/// Whether federation is enabled.
|
/// Whether federation is enabled.
|
||||||
pub federation_enabled: bool,
|
pub federation_enabled: bool,
|
||||||
/// The number of concurrent federation http workers.
|
|
||||||
pub federation_worker_count: i32,
|
|
||||||
/// Whether captcha is enabled.
|
/// Whether captcha is enabled.
|
||||||
pub captcha_enabled: bool,
|
pub captcha_enabled: bool,
|
||||||
/// The captcha difficulty.
|
/// The captcha difficulty.
|
||||||
|
@ -85,7 +83,6 @@ pub struct LocalSiteInsertForm {
|
||||||
pub slur_filter_regex: Option<String>,
|
pub slur_filter_regex: Option<String>,
|
||||||
pub actor_name_max_length: Option<i32>,
|
pub actor_name_max_length: Option<i32>,
|
||||||
pub federation_enabled: Option<bool>,
|
pub federation_enabled: Option<bool>,
|
||||||
pub federation_worker_count: Option<i32>,
|
|
||||||
pub captcha_enabled: Option<bool>,
|
pub captcha_enabled: Option<bool>,
|
||||||
pub captcha_difficulty: Option<String>,
|
pub captcha_difficulty: Option<String>,
|
||||||
pub registration_mode: Option<RegistrationMode>,
|
pub registration_mode: Option<RegistrationMode>,
|
||||||
|
@ -112,7 +109,6 @@ pub struct LocalSiteUpdateForm {
|
||||||
pub slur_filter_regex: Option<Option<String>>,
|
pub slur_filter_regex: Option<Option<String>>,
|
||||||
pub actor_name_max_length: Option<i32>,
|
pub actor_name_max_length: Option<i32>,
|
||||||
pub federation_enabled: Option<bool>,
|
pub federation_enabled: Option<bool>,
|
||||||
pub federation_worker_count: Option<i32>,
|
|
||||||
pub captcha_enabled: Option<bool>,
|
pub captcha_enabled: Option<bool>,
|
||||||
pub captcha_difficulty: Option<String>,
|
pub captcha_difficulty: Option<String>,
|
||||||
pub registration_mode: Option<RegistrationMode>,
|
pub registration_mode: Option<RegistrationMode>,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
pub mod activity;
|
pub mod activity;
|
||||||
pub mod actor_language;
|
pub mod actor_language;
|
||||||
|
pub mod captcha_answer;
|
||||||
pub mod comment;
|
pub mod comment;
|
||||||
pub mod comment_reply;
|
pub mod comment_reply;
|
||||||
pub mod comment_report;
|
pub mod comment_report;
|
||||||
|
|
|
@ -12,7 +12,7 @@ use diesel::{
|
||||||
backend::Backend,
|
backend::Backend,
|
||||||
deserialize::FromSql,
|
deserialize::FromSql,
|
||||||
pg::Pg,
|
pg::Pg,
|
||||||
result::{Error as DieselError, Error::QueryBuilderError},
|
result::{ConnectionError, ConnectionResult, Error as DieselError, Error::QueryBuilderError},
|
||||||
serialize::{Output, ToSql},
|
serialize::{Output, ToSql},
|
||||||
sql_types::Text,
|
sql_types::Text,
|
||||||
PgConnection,
|
PgConnection,
|
||||||
|
@ -25,11 +25,21 @@ use diesel_async::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use diesel_migrations::EmbeddedMigrations;
|
use diesel_migrations::EmbeddedMigrations;
|
||||||
|
use futures_util::{future::BoxFuture, FutureExt};
|
||||||
use lemmy_utils::{error::LemmyError, settings::structs::Settings};
|
use lemmy_utils::{error::LemmyError, settings::structs::Settings};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::{env, env::VarError, time::Duration};
|
use rustls::{
|
||||||
use tracing::info;
|
client::{ServerCertVerified, ServerCertVerifier},
|
||||||
|
ServerName,
|
||||||
|
};
|
||||||
|
use std::{
|
||||||
|
env,
|
||||||
|
env::VarError,
|
||||||
|
sync::Arc,
|
||||||
|
time::{Duration, SystemTime},
|
||||||
|
};
|
||||||
|
use tracing::{error, info};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
const FETCH_LIMIT_DEFAULT: i64 = 10;
|
const FETCH_LIMIT_DEFAULT: i64 = 10;
|
||||||
|
@ -138,7 +148,15 @@ pub fn diesel_option_overwrite_to_url_create(
|
||||||
async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result<DbPool, LemmyError> {
|
async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result<DbPool, LemmyError> {
|
||||||
let db_url = get_database_url(settings);
|
let db_url = get_database_url(settings);
|
||||||
let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5);
|
let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5);
|
||||||
let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url);
|
// We only support TLS with sslmode=require currently
|
||||||
|
let tls_enabled = db_url.contains("sslmode=require");
|
||||||
|
let manager = if tls_enabled {
|
||||||
|
// diesel-async does not support any TLS connections out of the box, so we need to manually
|
||||||
|
// provide a setup function which handles creating the connection
|
||||||
|
AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_setup(&db_url, establish_connection)
|
||||||
|
} else {
|
||||||
|
AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url)
|
||||||
|
};
|
||||||
let pool = Pool::builder(manager)
|
let pool = Pool::builder(manager)
|
||||||
.max_size(pool_size)
|
.max_size(pool_size)
|
||||||
.wait_timeout(POOL_TIMEOUT)
|
.wait_timeout(POOL_TIMEOUT)
|
||||||
|
@ -155,6 +173,44 @@ async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result<DbPoo
|
||||||
Ok(pool)
|
Ok(pool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
||||||
|
let fut = async {
|
||||||
|
let rustls_config = rustls::ClientConfig::builder()
|
||||||
|
.with_safe_defaults()
|
||||||
|
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
|
||||||
|
.with_no_client_auth();
|
||||||
|
|
||||||
|
let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config);
|
||||||
|
let (client, conn) = tokio_postgres::connect(config, tls)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ConnectionError::BadConnection(e.to_string()))?;
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Err(e) = conn.await {
|
||||||
|
error!("Database connection failed: {e}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
AsyncPgConnection::try_from(client).await
|
||||||
|
};
|
||||||
|
fut.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
struct NoCertVerifier {}
|
||||||
|
|
||||||
|
impl ServerCertVerifier for NoCertVerifier {
|
||||||
|
fn verify_server_cert(
|
||||||
|
&self,
|
||||||
|
_end_entity: &rustls::Certificate,
|
||||||
|
_intermediates: &[rustls::Certificate],
|
||||||
|
_server_name: &ServerName,
|
||||||
|
_scts: &mut dyn Iterator<Item = &[u8]>,
|
||||||
|
_ocsp_response: &[u8],
|
||||||
|
_now: SystemTime,
|
||||||
|
) -> Result<ServerCertVerified, rustls::Error> {
|
||||||
|
// Will verify all (even invalid) certs without any checks (sslmode=require)
|
||||||
|
Ok(ServerCertVerified::assertion())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
||||||
|
|
||||||
pub fn run_migrations(db_url: &str) {
|
pub fn run_migrations(db_url: &str) {
|
||||||
|
@ -211,7 +267,10 @@ pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType {
|
||||||
| SortType::TopAll
|
| SortType::TopAll
|
||||||
| SortType::TopWeek
|
| SortType::TopWeek
|
||||||
| SortType::TopYear
|
| SortType::TopYear
|
||||||
| SortType::TopMonth => CommentSortType::Top,
|
| SortType::TopMonth
|
||||||
|
| SortType::TopThreeMonths
|
||||||
|
| SortType::TopSixMonths
|
||||||
|
| SortType::TopNineMonths => CommentSortType::Top,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,6 +289,8 @@ pub mod functions {
|
||||||
sql_function!(fn lower(x: Text) -> Text);
|
sql_function!(fn lower(x: Text) -> Text);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*";
|
||||||
|
|
||||||
impl ToSql<Text, Pg> for DbUrl {
|
impl ToSql<Text, Pg> for DbUrl {
|
||||||
fn to_sql(&self, out: &mut Output<Pg>) -> diesel::serialize::Result {
|
fn to_sql(&self, out: &mut Output<Pg>) -> diesel::serialize::Result {
|
||||||
<std::string::String as ToSql<Text, Pg>>::to_sql(&self.0.to_string(), &mut out.reborrow())
|
<std::string::String as ToSql<Text, Pg>>::to_sql(&self.0.to_string(), &mut out.reborrow())
|
||||||
|
|
|
@ -428,6 +428,18 @@ impl<'a> PostQuery<'a> {
|
||||||
.filter(post_aggregates::published.gt(now - 12.hours()))
|
.filter(post_aggregates::published.gt(now - 12.hours()))
|
||||||
.then_order_by(post_aggregates::score.desc())
|
.then_order_by(post_aggregates::score.desc())
|
||||||
.then_order_by(post_aggregates::published.desc()),
|
.then_order_by(post_aggregates::published.desc()),
|
||||||
|
SortType::TopThreeMonths => query
|
||||||
|
.filter(post_aggregates::published.gt(now - 3.months()))
|
||||||
|
.then_order_by(post_aggregates::score.desc())
|
||||||
|
.then_order_by(post_aggregates::published.desc()),
|
||||||
|
SortType::TopSixMonths => query
|
||||||
|
.filter(post_aggregates::published.gt(now - 6.months()))
|
||||||
|
.then_order_by(post_aggregates::score.desc())
|
||||||
|
.then_order_by(post_aggregates::published.desc()),
|
||||||
|
SortType::TopNineMonths => query
|
||||||
|
.filter(post_aggregates::published.gt(now - 9.months()))
|
||||||
|
.then_order_by(post_aggregates::score.desc())
|
||||||
|
.then_order_by(post_aggregates::published.desc()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (limit, offset) = limit_and_offset(self.page, self.limit)?;
|
let (limit, offset) = limit_and_offset(self.page, self.limit)?;
|
||||||
|
|
|
@ -125,6 +125,7 @@ pub struct CommunityQuery<'a> {
|
||||||
local_user: Option<&'a LocalUser>,
|
local_user: Option<&'a LocalUser>,
|
||||||
search_term: Option<String>,
|
search_term: Option<String>,
|
||||||
is_mod_or_admin: Option<bool>,
|
is_mod_or_admin: Option<bool>,
|
||||||
|
show_nsfw: Option<bool>,
|
||||||
page: Option<i64>,
|
page: Option<i64>,
|
||||||
limit: Option<i64>,
|
limit: Option<i64>,
|
||||||
}
|
}
|
||||||
|
@ -202,8 +203,8 @@ impl<'a> CommunityQuery<'a> {
|
||||||
query = query.filter(community_block::person_id.is_null());
|
query = query.filter(community_block::person_id.is_null());
|
||||||
query = query.filter(community::nsfw.eq(false).or(local_user::show_nsfw.eq(true)));
|
query = query.filter(community::nsfw.eq(false).or(local_user::show_nsfw.eq(true)));
|
||||||
} else {
|
} else {
|
||||||
// No person in request, only show nsfw communities if show_nsfw passed into request
|
// No person in request, only show nsfw communities if show_nsfw is passed into request
|
||||||
if !self.local_user.map(|l| l.show_nsfw).unwrap_or(false) {
|
if !self.show_nsfw.unwrap_or(false) {
|
||||||
query = query.filter(community::nsfw.eq(false));
|
query = query.filter(community::nsfw.eq(false));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,6 +119,15 @@ impl<'a> PersonQuery<'a> {
|
||||||
SortType::TopTwelveHour => query
|
SortType::TopTwelveHour => query
|
||||||
.filter(person::published.gt(now - 12.hours()))
|
.filter(person::published.gt(now - 12.hours()))
|
||||||
.order_by(person_aggregates::comment_score.desc()),
|
.order_by(person_aggregates::comment_score.desc()),
|
||||||
|
SortType::TopThreeMonths => query
|
||||||
|
.filter(person::published.gt(now - 3.months()))
|
||||||
|
.order_by(person_aggregates::comment_score.desc()),
|
||||||
|
SortType::TopSixMonths => query
|
||||||
|
.filter(person::published.gt(now - 6.months()))
|
||||||
|
.order_by(person_aggregates::comment_score.desc()),
|
||||||
|
SortType::TopNineMonths => query
|
||||||
|
.filter(person::published.gt(now - 9.months()))
|
||||||
|
.order_by(person_aggregates::comment_score.desc()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (limit, offset) = limit_and_offset(self.page, self.limit)?;
|
let (limit, offset) = limit_and_offset(self.page, self.limit)?;
|
||||||
|
|
|
@ -482,7 +482,6 @@ fn create_post_items(
|
||||||
i.pub_date(dt.to_rfc2822());
|
i.pub_date(dt.to_rfc2822());
|
||||||
|
|
||||||
let post_url = format!("{}/post/{}", protocol_and_hostname, p.post.id);
|
let post_url = format!("{}/post/{}", protocol_and_hostname, p.post.id);
|
||||||
i.link(post_url.clone());
|
|
||||||
i.comments(post_url.clone());
|
i.comments(post_url.clone());
|
||||||
let guid = GuidBuilder::default()
|
let guid = GuidBuilder::default()
|
||||||
.permalink(true)
|
.permalink(true)
|
||||||
|
@ -506,6 +505,9 @@ fn create_post_items(
|
||||||
if let Some(url) = p.post.url {
|
if let Some(url) = p.post.url {
|
||||||
let link_html = format!("<br><a href=\"{url}\">{url}</a>");
|
let link_html = format!("<br><a href=\"{url}\">{url}</a>");
|
||||||
description.push_str(&link_html);
|
description.push_str(&link_html);
|
||||||
|
i.link(url.to_string());
|
||||||
|
} else {
|
||||||
|
i.link(post_url.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(body) = p.post.body {
|
if let Some(body) = p.post.body {
|
||||||
|
|
|
@ -49,7 +49,6 @@ enum-map = "2.5"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
tokio = { workspace = true, features = ["macros"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
rosetta-build = "0.1.2"
|
rosetta-build = "0.1.2"
|
||||||
|
|
16
crates/utils/src/main.rs
Normal file
16
crates/utils/src/main.rs
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle};
|
||||||
|
use lemmy_utils::settings::structs::Settings;
|
||||||
|
fn main() {
|
||||||
|
let fmt = Formatting {
|
||||||
|
auto_comments: AutoComments::none(),
|
||||||
|
comments_style: CommentsStyle {
|
||||||
|
separator: "#".to_owned(),
|
||||||
|
},
|
||||||
|
objects_style: ObjectsStyle {
|
||||||
|
surround_keys_with_quotes: false,
|
||||||
|
use_comma_as_separator: false,
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default()));
|
||||||
|
}
|
|
@ -39,6 +39,12 @@ pub struct Settings {
|
||||||
#[default(None)]
|
#[default(None)]
|
||||||
#[doku(skip)]
|
#[doku(skip)]
|
||||||
pub opentelemetry_url: Option<Url>,
|
pub opentelemetry_url: Option<Url>,
|
||||||
|
/// The number of activitypub federation workers that can be in-flight concurrently
|
||||||
|
#[default(0)]
|
||||||
|
pub worker_count: usize,
|
||||||
|
/// The number of activitypub federation retry workers that can be in-flight concurrently
|
||||||
|
#[default(0)]
|
||||||
|
pub retry_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::error::{LemmyError, LemmyResult};
|
use crate::error::{LemmyError, LemmyResult};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::{Regex, RegexBuilder};
|
||||||
use totp_rs::{Secret, TOTP};
|
use totp_rs::{Secret, TOTP};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -17,8 +17,13 @@ static CLEAN_URL_PARAMS_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||||
Regex::new(r"^utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid$")
|
Regex::new(r"^utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid$")
|
||||||
.expect("compile regex")
|
.expect("compile regex")
|
||||||
});
|
});
|
||||||
|
|
||||||
const BODY_MAX_LENGTH: usize = 10000;
|
const BODY_MAX_LENGTH: usize = 10000;
|
||||||
|
const POST_BODY_MAX_LENGTH: usize = 50000;
|
||||||
const BIO_MAX_LENGTH: usize = 300;
|
const BIO_MAX_LENGTH: usize = 300;
|
||||||
|
const SITE_NAME_MAX_LENGTH: usize = 20;
|
||||||
|
const SITE_NAME_MIN_LENGTH: usize = 1;
|
||||||
|
const SITE_DESCRIPTION_MAX_LENGTH: usize = 150;
|
||||||
|
|
||||||
fn has_newline(name: &str) -> bool {
|
fn has_newline(name: &str) -> bool {
|
||||||
name.contains('\n')
|
name.contains('\n')
|
||||||
|
@ -68,9 +73,14 @@ pub fn is_valid_post_title(title: &str) -> LemmyResult<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This could be post bodies, comments, or any description field
|
/// This could be post bodies, comments, or any description field
|
||||||
pub fn is_valid_body_field(body: &Option<String>) -> LemmyResult<()> {
|
pub fn is_valid_body_field(body: &Option<String>, post: bool) -> LemmyResult<()> {
|
||||||
if let Some(body) = body {
|
if let Some(body) = body {
|
||||||
let check = body.chars().count() <= BODY_MAX_LENGTH;
|
let check = if post {
|
||||||
|
body.chars().count() <= POST_BODY_MAX_LENGTH
|
||||||
|
} else {
|
||||||
|
body.chars().count() <= BODY_MAX_LENGTH
|
||||||
|
};
|
||||||
|
|
||||||
if !check {
|
if !check {
|
||||||
Err(LemmyError::from_message("invalid_body_field"))
|
Err(LemmyError::from_message("invalid_body_field"))
|
||||||
} else {
|
} else {
|
||||||
|
@ -82,14 +92,83 @@ pub fn is_valid_body_field(body: &Option<String>) -> LemmyResult<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_valid_bio_field(bio: &str) -> LemmyResult<()> {
|
pub fn is_valid_bio_field(bio: &str) -> LemmyResult<()> {
|
||||||
let check = bio.chars().count() <= BIO_MAX_LENGTH;
|
max_length_check(bio, BIO_MAX_LENGTH, String::from("bio_length_overflow"))
|
||||||
if !check {
|
}
|
||||||
Err(LemmyError::from_message("bio_length_overflow"))
|
|
||||||
|
/// Checks the site name length, the limit as defined in the DB.
|
||||||
|
pub fn site_name_length_check(name: &str) -> LemmyResult<()> {
|
||||||
|
min_max_length_check(
|
||||||
|
name,
|
||||||
|
SITE_NAME_MIN_LENGTH,
|
||||||
|
SITE_NAME_MAX_LENGTH,
|
||||||
|
String::from("site_name_required"),
|
||||||
|
String::from("site_name_length_overflow"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks the site description length, the limit as defined in the DB.
|
||||||
|
pub fn site_description_length_check(description: &str) -> LemmyResult<()> {
|
||||||
|
max_length_check(
|
||||||
|
description,
|
||||||
|
SITE_DESCRIPTION_MAX_LENGTH,
|
||||||
|
String::from("site_description_length_overflow"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn max_length_check(item: &str, max_length: usize, msg: String) -> LemmyResult<()> {
|
||||||
|
if item.len() > max_length {
|
||||||
|
Err(LemmyError::from_message(&msg))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn min_max_length_check(
|
||||||
|
item: &str,
|
||||||
|
min_length: usize,
|
||||||
|
max_length: usize,
|
||||||
|
min_msg: String,
|
||||||
|
max_msg: String,
|
||||||
|
) -> LemmyResult<()> {
|
||||||
|
if item.len() > max_length {
|
||||||
|
Err(LemmyError::from_message(&max_msg))
|
||||||
|
} else if item.len() < min_length {
|
||||||
|
Err(LemmyError::from_message(&min_msg))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to build a regex and check it for common errors before inserting into the DB.
|
||||||
|
pub fn build_and_check_regex(regex_str_opt: &Option<&str>) -> LemmyResult<Option<Regex>> {
|
||||||
|
regex_str_opt.map_or_else(
|
||||||
|
|| Ok(None::<Regex>),
|
||||||
|
|regex_str| {
|
||||||
|
if regex_str.is_empty() {
|
||||||
|
// If the proposed regex is empty, return as having no regex at all; this is the same
|
||||||
|
// behavior that happens downstream before the write to the database.
|
||||||
|
return Ok(None::<Regex>);
|
||||||
|
}
|
||||||
|
|
||||||
|
RegexBuilder::new(regex_str)
|
||||||
|
.case_insensitive(true)
|
||||||
|
.build()
|
||||||
|
.map_err(|e| LemmyError::from_error_message(e, "invalid_regex"))
|
||||||
|
.and_then(|regex| {
|
||||||
|
// NOTE: It is difficult to know, in the universe of user-crafted regex, which ones
|
||||||
|
// may match against any string text. To keep it simple, we'll match the regex
|
||||||
|
// against an innocuous string - a single number - which should help catch a regex
|
||||||
|
// that accidentally matches against all strings.
|
||||||
|
if regex.is_match("1") {
|
||||||
|
return Err(LemmyError::from_message("permissive_regex"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(regex))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn clean_url_params(url: &Url) -> Url {
|
pub fn clean_url_params(url: &Url) -> Url {
|
||||||
let mut url_out = url.clone();
|
let mut url_out = url.clone();
|
||||||
if url.query().is_some() {
|
if url.query().is_some() {
|
||||||
|
@ -171,13 +250,20 @@ pub fn check_site_visibility_valid(
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::build_totp_2fa;
|
use super::build_totp_2fa;
|
||||||
use crate::utils::validation::{
|
use crate::utils::validation::{
|
||||||
|
build_and_check_regex,
|
||||||
check_site_visibility_valid,
|
check_site_visibility_valid,
|
||||||
clean_url_params,
|
clean_url_params,
|
||||||
generate_totp_2fa_secret,
|
generate_totp_2fa_secret,
|
||||||
is_valid_actor_name,
|
is_valid_actor_name,
|
||||||
|
is_valid_bio_field,
|
||||||
is_valid_display_name,
|
is_valid_display_name,
|
||||||
is_valid_matrix_id,
|
is_valid_matrix_id,
|
||||||
is_valid_post_title,
|
is_valid_post_title,
|
||||||
|
site_description_length_check,
|
||||||
|
site_name_length_check,
|
||||||
|
BIO_MAX_LENGTH,
|
||||||
|
SITE_DESCRIPTION_MAX_LENGTH,
|
||||||
|
SITE_NAME_MAX_LENGTH,
|
||||||
};
|
};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -246,6 +332,126 @@ mod tests {
|
||||||
assert!(totp.is_ok());
|
assert!(totp.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_site_name() {
|
||||||
|
let valid_names = [
|
||||||
|
(0..SITE_NAME_MAX_LENGTH).map(|_| 'A').collect::<String>(),
|
||||||
|
String::from("A"),
|
||||||
|
];
|
||||||
|
let invalid_names = [
|
||||||
|
(
|
||||||
|
&(0..SITE_NAME_MAX_LENGTH + 1)
|
||||||
|
.map(|_| 'A')
|
||||||
|
.collect::<String>(),
|
||||||
|
"site_name_length_overflow",
|
||||||
|
),
|
||||||
|
(&String::new(), "site_name_required"),
|
||||||
|
];
|
||||||
|
|
||||||
|
valid_names.iter().for_each(|valid_name| {
|
||||||
|
assert!(
|
||||||
|
site_name_length_check(valid_name).is_ok(),
|
||||||
|
"Expected {} of length {} to be Ok.",
|
||||||
|
valid_name,
|
||||||
|
valid_name.len()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
invalid_names
|
||||||
|
.iter()
|
||||||
|
.for_each(|&(invalid_name, expected_err)| {
|
||||||
|
let result = site_name_length_check(invalid_name);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(
|
||||||
|
result
|
||||||
|
.unwrap_err()
|
||||||
|
.message
|
||||||
|
.eq(&Some(String::from(expected_err))),
|
||||||
|
"Testing {}, expected error {}",
|
||||||
|
invalid_name,
|
||||||
|
expected_err
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_bio() {
|
||||||
|
assert!(is_valid_bio_field(&(0..BIO_MAX_LENGTH).map(|_| 'A').collect::<String>()).is_ok());
|
||||||
|
|
||||||
|
let invalid_result =
|
||||||
|
is_valid_bio_field(&(0..BIO_MAX_LENGTH + 1).map(|_| 'A').collect::<String>());
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
invalid_result.is_err()
|
||||||
|
&& invalid_result
|
||||||
|
.unwrap_err()
|
||||||
|
.message
|
||||||
|
.eq(&Some(String::from("bio_length_overflow")))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_site_description() {
|
||||||
|
assert!(site_description_length_check(
|
||||||
|
&(0..SITE_DESCRIPTION_MAX_LENGTH)
|
||||||
|
.map(|_| 'A')
|
||||||
|
.collect::<String>()
|
||||||
|
)
|
||||||
|
.is_ok());
|
||||||
|
|
||||||
|
let invalid_result = site_description_length_check(
|
||||||
|
&(0..SITE_DESCRIPTION_MAX_LENGTH + 1)
|
||||||
|
.map(|_| 'A')
|
||||||
|
.collect::<String>(),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
invalid_result.is_err()
|
||||||
|
&& invalid_result
|
||||||
|
.unwrap_err()
|
||||||
|
.message
|
||||||
|
.eq(&Some(String::from("site_description_length_overflow")))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_slur_regex() {
|
||||||
|
let valid_regexes = [&None, &Some(""), &Some("(foo|bar)")];
|
||||||
|
|
||||||
|
valid_regexes.iter().for_each(|regex| {
|
||||||
|
let result = build_and_check_regex(regex);
|
||||||
|
|
||||||
|
assert!(result.is_ok(), "Testing regex: {:?}", regex);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_too_permissive_slur_regex() {
|
||||||
|
let match_everything_regexes = [
|
||||||
|
(&Some("["), "invalid_regex"),
|
||||||
|
(&Some("(foo|bar|)"), "permissive_regex"),
|
||||||
|
(&Some(".*"), "permissive_regex"),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_everything_regexes
|
||||||
|
.iter()
|
||||||
|
.for_each(|&(regex_str, expected_err)| {
|
||||||
|
let result = build_and_check_regex(regex_str);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(
|
||||||
|
result
|
||||||
|
.unwrap_err()
|
||||||
|
.message
|
||||||
|
.eq(&Some(String::from(expected_err))),
|
||||||
|
"Testing regex {:?}, expected error {}",
|
||||||
|
regex_str,
|
||||||
|
expected_err
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_check_site_visibility_valid() {
|
fn test_check_site_visibility_valid() {
|
||||||
assert!(check_site_visibility_valid(true, true, &None, &None).is_err());
|
assert!(check_site_visibility_valid(true, true, &None, &None).is_err());
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 7fc71d0860bbe5c6d620ec27112350ffe5b9229c
|
Subproject commit 5a9d44656e2658ab7cb2dbec3fd1bfaf57654533
|
|
@ -29,7 +29,7 @@ RUN \
|
||||||
FROM alpine:3 as lemmy
|
FROM alpine:3 as lemmy
|
||||||
|
|
||||||
# Install libpq for postgres
|
# Install libpq for postgres
|
||||||
RUN apk add libpq
|
RUN apk add --no-cache libpq
|
||||||
|
|
||||||
# Copy resources
|
# Copy resources
|
||||||
COPY --from=builder /app/lemmy_server /app/lemmy
|
COPY --from=builder /app/lemmy_server /app/lemmy
|
||||||
|
|
|
@ -6,20 +6,9 @@ x-logging: &default-logging
|
||||||
max-size: "50m"
|
max-size: "50m"
|
||||||
max-file: 4
|
max-file: 4
|
||||||
|
|
||||||
networks:
|
|
||||||
# communication to web and clients
|
|
||||||
lemmyexternalproxy:
|
|
||||||
# communication between lemmy services
|
|
||||||
lemmyinternal:
|
|
||||||
driver: bridge
|
|
||||||
internal: true
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
proxy:
|
proxy:
|
||||||
image: nginx:1-alpine
|
image: nginx:1-alpine
|
||||||
networks:
|
|
||||||
- lemmyinternal
|
|
||||||
- lemmyexternalproxy
|
|
||||||
ports:
|
ports:
|
||||||
# actual and only port facing any connection from outside
|
# actual and only port facing any connection from outside
|
||||||
# Note, change the left number if port 1236 is already in use on your system
|
# Note, change the left number if port 1236 is already in use on your system
|
||||||
|
@ -45,9 +34,6 @@ services:
|
||||||
# RUST_RELEASE_MODE: release
|
# RUST_RELEASE_MODE: release
|
||||||
# this hostname is used in nginx reverse proxy and also for lemmy ui to connect to the backend, do not change
|
# this hostname is used in nginx reverse proxy and also for lemmy ui to connect to the backend, do not change
|
||||||
hostname: lemmy
|
hostname: lemmy
|
||||||
networks:
|
|
||||||
- lemmyinternal
|
|
||||||
- lemmyexternalproxy
|
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
- RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug"
|
- RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug"
|
||||||
|
@ -67,14 +53,12 @@ services:
|
||||||
# build:
|
# build:
|
||||||
# context: ../../lemmy-ui
|
# context: ../../lemmy-ui
|
||||||
# dockerfile: dev.dockerfile
|
# dockerfile: dev.dockerfile
|
||||||
networks:
|
|
||||||
- lemmyinternal
|
|
||||||
environment:
|
environment:
|
||||||
# this needs to match the hostname defined in the lemmy service
|
# this needs to match the hostname defined in the lemmy service
|
||||||
- LEMMY_UI_LEMMY_INTERNAL_HOST=lemmy:8536
|
- LEMMY_UI_LEMMY_INTERNAL_HOST=lemmy:8536
|
||||||
# set the outside hostname here
|
# set the outside hostname here
|
||||||
- LEMMY_UI_LEMMY_EXTERNAL_HOST=localhost:1236
|
- LEMMY_UI_LEMMY_EXTERNAL_HOST=localhost:1236
|
||||||
- LEMMY_HTTPS=false
|
- LEMMY_UI_HTTPS=false
|
||||||
- LEMMY_UI_DEBUG=true
|
- LEMMY_UI_DEBUG=true
|
||||||
depends_on:
|
depends_on:
|
||||||
- lemmy
|
- lemmy
|
||||||
|
@ -88,8 +72,6 @@ services:
|
||||||
hostname: pictrs
|
hostname: pictrs
|
||||||
# we can set options to pictrs like this, here we set max. image size and forced format for conversion
|
# we can set options to pictrs like this, here we set max. image size and forced format for conversion
|
||||||
# entrypoint: /sbin/tini -- /usr/local/bin/pict-rs -p /mnt -m 4 --image-format webp
|
# entrypoint: /sbin/tini -- /usr/local/bin/pict-rs -p /mnt -m 4 --image-format webp
|
||||||
networks:
|
|
||||||
- lemmyinternal
|
|
||||||
environment:
|
environment:
|
||||||
- PICTRS_OPENTELEMETRY_URL=http://otel:4137
|
- PICTRS_OPENTELEMETRY_URL=http://otel:4137
|
||||||
- PICTRS__API_KEY=API_KEY
|
- PICTRS__API_KEY=API_KEY
|
||||||
|
@ -126,10 +108,6 @@ services:
|
||||||
"-c",
|
"-c",
|
||||||
"track_activity_query_size=1048576",
|
"track_activity_query_size=1048576",
|
||||||
]
|
]
|
||||||
networks:
|
|
||||||
- lemmyinternal
|
|
||||||
# adding the external facing network to allow direct db access for devs
|
|
||||||
- lemmyexternalproxy
|
|
||||||
ports:
|
ports:
|
||||||
# use a different port so it doesnt conflict with potential postgres db running on the host
|
# use a different port so it doesnt conflict with potential postgres db running on the host
|
||||||
- "5433:5432"
|
- "5433:5432"
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
alter table local_site add column federation_worker_count int default 64 not null;
|
|
@ -0,0 +1 @@
|
||||||
|
alter table local_site drop column federation_worker_count;
|
|
@ -0,0 +1,14 @@
|
||||||
|
-- update the default sort type
|
||||||
|
update local_user set default_sort_type = 'TopDay' where default_sort_type in ('TopThreeMonths', 'TopSixMonths', 'TopNineMonths');
|
||||||
|
|
||||||
|
-- rename the old enum
|
||||||
|
alter type sort_type_enum rename to sort_type_enum__;
|
||||||
|
-- create the new enum
|
||||||
|
CREATE TYPE sort_type_enum AS ENUM ('Active', 'Hot', 'New', 'Old', 'TopDay', 'TopWeek', 'TopMonth', 'TopYear', 'TopAll', 'MostComments', 'NewComments', 'TopHour', 'TopSixHour', 'TopTwelveHour');
|
||||||
|
|
||||||
|
-- alter all you enum columns
|
||||||
|
alter table local_user
|
||||||
|
alter column default_sort_type type sort_type_enum using default_sort_type::text::sort_type_enum;
|
||||||
|
|
||||||
|
-- drop the old enum
|
||||||
|
drop type sort_type_enum__;
|
|
@ -0,0 +1,4 @@
|
||||||
|
-- Update the enums
|
||||||
|
ALTER TYPE sort_type_enum ADD VALUE 'TopThreeMonths';
|
||||||
|
ALTER TYPE sort_type_enum ADD VALUE 'TopSixMonths';
|
||||||
|
ALTER TYPE sort_type_enum ADD VALUE 'TopNineMonths';
|
1
migrations/2023-06-21-153242_add_captcha/down.sql
Normal file
1
migrations/2023-06-21-153242_add_captcha/down.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
drop table captcha_answer;
|
6
migrations/2023-06-21-153242_add_captcha/up.sql
Normal file
6
migrations/2023-06-21-153242_add_captcha/up.sql
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
create table captcha_answer (
|
||||||
|
id serial primary key,
|
||||||
|
uuid uuid not null unique default gen_random_uuid(),
|
||||||
|
answer text not null,
|
||||||
|
published timestamp not null default now()
|
||||||
|
);
|
|
@ -0,0 +1,2 @@
|
||||||
|
alter table only local_user alter column theme TYPE character varying(20);
|
||||||
|
alter table only local_user alter column theme set default 'browser'::character varying;
|
|
@ -0,0 +1,2 @@
|
||||||
|
alter table only local_user alter column theme type text;
|
||||||
|
alter table only local_user alter column theme set default 'browser'::text;
|
|
@ -0,0 +1,2 @@
|
||||||
|
drop index idx_comment_aggregates_published;
|
||||||
|
drop index idx_community_aggregates_published;
|
|
@ -0,0 +1,4 @@
|
||||||
|
-- Add indexes on published column (needed for hot_rank updates)
|
||||||
|
|
||||||
|
create index idx_community_aggregates_published on community_aggregates (published desc);
|
||||||
|
create index idx_comment_aggregates_published on comment_aggregates (published desc);
|
|
@ -14,7 +14,10 @@ cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-target
|
||||||
-D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls \
|
-D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls \
|
||||||
-D clippy::unused_self \
|
-D clippy::unused_self \
|
||||||
-A clippy::uninlined_format_args \
|
-A clippy::uninlined_format_args \
|
||||||
-D clippy::get_first
|
-D clippy::get_first \
|
||||||
|
-D clippy::explicit_into_iter_loop \
|
||||||
|
-D clippy::explicit_iter_loop \
|
||||||
|
-D clippy::needless_collect
|
||||||
|
|
||||||
cargo clippy --workspace --features console -- \
|
cargo clippy --workspace --features console -- \
|
||||||
-D clippy::unwrap_used \
|
-D clippy::unwrap_used \
|
||||||
|
|
|
@ -6,19 +6,23 @@ set -e
|
||||||
new_tag="$1"
|
new_tag="$1"
|
||||||
third_semver=$(echo $new_tag | cut -d "." -f 3)
|
third_semver=$(echo $new_tag | cut -d "." -f 3)
|
||||||
|
|
||||||
|
# Goto the upper route
|
||||||
|
CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P)"
|
||||||
|
cd $CWD/../
|
||||||
|
|
||||||
# The ansible and docker installs should only update for non release-candidates
|
# The ansible and docker installs should only update for non release-candidates
|
||||||
# IE, when the third semver is a number, not '2-rc'
|
# IE, when the third semver is a number, not '2-rc'
|
||||||
if [ ! -z "${third_semver##*[!0-9]*}" ]; then
|
if [ ! -z "${third_semver##*[!0-9]*}" ]; then
|
||||||
pushd ../docker
|
pushd docker
|
||||||
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../docker-compose.yml
|
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" docker-compose.yml
|
||||||
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../docker-compose.yml
|
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" docker-compose.yml
|
||||||
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../federation/docker-compose.yml
|
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" federation/docker-compose.yml
|
||||||
git add ../docker-compose.yml
|
git add docker-compose.yml
|
||||||
git add ../federation/docker-compose.yml
|
git add federation/docker-compose.yml
|
||||||
popd
|
popd
|
||||||
|
|
||||||
# Setting the version for Ansible
|
# Setting the version for Ansible
|
||||||
pushd ../../../lemmy-ansible
|
pushd ../lemmy-ansible
|
||||||
echo $new_tag > "VERSION"
|
echo $new_tag > "VERSION"
|
||||||
git add "VERSION"
|
git add "VERSION"
|
||||||
git commit -m"Updating VERSION"
|
git commit -m"Updating VERSION"
|
||||||
|
@ -29,14 +33,16 @@ if [ ! -z "${third_semver##*[!0-9]*}" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Update crate versions
|
# Update crate versions
|
||||||
pushd ..
|
|
||||||
old_tag=$(grep version Cargo.toml | head -1 | cut -d'"' -f 2)
|
old_tag=$(grep version Cargo.toml | head -1 | cut -d'"' -f 2)
|
||||||
sed -i "s/{ version = \"=$old_tag\", path/{ version = \"=$new_tag\", path/g" Cargo.toml
|
sed -i "s/{ version = \"=$old_tag\", path/{ version = \"=$new_tag\", path/g" Cargo.toml
|
||||||
sed -i "s/version = \"$old_tag\"/version = \"$new_tag\"/g" Cargo.toml
|
sed -i "s/version = \"$old_tag\"/version = \"$new_tag\"/g" Cargo.toml
|
||||||
git add Cargo.toml
|
git add Cargo.toml
|
||||||
cargo check
|
cargo check
|
||||||
git add Cargo.lock
|
git add Cargo.lock
|
||||||
popd
|
|
||||||
|
# Update the submodules
|
||||||
|
git submodule update --remote
|
||||||
|
git add crates/utils/translations
|
||||||
|
|
||||||
# The commit
|
# The commit
|
||||||
git commit -m"Version $new_tag"
|
git commit -m"Version $new_tag"
|
||||||
|
|
|
@ -3,4 +3,4 @@ set -e
|
||||||
|
|
||||||
dest=${1-config/defaults.hjson}
|
dest=${1-config/defaults.hjson}
|
||||||
|
|
||||||
cargo run -- --print-config-docs > "$dest"
|
cargo run --manifest-path crates/utils/Cargo.toml > "$dest"
|
||||||
|
|
|
@ -38,6 +38,7 @@ use lemmy_api_common::{
|
||||||
ChangePassword,
|
ChangePassword,
|
||||||
DeleteAccount,
|
DeleteAccount,
|
||||||
GetBannedPersons,
|
GetBannedPersons,
|
||||||
|
GetCaptcha,
|
||||||
GetPersonDetails,
|
GetPersonDetails,
|
||||||
GetPersonMentions,
|
GetPersonMentions,
|
||||||
GetReplies,
|
GetReplies,
|
||||||
|
@ -272,6 +273,12 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
|
||||||
.wrap(rate_limit.register())
|
.wrap(rate_limit.register())
|
||||||
.route(web::post().to(route_post_crud::<Register>)),
|
.route(web::post().to(route_post_crud::<Register>)),
|
||||||
)
|
)
|
||||||
|
.service(
|
||||||
|
// Handle captcha separately
|
||||||
|
web::resource("/user/get_captcha")
|
||||||
|
.wrap(rate_limit.post())
|
||||||
|
.route(web::get().to(route_get::<GetCaptcha>)),
|
||||||
|
)
|
||||||
// User actions
|
// User actions
|
||||||
.service(
|
.service(
|
||||||
web::scope("/user")
|
web::scope("/user")
|
||||||
|
|
36
src/lib.rs
36
src/lib.rs
|
@ -9,7 +9,6 @@ use crate::{code_migrations::run_advanced_migrations, root_span_builder::Quieter
|
||||||
use activitypub_federation::config::{FederationConfig, FederationMiddleware};
|
use activitypub_federation::config::{FederationConfig, FederationMiddleware};
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_web::{middleware, web::Data, App, HttpServer, Result};
|
use actix_web::{middleware, web::Data, App, HttpServer, Result};
|
||||||
use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle};
|
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
lemmy_db_views::structs::SiteView,
|
lemmy_db_views::structs::SiteView,
|
||||||
|
@ -25,11 +24,7 @@ use lemmy_db_schema::{
|
||||||
utils::{build_db_pool, get_conn, get_database_url, run_migrations},
|
utils::{build_db_pool, get_conn, get_database_url, run_migrations},
|
||||||
};
|
};
|
||||||
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{error::LemmyError, rate_limit::RateLimitCell, settings::SETTINGS};
|
||||||
error::LemmyError,
|
|
||||||
rate_limit::RateLimitCell,
|
|
||||||
settings::{structs::Settings, SETTINGS},
|
|
||||||
};
|
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
use reqwest_middleware::ClientBuilder;
|
use reqwest_middleware::ClientBuilder;
|
||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
|
@ -47,21 +42,6 @@ pub(crate) const REQWEST_TIMEOUT: Duration = Duration::from_secs(10);
|
||||||
/// Placing the main function in lib.rs allows other crates to import it and embed Lemmy
|
/// Placing the main function in lib.rs allows other crates to import it and embed Lemmy
|
||||||
pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
let args: Vec<String> = env::args().collect();
|
let args: Vec<String> = env::args().collect();
|
||||||
if args.get(1) == Some(&"--print-config-docs".to_string()) {
|
|
||||||
let fmt = Formatting {
|
|
||||||
auto_comments: AutoComments::none(),
|
|
||||||
comments_style: CommentsStyle {
|
|
||||||
separator: "#".to_owned(),
|
|
||||||
},
|
|
||||||
objects_style: ObjectsStyle {
|
|
||||||
surround_keys_with_quotes: false,
|
|
||||||
use_comma_as_separator: false,
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default()));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let scheduled_tasks_enabled = args.get(1) != Some(&"--disable-scheduled-tasks".to_string());
|
let scheduled_tasks_enabled = args.get(1) != Some(&"--disable-scheduled-tasks".to_string());
|
||||||
|
|
||||||
|
@ -140,24 +120,23 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let settings_bind = settings.clone();
|
||||||
|
|
||||||
let federation_config = FederationConfig::builder()
|
let federation_config = FederationConfig::builder()
|
||||||
.domain(settings.hostname.clone())
|
.domain(settings.hostname.clone())
|
||||||
.app_data(context.clone())
|
.app_data(context.clone())
|
||||||
.client(client.clone())
|
.client(client.clone())
|
||||||
.http_fetch_limit(FEDERATION_HTTP_FETCH_LIMIT)
|
.http_fetch_limit(FEDERATION_HTTP_FETCH_LIMIT)
|
||||||
.worker_count(local_site.federation_worker_count as usize)
|
.worker_count(settings.worker_count)
|
||||||
|
.retry_count(settings.retry_count)
|
||||||
.debug(cfg!(debug_assertions))
|
.debug(cfg!(debug_assertions))
|
||||||
.http_signature_compat(true)
|
.http_signature_compat(true)
|
||||||
.url_verifier(Box::new(VerifyUrlData(context.pool().clone())))
|
.url_verifier(Box::new(VerifyUrlData(context.pool().clone())))
|
||||||
.build()
|
.build()
|
||||||
.await
|
.await?;
|
||||||
.expect("configure federation");
|
|
||||||
|
|
||||||
// Create Http server with websocket support
|
// Create Http server with websocket support
|
||||||
let settings_bind = settings.clone();
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
let context = context.clone();
|
|
||||||
|
|
||||||
let cors_config = if cfg!(debug_assertions) {
|
let cors_config = if cfg!(debug_assertions) {
|
||||||
Cors::permissive()
|
Cors::permissive()
|
||||||
} else {
|
} else {
|
||||||
|
@ -172,9 +151,10 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> {
|
||||||
// This is the default log format save for the usage of %{r}a over %a to guarantee to record the client's (forwarded) IP and not the last peer address, since the latter is frequently just a reverse proxy
|
// This is the default log format save for the usage of %{r}a over %a to guarantee to record the client's (forwarded) IP and not the last peer address, since the latter is frequently just a reverse proxy
|
||||||
"%{r}a '%r' %s %b '%{Referer}i' '%{User-Agent}i' %T",
|
"%{r}a '%r' %s %b '%{Referer}i' '%{User-Agent}i' %T",
|
||||||
))
|
))
|
||||||
|
.wrap(middleware::Compress::default())
|
||||||
.wrap(cors_config)
|
.wrap(cors_config)
|
||||||
.wrap(TracingLogger::<QuieterRootSpanBuilder>::new())
|
.wrap(TracingLogger::<QuieterRootSpanBuilder>::new())
|
||||||
.app_data(Data::new(context))
|
.app_data(Data::new(context.clone()))
|
||||||
.app_data(Data::new(rate_limit_cell.clone()))
|
.app_data(Data::new(rate_limit_cell.clone()))
|
||||||
.wrap(FederationMiddleware::new(federation_config.clone()))
|
.wrap(FederationMiddleware::new(federation_config.clone()))
|
||||||
// The routes
|
// The routes
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use lemmy_server::{init_logging, start_lemmy_server};
|
use lemmy_server::{init_logging, start_lemmy_server};
|
||||||
use lemmy_utils::{error::LemmyError, settings::SETTINGS};
|
use lemmy_utils::{error::LemmyError, settings::SETTINGS};
|
||||||
|
|
||||||
#[actix_web::main]
|
#[tokio::main]
|
||||||
pub async fn main() -> Result<(), LemmyError> {
|
pub async fn main() -> Result<(), LemmyError> {
|
||||||
init_logging(&SETTINGS.opentelemetry_url)?;
|
init_logging(&SETTINGS.opentelemetry_url)?;
|
||||||
#[cfg(not(feature = "embed-pictrs"))]
|
#[cfg(not(feature = "embed-pictrs"))]
|
||||||
|
|
|
@ -1,25 +1,21 @@
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
use clokwerk::{Scheduler, TimeUnits as CTimeUnits};
|
use clokwerk::{Scheduler, TimeUnits as CTimeUnits};
|
||||||
use diesel::{
|
use diesel::{
|
||||||
dsl::{now, IntervalDsl},
|
dsl::{now, IntervalDsl},
|
||||||
|
sql_types::{Integer, Timestamp},
|
||||||
Connection,
|
Connection,
|
||||||
ExpressionMethods,
|
ExpressionMethods,
|
||||||
|
NullableExpressionMethods,
|
||||||
QueryDsl,
|
QueryDsl,
|
||||||
|
QueryableByName,
|
||||||
};
|
};
|
||||||
// Import week days and WeekDay
|
// Import week days and WeekDay
|
||||||
use diesel::{sql_query, PgConnection, RunQueryDsl};
|
use diesel::{sql_query, PgConnection, RunQueryDsl};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
schema::{
|
schema::{activity, captcha_answer, comment, community_person_ban, instance, person, post},
|
||||||
activity,
|
|
||||||
comment_aggregates,
|
|
||||||
community_aggregates,
|
|
||||||
community_person_ban,
|
|
||||||
instance,
|
|
||||||
person,
|
|
||||||
post_aggregates,
|
|
||||||
},
|
|
||||||
source::instance::{Instance, InstanceForm},
|
source::instance::{Instance, InstanceForm},
|
||||||
utils::{functions::hot_rank, naive_now},
|
utils::{naive_now, DELETED_REPLACEMENT_TEXT},
|
||||||
};
|
};
|
||||||
use lemmy_routes::nodeinfo::NodeInfo;
|
use lemmy_routes::nodeinfo::NodeInfo;
|
||||||
use lemmy_utils::{error::LemmyError, REQWEST_TIMEOUT};
|
use lemmy_utils::{error::LemmyError, REQWEST_TIMEOUT};
|
||||||
|
@ -46,13 +42,20 @@ pub fn setup(
|
||||||
update_banned_when_expired(&mut conn);
|
update_banned_when_expired(&mut conn);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update hot ranks every 5 minutes
|
// Update hot ranks every 15 minutes
|
||||||
let url = db_url.clone();
|
let url = db_url.clone();
|
||||||
scheduler.every(CTimeUnits::minutes(5)).run(move || {
|
scheduler.every(CTimeUnits::minutes(15)).run(move || {
|
||||||
let mut conn = PgConnection::establish(&url).expect("could not establish connection");
|
let mut conn = PgConnection::establish(&url).expect("could not establish connection");
|
||||||
update_hot_ranks(&mut conn, true);
|
update_hot_ranks(&mut conn, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Delete any captcha answers older than ten minutes, every ten minutes
|
||||||
|
let url = db_url.clone();
|
||||||
|
scheduler.every(CTimeUnits::minutes(10)).run(move || {
|
||||||
|
let mut conn = PgConnection::establish(&url).expect("could not establish connection");
|
||||||
|
delete_expired_captcha_answers(&mut conn);
|
||||||
|
});
|
||||||
|
|
||||||
// Clear old activities every week
|
// Clear old activities every week
|
||||||
let url = db_url.clone();
|
let url = db_url.clone();
|
||||||
scheduler.every(CTimeUnits::weeks(1)).run(move || {
|
scheduler.every(CTimeUnits::weeks(1)).run(move || {
|
||||||
|
@ -66,6 +69,13 @@ pub fn setup(
|
||||||
context_1.settings_updated_channel().remove_older_than(hour);
|
context_1.settings_updated_channel().remove_older_than(hour);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Overwrite deleted & removed posts and comments every day
|
||||||
|
let url = db_url.clone();
|
||||||
|
scheduler.every(CTimeUnits::days(1)).run(move || {
|
||||||
|
let mut conn = PgConnection::establish(&url).expect("could not establish connection");
|
||||||
|
overwrite_deleted_posts_and_comments(&mut conn);
|
||||||
|
});
|
||||||
|
|
||||||
// Update the Instance Software
|
// Update the Instance Software
|
||||||
scheduler.every(CTimeUnits::days(1)).run(move || {
|
scheduler.every(CTimeUnits::days(1)).run(move || {
|
||||||
let mut conn = PgConnection::establish(&db_url).expect("could not establish connection");
|
let mut conn = PgConnection::establish(&db_url).expect("could not establish connection");
|
||||||
|
@ -86,67 +96,109 @@ fn startup_jobs(db_url: &str) {
|
||||||
update_hot_ranks(&mut conn, false);
|
update_hot_ranks(&mut conn, false);
|
||||||
update_banned_when_expired(&mut conn);
|
update_banned_when_expired(&mut conn);
|
||||||
clear_old_activities(&mut conn);
|
clear_old_activities(&mut conn);
|
||||||
|
overwrite_deleted_posts_and_comments(&mut conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the hot_rank columns for the aggregates tables
|
/// Update the hot_rank columns for the aggregates tables
|
||||||
|
/// Runs in batches until all necessary rows are updated once
|
||||||
fn update_hot_ranks(conn: &mut PgConnection, last_week_only: bool) {
|
fn update_hot_ranks(conn: &mut PgConnection, last_week_only: bool) {
|
||||||
let mut post_update = diesel::update(post_aggregates::table).into_boxed();
|
let process_start_time = if last_week_only {
|
||||||
let mut comment_update = diesel::update(comment_aggregates::table).into_boxed();
|
|
||||||
let mut community_update = diesel::update(community_aggregates::table).into_boxed();
|
|
||||||
|
|
||||||
// Only update for the last week of content
|
|
||||||
if last_week_only {
|
|
||||||
info!("Updating hot ranks for last week...");
|
info!("Updating hot ranks for last week...");
|
||||||
let last_week = now - diesel::dsl::IntervalDsl::weeks(1);
|
naive_now() - chrono::Duration::days(7)
|
||||||
|
|
||||||
post_update = post_update.filter(post_aggregates::published.gt(last_week));
|
|
||||||
comment_update = comment_update.filter(comment_aggregates::published.gt(last_week));
|
|
||||||
community_update = community_update.filter(community_aggregates::published.gt(last_week));
|
|
||||||
} else {
|
} else {
|
||||||
info!("Updating hot ranks for all history...");
|
info!("Updating hot ranks for all history...");
|
||||||
|
NaiveDateTime::from_timestamp_opt(0, 0).expect("0 timestamp creation")
|
||||||
|
};
|
||||||
|
|
||||||
|
process_hot_ranks_in_batches(
|
||||||
|
conn,
|
||||||
|
"post_aggregates",
|
||||||
|
"SET hot_rank = hot_rank(a.score, a.published),
|
||||||
|
hot_rank_active = hot_rank(a.score, a.newest_comment_time_necro)",
|
||||||
|
process_start_time,
|
||||||
|
);
|
||||||
|
|
||||||
|
process_hot_ranks_in_batches(
|
||||||
|
conn,
|
||||||
|
"comment_aggregates",
|
||||||
|
"SET hot_rank = hot_rank(a.score, a.published)",
|
||||||
|
process_start_time,
|
||||||
|
);
|
||||||
|
|
||||||
|
process_hot_ranks_in_batches(
|
||||||
|
conn,
|
||||||
|
"community_aggregates",
|
||||||
|
"SET hot_rank = hot_rank(a.subscribers, a.published)",
|
||||||
|
process_start_time,
|
||||||
|
);
|
||||||
|
|
||||||
|
info!("Finished hot ranks update!");
|
||||||
}
|
}
|
||||||
|
|
||||||
match post_update
|
#[derive(QueryableByName)]
|
||||||
.set((
|
struct HotRanksUpdateResult {
|
||||||
post_aggregates::hot_rank.eq(hot_rank(post_aggregates::score, post_aggregates::published)),
|
#[diesel(sql_type = Timestamp)]
|
||||||
post_aggregates::hot_rank_active.eq(hot_rank(
|
published: NaiveDateTime,
|
||||||
post_aggregates::score,
|
}
|
||||||
post_aggregates::newest_comment_time_necro,
|
|
||||||
)),
|
/// Runs the hot rank update query in batches until all rows after `process_start_time` have been
|
||||||
|
/// processed.
|
||||||
|
/// In `set_clause`, "a" will refer to the current aggregates table.
|
||||||
|
/// Locked rows are skipped in order to prevent deadlocks (they will likely get updated on the next
|
||||||
|
/// run)
|
||||||
|
fn process_hot_ranks_in_batches(
|
||||||
|
conn: &mut PgConnection,
|
||||||
|
table_name: &str,
|
||||||
|
set_clause: &str,
|
||||||
|
process_start_time: NaiveDateTime,
|
||||||
|
) {
|
||||||
|
let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans
|
||||||
|
let mut previous_batch_result = Some(process_start_time);
|
||||||
|
while let Some(previous_batch_last_published) = previous_batch_result {
|
||||||
|
// Raw `sql_query` is used as a performance optimization - Diesel does not support doing this
|
||||||
|
// in a single query (neither as a CTE, nor using a subquery)
|
||||||
|
let result = sql_query(format!(
|
||||||
|
r#"WITH batch AS (SELECT a.id
|
||||||
|
FROM {aggregates_table} a
|
||||||
|
WHERE a.published > $1
|
||||||
|
ORDER BY a.published
|
||||||
|
LIMIT $2
|
||||||
|
FOR UPDATE SKIP LOCKED)
|
||||||
|
UPDATE {aggregates_table} a {set_clause}
|
||||||
|
FROM batch WHERE a.id = batch.id RETURNING a.published;
|
||||||
|
"#,
|
||||||
|
aggregates_table = table_name,
|
||||||
|
set_clause = set_clause
|
||||||
))
|
))
|
||||||
.execute(conn)
|
.bind::<Timestamp, _>(previous_batch_last_published)
|
||||||
{
|
.bind::<Integer, _>(update_batch_size)
|
||||||
Ok(_) => {}
|
.get_results::<HotRanksUpdateResult>(conn);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(updated_rows) => previous_batch_result = updated_rows.last().map(|row| row.published),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to update post_aggregates hot_ranks: {}", e)
|
error!("Failed to update {} hot_ranks: {}", table_name, e);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
info!(
|
||||||
|
"Finished process_hot_ranks_in_batches execution for {}",
|
||||||
|
table_name
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
match comment_update
|
fn delete_expired_captcha_answers(conn: &mut PgConnection) {
|
||||||
.set(comment_aggregates::hot_rank.eq(hot_rank(
|
match diesel::delete(
|
||||||
comment_aggregates::score,
|
captcha_answer::table.filter(captcha_answer::published.lt(now - IntervalDsl::minutes(10))),
|
||||||
comment_aggregates::published,
|
)
|
||||||
)))
|
|
||||||
.execute(conn)
|
|
||||||
{
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to update comment_aggregates hot_ranks: {}", e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match community_update
|
|
||||||
.set(community_aggregates::hot_rank.eq(hot_rank(
|
|
||||||
community_aggregates::subscribers,
|
|
||||||
community_aggregates::published,
|
|
||||||
)))
|
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
{
|
{
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
info!("Done.");
|
info!("Done.");
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to update community_aggregates hot_ranks: {}", e)
|
error!("Failed to clear old captcha answers: {}", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -166,6 +218,48 @@ fn clear_old_activities(conn: &mut PgConnection) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// overwrite posts and comments 30d after deletion
|
||||||
|
fn overwrite_deleted_posts_and_comments(conn: &mut PgConnection) {
|
||||||
|
info!("Overwriting deleted posts...");
|
||||||
|
match diesel::update(
|
||||||
|
post::table
|
||||||
|
.filter(post::deleted.eq(true))
|
||||||
|
.filter(post::updated.lt(now.nullable() - 1.months()))
|
||||||
|
.filter(post::body.ne(DELETED_REPLACEMENT_TEXT)),
|
||||||
|
)
|
||||||
|
.set((
|
||||||
|
post::body.eq(DELETED_REPLACEMENT_TEXT),
|
||||||
|
post::name.eq(DELETED_REPLACEMENT_TEXT),
|
||||||
|
))
|
||||||
|
.execute(conn)
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Done.");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to overwrite deleted posts: {}", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Overwriting deleted comments...");
|
||||||
|
match diesel::update(
|
||||||
|
comment::table
|
||||||
|
.filter(comment::deleted.eq(true))
|
||||||
|
.filter(comment::updated.lt(now.nullable() - 1.months()))
|
||||||
|
.filter(comment::content.ne(DELETED_REPLACEMENT_TEXT)),
|
||||||
|
)
|
||||||
|
.set(comment::content.eq(DELETED_REPLACEMENT_TEXT))
|
||||||
|
.execute(conn)
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Done.");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to overwrite deleted comments: {}", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Re-calculate the site and community active counts every 12 hours
|
/// Re-calculate the site and community active counts every 12 hours
|
||||||
fn active_counts(conn: &mut PgConnection) {
|
fn active_counts(conn: &mut PgConnection) {
|
||||||
info!("Updating active site and community aggregates ...");
|
info!("Updating active site and community aggregates ...");
|
||||||
|
|
Loading…
Reference in a new issue