mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-12-23 19:31:33 +00:00
Merge remote-tracking branch 'upstream/main' into migration-runner
This commit is contained in:
commit
ecb700ec6e
74 changed files with 1458 additions and 766 deletions
|
@ -2,9 +2,14 @@
|
||||||
# See https://github.com/woodpecker-ci/woodpecker/issues/1677
|
# See https://github.com/woodpecker-ci/woodpecker/issues/1677
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &rust_image "rust:1.78"
|
- &rust_image "rust:1.80"
|
||||||
- &rust_nightly_image "rustlang/rust:nightly"
|
- &rust_nightly_image "rustlang/rust:nightly"
|
||||||
- &install_pnpm "corepack enable pnpm"
|
- &install_pnpm "corepack enable pnpm"
|
||||||
|
- &install_binstall "wget -O- https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz | tar -xvz -C /usr/local/cargo/bin"
|
||||||
|
- install_diesel_cli: &install_diesel_cli
|
||||||
|
- apt-get update && apt-get install -y postgresql-client
|
||||||
|
- cargo install diesel_cli --no-default-features --features postgres
|
||||||
|
- export PATH="$CARGO_HOME/bin:$PATH"
|
||||||
- &slow_check_paths
|
- &slow_check_paths
|
||||||
- event: pull_request
|
- event: pull_request
|
||||||
path:
|
path:
|
||||||
|
@ -25,17 +30,6 @@ variables:
|
||||||
"diesel.toml",
|
"diesel.toml",
|
||||||
".gitmodules",
|
".gitmodules",
|
||||||
]
|
]
|
||||||
- install_binstall: &install_binstall
|
|
||||||
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
|
|
||||||
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
|
|
||||||
- cp cargo-binstall /usr/local/cargo/bin
|
|
||||||
- install_diesel_cli: &install_diesel_cli
|
|
||||||
- apt update && apt install -y lsb-release build-essential
|
|
||||||
- sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
|
||||||
- wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
|
|
||||||
- apt update && apt install -y postgresql-client-16
|
|
||||||
- cargo install diesel_cli --no-default-features --features postgres
|
|
||||||
- export PATH="$CARGO_HOME/bin:$PATH"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
prepare_repo:
|
prepare_repo:
|
||||||
|
@ -82,7 +76,7 @@ steps:
|
||||||
cargo_machete:
|
cargo_machete:
|
||||||
image: *rust_nightly_image
|
image: *rust_nightly_image
|
||||||
commands:
|
commands:
|
||||||
- <<: *install_binstall
|
- *install_binstall
|
||||||
- cargo binstall -y cargo-machete
|
- cargo binstall -y cargo-machete
|
||||||
- cargo machete
|
- cargo machete
|
||||||
when:
|
when:
|
||||||
|
@ -214,7 +208,7 @@ steps:
|
||||||
DO_WRITE_HOSTS_FILE: "1"
|
DO_WRITE_HOSTS_FILE: "1"
|
||||||
commands:
|
commands:
|
||||||
- *install_pnpm
|
- *install_pnpm
|
||||||
- apt update && apt install -y bash curl postgresql-client
|
- apt-get update && apt-get install -y bash curl postgresql-client
|
||||||
- bash api_tests/prepare-drone-federation-test.sh
|
- bash api_tests/prepare-drone-federation-test.sh
|
||||||
- cd api_tests/
|
- cd api_tests/
|
||||||
- pnpm i
|
- pnpm i
|
||||||
|
@ -261,7 +255,7 @@ steps:
|
||||||
publish_to_crates_io:
|
publish_to_crates_io:
|
||||||
image: *rust_image
|
image: *rust_image
|
||||||
commands:
|
commands:
|
||||||
- <<: *install_binstall
|
- *install_binstall
|
||||||
# Install cargo-workspaces
|
# Install cargo-workspaces
|
||||||
- cargo binstall -y cargo-workspaces
|
- cargo binstall -y cargo-workspaces
|
||||||
- cp -r migrations crates/db_schema/
|
- cp -r migrations crates/db_schema/
|
||||||
|
@ -289,7 +283,8 @@ steps:
|
||||||
|
|
||||||
services:
|
services:
|
||||||
database:
|
database:
|
||||||
image: pgautoupgrade/pgautoupgrade:16-alpine
|
# 15-alpine image necessary because of diesel tests
|
||||||
|
image: pgautoupgrade/pgautoupgrade:15-alpine
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: lemmy
|
POSTGRES_DB: lemmy
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
|
|
464
Cargo.lock
generated
464
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
58
Cargo.toml
58
Cargo.toml
|
@ -1,5 +1,5 @@
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.19.5"
|
version = "0.19.6-beta.6"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "A link aggregator for the fediverse"
|
description = "A link aggregator for the fediverse"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
|
@ -86,28 +86,29 @@ suspicious = { level = "deny", priority = -1 }
|
||||||
uninlined_format_args = "allow"
|
uninlined_format_args = "allow"
|
||||||
unused_self = "deny"
|
unused_self = "deny"
|
||||||
unwrap_used = "deny"
|
unwrap_used = "deny"
|
||||||
|
unimplemented = "deny"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lemmy_api = { version = "=0.19.5", path = "./crates/api" }
|
lemmy_api = { version = "=0.19.6-beta.6", path = "./crates/api" }
|
||||||
lemmy_api_crud = { version = "=0.19.5", path = "./crates/api_crud" }
|
lemmy_api_crud = { version = "=0.19.6-beta.6", path = "./crates/api_crud" }
|
||||||
lemmy_apub = { version = "=0.19.5", path = "./crates/apub" }
|
lemmy_apub = { version = "=0.19.6-beta.6", path = "./crates/apub" }
|
||||||
lemmy_utils = { version = "=0.19.5", path = "./crates/utils", default-features = false }
|
lemmy_utils = { version = "=0.19.6-beta.6", path = "./crates/utils", default-features = false }
|
||||||
lemmy_db_schema = { version = "=0.19.5", path = "./crates/db_schema" }
|
lemmy_db_schema = { version = "=0.19.6-beta.6", path = "./crates/db_schema" }
|
||||||
lemmy_api_common = { version = "=0.19.5", path = "./crates/api_common" }
|
lemmy_api_common = { version = "=0.19.6-beta.6", path = "./crates/api_common" }
|
||||||
lemmy_routes = { version = "=0.19.5", path = "./crates/routes" }
|
lemmy_routes = { version = "=0.19.6-beta.6", path = "./crates/routes" }
|
||||||
lemmy_db_views = { version = "=0.19.5", path = "./crates/db_views" }
|
lemmy_db_views = { version = "=0.19.6-beta.6", path = "./crates/db_views" }
|
||||||
lemmy_db_views_actor = { version = "=0.19.5", path = "./crates/db_views_actor" }
|
lemmy_db_views_actor = { version = "=0.19.6-beta.6", path = "./crates/db_views_actor" }
|
||||||
lemmy_db_views_moderator = { version = "=0.19.5", path = "./crates/db_views_moderator" }
|
lemmy_db_views_moderator = { version = "=0.19.6-beta.6", path = "./crates/db_views_moderator" }
|
||||||
lemmy_federate = { version = "=0.19.5", path = "./crates/federate" }
|
lemmy_federate = { version = "=0.19.6-beta.6", path = "./crates/federate" }
|
||||||
activitypub_federation = { version = "0.5.8", default-features = false, features = [
|
activitypub_federation = { version = "0.5.8", default-features = false, features = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
] }
|
] }
|
||||||
diesel = "2.1.6"
|
diesel = "2.1.6"
|
||||||
diesel_migrations = "2.1.0"
|
diesel_migrations = "2.1.0"
|
||||||
diesel-async = "0.4.1"
|
diesel-async = "0.4.1"
|
||||||
serde = { version = "1.0.203", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_with = "3.8.1"
|
serde_with = "3.9.0"
|
||||||
actix-web = { version = "4.6.0", default-features = false, features = [
|
actix-web = { version = "4.8.0", default-features = false, features = [
|
||||||
"macros",
|
"macros",
|
||||||
"rustls-0_23",
|
"rustls-0_23",
|
||||||
"compress-brotli",
|
"compress-brotli",
|
||||||
|
@ -120,7 +121,7 @@ tracing-actix-web = { version = "0.7.11", default-features = false }
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
tracing-log = "0.2.0"
|
tracing-log = "0.2.0"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||||
url = { version = "2.5.0", features = ["serde"] }
|
url = { version = "2.5.2", features = ["serde"] }
|
||||||
reqwest = { version = "0.11.27", default-features = false, features = [
|
reqwest = { version = "0.11.27", default-features = false, features = [
|
||||||
"json",
|
"json",
|
||||||
"blocking",
|
"blocking",
|
||||||
|
@ -133,20 +134,19 @@ clokwerk = "0.4.0"
|
||||||
doku = { version = "0.21.1", features = ["url-2"] }
|
doku = { version = "0.21.1", features = ["url-2"] }
|
||||||
bcrypt = "0.15.1"
|
bcrypt = "0.15.1"
|
||||||
chrono = { version = "0.4.38", features = ["serde"], default-features = false }
|
chrono = { version = "0.4.38", features = ["serde"], default-features = false }
|
||||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
serde_json = { version = "1.0.121", features = ["preserve_order"] }
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
uuid = { version = "1.8.0", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
async-trait = "0.1.80"
|
async-trait = "0.1.81"
|
||||||
captcha = "0.0.9"
|
captcha = "0.0.9"
|
||||||
anyhow = { version = "1.0.86", features = [
|
anyhow = { version = "1.0.86", features = [
|
||||||
"backtrace",
|
"backtrace",
|
||||||
] } # backtrace is on by default on nightly, but not stable rust
|
] } # backtrace is on by default on nightly, but not stable rust
|
||||||
diesel_ltree = "0.3.1"
|
diesel_ltree = "0.3.1"
|
||||||
typed-builder = "0.18.2"
|
typed-builder = "0.19.1"
|
||||||
serial_test = "3.1.1"
|
serial_test = "3.1.1"
|
||||||
tokio = { version = "1.38.0", features = ["full"] }
|
tokio = { version = "1.39.2", features = ["full"] }
|
||||||
regex = "1.10.4"
|
regex = "1.10.5"
|
||||||
once_cell = "1.19.0"
|
|
||||||
diesel-derive-newtype = "2.1.2"
|
diesel-derive-newtype = "2.1.2"
|
||||||
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
|
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
|
||||||
strum = { version = "0.26.3", features = ["derive"] }
|
strum = { version = "0.26.3", features = ["derive"] }
|
||||||
|
@ -161,15 +161,15 @@ ts-rs = { version = "7.1.1", features = [
|
||||||
"chrono-impl",
|
"chrono-impl",
|
||||||
"no-serde-warnings",
|
"no-serde-warnings",
|
||||||
] }
|
] }
|
||||||
rustls = { version = "0.23.9", features = ["ring"] }
|
rustls = { version = "0.23.12", features = ["ring"] }
|
||||||
futures-util = "0.3.30"
|
futures-util = "0.3.30"
|
||||||
tokio-postgres = "0.7.10"
|
tokio-postgres = "0.7.11"
|
||||||
tokio-postgres-rustls = "0.12.0"
|
tokio-postgres-rustls = "0.12.0"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
enum-map = "2.7"
|
enum-map = "2.7"
|
||||||
moka = { version = "0.12.7", features = ["future"] }
|
moka = { version = "0.12.8", features = ["future"] }
|
||||||
i-love-jesus = { version = "0.1.0" }
|
i-love-jesus = { version = "0.1.0" }
|
||||||
clap = { version = "4.5.6", features = ["derive", "env"] }
|
clap = { version = "4.5.13", features = ["derive", "env"] }
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
derive-new = "0.6.0"
|
derive-new = "0.6.0"
|
||||||
|
|
||||||
|
@ -199,9 +199,9 @@ clokwerk = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tracing-opentelemetry = { workspace = true, optional = true }
|
tracing-opentelemetry = { workspace = true, optional = true }
|
||||||
opentelemetry = { workspace = true, optional = true }
|
opentelemetry = { workspace = true, optional = true }
|
||||||
console-subscriber = { version = "0.3.0", optional = true }
|
console-subscriber = { version = "0.4.0", optional = true }
|
||||||
opentelemetry-otlp = { version = "0.12.0", optional = true }
|
opentelemetry-otlp = { version = "0.12.0", optional = true }
|
||||||
pict-rs = { version = "0.5.15", optional = true }
|
pict-rs = { version = "0.5.16", optional = true }
|
||||||
rustls = { workspace = true }
|
rustls = { workspace = true }
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
actix-cors = "0.7.0"
|
actix-cors = "0.7.0"
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"repository": "https://github.com/LemmyNet/lemmy",
|
"repository": "https://github.com/LemmyNet/lemmy",
|
||||||
"author": "Dessalines",
|
"author": "Dessalines",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"packageManager": "pnpm@9.4.0",
|
"packageManager": "pnpm@9.6.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "tsc --noEmit && eslint --report-unused-disable-directives && prettier --check 'src/**/*.ts'",
|
"lint": "tsc --noEmit && eslint --report-unused-disable-directives && prettier --check 'src/**/*.ts'",
|
||||||
"fix": "prettier --write src && eslint --fix src",
|
"fix": "prettier --write src && eslint --fix src",
|
||||||
|
@ -21,17 +21,16 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^29.5.12",
|
"@types/jest": "^29.5.12",
|
||||||
"@types/node": "^20.12.4",
|
"@types/node": "^22.0.2",
|
||||||
"@typescript-eslint/eslint-plugin": "^7.5.0",
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||||
"@typescript-eslint/parser": "^7.5.0",
|
"@typescript-eslint/parser": "^8.0.0",
|
||||||
"download-file-sync": "^1.0.4",
|
"eslint": "^9.8.0",
|
||||||
"eslint": "^9.0.0",
|
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"jest": "^29.5.0",
|
"jest": "^29.5.0",
|
||||||
"lemmy-js-client": "0.19.5-alpha.1",
|
"lemmy-js-client": "0.19.5-alpha.1",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"ts-jest": "^29.1.0",
|
"ts-jest": "^29.1.0",
|
||||||
"typescript": "^5.4.4",
|
"typescript": "^5.5.4",
|
||||||
"typescript-eslint": "^7.13.0"
|
"typescript-eslint": "^8.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,6 @@
|
||||||
jest.setTimeout(120000);
|
jest.setTimeout(120000);
|
||||||
|
|
||||||
|
import { AddModToCommunity } from "lemmy-js-client/dist/types/AddModToCommunity";
|
||||||
import { CommunityView } from "lemmy-js-client/dist/types/CommunityView";
|
import { CommunityView } from "lemmy-js-client/dist/types/CommunityView";
|
||||||
import {
|
import {
|
||||||
alpha,
|
alpha,
|
||||||
|
@ -9,6 +10,7 @@ import {
|
||||||
resolveCommunity,
|
resolveCommunity,
|
||||||
createCommunity,
|
createCommunity,
|
||||||
deleteCommunity,
|
deleteCommunity,
|
||||||
|
delay,
|
||||||
removeCommunity,
|
removeCommunity,
|
||||||
getCommunity,
|
getCommunity,
|
||||||
followCommunity,
|
followCommunity,
|
||||||
|
@ -533,3 +535,41 @@ test("Content in local-only community doesn't federate", async () => {
|
||||||
Error("couldnt_find_object"),
|
Error("couldnt_find_object"),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("Remote mods can edit communities", async () => {
|
||||||
|
let communityRes = await createCommunity(alpha);
|
||||||
|
|
||||||
|
let betaCommunity = await resolveCommunity(
|
||||||
|
beta,
|
||||||
|
communityRes.community_view.community.actor_id,
|
||||||
|
);
|
||||||
|
if (!betaCommunity.community) {
|
||||||
|
throw "Missing beta community";
|
||||||
|
}
|
||||||
|
let betaOnAlpha = await resolvePerson(alpha, "lemmy_beta@lemmy-beta:8551");
|
||||||
|
|
||||||
|
let form: AddModToCommunity = {
|
||||||
|
community_id: communityRes.community_view.community.id,
|
||||||
|
person_id: betaOnAlpha.person?.person.id as number,
|
||||||
|
added: true,
|
||||||
|
};
|
||||||
|
alpha.addModToCommunity(form);
|
||||||
|
|
||||||
|
let form2: EditCommunity = {
|
||||||
|
community_id: betaCommunity.community?.community.id as number,
|
||||||
|
description: "Example description",
|
||||||
|
};
|
||||||
|
|
||||||
|
await editCommunity(beta, form2);
|
||||||
|
// give alpha time to get and process the edit
|
||||||
|
await delay(1000);
|
||||||
|
|
||||||
|
let alphaCommunity = await getCommunity(
|
||||||
|
alpha,
|
||||||
|
communityRes.community_view.community.id,
|
||||||
|
);
|
||||||
|
|
||||||
|
await expect(alphaCommunity.community_view.community.description).toBe(
|
||||||
|
"Example description",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
|
@ -33,7 +33,6 @@ import {
|
||||||
sampleImage,
|
sampleImage,
|
||||||
sampleSite,
|
sampleSite,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
const downloadFileSync = require("download-file-sync");
|
|
||||||
|
|
||||||
beforeAll(setupLogins);
|
beforeAll(setupLogins);
|
||||||
|
|
||||||
|
@ -57,7 +56,8 @@ test("Upload image and delete it", async () => {
|
||||||
expect(upload.delete_url).toBeDefined();
|
expect(upload.delete_url).toBeDefined();
|
||||||
|
|
||||||
// ensure that image download is working. theres probably a better way to do this
|
// ensure that image download is working. theres probably a better way to do this
|
||||||
const content = downloadFileSync(upload.url);
|
const response = await fetch(upload.url ?? "");
|
||||||
|
const content = await response.text();
|
||||||
expect(content.length).toBeGreaterThan(0);
|
expect(content.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
// Ensure that it comes back with the list_media endpoint
|
// Ensure that it comes back with the list_media endpoint
|
||||||
|
@ -92,7 +92,8 @@ test("Upload image and delete it", async () => {
|
||||||
expect(delete_).toBe(true);
|
expect(delete_).toBe(true);
|
||||||
|
|
||||||
// ensure that image is deleted
|
// ensure that image is deleted
|
||||||
const content2 = downloadFileSync(upload.url);
|
const response2 = await fetch(upload.url ?? "");
|
||||||
|
const content2 = await response2.text();
|
||||||
expect(content2).toBe("");
|
expect(content2).toBe("");
|
||||||
|
|
||||||
// Ensure that it shows the image is deleted
|
// Ensure that it shows the image is deleted
|
||||||
|
@ -120,7 +121,8 @@ test("Purge user, uploaded image removed", async () => {
|
||||||
expect(upload.delete_url).toBeDefined();
|
expect(upload.delete_url).toBeDefined();
|
||||||
|
|
||||||
// ensure that image download is working. theres probably a better way to do this
|
// ensure that image download is working. theres probably a better way to do this
|
||||||
const content = downloadFileSync(upload.url);
|
const response = await fetch(upload.url ?? "");
|
||||||
|
const content = await response.text();
|
||||||
expect(content.length).toBeGreaterThan(0);
|
expect(content.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
// purge user
|
// purge user
|
||||||
|
@ -132,7 +134,8 @@ test("Purge user, uploaded image removed", async () => {
|
||||||
expect(delete_.success).toBe(true);
|
expect(delete_.success).toBe(true);
|
||||||
|
|
||||||
// ensure that image is deleted
|
// ensure that image is deleted
|
||||||
const content2 = downloadFileSync(upload.url);
|
const response2 = await fetch(upload.url ?? "");
|
||||||
|
const content2 = await response2.text();
|
||||||
expect(content2).toBe("");
|
expect(content2).toBe("");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -150,7 +153,8 @@ test("Purge post, linked image removed", async () => {
|
||||||
expect(upload.delete_url).toBeDefined();
|
expect(upload.delete_url).toBeDefined();
|
||||||
|
|
||||||
// ensure that image download is working. theres probably a better way to do this
|
// ensure that image download is working. theres probably a better way to do this
|
||||||
const content = downloadFileSync(upload.url);
|
const response = await fetch(upload.url ?? "");
|
||||||
|
const content = await response.text();
|
||||||
expect(content.length).toBeGreaterThan(0);
|
expect(content.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
let community = await resolveBetaCommunity(user);
|
let community = await resolveBetaCommunity(user);
|
||||||
|
@ -170,7 +174,8 @@ test("Purge post, linked image removed", async () => {
|
||||||
expect(delete_.success).toBe(true);
|
expect(delete_.success).toBe(true);
|
||||||
|
|
||||||
// ensure that image is deleted
|
// ensure that image is deleted
|
||||||
const content2 = downloadFileSync(upload.url);
|
const response2 = await fetch(upload.url ?? "");
|
||||||
|
const content2 = await response2.text();
|
||||||
expect(content2).toBe("");
|
expect(content2).toBe("");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -197,7 +197,7 @@ export async function setupLogins() {
|
||||||
// (because last_successful_id is set to current id when federation to an instance is first started)
|
// (because last_successful_id is set to current id when federation to an instance is first started)
|
||||||
// only needed the first time so do in this try
|
// only needed the first time so do in this try
|
||||||
await delay(10_000);
|
await delay(10_000);
|
||||||
} catch (_) {
|
} catch {
|
||||||
console.log("Communities already exist");
|
console.log("Communities already exist");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -899,7 +899,6 @@ export async function deleteAllImages(api: LemmyHttp) {
|
||||||
const imagesRes = await api.listAllMedia({
|
const imagesRes = await api.listAllMedia({
|
||||||
limit: imageFetchLimit,
|
limit: imageFetchLimit,
|
||||||
});
|
});
|
||||||
imagesRes.images;
|
|
||||||
Promise.all(
|
Promise.all(
|
||||||
imagesRes.images
|
imagesRes.images
|
||||||
.map(image => {
|
.map(image => {
|
||||||
|
|
|
@ -35,11 +35,12 @@ chrono = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
hound = "3.5.1"
|
hound = "3.5.1"
|
||||||
sitemap-rs = "0.2.1"
|
sitemap-rs = "0.2.1"
|
||||||
totp-rs = { version = "5.5.1", features = ["gen_secret", "otpauth"] }
|
totp-rs = { version = "5.6.0", features = ["gen_secret", "otpauth"] }
|
||||||
actix-web-httpauth = "0.8.1"
|
actix-web-httpauth = "0.8.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serial_test = { workspace = true }
|
serial_test = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
elementtree = "1.2.3"
|
elementtree = "1.2.3"
|
||||||
pretty_assertions = { workspace = true }
|
pretty_assertions = { workspace = true }
|
||||||
|
lemmy_api_crud = { workspace = true }
|
||||||
|
|
|
@ -5,12 +5,9 @@ use lemmy_api_common::{
|
||||||
utils::send_new_applicant_email_to_admins,
|
utils::send_new_applicant_email_to_admins,
|
||||||
SuccessResponse,
|
SuccessResponse,
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::source::{
|
||||||
source::{
|
|
||||||
email_verification::EmailVerification,
|
email_verification::EmailVerification,
|
||||||
local_user::{LocalUser, LocalUserUpdateForm},
|
local_user::{LocalUser, LocalUserUpdateForm},
|
||||||
},
|
|
||||||
RegistrationMode,
|
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||||
|
@ -41,9 +38,7 @@ pub async fn verify_email(
|
||||||
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
|
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
|
||||||
|
|
||||||
// send out notification about registration application to admins if enabled
|
// send out notification about registration application to admins if enabled
|
||||||
if site_view.local_site.registration_mode == RegistrationMode::RequireApplication
|
if site_view.local_site.application_email_admins {
|
||||||
&& site_view.local_site.application_email_admins
|
|
||||||
{
|
|
||||||
let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
|
let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(LemmyErrorType::CouldntFindPerson)?;
|
.ok_or(LemmyErrorType::CouldntFindPerson)?;
|
||||||
|
|
|
@ -4,6 +4,7 @@ use lemmy_api_common::{
|
||||||
post::{GetSiteMetadata, GetSiteMetadataResponse},
|
post::{GetSiteMetadata, GetSiteMetadataResponse},
|
||||||
request::fetch_link_metadata,
|
request::fetch_link_metadata,
|
||||||
};
|
};
|
||||||
|
use lemmy_db_views::structs::LocalUserView;
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::{LemmyErrorExt, LemmyResult},
|
error::{LemmyErrorExt, LemmyResult},
|
||||||
LemmyErrorType,
|
LemmyErrorType,
|
||||||
|
@ -14,6 +15,8 @@ use url::Url;
|
||||||
pub async fn get_link_metadata(
|
pub async fn get_link_metadata(
|
||||||
data: Query<GetSiteMetadata>,
|
data: Query<GetSiteMetadata>,
|
||||||
context: Data<LemmyContext>,
|
context: Data<LemmyContext>,
|
||||||
|
// Require an account for this API
|
||||||
|
_local_user_view: LocalUserView,
|
||||||
) -> LemmyResult<Json<GetSiteMetadataResponse>> {
|
) -> LemmyResult<Json<GetSiteMetadataResponse>> {
|
||||||
let url = Url::parse(&data.url).with_lemmy_type(LemmyErrorType::InvalidUrl)?;
|
let url = Url::parse(&data.url).with_lemmy_type(LemmyErrorType::InvalidUrl)?;
|
||||||
let metadata = fetch_link_metadata(&url, &context).await?;
|
let metadata = fetch_link_metadata(&url, &context).await?;
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use actix_web::web::{Data, Json};
|
use activitypub_federation::config::Data;
|
||||||
|
use actix_web::web::Json;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::{ApproveRegistrationApplication, RegistrationApplicationResponse},
|
site::{ApproveRegistrationApplication, RegistrationApplicationResponse},
|
||||||
|
@ -10,10 +11,13 @@ use lemmy_db_schema::{
|
||||||
registration_application::{RegistrationApplication, RegistrationApplicationUpdateForm},
|
registration_application::{RegistrationApplication, RegistrationApplicationUpdateForm},
|
||||||
},
|
},
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::diesel_string_update,
|
utils::{diesel_string_update, get_conn},
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView};
|
use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView};
|
||||||
use lemmy_utils::{error::LemmyResult, LemmyErrorType};
|
use lemmy_utils::{
|
||||||
|
error::{LemmyError, LemmyResult},
|
||||||
|
LemmyErrorType,
|
||||||
|
};
|
||||||
|
|
||||||
pub async fn approve_registration_application(
|
pub async fn approve_registration_application(
|
||||||
data: Json<ApproveRegistrationApplication>,
|
data: Json<ApproveRegistrationApplication>,
|
||||||
|
@ -25,34 +29,46 @@ pub async fn approve_registration_application(
|
||||||
// Only let admins do this
|
// Only let admins do this
|
||||||
is_admin(&local_user_view)?;
|
is_admin(&local_user_view)?;
|
||||||
|
|
||||||
|
let pool = &mut context.pool();
|
||||||
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
let tx_data = data.clone();
|
||||||
|
let approved_user_id = conn
|
||||||
|
.build_transaction()
|
||||||
|
.run(|conn| {
|
||||||
|
Box::pin(async move {
|
||||||
// Update the registration with reason, admin_id
|
// Update the registration with reason, admin_id
|
||||||
let deny_reason = diesel_string_update(data.deny_reason.as_deref());
|
let deny_reason = diesel_string_update(tx_data.deny_reason.as_deref());
|
||||||
let app_form = RegistrationApplicationUpdateForm {
|
let app_form = RegistrationApplicationUpdateForm {
|
||||||
admin_id: Some(Some(local_user_view.person.id)),
|
admin_id: Some(Some(local_user_view.person.id)),
|
||||||
deny_reason,
|
deny_reason,
|
||||||
};
|
};
|
||||||
|
|
||||||
let registration_application =
|
let registration_application =
|
||||||
RegistrationApplication::update(&mut context.pool(), app_id, &app_form).await?;
|
RegistrationApplication::update(&mut conn.into(), app_id, &app_form).await?;
|
||||||
|
|
||||||
// Update the local_user row
|
// Update the local_user row
|
||||||
let local_user_form = LocalUserUpdateForm {
|
let local_user_form = LocalUserUpdateForm {
|
||||||
accepted_application: Some(data.approve),
|
accepted_application: Some(tx_data.approve),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let approved_user_id = registration_application.local_user_id;
|
let approved_user_id = registration_application.local_user_id;
|
||||||
LocalUser::update(&mut context.pool(), approved_user_id, &local_user_form).await?;
|
LocalUser::update(&mut conn.into(), approved_user_id, &local_user_form).await?;
|
||||||
|
|
||||||
|
Ok::<_, LemmyError>(approved_user_id)
|
||||||
|
}) as _
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
if data.approve {
|
if data.approve {
|
||||||
let approved_local_user_view = LocalUserView::read(&mut context.pool(), approved_user_id)
|
let approved_local_user_view = LocalUserView::read(&mut context.pool(), approved_user_id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(LemmyErrorType::CouldntFindLocalUser)?;
|
.ok_or(LemmyErrorType::CouldntFindLocalUser)?;
|
||||||
|
|
||||||
if approved_local_user_view.local_user.email.is_some() {
|
if approved_local_user_view.local_user.email.is_some() {
|
||||||
|
// Email sending may fail, but this won't revert the application approval
|
||||||
send_application_approved_email(&approved_local_user_view, context.settings()).await?;
|
send_application_approved_email(&approved_local_user_view, context.settings()).await?;
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
// Read the view
|
// Read the view
|
||||||
let registration_application = RegistrationApplicationView::read(&mut context.pool(), app_id)
|
let registration_application = RegistrationApplicationView::read(&mut context.pool(), app_id)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use actix_web::web::{Data, Json, Query};
|
use activitypub_federation::config::Data;
|
||||||
|
use actix_web::web::{Json, Query};
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::{ListRegistrationApplications, ListRegistrationApplicationsResponse},
|
site::{ListRegistrationApplications, ListRegistrationApplicationsResponse},
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
pub mod approve;
|
pub mod approve;
|
||||||
pub mod get;
|
pub mod get;
|
||||||
pub mod list;
|
pub mod list;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
pub mod unread_count;
|
pub mod unread_count;
|
||||||
|
|
428
crates/api/src/site/registration_applications/tests.rs
Normal file
428
crates/api/src/site/registration_applications/tests.rs
Normal file
|
@ -0,0 +1,428 @@
|
||||||
|
use crate::site::registration_applications::{
|
||||||
|
approve::approve_registration_application,
|
||||||
|
list::list_registration_applications,
|
||||||
|
unread_count::get_unread_registration_application_count,
|
||||||
|
};
|
||||||
|
use activitypub_federation::config::Data;
|
||||||
|
use actix_web::web::{Json, Query};
|
||||||
|
use lemmy_api_common::{
|
||||||
|
context::LemmyContext,
|
||||||
|
site::{
|
||||||
|
ApproveRegistrationApplication,
|
||||||
|
EditSite,
|
||||||
|
GetUnreadRegistrationApplicationCountResponse,
|
||||||
|
ListRegistrationApplicationsResponse,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use lemmy_api_crud::site::update::update_site;
|
||||||
|
use lemmy_db_schema::{
|
||||||
|
newtypes::InstanceId,
|
||||||
|
source::{
|
||||||
|
instance::Instance,
|
||||||
|
local_site::{LocalSite, LocalSiteInsertForm},
|
||||||
|
local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm},
|
||||||
|
local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm},
|
||||||
|
person::{Person, PersonInsertForm},
|
||||||
|
registration_application::{RegistrationApplication, RegistrationApplicationInsertForm},
|
||||||
|
site::{Site, SiteInsertForm},
|
||||||
|
},
|
||||||
|
traits::Crud,
|
||||||
|
utils::DbPool,
|
||||||
|
RegistrationMode,
|
||||||
|
};
|
||||||
|
use lemmy_db_views::structs::LocalUserView;
|
||||||
|
use lemmy_utils::{error::LemmyResult, LemmyErrorType, CACHE_DURATION_API};
|
||||||
|
use serial_test::serial;
|
||||||
|
|
||||||
|
#[allow(clippy::unwrap_used)]
|
||||||
|
async fn create_test_site(context: &Data<LemmyContext>) -> LemmyResult<(Instance, LocalUserView)> {
|
||||||
|
let pool = &mut context.pool();
|
||||||
|
|
||||||
|
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string())
|
||||||
|
.await
|
||||||
|
.expect("Create test instance");
|
||||||
|
|
||||||
|
let admin_person = Person::create(
|
||||||
|
pool,
|
||||||
|
&PersonInsertForm::test_form(inserted_instance.id, "admin"),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
LocalUser::create(
|
||||||
|
pool,
|
||||||
|
&LocalUserInsertForm::test_form_admin(admin_person.id),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let admin_local_user_view = LocalUserView::read_person(pool, admin_person.id)
|
||||||
|
.await?
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let site_form = SiteInsertForm::builder()
|
||||||
|
.name("test site".to_string())
|
||||||
|
.instance_id(inserted_instance.id)
|
||||||
|
.build();
|
||||||
|
let site = Site::create(pool, &site_form).await.unwrap();
|
||||||
|
|
||||||
|
// Create a local site, since this is necessary for determining if email verification is
|
||||||
|
// required
|
||||||
|
let local_site_form = LocalSiteInsertForm::builder()
|
||||||
|
.site_id(site.id)
|
||||||
|
.require_email_verification(Some(true))
|
||||||
|
.application_question(Some(".".to_string()))
|
||||||
|
.registration_mode(Some(RegistrationMode::RequireApplication))
|
||||||
|
.site_setup(Some(true))
|
||||||
|
.build();
|
||||||
|
let local_site = LocalSite::create(pool, &local_site_form).await.unwrap();
|
||||||
|
|
||||||
|
// Required to have a working local SiteView when updating the site to change email verification
|
||||||
|
// requirement or registration mode
|
||||||
|
let rate_limit_form = LocalSiteRateLimitInsertForm::builder()
|
||||||
|
.local_site_id(local_site.id)
|
||||||
|
.build();
|
||||||
|
LocalSiteRateLimit::create(pool, &rate_limit_form)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok((inserted_instance, admin_local_user_view))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn signup(
|
||||||
|
pool: &mut DbPool<'_>,
|
||||||
|
instance_id: InstanceId,
|
||||||
|
name: &str,
|
||||||
|
email: Option<&str>,
|
||||||
|
) -> LemmyResult<(LocalUser, RegistrationApplication)> {
|
||||||
|
let person_insert_form = PersonInsertForm::test_form(instance_id, name);
|
||||||
|
let person = Person::create(pool, &person_insert_form).await?;
|
||||||
|
|
||||||
|
let local_user_insert_form = match email {
|
||||||
|
Some(email) => LocalUserInsertForm {
|
||||||
|
email: Some(email.to_string()),
|
||||||
|
email_verified: Some(false),
|
||||||
|
..LocalUserInsertForm::test_form(person.id)
|
||||||
|
},
|
||||||
|
None => LocalUserInsertForm::test_form(person.id),
|
||||||
|
};
|
||||||
|
|
||||||
|
let local_user = LocalUser::create(pool, &local_user_insert_form, vec![]).await?;
|
||||||
|
|
||||||
|
let application_insert_form = RegistrationApplicationInsertForm {
|
||||||
|
local_user_id: local_user.id,
|
||||||
|
answer: "x".to_string(),
|
||||||
|
};
|
||||||
|
let application = RegistrationApplication::create(pool, &application_insert_form).await?;
|
||||||
|
|
||||||
|
Ok((local_user, application))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::unwrap_used)]
|
||||||
|
async fn get_application_statuses(
|
||||||
|
context: &Data<LemmyContext>,
|
||||||
|
admin: LocalUserView,
|
||||||
|
) -> LemmyResult<(
|
||||||
|
Json<GetUnreadRegistrationApplicationCountResponse>,
|
||||||
|
Json<ListRegistrationApplicationsResponse>,
|
||||||
|
Json<ListRegistrationApplicationsResponse>,
|
||||||
|
)> {
|
||||||
|
let application_count =
|
||||||
|
get_unread_registration_application_count(context.reset_request_count(), admin.clone()).await?;
|
||||||
|
|
||||||
|
let unread_applications = list_registration_applications(
|
||||||
|
Query::from_query("unread_only=true").unwrap(),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin.clone(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let all_applications = list_registration_applications(
|
||||||
|
Query::from_query("unread_only=false").unwrap(),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((application_count, unread_applications, all_applications))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::indexing_slicing)]
|
||||||
|
#[allow(clippy::unwrap_used)]
|
||||||
|
#[tokio::test]
|
||||||
|
#[serial]
|
||||||
|
async fn test_application_approval() -> LemmyResult<()> {
|
||||||
|
let context = LemmyContext::init_test_context().await;
|
||||||
|
let pool = &mut context.pool();
|
||||||
|
|
||||||
|
let (instance, admin_local_user_view) = create_test_site(&context).await?;
|
||||||
|
|
||||||
|
// Non-unread counts unfortunately are duplicated due to different types (i64 vs usize)
|
||||||
|
let mut expected_total_applications = 0;
|
||||||
|
let mut expected_unread_applications = 0u8;
|
||||||
|
|
||||||
|
let (local_user_with_email, app_with_email) =
|
||||||
|
signup(pool, instance.id, "user_w_email", Some("lemmy@localhost")).await?;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// When email verification is required and the email is not verified the application should not
|
||||||
|
// be visible to admins
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
LocalUser::update(
|
||||||
|
pool,
|
||||||
|
local_user_with_email.id,
|
||||||
|
&LocalUserUpdateForm {
|
||||||
|
email_verified: Some(true),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
expected_total_applications += 1;
|
||||||
|
expected_unread_applications += 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// When email verification is required and the email is verified the application should be
|
||||||
|
// visible to admins
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
!unread_applications.registration_applications[0]
|
||||||
|
.creator_local_user
|
||||||
|
.accepted_application
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
let approval = approve_registration_application(
|
||||||
|
Json(ApproveRegistrationApplication {
|
||||||
|
id: app_with_email.id,
|
||||||
|
approve: true,
|
||||||
|
deny_reason: None,
|
||||||
|
}),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin_local_user_view.clone(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
// Approval should be processed up until email sending is attempted
|
||||||
|
assert!(approval.is_err_and(|e| e.error_type == LemmyErrorType::NoEmailSetup));
|
||||||
|
|
||||||
|
expected_unread_applications -= 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// When the application is approved it should only be returned for unread queries
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
all_applications.registration_applications[0]
|
||||||
|
.creator_local_user
|
||||||
|
.accepted_application
|
||||||
|
);
|
||||||
|
|
||||||
|
let (_local_user, app_with_email_2) = signup(
|
||||||
|
pool,
|
||||||
|
instance.id,
|
||||||
|
"user_w_email_2",
|
||||||
|
Some("lemmy2@localhost"),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// Email not verified, so application still not visible
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
update_site(
|
||||||
|
Json(EditSite {
|
||||||
|
require_email_verification: Some(false),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin_local_user_view.clone(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// TODO: There is probably a better way to ensure cache invalidation
|
||||||
|
tokio::time::sleep(CACHE_DURATION_API).await;
|
||||||
|
|
||||||
|
expected_total_applications += 1;
|
||||||
|
expected_unread_applications += 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// After disabling email verification the application should now be visible
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
approve_registration_application(
|
||||||
|
Json(ApproveRegistrationApplication {
|
||||||
|
id: app_with_email_2.id,
|
||||||
|
approve: false,
|
||||||
|
deny_reason: None,
|
||||||
|
}),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin_local_user_view.clone(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
expected_unread_applications -= 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// Denied applications should not be marked as unread
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
signup(pool, instance.id, "user_wo_email", None).await?;
|
||||||
|
|
||||||
|
expected_total_applications += 1;
|
||||||
|
expected_unread_applications += 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// New user without email should immediately be visible
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
signup(pool, instance.id, "user_w_email_3", None).await?;
|
||||||
|
|
||||||
|
expected_total_applications += 1;
|
||||||
|
expected_unread_applications += 1;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// New user with email should immediately be visible
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
update_site(
|
||||||
|
Json(EditSite {
|
||||||
|
registration_mode: Some(RegistrationMode::Open),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
context.reset_request_count(),
|
||||||
|
admin_local_user_view.clone(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// TODO: There is probably a better way to ensure cache invalidation
|
||||||
|
tokio::time::sleep(CACHE_DURATION_API).await;
|
||||||
|
|
||||||
|
let (application_count, unread_applications, all_applications) =
|
||||||
|
get_application_statuses(&context, admin_local_user_view.clone()).await?;
|
||||||
|
|
||||||
|
// TODO: At this time applications do not get approved when switching to open registration, so the
|
||||||
|
// numbers will not change. See https://github.com/LemmyNet/lemmy/issues/4969
|
||||||
|
// expected_application_count = 0;
|
||||||
|
// expected_unread_applications_len = 0;
|
||||||
|
|
||||||
|
// When applications are not required all previous applications should become approved but still
|
||||||
|
// visible
|
||||||
|
assert_eq!(
|
||||||
|
application_count.registration_applications,
|
||||||
|
i64::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
unread_applications.registration_applications.len(),
|
||||||
|
usize::from(expected_unread_applications),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
all_applications.registration_applications.len(),
|
||||||
|
expected_total_applications,
|
||||||
|
);
|
||||||
|
|
||||||
|
LocalSite::delete(pool).await?;
|
||||||
|
// Instance deletion cascades cleanup of all created persons
|
||||||
|
Instance::delete(pool, instance.id).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
use actix_web::web::{Data, Json};
|
use activitypub_federation::config::Data;
|
||||||
|
use actix_web::web::Json;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::GetUnreadRegistrationApplicationCountResponse,
|
site::GetUnreadRegistrationApplicationCountResponse,
|
||||||
|
|
|
@ -34,7 +34,6 @@ full = [
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"futures",
|
"futures",
|
||||||
"once_cell",
|
|
||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"mime",
|
"mime",
|
||||||
]
|
]
|
||||||
|
@ -61,7 +60,6 @@ reqwest = { workspace = true, optional = true }
|
||||||
ts-rs = { workspace = true, optional = true }
|
ts-rs = { workspace = true, optional = true }
|
||||||
moka.workspace = true
|
moka.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
once_cell = { workspace = true, optional = true }
|
|
||||||
actix-web = { workspace = true, optional = true }
|
actix-web = { workspace = true, optional = true }
|
||||||
enum-map = { workspace = true }
|
enum-map = { workspace = true }
|
||||||
urlencoding = { workspace = true }
|
urlencoding = { workspace = true }
|
||||||
|
|
|
@ -100,13 +100,18 @@ pub async fn send_local_notifs(
|
||||||
person: &Person,
|
person: &Person,
|
||||||
do_send_email: bool,
|
do_send_email: bool,
|
||||||
context: &LemmyContext,
|
context: &LemmyContext,
|
||||||
|
local_user_view: Option<&LocalUserView>,
|
||||||
) -> LemmyResult<Vec<LocalUserId>> {
|
) -> LemmyResult<Vec<LocalUserId>> {
|
||||||
let mut recipient_ids = Vec::new();
|
let mut recipient_ids = Vec::new();
|
||||||
let inbox_link = format!("{}/inbox", context.settings().get_protocol_and_hostname());
|
let inbox_link = format!("{}/inbox", context.settings().get_protocol_and_hostname());
|
||||||
|
|
||||||
// let person = my_local_user.person;
|
// let person = my_local_user.person;
|
||||||
// Read the comment view to get extra info
|
// Read the comment view to get extra info
|
||||||
let comment_view = CommentView::read(&mut context.pool(), comment_id, None)
|
let comment_view = CommentView::read(
|
||||||
|
&mut context.pool(),
|
||||||
|
comment_id,
|
||||||
|
local_user_view.map(|view| &view.local_user),
|
||||||
|
)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(LemmyErrorType::CouldntFindComment)?;
|
.ok_or(LemmyErrorType::CouldntFindComment)?;
|
||||||
let comment = comment_view.comment;
|
let comment = comment_view.comment;
|
||||||
|
|
|
@ -8,6 +8,7 @@ use crate::{
|
||||||
use activitypub_federation::config::Data;
|
use activitypub_federation::config::Data;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use encoding_rs::{Encoding, UTF_8};
|
use encoding_rs::{Encoding, UTF_8};
|
||||||
|
use futures::StreamExt;
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
newtypes::DbUrl,
|
newtypes::DbUrl,
|
||||||
source::{
|
source::{
|
||||||
|
@ -23,7 +24,12 @@ use lemmy_utils::{
|
||||||
VERSION,
|
VERSION,
|
||||||
};
|
};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
use reqwest::{header::CONTENT_TYPE, Client, ClientBuilder};
|
use reqwest::{
|
||||||
|
header::{CONTENT_TYPE, RANGE},
|
||||||
|
Client,
|
||||||
|
ClientBuilder,
|
||||||
|
Response,
|
||||||
|
};
|
||||||
use reqwest_middleware::ClientWithMiddleware;
|
use reqwest_middleware::ClientWithMiddleware;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
@ -44,7 +50,17 @@ pub fn client_builder(settings: &Settings) -> ClientBuilder {
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> {
|
pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> {
|
||||||
info!("Fetching site metadata for url: {}", url);
|
info!("Fetching site metadata for url: {}", url);
|
||||||
let response = context.client().get(url.as_str()).send().await?;
|
// We only fetch the first 64kB of data in order to not waste bandwidth especially for large
|
||||||
|
// binary files
|
||||||
|
let bytes_to_fetch = 64 * 1024;
|
||||||
|
let response = context
|
||||||
|
.client()
|
||||||
|
.get(url.as_str())
|
||||||
|
// we only need the first chunk of data. Note that we do not check for Accept-Range so the
|
||||||
|
// server may ignore this and still respond with the full response
|
||||||
|
.header(RANGE, format!("bytes=0-{}", bytes_to_fetch - 1)) /* -1 because inclusive */
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
let content_type: Option<Mime> = response
|
let content_type: Option<Mime> = response
|
||||||
.headers()
|
.headers()
|
||||||
|
@ -52,19 +68,57 @@ pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResu
|
||||||
.and_then(|h| h.to_str().ok())
|
.and_then(|h| h.to_str().ok())
|
||||||
.and_then(|h| h.parse().ok());
|
.and_then(|h| h.parse().ok());
|
||||||
|
|
||||||
|
let opengraph_data = {
|
||||||
|
// if the content type is not text/html, we don't need to parse it
|
||||||
|
let is_html = content_type
|
||||||
|
.as_ref()
|
||||||
|
.map(|c| {
|
||||||
|
(c.type_() == mime::TEXT && c.subtype() == mime::HTML)
|
||||||
|
||
|
||||||
|
// application/xhtml+xml is a subset of HTML
|
||||||
|
(c.type_() == mime::APPLICATION && c.subtype() == "xhtml")
|
||||||
|
})
|
||||||
|
.unwrap_or(false);
|
||||||
|
if !is_html {
|
||||||
|
Default::default()
|
||||||
|
} else {
|
||||||
// Can't use .text() here, because it only checks the content header, not the actual bytes
|
// Can't use .text() here, because it only checks the content header, not the actual bytes
|
||||||
// https://github.com/LemmyNet/lemmy/issues/1964
|
// https://github.com/LemmyNet/lemmy/issues/1964
|
||||||
let html_bytes = response.bytes().await.map_err(LemmyError::from)?.to_vec();
|
// So we want to do deep inspection of the actually returned bytes but need to be careful not
|
||||||
|
// spend too much time parsing binary data as HTML
|
||||||
|
|
||||||
let opengraph_data = extract_opengraph_data(&html_bytes, url)
|
// only take first bytes regardless of how many bytes the server returns
|
||||||
|
let html_bytes = collect_bytes_until_limit(response, bytes_to_fetch).await?;
|
||||||
|
extract_opengraph_data(&html_bytes, url)
|
||||||
.map_err(|e| info!("{e}"))
|
.map_err(|e| info!("{e}"))
|
||||||
.unwrap_or_default();
|
.unwrap_or_default()
|
||||||
|
}
|
||||||
|
};
|
||||||
Ok(LinkMetadata {
|
Ok(LinkMetadata {
|
||||||
opengraph_data,
|
opengraph_data,
|
||||||
content_type: content_type.map(|c| c.to_string()),
|
content_type: content_type.map(|c| c.to_string()),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn collect_bytes_until_limit(
|
||||||
|
response: Response,
|
||||||
|
requested_bytes: usize,
|
||||||
|
) -> Result<Vec<u8>, LemmyError> {
|
||||||
|
let mut stream = response.bytes_stream();
|
||||||
|
let mut bytes = Vec::with_capacity(requested_bytes);
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
let chunk = chunk.map_err(LemmyError::from)?;
|
||||||
|
// we may go over the requested size here but the important part is we don't keep aggregating
|
||||||
|
// more chunks than needed
|
||||||
|
bytes.extend_from_slice(&chunk);
|
||||||
|
if bytes.len() >= requested_bytes {
|
||||||
|
bytes.truncate(requested_bytes);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
/// Generates and saves a post thumbnail and metadata.
|
/// Generates and saves a post thumbnail and metadata.
|
||||||
///
|
///
|
||||||
/// Takes a callback to generate a send activity task, so that post can be federated with metadata.
|
/// Takes a callback to generate a send activity task, so that post can be federated with metadata.
|
||||||
|
|
|
@ -13,7 +13,7 @@ use lemmy_db_schema::{
|
||||||
};
|
};
|
||||||
use lemmy_db_views::structs::PrivateMessageView;
|
use lemmy_db_views::structs::PrivateMessageView;
|
||||||
use lemmy_utils::error::LemmyResult;
|
use lemmy_utils::error::LemmyResult;
|
||||||
use once_cell::sync::{Lazy, OnceCell};
|
use std::sync::{LazyLock, OnceLock};
|
||||||
use tokio::{
|
use tokio::{
|
||||||
sync::{
|
sync::{
|
||||||
mpsc,
|
mpsc,
|
||||||
|
@ -28,7 +28,7 @@ type MatchOutgoingActivitiesBoxed =
|
||||||
Box<for<'a> fn(SendActivityData, &'a Data<LemmyContext>) -> BoxFuture<'a, LemmyResult<()>>>;
|
Box<for<'a> fn(SendActivityData, &'a Data<LemmyContext>) -> BoxFuture<'a, LemmyResult<()>>>;
|
||||||
|
|
||||||
/// This static is necessary so that the api_common crates don't need to depend on lemmy_apub
|
/// This static is necessary so that the api_common crates don't need to depend on lemmy_apub
|
||||||
pub static MATCH_OUTGOING_ACTIVITIES: OnceCell<MatchOutgoingActivitiesBoxed> = OnceCell::new();
|
pub static MATCH_OUTGOING_ACTIVITIES: OnceLock<MatchOutgoingActivitiesBoxed> = OnceLock::new();
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum SendActivityData {
|
pub enum SendActivityData {
|
||||||
|
@ -101,7 +101,7 @@ pub enum SendActivityData {
|
||||||
|
|
||||||
// TODO: instead of static, move this into LemmyContext. make sure that stopping the process with
|
// TODO: instead of static, move this into LemmyContext. make sure that stopping the process with
|
||||||
// ctrl+c still works.
|
// ctrl+c still works.
|
||||||
static ACTIVITY_CHANNEL: Lazy<ActivityChannel> = Lazy::new(|| {
|
static ACTIVITY_CHANNEL: LazyLock<ActivityChannel> = LazyLock::new(|| {
|
||||||
let (sender, receiver) = mpsc::unbounded_channel();
|
let (sender, receiver) = mpsc::unbounded_channel();
|
||||||
let weak_sender = sender.downgrade();
|
let weak_sender = sender.downgrade();
|
||||||
ActivityChannel {
|
ActivityChannel {
|
||||||
|
|
|
@ -53,10 +53,9 @@ use lemmy_utils::{
|
||||||
CACHE_DURATION_FEDERATION,
|
CACHE_DURATION_FEDERATION,
|
||||||
};
|
};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::{escape, Regex, RegexSet};
|
use regex::{escape, Regex, RegexSet};
|
||||||
use rosetta_i18n::{Language, LanguageId};
|
use rosetta_i18n::{Language, LanguageId};
|
||||||
use std::collections::HashSet;
|
use std::{collections::HashSet, sync::LazyLock};
|
||||||
use tracing::warn;
|
use tracing::warn;
|
||||||
use url::{ParseError, Url};
|
use url::{ParseError, Url};
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
@ -546,7 +545,7 @@ pub fn local_site_opt_to_sensitive(local_site: &Option<LocalSite>) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_url_blocklist(context: &LemmyContext) -> LemmyResult<RegexSet> {
|
pub async fn get_url_blocklist(context: &LemmyContext) -> LemmyResult<RegexSet> {
|
||||||
static URL_BLOCKLIST: Lazy<Cache<(), RegexSet>> = Lazy::new(|| {
|
static URL_BLOCKLIST: LazyLock<Cache<(), RegexSet>> = LazyLock::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
.max_capacity(1)
|
.max_capacity(1)
|
||||||
.time_to_live(CACHE_DURATION_FEDERATION)
|
.time_to_live(CACHE_DURATION_FEDERATION)
|
||||||
|
|
|
@ -26,7 +26,6 @@ url = { workspace = true }
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
uuid = { workspace = true }
|
uuid = { workspace = true }
|
||||||
moka.workspace = true
|
moka.workspace = true
|
||||||
once_cell.workspace = true
|
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
webmention = "0.5.0"
|
webmention = "0.5.0"
|
||||||
accept-language = "3.1.0"
|
accept-language = "3.1.0"
|
||||||
|
|
|
@ -134,6 +134,7 @@ pub async fn create_comment(
|
||||||
&local_user_view.person,
|
&local_user_view.person,
|
||||||
true,
|
true,
|
||||||
&context,
|
&context,
|
||||||
|
Some(&local_user_view),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|
|
@ -59,8 +59,15 @@ pub async fn delete_comment(
|
||||||
.await
|
.await
|
||||||
.with_lemmy_type(LemmyErrorType::CouldntUpdateComment)?;
|
.with_lemmy_type(LemmyErrorType::CouldntUpdateComment)?;
|
||||||
|
|
||||||
let recipient_ids =
|
let recipient_ids = send_local_notifs(
|
||||||
send_local_notifs(vec![], comment_id, &local_user_view.person, false, &context).await?;
|
vec![],
|
||||||
|
comment_id,
|
||||||
|
&local_user_view.person,
|
||||||
|
false,
|
||||||
|
&context,
|
||||||
|
Some(&local_user_view),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
let updated_comment_id = updated_comment.id;
|
let updated_comment_id = updated_comment.id;
|
||||||
|
|
||||||
ActivityChannel::submit_activity(
|
ActivityChannel::submit_activity(
|
||||||
|
|
|
@ -81,8 +81,15 @@ pub async fn remove_comment(
|
||||||
};
|
};
|
||||||
ModRemoveComment::create(&mut context.pool(), &form).await?;
|
ModRemoveComment::create(&mut context.pool(), &form).await?;
|
||||||
|
|
||||||
let recipient_ids =
|
let recipient_ids = send_local_notifs(
|
||||||
send_local_notifs(vec![], comment_id, &local_user_view.person, false, &context).await?;
|
vec![],
|
||||||
|
comment_id,
|
||||||
|
&local_user_view.person,
|
||||||
|
false,
|
||||||
|
&context,
|
||||||
|
Some(&local_user_view),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
let updated_comment_id = updated_comment.id;
|
let updated_comment_id = updated_comment.id;
|
||||||
|
|
||||||
ActivityChannel::submit_activity(
|
ActivityChannel::submit_activity(
|
||||||
|
|
|
@ -91,6 +91,7 @@ pub async fn update_comment(
|
||||||
&local_user_view.person,
|
&local_user_view.person,
|
||||||
false,
|
false,
|
||||||
&context,
|
&context,
|
||||||
|
Some(&local_user_view),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|
|
@ -35,11 +35,11 @@ use lemmy_utils::{
|
||||||
utils::{
|
utils::{
|
||||||
slurs::check_slurs,
|
slurs::check_slurs,
|
||||||
validation::{
|
validation::{
|
||||||
check_url_scheme,
|
|
||||||
is_url_blocked,
|
is_url_blocked,
|
||||||
is_valid_alt_text_field,
|
is_valid_alt_text_field,
|
||||||
is_valid_body_field,
|
is_valid_body_field,
|
||||||
is_valid_post_title,
|
is_valid_post_title,
|
||||||
|
is_valid_url,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -69,11 +69,11 @@ pub async fn create_post(
|
||||||
|
|
||||||
if let Some(url) = &url {
|
if let Some(url) = &url {
|
||||||
is_url_blocked(url, &url_blocklist)?;
|
is_url_blocked(url, &url_blocklist)?;
|
||||||
check_url_scheme(url)?;
|
is_valid_url(url)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(custom_thumbnail) = &custom_thumbnail {
|
if let Some(custom_thumbnail) = &custom_thumbnail {
|
||||||
check_url_scheme(custom_thumbnail)?;
|
is_valid_url(custom_thumbnail)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(alt_text) = &data.alt_text {
|
if let Some(alt_text) = &data.alt_text {
|
||||||
|
|
|
@ -28,11 +28,11 @@ use lemmy_utils::{
|
||||||
utils::{
|
utils::{
|
||||||
slurs::check_slurs,
|
slurs::check_slurs,
|
||||||
validation::{
|
validation::{
|
||||||
check_url_scheme,
|
|
||||||
is_url_blocked,
|
is_url_blocked,
|
||||||
is_valid_alt_text_field,
|
is_valid_alt_text_field,
|
||||||
is_valid_body_field,
|
is_valid_body_field,
|
||||||
is_valid_post_title,
|
is_valid_post_title,
|
||||||
|
is_valid_url,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -77,11 +77,11 @@ pub async fn update_post(
|
||||||
|
|
||||||
if let Some(Some(url)) = &url {
|
if let Some(Some(url)) = &url {
|
||||||
is_url_blocked(url, &url_blocklist)?;
|
is_url_blocked(url, &url_blocklist)?;
|
||||||
check_url_scheme(url)?;
|
is_valid_url(url)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(Some(custom_thumbnail)) = &custom_thumbnail {
|
if let Some(Some(custom_thumbnail)) = &custom_thumbnail {
|
||||||
check_url_scheme(custom_thumbnail)?;
|
is_valid_url(custom_thumbnail)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let post_id = data.post_id;
|
let post_id = data.post_id;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::site::{application_question_check, site_default_post_listing_type_check};
|
use crate::site::{application_question_check, site_default_post_listing_type_check};
|
||||||
use activitypub_federation::http_signatures::generate_actor_keypair;
|
use activitypub_federation::{config::Data, http_signatures::generate_actor_keypair};
|
||||||
use actix_web::web::{Data, Json};
|
use actix_web::web::Json;
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
site::{CreateSite, SiteResponse},
|
site::{CreateSite, SiteResponse},
|
||||||
|
|
|
@ -24,14 +24,14 @@ use lemmy_utils::{
|
||||||
VERSION,
|
VERSION,
|
||||||
};
|
};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
#[tracing::instrument(skip(context))]
|
#[tracing::instrument(skip(context))]
|
||||||
pub async fn get_site(
|
pub async fn get_site(
|
||||||
local_user_view: Option<LocalUserView>,
|
local_user_view: Option<LocalUserView>,
|
||||||
context: Data<LemmyContext>,
|
context: Data<LemmyContext>,
|
||||||
) -> LemmyResult<Json<GetSiteResponse>> {
|
) -> LemmyResult<Json<GetSiteResponse>> {
|
||||||
static CACHE: Lazy<Cache<(), GetSiteResponse>> = Lazy::new(|| {
|
static CACHE: LazyLock<Cache<(), GetSiteResponse>> = LazyLock::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
.max_capacity(1)
|
.max_capacity(1)
|
||||||
.time_to_live(CACHE_DURATION_API)
|
.time_to_live(CACHE_DURATION_API)
|
||||||
|
|
|
@ -40,7 +40,6 @@ uuid = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
|
||||||
moka.workspace = true
|
moka.workspace = true
|
||||||
serde_with.workspace = true
|
serde_with.workspace = true
|
||||||
html2md = "0.2.14"
|
html2md = "0.2.14"
|
||||||
|
|
|
@ -38,7 +38,6 @@ pub enum SiteOrCommunity {
|
||||||
Site(ApubSite),
|
Site(ApubSite),
|
||||||
Community(ApubCommunity),
|
Community(ApubCommunity),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum InstanceOrGroup {
|
pub enum InstanceOrGroup {
|
||||||
|
@ -74,12 +73,18 @@ impl Object for SiteOrCommunity {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete(self, _data: &Data<Self::DataType>) -> LemmyResult<()> {
|
async fn delete(self, data: &Data<Self::DataType>) -> LemmyResult<()> {
|
||||||
unimplemented!()
|
match self {
|
||||||
|
SiteOrCommunity::Site(i) => i.delete(data).await,
|
||||||
|
SiteOrCommunity::Community(c) => c.delete(data).await,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn into_json(self, _data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
async fn into_json(self, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||||
unimplemented!()
|
Ok(match self {
|
||||||
|
SiteOrCommunity::Site(i) => InstanceOrGroup::Instance(i.into_json(data).await?),
|
||||||
|
SiteOrCommunity::Community(c) => InstanceOrGroup::Group(c.into_json(data).await?),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -179,7 +179,7 @@ impl ActivityHandler for CreateOrUpdateNote {
|
||||||
// TODO: for compatibility with other projects, it would be much better to read this from cc or
|
// TODO: for compatibility with other projects, it would be much better to read this from cc or
|
||||||
// tags
|
// tags
|
||||||
let mentions = scrape_text_for_mentions(&comment.content);
|
let mentions = scrape_text_for_mentions(&comment.content);
|
||||||
send_local_notifs(mentions, comment.id, &actor, do_send_email, context).await?;
|
send_local_notifs(mentions, comment.id, &actor, do_send_email, context, None).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -175,8 +175,9 @@ pub(in crate::activities) async fn receive_remove_action(
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
DeletableObjects::PrivateMessage(_) => unimplemented!(),
|
// TODO these need to be implemented yet, for now, return errors
|
||||||
DeletableObjects::Person { .. } => unimplemented!(),
|
DeletableObjects::PrivateMessage(_) => Err(LemmyErrorType::CouldntFindPrivateMessage)?,
|
||||||
|
DeletableObjects::Person(_) => Err(LemmyErrorType::CouldntFindPerson)?,
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,8 +155,9 @@ impl UndoDelete {
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
DeletableObjects::PrivateMessage(_) => unimplemented!(),
|
// TODO these need to be implemented yet, for now, return errors
|
||||||
DeletableObjects::Person { .. } => unimplemented!(),
|
DeletableObjects::PrivateMessage(_) => Err(LemmyErrorType::CouldntFindPrivateMessage)?,
|
||||||
|
DeletableObjects::Person(_) => Err(LemmyErrorType::CouldntFindPerson)?,
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
use activitypub_federation::{config::Data, traits::ActivityHandler};
|
use activitypub_federation::{config::Data, traits::ActivityHandler};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::context::LemmyContext;
|
||||||
use lemmy_utils::error::LemmyResult;
|
use lemmy_utils::{error::LemmyResult, LemmyErrorType};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ impl InCommunity for AnnouncableActivities {
|
||||||
CollectionRemove(a) => a.community(context).await,
|
CollectionRemove(a) => a.community(context).await,
|
||||||
LockPost(a) => a.community(context).await,
|
LockPost(a) => a.community(context).await,
|
||||||
UndoLockPost(a) => a.community(context).await,
|
UndoLockPost(a) => a.community(context).await,
|
||||||
Page(_) => unimplemented!(),
|
Page(_) => Err(LemmyErrorType::CouldntFindPost.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,8 +61,11 @@ impl Object for PostOrComment {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn into_json(self, _data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
async fn into_json(self, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||||
unimplemented!()
|
Ok(match self {
|
||||||
|
PostOrComment::Post(p) => PageOrNote::Page(Box::new(p.into_json(data).await?)),
|
||||||
|
PostOrComment::Comment(c) => PageOrNote::Note(c.into_json(data).await?),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -118,8 +118,17 @@ impl Object for SearchableObjects {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn into_json(self, _data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
async fn into_json(self, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||||
unimplemented!()
|
Ok(match self {
|
||||||
|
SearchableObjects::Post(p) => SearchableKinds::Page(Box::new(p.into_json(data).await?)),
|
||||||
|
SearchableObjects::Comment(c) => SearchableKinds::Note(c.into_json(data).await?),
|
||||||
|
SearchableObjects::PersonOrCommunity(pc) => {
|
||||||
|
SearchableKinds::PersonOrGroup(Box::new(match *pc {
|
||||||
|
UserOrCommunity::User(p) => PersonOrGroup::Person(p.into_json(data).await?),
|
||||||
|
UserOrCommunity::Community(c) => PersonOrGroup::Group(c.into_json(data).await?),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
fetcher::user_or_community::{PersonOrGroup, UserOrCommunity},
|
fetcher::user_or_community::{PersonOrGroup, UserOrCommunity},
|
||||||
objects::instance::ApubSite,
|
objects::{community::ApubCommunity, instance::ApubSite, person::ApubPerson},
|
||||||
protocol::objects::instance::Instance,
|
protocol::objects::instance::Instance,
|
||||||
};
|
};
|
||||||
use activitypub_federation::{
|
use activitypub_federation::{
|
||||||
|
@ -41,11 +41,14 @@ impl Object for SiteOrCommunityOrUser {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn read_from_id(
|
async fn read_from_id(object_id: Url, data: &Data<Self::DataType>) -> LemmyResult<Option<Self>> {
|
||||||
_object_id: Url,
|
let site = ApubSite::read_from_id(object_id.clone(), data).await?;
|
||||||
_data: &Data<Self::DataType>,
|
Ok(match site {
|
||||||
) -> LemmyResult<Option<Self>> {
|
Some(o) => Some(SiteOrCommunityOrUser::Site(o)),
|
||||||
unimplemented!();
|
None => UserOrCommunity::read_from_id(object_id, data)
|
||||||
|
.await?
|
||||||
|
.map(SiteOrCommunityOrUser::UserOrCommunity),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
@ -56,8 +59,13 @@ impl Object for SiteOrCommunityOrUser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn into_json(self, _data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
async fn into_json(self, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||||
unimplemented!()
|
Ok(match self {
|
||||||
|
SiteOrCommunityOrUser::Site(p) => SiteOrPersonOrGroup::Instance(p.into_json(data).await?),
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(p) => {
|
||||||
|
SiteOrPersonOrGroup::PersonOrGroup(p.into_json(data).await?)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
@ -75,8 +83,18 @@ impl Object for SiteOrCommunityOrUser {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn from_json(_apub: Self::Kind, _data: &Data<Self::DataType>) -> LemmyResult<Self> {
|
async fn from_json(apub: Self::Kind, data: &Data<Self::DataType>) -> LemmyResult<Self> {
|
||||||
unimplemented!();
|
Ok(match apub {
|
||||||
|
SiteOrPersonOrGroup::Instance(a) => {
|
||||||
|
SiteOrCommunityOrUser::Site(ApubSite::from_json(a, data).await?)
|
||||||
|
}
|
||||||
|
SiteOrPersonOrGroup::PersonOrGroup(a) => SiteOrCommunityOrUser::UserOrCommunity(match a {
|
||||||
|
PersonOrGroup::Person(p) => UserOrCommunity::User(ApubPerson::from_json(p, data).await?),
|
||||||
|
PersonOrGroup::Group(g) => {
|
||||||
|
UserOrCommunity::Community(ApubCommunity::from_json(g, data).await?)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,6 +121,9 @@ impl Actor for SiteOrCommunityOrUser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inbox(&self) -> Url {
|
fn inbox(&self) -> Url {
|
||||||
unimplemented!()
|
match self {
|
||||||
|
SiteOrCommunityOrUser::Site(u) => u.inbox(),
|
||||||
|
SiteOrCommunityOrUser::UserOrCommunity(c) => c.inbox(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,8 +65,11 @@ impl Object for UserOrCommunity {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn into_json(self, _data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
async fn into_json(self, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||||
unimplemented!()
|
Ok(match self {
|
||||||
|
UserOrCommunity::User(p) => PersonOrGroup::Person(p.into_json(data).await?),
|
||||||
|
UserOrCommunity::Community(p) => PersonOrGroup::Group(p.into_json(data).await?),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
@ -115,7 +118,10 @@ impl Actor for UserOrCommunity {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inbox(&self) -> Url {
|
fn inbox(&self) -> Url {
|
||||||
unimplemented!()
|
match self {
|
||||||
|
UserOrCommunity::User(p) => p.inbox(),
|
||||||
|
UserOrCommunity::Community(p) => p.inbox(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,9 +14,8 @@ use lemmy_utils::{
|
||||||
CACHE_DURATION_FEDERATION,
|
CACHE_DURATION_FEDERATION,
|
||||||
};
|
};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::sync::Arc;
|
use std::sync::{Arc, LazyLock};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
pub mod activities;
|
pub mod activities;
|
||||||
|
@ -36,7 +35,7 @@ pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 100;
|
||||||
/// Only include a basic context to save space and bandwidth. The main context is hosted statically
|
/// Only include a basic context to save space and bandwidth. The main context is hosted statically
|
||||||
/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could
|
/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could
|
||||||
/// theoretically also be moved.
|
/// theoretically also be moved.
|
||||||
pub static FEDERATION_CONTEXT: Lazy<Value> = Lazy::new(|| {
|
pub static FEDERATION_CONTEXT: LazyLock<Value> = LazyLock::new(|| {
|
||||||
Value::Array(vec![
|
Value::Array(vec![
|
||||||
Value::String("https://join-lemmy.org/context.json".to_string()),
|
Value::String("https://join-lemmy.org/context.json".to_string()),
|
||||||
Value::String("https://www.w3.org/ns/activitystreams".to_string()),
|
Value::String("https://www.w3.org/ns/activitystreams".to_string()),
|
||||||
|
@ -129,7 +128,7 @@ pub(crate) async fn local_site_data_cached(
|
||||||
// multiple times. This causes a huge number of database reads if we hit the db directly. So we
|
// multiple times. This causes a huge number of database reads if we hit the db directly. So we
|
||||||
// cache these values for a short time, which will already make a huge difference and ensures that
|
// cache these values for a short time, which will already make a huge difference and ensures that
|
||||||
// changes take effect quickly.
|
// changes take effect quickly.
|
||||||
static CACHE: Lazy<Cache<(), Arc<LocalSiteData>>> = Lazy::new(|| {
|
static CACHE: LazyLock<Cache<(), Arc<LocalSiteData>>> = LazyLock::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
.max_capacity(1)
|
.max_capacity(1)
|
||||||
.time_to_live(CACHE_DURATION_FEDERATION)
|
.time_to_live(CACHE_DURATION_FEDERATION)
|
||||||
|
|
|
@ -88,7 +88,7 @@ impl Object for ApubSite {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete(self, _data: &Data<Self::DataType>) -> LemmyResult<()> {
|
async fn delete(self, _data: &Data<Self::DataType>) -> LemmyResult<()> {
|
||||||
unimplemented!()
|
Err(LemmyErrorType::CantDeleteSite.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -41,7 +41,11 @@ use lemmy_db_views_actor::structs::CommunityModeratorView;
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::{LemmyError, LemmyErrorType, LemmyResult},
|
error::{LemmyError, LemmyErrorType, LemmyResult},
|
||||||
spawn_try_task,
|
spawn_try_task,
|
||||||
utils::{markdown::markdown_to_html, slurs::check_slurs_opt, validation::check_url_scheme},
|
utils::{
|
||||||
|
markdown::markdown_to_html,
|
||||||
|
slurs::check_slurs_opt,
|
||||||
|
validation::{is_url_blocked, is_valid_url},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use stringreader::StringReader;
|
use stringreader::StringReader;
|
||||||
|
@ -180,8 +184,15 @@ impl Object for ApubPost {
|
||||||
let creator = page.creator()?.dereference(context).await?;
|
let creator = page.creator()?.dereference(context).await?;
|
||||||
let community = page.community(context).await?;
|
let community = page.community(context).await?;
|
||||||
if community.posting_restricted_to_mods {
|
if community.posting_restricted_to_mods {
|
||||||
CommunityModeratorView::is_community_moderator(&mut context.pool(), community.id, creator.id)
|
let is_mod = CommunityModeratorView::is_community_moderator(
|
||||||
|
&mut context.pool(),
|
||||||
|
community.id,
|
||||||
|
creator.id,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
if !is_mod {
|
||||||
|
Err(LemmyErrorType::OnlyModsCanPostInCommunity)?
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let mut name = page
|
let mut name = page
|
||||||
.name
|
.name
|
||||||
|
@ -220,14 +231,16 @@ impl Object for ApubPost {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let url_blocklist = get_url_blocklist(context).await?;
|
||||||
|
|
||||||
if let Some(url) = &url {
|
if let Some(url) = &url {
|
||||||
check_url_scheme(url)?;
|
is_url_blocked(url, &url_blocklist)?;
|
||||||
|
is_valid_url(url)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
|
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
|
||||||
|
|
||||||
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
|
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
|
||||||
let url_blocklist = get_url_blocklist(context).await?;
|
|
||||||
|
|
||||||
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
|
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
|
||||||
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
|
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
|
||||||
|
|
|
@ -73,7 +73,7 @@ impl Object for ApubPrivateMessage {
|
||||||
|
|
||||||
async fn delete(self, _context: &Data<Self::DataType>) -> LemmyResult<()> {
|
async fn delete(self, _context: &Data<Self::DataType>) -> LemmyResult<()> {
|
||||||
// do nothing, because pm can't be fetched over http
|
// do nothing, because pm can't be fetched over http
|
||||||
unimplemented!()
|
Err(LemmyErrorType::CouldntFindPrivateMessage.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
||||||
community_outbox::ApubCommunityOutbox,
|
community_outbox::ApubCommunityOutbox,
|
||||||
},
|
},
|
||||||
local_site_data_cached,
|
local_site_data_cached,
|
||||||
objects::{community::ApubCommunity, read_from_string_or_source_opt, verify_is_remote_object},
|
objects::{community::ApubCommunity, read_from_string_or_source_opt},
|
||||||
protocol::{
|
protocol::{
|
||||||
objects::{Endpoints, LanguageTag},
|
objects::{Endpoints, LanguageTag},
|
||||||
ImageObject,
|
ImageObject,
|
||||||
|
@ -80,7 +80,6 @@ impl Group {
|
||||||
) -> LemmyResult<()> {
|
) -> LemmyResult<()> {
|
||||||
check_apub_id_valid_with_strictness(self.id.inner(), true, context).await?;
|
check_apub_id_valid_with_strictness(self.id.inner(), true, context).await?;
|
||||||
verify_domains_match(expected_domain, self.id.inner())?;
|
verify_domains_match(expected_domain, self.id.inner())?;
|
||||||
verify_is_remote_object(&self.id, context)?;
|
|
||||||
|
|
||||||
let local_site_data = local_site_data_cached(&mut context.pool()).await?;
|
let local_site_data = local_site_data_cached(&mut context.pool()).await?;
|
||||||
let slur_regex = &local_site_opt_to_slur_regex(&local_site_data.local_site);
|
let slur_regex = &local_site_opt_to_slur_regex(&local_site_data.local_site);
|
||||||
|
|
|
@ -193,10 +193,12 @@ impl ActivityHandler for Page {
|
||||||
type DataType = LemmyContext;
|
type DataType = LemmyContext;
|
||||||
type Error = LemmyError;
|
type Error = LemmyError;
|
||||||
fn id(&self) -> &Url {
|
fn id(&self) -> &Url {
|
||||||
unimplemented!()
|
self.id.inner()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn actor(&self) -> &Url {
|
fn actor(&self) -> &Url {
|
||||||
unimplemented!()
|
debug_assert!(false);
|
||||||
|
self.id.inner()
|
||||||
}
|
}
|
||||||
async fn verify(&self, data: &Data<Self::DataType>) -> LemmyResult<()> {
|
async fn verify(&self, data: &Data<Self::DataType>) -> LemmyResult<()> {
|
||||||
ApubPost::verify(self, self.id.inner(), data).await
|
ApubPost::verify(self, self.id.inner(), data).await
|
||||||
|
|
|
@ -27,7 +27,6 @@ full = [
|
||||||
"lemmy_utils",
|
"lemmy_utils",
|
||||||
"activitypub_federation",
|
"activitypub_federation",
|
||||||
"regex",
|
"regex",
|
||||||
"once_cell",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"diesel_ltree",
|
"diesel_ltree",
|
||||||
"diesel-async",
|
"diesel-async",
|
||||||
|
@ -64,7 +63,6 @@ diesel-async = { workspace = true, features = [
|
||||||
"deadpool",
|
"deadpool",
|
||||||
], optional = true }
|
], optional = true }
|
||||||
regex = { workspace = true, optional = true }
|
regex = { workspace = true, optional = true }
|
||||||
once_cell = { workspace = true, optional = true }
|
|
||||||
diesel_ltree = { workspace = true, optional = true }
|
diesel_ltree = { workspace = true, optional = true }
|
||||||
typed-builder = { workspace = true }
|
typed-builder = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
|
|
|
@ -117,7 +117,7 @@ impl Crud for Comment {
|
||||||
type UpdateForm = CommentUpdateForm;
|
type UpdateForm = CommentUpdateForm;
|
||||||
type IdType = CommentId;
|
type IdType = CommentId;
|
||||||
|
|
||||||
/// This is unimplemented, use [[Comment::create]]
|
/// Use [[Comment::create]]
|
||||||
async fn create(pool: &mut DbPool<'_>, comment_form: &Self::InsertForm) -> Result<Self, Error> {
|
async fn create(pool: &mut DbPool<'_>, comment_form: &Self::InsertForm) -> Result<Self, Error> {
|
||||||
debug_assert!(false);
|
debug_assert!(false);
|
||||||
Comment::create(pool, comment_form, None).await
|
Comment::create(pool, comment_form, None).await
|
||||||
|
|
|
@ -7,7 +7,7 @@ use diesel::{dsl::insert_into, result::Error};
|
||||||
use diesel_async::RunQueryDsl;
|
use diesel_async::RunQueryDsl;
|
||||||
use lemmy_utils::{error::LemmyResult, CACHE_DURATION_API};
|
use lemmy_utils::{error::LemmyResult, CACHE_DURATION_API};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
impl LocalSite {
|
impl LocalSite {
|
||||||
pub async fn create(pool: &mut DbPool<'_>, form: &LocalSiteInsertForm) -> Result<Self, Error> {
|
pub async fn create(pool: &mut DbPool<'_>, form: &LocalSiteInsertForm) -> Result<Self, Error> {
|
||||||
|
@ -18,7 +18,7 @@ impl LocalSite {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
pub async fn read(pool: &mut DbPool<'_>) -> LemmyResult<Self> {
|
pub async fn read(pool: &mut DbPool<'_>) -> LemmyResult<Self> {
|
||||||
static CACHE: Lazy<Cache<(), LocalSite>> = Lazy::new(|| {
|
static CACHE: LazyLock<Cache<(), LocalSite>> = LazyLock::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
.max_capacity(1)
|
.max_capacity(1)
|
||||||
.time_to_live(CACHE_DURATION_API)
|
.time_to_live(CACHE_DURATION_API)
|
||||||
|
|
|
@ -115,11 +115,11 @@ impl LocalUser {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
|
|
||||||
// Make sure:
|
// Make sure:
|
||||||
// - The deny reason exists
|
// - An admin has interacted with the application
|
||||||
// - The app is older than a week
|
// - The app is older than a week
|
||||||
// - The accepted_application is false
|
// - The accepted_application is false
|
||||||
let old_denied_registrations = registration_application::table
|
let old_denied_registrations = registration_application::table
|
||||||
.filter(registration_application::deny_reason.is_not_null())
|
.filter(registration_application::admin_id.is_not_null())
|
||||||
.filter(registration_application::published.lt(now() - 1.week()))
|
.filter(registration_application::published.lt(now() - 1.week()))
|
||||||
.select(registration_application::local_user_id);
|
.select(registration_application::local_user_id);
|
||||||
|
|
||||||
|
|
|
@ -191,9 +191,12 @@ impl Followable for PersonFollower {
|
||||||
.get_result::<Self>(conn)
|
.get_result::<Self>(conn)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Currently no user following
|
||||||
async fn follow_accepted(_: &mut DbPool<'_>, _: CommunityId, _: PersonId) -> Result<Self, Error> {
|
async fn follow_accepted(_: &mut DbPool<'_>, _: CommunityId, _: PersonId) -> Result<Self, Error> {
|
||||||
unimplemented!()
|
Err(Error::NotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn unfollow(pool: &mut DbPool<'_>, form: &PersonFollowerForm) -> Result<usize, Error> {
|
async fn unfollow(pool: &mut DbPool<'_>, form: &PersonFollowerForm) -> Result<usize, Error> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
diesel::delete(person_follower::table.find((form.follower_id, form.person_id)))
|
diesel::delete(person_follower::table.find((form.follower_id, form.person_id)))
|
||||||
|
|
|
@ -52,7 +52,7 @@ impl Reportable for PrivateMessageReport {
|
||||||
_pm_id_: PrivateMessageId,
|
_pm_id_: PrivateMessageId,
|
||||||
_by_resolver_id: PersonId,
|
_by_resolver_id: PersonId,
|
||||||
) -> Result<usize, Error> {
|
) -> Result<usize, Error> {
|
||||||
unimplemented!()
|
Err(Error::NotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn unresolve(
|
async fn unresolve(
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl Crud for Site {
|
||||||
|
|
||||||
/// Use SiteView::read_local, or Site::read_from_apub_id instead
|
/// Use SiteView::read_local, or Site::read_from_apub_id instead
|
||||||
async fn read(_pool: &mut DbPool<'_>, _site_id: SiteId) -> Result<Option<Self>, Error> {
|
async fn read(_pool: &mut DbPool<'_>, _site_id: SiteId) -> Result<Option<Self>, Error> {
|
||||||
unimplemented!()
|
Err(Error::NotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
|
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
|
||||||
|
|
|
@ -711,7 +711,7 @@ diesel::table! {
|
||||||
id -> Int4,
|
id -> Int4,
|
||||||
#[max_length = 200]
|
#[max_length = 200]
|
||||||
name -> Varchar,
|
name -> Varchar,
|
||||||
#[max_length = 512]
|
#[max_length = 2000]
|
||||||
url -> Nullable<Varchar>,
|
url -> Nullable<Varchar>,
|
||||||
body -> Nullable<Text>,
|
body -> Nullable<Text>,
|
||||||
creator_id -> Int4,
|
creator_id -> Int4,
|
||||||
|
|
|
@ -32,7 +32,6 @@ use lemmy_utils::{
|
||||||
settings::SETTINGS,
|
settings::SETTINGS,
|
||||||
utils::validation::clean_url_params,
|
utils::validation::clean_url_params,
|
||||||
};
|
};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustls::{
|
use rustls::{
|
||||||
client::danger::{
|
client::danger::{
|
||||||
|
@ -49,7 +48,7 @@ use rustls::{
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
ops::{Deref, DerefMut},
|
ops::{Deref, DerefMut},
|
||||||
sync::Arc,
|
sync::{Arc, LazyLock},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
@ -478,7 +477,7 @@ pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static EMAIL_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static EMAIL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
|
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
|
||||||
.expect("compile email regex")
|
.expect("compile email regex")
|
||||||
});
|
});
|
||||||
|
|
|
@ -27,7 +27,6 @@ futures.workspace = true
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
diesel = { workspace = true, features = ["postgres", "chrono", "serde_json"] }
|
diesel = { workspace = true, features = ["postgres", "chrono", "serde_json"] }
|
||||||
diesel-async = { workspace = true, features = ["deadpool", "postgres"] }
|
diesel-async = { workspace = true, features = ["deadpool", "postgres"] }
|
||||||
once_cell.workspace = true
|
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
tokio = { workspace = true, features = ["full"] }
|
tokio = { workspace = true, features = ["full"] }
|
||||||
|
@ -43,4 +42,4 @@ actix-web.workspace = true
|
||||||
tracing-test = "0.2.5"
|
tracing-test = "0.2.5"
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
test-context = "0.3.0"
|
test-context = "0.3.0"
|
||||||
mockall = "0.12.1"
|
mockall = "0.13.0"
|
||||||
|
|
|
@ -8,9 +8,11 @@ use lemmy_db_schema::{
|
||||||
utils::{ActualDbPool, DbPool},
|
utils::{ActualDbPool, DbPool},
|
||||||
};
|
};
|
||||||
use lemmy_db_views_actor::structs::CommunityFollowerView;
|
use lemmy_db_views_actor::structs::CommunityFollowerView;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use reqwest::Url;
|
use reqwest::Url;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
sync::LazyLock,
|
||||||
|
};
|
||||||
|
|
||||||
/// interval with which new additions to community_followers are queried.
|
/// interval with which new additions to community_followers are queried.
|
||||||
///
|
///
|
||||||
|
@ -21,7 +23,7 @@ use std::collections::{HashMap, HashSet};
|
||||||
/// currently fairly high because of the current structure of storing inboxes for every person, not
|
/// currently fairly high because of the current structure of storing inboxes for every person, not
|
||||||
/// having a separate list of shared_inboxes, and the architecture of having every instance queue be
|
/// having a separate list of shared_inboxes, and the architecture of having every instance queue be
|
||||||
/// fully separate. (see https://github.com/LemmyNet/lemmy/issues/3958)
|
/// fully separate. (see https://github.com/LemmyNet/lemmy/issues/3958)
|
||||||
static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::TimeDelta> = Lazy::new(|| {
|
static FOLLOW_ADDITIONS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> = LazyLock::new(|| {
|
||||||
if *LEMMY_TEST_FAST_FEDERATION {
|
if *LEMMY_TEST_FAST_FEDERATION {
|
||||||
chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds")
|
chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds")
|
||||||
} else {
|
} else {
|
||||||
|
@ -31,8 +33,8 @@ static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::TimeDelta> = Lazy::new(|| {
|
||||||
/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance
|
/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance
|
||||||
/// unfollows a specific remote community. This is expected to happen pretty rarely and updating it
|
/// unfollows a specific remote community. This is expected to happen pretty rarely and updating it
|
||||||
/// in a timely manner is not too important.
|
/// in a timely manner is not too important.
|
||||||
static FOLLOW_REMOVALS_RECHECK_DELAY: Lazy<chrono::TimeDelta> =
|
static FOLLOW_REMOVALS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> =
|
||||||
Lazy::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds"));
|
LazyLock::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds"));
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait DataSource: Send + Sync {
|
pub trait DataSource: Send + Sync {
|
||||||
|
|
|
@ -18,10 +18,15 @@ use lemmy_db_schema::{
|
||||||
utils::{get_conn, DbPool},
|
utils::{get_conn, DbPool},
|
||||||
};
|
};
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use reqwest::Url;
|
use reqwest::Url;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::{fmt::Debug, future::Future, pin::Pin, sync::Arc, time::Duration};
|
use std::{
|
||||||
|
fmt::Debug,
|
||||||
|
future::Future,
|
||||||
|
pin::Pin,
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
use tokio::{task::JoinHandle, time::sleep};
|
use tokio::{task::JoinHandle, time::sleep};
|
||||||
use tokio_util::sync::CancellationToken;
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
|
@ -29,7 +34,7 @@ use tokio_util::sync::CancellationToken;
|
||||||
/// Should only be used for federation tests since it significantly increases CPU and DB load of the
|
/// Should only be used for federation tests since it significantly increases CPU and DB load of the
|
||||||
/// federation queue. This is intentionally a separate flag from other flags like debug_assertions,
|
/// federation queue. This is intentionally a separate flag from other flags like debug_assertions,
|
||||||
/// since this is a invasive change we only need rarely.
|
/// since this is a invasive change we only need rarely.
|
||||||
pub(crate) static LEMMY_TEST_FAST_FEDERATION: Lazy<bool> = Lazy::new(|| {
|
pub(crate) static LEMMY_TEST_FAST_FEDERATION: LazyLock<bool> = LazyLock::new(|| {
|
||||||
std::env::var("LEMMY_TEST_FAST_FEDERATION")
|
std::env::var("LEMMY_TEST_FAST_FEDERATION")
|
||||||
.map(|s| !s.is_empty())
|
.map(|s| !s.is_empty())
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
|
@ -49,7 +54,7 @@ pub(crate) static LEMMY_TEST_FAST_FEDERATION: Lazy<bool> = Lazy::new(|| {
|
||||||
/// If the delay is too short, the workers (one per federated instance) will wake up too
|
/// If the delay is too short, the workers (one per federated instance) will wake up too
|
||||||
/// often and consume a lot of CPU. If the delay is long, then activities on low-traffic instances
|
/// often and consume a lot of CPU. If the delay is long, then activities on low-traffic instances
|
||||||
/// will on average take delay/2 seconds to federate.
|
/// will on average take delay/2 seconds to federate.
|
||||||
pub(crate) static WORK_FINISHED_RECHECK_DELAY: Lazy<Duration> = Lazy::new(|| {
|
pub(crate) static WORK_FINISHED_RECHECK_DELAY: LazyLock<Duration> = LazyLock::new(|| {
|
||||||
if *LEMMY_TEST_FAST_FEDERATION {
|
if *LEMMY_TEST_FAST_FEDERATION {
|
||||||
Duration::from_millis(100)
|
Duration::from_millis(100)
|
||||||
} else {
|
} else {
|
||||||
|
@ -61,7 +66,7 @@ pub(crate) static WORK_FINISHED_RECHECK_DELAY: Lazy<Duration> = Lazy::new(|| {
|
||||||
///
|
///
|
||||||
/// This cache is common to all the instance workers and prevents there from being more than one
|
/// This cache is common to all the instance workers and prevents there from being more than one
|
||||||
/// call per N seconds between each DB query to find max(activity_id).
|
/// call per N seconds between each DB query to find max(activity_id).
|
||||||
pub(crate) static CACHE_DURATION_LATEST_ID: Lazy<Duration> = Lazy::new(|| {
|
pub(crate) static CACHE_DURATION_LATEST_ID: LazyLock<Duration> = LazyLock::new(|| {
|
||||||
if *LEMMY_TEST_FAST_FEDERATION {
|
if *LEMMY_TEST_FAST_FEDERATION {
|
||||||
// in test mode, we use the same cache duration as the recheck delay so when recheck happens
|
// in test mode, we use the same cache duration as the recheck delay so when recheck happens
|
||||||
// data is fresh, accelerating the time the tests take.
|
// data is fresh, accelerating the time the tests take.
|
||||||
|
@ -132,8 +137,8 @@ pub(crate) async fn get_actor_cached(
|
||||||
actor_type: ActorType,
|
actor_type: ActorType,
|
||||||
actor_apub_id: &Url,
|
actor_apub_id: &Url,
|
||||||
) -> Result<Arc<SiteOrCommunityOrUser>> {
|
) -> Result<Arc<SiteOrCommunityOrUser>> {
|
||||||
static CACHE: Lazy<Cache<Url, Arc<SiteOrCommunityOrUser>>> =
|
static CACHE: LazyLock<Cache<Url, Arc<SiteOrCommunityOrUser>>> =
|
||||||
Lazy::new(|| Cache::builder().max_capacity(10000).build());
|
LazyLock::new(|| Cache::builder().max_capacity(10000).build());
|
||||||
CACHE
|
CACHE
|
||||||
.try_get_with(actor_apub_id.clone(), async {
|
.try_get_with(actor_apub_id.clone(), async {
|
||||||
let url = actor_apub_id.clone().into();
|
let url = actor_apub_id.clone().into();
|
||||||
|
@ -172,8 +177,8 @@ pub(crate) async fn get_activity_cached(
|
||||||
pool: &mut DbPool<'_>,
|
pool: &mut DbPool<'_>,
|
||||||
activity_id: ActivityId,
|
activity_id: ActivityId,
|
||||||
) -> Result<CachedActivityInfo> {
|
) -> Result<CachedActivityInfo> {
|
||||||
static ACTIVITIES: Lazy<Cache<ActivityId, CachedActivityInfo>> =
|
static ACTIVITIES: LazyLock<Cache<ActivityId, CachedActivityInfo>> =
|
||||||
Lazy::new(|| Cache::builder().max_capacity(10000).build());
|
LazyLock::new(|| Cache::builder().max_capacity(10000).build());
|
||||||
ACTIVITIES
|
ACTIVITIES
|
||||||
.try_get_with(activity_id, async {
|
.try_get_with(activity_id, async {
|
||||||
let row = SentActivity::read(pool, activity_id)
|
let row = SentActivity::read(pool, activity_id)
|
||||||
|
@ -195,7 +200,7 @@ pub(crate) async fn get_activity_cached(
|
||||||
|
|
||||||
/// return the most current activity id (with 1 second cache)
|
/// return the most current activity id (with 1 second cache)
|
||||||
pub(crate) async fn get_latest_activity_id(pool: &mut DbPool<'_>) -> Result<ActivityId> {
|
pub(crate) async fn get_latest_activity_id(pool: &mut DbPool<'_>) -> Result<ActivityId> {
|
||||||
static CACHE: Lazy<Cache<(), ActivityId>> = Lazy::new(|| {
|
static CACHE: LazyLock<Cache<(), ActivityId>> = LazyLock::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
.time_to_live(*CACHE_DURATION_LATEST_ID)
|
.time_to_live(*CACHE_DURATION_LATEST_ID)
|
||||||
.build()
|
.build()
|
||||||
|
|
|
@ -30,7 +30,6 @@ reqwest = { workspace = true, features = ["stream"] }
|
||||||
reqwest-middleware = { workspace = true }
|
reqwest-middleware = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
urlencoding = { workspace = true }
|
urlencoding = { workspace = true }
|
||||||
|
|
|
@ -25,7 +25,6 @@ use lemmy_utils::{
|
||||||
error::{LemmyError, LemmyErrorType, LemmyResult},
|
error::{LemmyError, LemmyErrorType, LemmyResult},
|
||||||
utils::markdown::{markdown_to_html, sanitize_html},
|
utils::markdown::{markdown_to_html, sanitize_html},
|
||||||
};
|
};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use rss::{
|
use rss::{
|
||||||
extension::{dublincore::DublinCoreExtension, ExtensionBuilder, ExtensionMap},
|
extension::{dublincore::DublinCoreExtension, ExtensionBuilder, ExtensionMap},
|
||||||
Channel,
|
Channel,
|
||||||
|
@ -34,7 +33,7 @@ use rss::{
|
||||||
Item,
|
Item,
|
||||||
};
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{collections::BTreeMap, str::FromStr};
|
use std::{collections::BTreeMap, str::FromStr, sync::LazyLock};
|
||||||
|
|
||||||
const RSS_FETCH_LIMIT: i64 = 20;
|
const RSS_FETCH_LIMIT: i64 = 20;
|
||||||
|
|
||||||
|
@ -80,7 +79,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RSS_NAMESPACE: Lazy<BTreeMap<String, String>> = Lazy::new(|| {
|
static RSS_NAMESPACE: LazyLock<BTreeMap<String, String>> = LazyLock::new(|| {
|
||||||
let mut h = BTreeMap::new();
|
let mut h = BTreeMap::new();
|
||||||
h.insert(
|
h.insert(
|
||||||
"dc".to_string(),
|
"dc".to_string(),
|
||||||
|
|
|
@ -39,7 +39,6 @@ full = [
|
||||||
"dep:urlencoding",
|
"dep:urlencoding",
|
||||||
"dep:doku",
|
"dep:doku",
|
||||||
"dep:url",
|
"dep:url",
|
||||||
"dep:once_cell",
|
|
||||||
"dep:smart-default",
|
"dep:smart-default",
|
||||||
"dep:enum-map",
|
"dep:enum-map",
|
||||||
"dep:futures",
|
"dep:futures",
|
||||||
|
@ -58,7 +57,6 @@ tracing-error = { workspace = true, optional = true }
|
||||||
itertools = { workspace = true, optional = true }
|
itertools = { workspace = true, optional = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true, optional = true }
|
serde_json = { workspace = true, optional = true }
|
||||||
once_cell = { workspace = true, optional = true }
|
|
||||||
url = { workspace = true, optional = true }
|
url = { workspace = true, optional = true }
|
||||||
actix-web = { workspace = true, optional = true }
|
actix-web = { workspace = true, optional = true }
|
||||||
anyhow = { workspace = true, optional = true }
|
anyhow = { workspace = true, optional = true }
|
||||||
|
@ -81,7 +79,7 @@ lettre = { version = "0.11.7", default-features = false, features = [
|
||||||
"tokio1-rustls-tls",
|
"tokio1-rustls-tls",
|
||||||
"smtp-transport",
|
"smtp-transport",
|
||||||
], optional = true }
|
], optional = true }
|
||||||
markdown-it = { version = "0.6.0", optional = true }
|
markdown-it = { version = "0.6.1", optional = true }
|
||||||
ts-rs = { workspace = true, optional = true }
|
ts-rs = { workspace = true, optional = true }
|
||||||
enum-map = { workspace = true, optional = true }
|
enum-map = { workspace = true, optional = true }
|
||||||
cfg-if = "1"
|
cfg-if = "1"
|
||||||
|
|
|
@ -179,6 +179,8 @@ pub enum LemmyErrorType {
|
||||||
UrlWithoutDomain,
|
UrlWithoutDomain,
|
||||||
InboxTimeout,
|
InboxTimeout,
|
||||||
Unknown(String),
|
Unknown(String),
|
||||||
|
CantDeleteSite,
|
||||||
|
UrlLengthOverflow,
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg_if! {
|
cfg_if! {
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
use enum_map::EnumMap;
|
use enum_map::EnumMap;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
net::{IpAddr, Ipv4Addr, Ipv6Addr},
|
net::{IpAddr, Ipv4Addr, Ipv6Addr},
|
||||||
|
sync::LazyLock,
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
use strum::{AsRefStr, Display};
|
use strum::{AsRefStr, Display};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
static START_TIME: Lazy<Instant> = Lazy::new(Instant::now);
|
static START_TIME: LazyLock<Instant> = LazyLock::new(Instant::now);
|
||||||
|
|
||||||
/// Smaller than `std::time::Instant` because it uses a smaller integer for seconds and doesn't
|
/// Smaller than `std::time::Instant` because it uses a smaller integer for seconds and doesn't
|
||||||
/// store nanoseconds
|
/// store nanoseconds
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
use crate::{error::LemmyResult, location_info};
|
use crate::{error::LemmyResult, location_info};
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use deser_hjson::from_str;
|
use deser_hjson::from_str;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::{env, fs, io::Error};
|
use std::{env, fs, io::Error, sync::LazyLock};
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
|
||||||
pub mod structs;
|
pub mod structs;
|
||||||
|
@ -12,7 +11,7 @@ use structs::{DatabaseConnection, PictrsConfig, PictrsImageMode, Settings};
|
||||||
|
|
||||||
static DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
|
static DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
|
||||||
|
|
||||||
pub static SETTINGS: Lazy<Settings> = Lazy::new(|| {
|
pub static SETTINGS: LazyLock<Settings> = LazyLock::new(|| {
|
||||||
if env::var("LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS").is_ok() {
|
if env::var("LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS").is_ok() {
|
||||||
println!(
|
println!(
|
||||||
"LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS was set, any configuration file has been ignored."
|
"LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS was set, any configuration file has been ignored."
|
||||||
|
@ -24,7 +23,7 @@ pub static SETTINGS: Lazy<Settings> = Lazy::new(|| {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
static WEBFINGER_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static WEBFINGER_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(&format!(
|
Regex::new(&format!(
|
||||||
"^acct:([a-zA-Z0-9_]{{3,}})@{}$",
|
"^acct:([a-zA-Z0-9_]{{3,}})@{}$",
|
||||||
SETTINGS.hostname
|
SETTINGS.hostname
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
use crate::{error::LemmyResult, settings::SETTINGS, LemmyErrorType};
|
use crate::{error::LemmyResult, settings::SETTINGS, LemmyErrorType};
|
||||||
use markdown_it::{plugins::cmark::inline::image::Image, MarkdownIt};
|
use markdown_it::{plugins::cmark::inline::image::Image, MarkdownIt};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::RegexSet;
|
use regex::RegexSet;
|
||||||
|
use std::sync::LazyLock;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
|
||||||
mod link_rule;
|
mod link_rule;
|
||||||
mod spoiler_rule;
|
mod spoiler_rule;
|
||||||
|
|
||||||
static MARKDOWN_PARSER: Lazy<MarkdownIt> = Lazy::new(|| {
|
static MARKDOWN_PARSER: LazyLock<MarkdownIt> = LazyLock::new(|| {
|
||||||
let mut parser = MarkdownIt::new();
|
let mut parser = MarkdownIt::new();
|
||||||
markdown_it::plugins::cmark::add(&mut parser);
|
markdown_it::plugins::cmark::add(&mut parser);
|
||||||
markdown_it::plugins::extra::add(&mut parser);
|
markdown_it::plugins::extra::add(&mut parser);
|
||||||
|
|
|
@ -34,8 +34,8 @@ use markdown_it::{
|
||||||
NodeValue,
|
NodeValue,
|
||||||
Renderer,
|
Renderer,
|
||||||
};
|
};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct SpoilerBlock {
|
struct SpoilerBlock {
|
||||||
|
@ -46,8 +46,8 @@ const SPOILER_PREFIX: &str = "::: spoiler ";
|
||||||
const SPOILER_SUFFIX: &str = ":::";
|
const SPOILER_SUFFIX: &str = ":::";
|
||||||
const SPOILER_SUFFIX_NEWLINE: &str = ":::\n";
|
const SPOILER_SUFFIX_NEWLINE: &str = ":::\n";
|
||||||
|
|
||||||
static SPOILER_REGEX: Lazy<Regex> =
|
static SPOILER_REGEX: LazyLock<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^::: spoiler .*$").expect("compile spoiler markdown regex."));
|
LazyLock::new(|| Regex::new(r"^::: spoiler .*$").expect("compile spoiler markdown regex."));
|
||||||
|
|
||||||
impl NodeValue for SpoilerBlock {
|
impl NodeValue for SpoilerBlock {
|
||||||
// Formats any node marked as a 'SpoilerBlock' into HTML.
|
// Formats any node marked as a 'SpoilerBlock' into HTML.
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
static MENTIONS_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static MENTIONS_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").expect("compile regex")
|
Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").expect("compile regex")
|
||||||
});
|
});
|
||||||
// TODO nothing is done with community / group webfingers yet, so just ignore those for now
|
// TODO nothing is done with community / group webfingers yet, so just ignore those for now
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::{Regex, RegexBuilder, RegexSet};
|
use regex::{Regex, RegexBuilder, RegexSet};
|
||||||
|
use std::sync::LazyLock;
|
||||||
use url::{ParseError, Url};
|
use url::{ParseError, Url};
|
||||||
|
|
||||||
// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35
|
// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35
|
||||||
static VALID_MATRIX_ID_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static VALID_MATRIX_ID_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^@[A-Za-z0-9\x21-\x39\x3B-\x7F]+:[A-Za-z0-9.-]+(:[0-9]{2,5})?$")
|
Regex::new(r"^@[A-Za-z0-9\x21-\x39\x3B-\x7F]+:[A-Za-z0-9.-]+(:[0-9]{2,5})?$")
|
||||||
.expect("compile regex")
|
.expect("compile regex")
|
||||||
});
|
});
|
||||||
// taken from https://en.wikipedia.org/wiki/UTM_parameters
|
// taken from https://en.wikipedia.org/wiki/UTM_parameters
|
||||||
static CLEAN_URL_PARAMS_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static CLEAN_URL_PARAMS_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(
|
Regex::new(
|
||||||
r"^(utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid)=",
|
r"^(utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid)=",
|
||||||
)
|
)
|
||||||
|
@ -21,6 +21,7 @@ const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
|
||||||
const BODY_MAX_LENGTH: usize = 10000;
|
const BODY_MAX_LENGTH: usize = 10000;
|
||||||
const POST_BODY_MAX_LENGTH: usize = 50000;
|
const POST_BODY_MAX_LENGTH: usize = 50000;
|
||||||
const BIO_MAX_LENGTH: usize = 300;
|
const BIO_MAX_LENGTH: usize = 300;
|
||||||
|
const URL_MAX_LENGTH: usize = 2000;
|
||||||
const ALT_TEXT_MAX_LENGTH: usize = 1500;
|
const ALT_TEXT_MAX_LENGTH: usize = 1500;
|
||||||
const SITE_NAME_MAX_LENGTH: usize = 20;
|
const SITE_NAME_MAX_LENGTH: usize = 20;
|
||||||
const SITE_NAME_MIN_LENGTH: usize = 1;
|
const SITE_NAME_MIN_LENGTH: usize = 1;
|
||||||
|
@ -87,12 +88,12 @@ fn has_newline(name: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_valid_actor_name(name: &str, actor_name_max_length: usize) -> LemmyResult<()> {
|
pub fn is_valid_actor_name(name: &str, actor_name_max_length: usize) -> LemmyResult<()> {
|
||||||
static VALID_ACTOR_NAME_REGEX_EN: Lazy<Regex> =
|
static VALID_ACTOR_NAME_REGEX_EN: LazyLock<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^[a-zA-Z0-9_]{3,}$").expect("compile regex"));
|
LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9_]{3,}$").expect("compile regex"));
|
||||||
static VALID_ACTOR_NAME_REGEX_AR: Lazy<Regex> =
|
static VALID_ACTOR_NAME_REGEX_AR: LazyLock<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^[\p{Arabic}0-9_]{3,}$").expect("compile regex"));
|
LazyLock::new(|| Regex::new(r"^[\p{Arabic}0-9_]{3,}$").expect("compile regex"));
|
||||||
static VALID_ACTOR_NAME_REGEX_RU: Lazy<Regex> =
|
static VALID_ACTOR_NAME_REGEX_RU: LazyLock<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^[\p{Cyrillic}0-9_]{3,}$").expect("compile regex"));
|
LazyLock::new(|| Regex::new(r"^[\p{Cyrillic}0-9_]{3,}$").expect("compile regex"));
|
||||||
|
|
||||||
let check = name.chars().count() <= actor_name_max_length && !has_newline(name);
|
let check = name.chars().count() <= actor_name_max_length && !has_newline(name);
|
||||||
|
|
||||||
|
@ -284,11 +285,17 @@ pub fn check_site_visibility_valid(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_url_scheme(url: &Url) -> LemmyResult<()> {
|
pub fn is_valid_url(url: &Url) -> LemmyResult<()> {
|
||||||
if !ALLOWED_POST_URL_SCHEMES.contains(&url.scheme()) {
|
if !ALLOWED_POST_URL_SCHEMES.contains(&url.scheme()) {
|
||||||
Err(LemmyErrorType::InvalidUrlScheme)?
|
Err(LemmyErrorType::InvalidUrlScheme)?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
max_length_check(
|
||||||
|
url.as_str(),
|
||||||
|
URL_MAX_LENGTH,
|
||||||
|
LemmyErrorType::UrlLengthOverflow,
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,7 +356,6 @@ mod tests {
|
||||||
utils::validation::{
|
utils::validation::{
|
||||||
build_and_check_regex,
|
build_and_check_regex,
|
||||||
check_site_visibility_valid,
|
check_site_visibility_valid,
|
||||||
check_url_scheme,
|
|
||||||
check_urls_are_valid,
|
check_urls_are_valid,
|
||||||
clean_url_params,
|
clean_url_params,
|
||||||
is_url_blocked,
|
is_url_blocked,
|
||||||
|
@ -358,11 +364,13 @@ mod tests {
|
||||||
is_valid_display_name,
|
is_valid_display_name,
|
||||||
is_valid_matrix_id,
|
is_valid_matrix_id,
|
||||||
is_valid_post_title,
|
is_valid_post_title,
|
||||||
|
is_valid_url,
|
||||||
site_description_length_check,
|
site_description_length_check,
|
||||||
site_name_length_check,
|
site_name_length_check,
|
||||||
BIO_MAX_LENGTH,
|
BIO_MAX_LENGTH,
|
||||||
SITE_DESCRIPTION_MAX_LENGTH,
|
SITE_DESCRIPTION_MAX_LENGTH,
|
||||||
SITE_NAME_MAX_LENGTH,
|
SITE_NAME_MAX_LENGTH,
|
||||||
|
URL_MAX_LENGTH,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
@ -580,15 +588,27 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_check_url_scheme() -> LemmyResult<()> {
|
fn test_check_url_valid() -> LemmyResult<()> {
|
||||||
assert!(check_url_scheme(&Url::parse("http://example.com")?).is_ok());
|
assert!(is_valid_url(&Url::parse("http://example.com")?).is_ok());
|
||||||
assert!(check_url_scheme(&Url::parse("https://example.com")?).is_ok());
|
assert!(is_valid_url(&Url::parse("https://example.com")?).is_ok());
|
||||||
assert!(check_url_scheme(&Url::parse("https://example.com")?).is_ok());
|
assert!(is_valid_url(&Url::parse("https://example.com")?).is_ok());
|
||||||
assert!(check_url_scheme(&Url::parse("ftp://example.com")?).is_err());
|
assert!(is_valid_url(&Url::parse("ftp://example.com")?)
|
||||||
assert!(check_url_scheme(&Url::parse("javascript:void")?).is_err());
|
.is_err_and(|e| e.error_type.eq(&LemmyErrorType::InvalidUrlScheme)));
|
||||||
|
assert!(is_valid_url(&Url::parse("javascript:void")?)
|
||||||
|
.is_err_and(|e| e.error_type.eq(&LemmyErrorType::InvalidUrlScheme)));
|
||||||
|
|
||||||
let magnet_link="magnet:?xt=urn:btih:4b390af3891e323778959d5abfff4b726510f14c&dn=Ravel%20Complete%20Piano%20Sheet%20Music%20-%20Public%20Domain&tr=udp%3A%2F%2Fopen.tracker.cl%3A1337%2Fannounce";
|
let magnet_link="magnet:?xt=urn:btih:4b390af3891e323778959d5abfff4b726510f14c&dn=Ravel%20Complete%20Piano%20Sheet%20Music%20-%20Public%20Domain&tr=udp%3A%2F%2Fopen.tracker.cl%3A1337%2Fannounce";
|
||||||
assert!(check_url_scheme(&Url::parse(magnet_link)?).is_ok());
|
assert!(is_valid_url(&Url::parse(magnet_link)?).is_ok());
|
||||||
|
|
||||||
|
// Also make sure the length overflow hits an error
|
||||||
|
let mut long_str = "http://example.com/test=".to_string();
|
||||||
|
for _ in 1..URL_MAX_LENGTH {
|
||||||
|
long_str.push('X');
|
||||||
|
}
|
||||||
|
let long_url = Url::parse(&long_str)?;
|
||||||
|
assert!(
|
||||||
|
is_valid_url(&long_url).is_err_and(|e| e.error_type.eq(&LemmyErrorType::UrlLengthOverflow))
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit ee2cffac809ad466644f061ad79ac577b6c2e4fd
|
Subproject commit bc9b5305769900c5a59d8f139f110e004085f92b
|
|
@ -1,11 +1,11 @@
|
||||||
# syntax=docker/dockerfile:1.8
|
# syntax=docker/dockerfile:1.9
|
||||||
ARG RUST_VERSION=1.78
|
ARG RUST_VERSION=1.80
|
||||||
ARG CARGO_BUILD_FEATURES=default
|
ARG CARGO_BUILD_FEATURES=default
|
||||||
ARG RUST_RELEASE_MODE=debug
|
ARG RUST_RELEASE_MODE=debug
|
||||||
|
|
||||||
ARG AMD_BUILDER_IMAGE=rust:${RUST_VERSION}
|
ARG AMD_BUILDER_IMAGE=rust:${RUST_VERSION}
|
||||||
# Repo: https://github.com/raskyld/lemmy-cross-toolchains
|
# Repo: https://github.com/raskyld/lemmy-cross-toolchains
|
||||||
ARG ARM_BUILDER_IMAGE="ghcr.io/raskyld/aarch64-lemmy-linux-gnu:v0.3.0"
|
ARG ARM_BUILDER_IMAGE="ghcr.io/raskyld/aarch64-lemmy-linux-gnu:v0.4.0"
|
||||||
|
|
||||||
ARG AMD_RUNNER_IMAGE=debian:bookworm-slim
|
ARG AMD_RUNNER_IMAGE=debian:bookworm-slim
|
||||||
ARG ARM_RUNNER_IMAGE=debian:bookworm-slim
|
ARG ARM_RUNNER_IMAGE=debian:bookworm-slim
|
||||||
|
|
|
@ -20,7 +20,7 @@ x-lemmy-default: &lemmy-default
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
x-postgres-default: &postgres-default
|
x-postgres-default: &postgres-default
|
||||||
image: pgautoupgrade/pgautoupgrade:16-alpine
|
image: pgautoupgrade/pgautoupgrade:15-alpine
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=lemmy
|
- POSTGRES_USER=lemmy
|
||||||
- POSTGRES_PASSWORD=password
|
- POSTGRES_PASSWORD=password
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
ALTER TABLE post
|
||||||
|
ALTER COLUMN url TYPE varchar(512);
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
-- Change the post url max limit to 2000
|
||||||
|
-- From here: https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers#417184
|
||||||
|
ALTER TABLE post
|
||||||
|
ALTER COLUMN url TYPE varchar(2000);
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
"extends": ["config:recommended"],
|
"extends": ["config:recommended"],
|
||||||
"schedule": ["before 4am on the first day of the month"]
|
"schedule": ["before 4am on the first day of the month"],
|
||||||
|
"automerge": true
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue