mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-22 20:31:19 +00:00
Merge remote-tracking branch 'origin/main' into thumbnail_sizes
This commit is contained in:
commit
34399ad741
152 changed files with 4080 additions and 3316 deletions
|
@ -3,3 +3,5 @@ edition = "2021"
|
|||
imports_layout = "HorizontalVertical"
|
||||
imports_granularity = "Crate"
|
||||
group_imports = "One"
|
||||
wrap_comments = true
|
||||
comment_width = 100
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# See https://github.com/woodpecker-ci/woodpecker/issues/1677
|
||||
|
||||
variables:
|
||||
- &rust_image "rust:1.77"
|
||||
- &rust_image "rust:1.78"
|
||||
- &rust_nightly_image "rustlang/rust:nightly"
|
||||
- &install_pnpm "corepack enable pnpm"
|
||||
- &slow_check_paths
|
||||
- event: pull_request
|
||||
|
@ -24,15 +25,17 @@ variables:
|
|||
"diesel.toml",
|
||||
".gitmodules",
|
||||
]
|
||||
|
||||
# Broken for cron jobs currently, see
|
||||
# https://github.com/woodpecker-ci/woodpecker/issues/1716
|
||||
# clone:
|
||||
# git:
|
||||
# image: woodpeckerci/plugin-git
|
||||
# settings:
|
||||
# recursive: true
|
||||
# submodule_update_remote: true
|
||||
- install_binstall: &install_binstall
|
||||
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- cp cargo-binstall /usr/local/cargo/bin
|
||||
- install_diesel_cli: &install_diesel_cli
|
||||
- apt update && apt install -y lsb-release build-essential
|
||||
- sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
- wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
|
||||
- apt update && apt install -y postgresql-client-16
|
||||
- cargo install diesel_cli --no-default-features --features postgres
|
||||
- export PATH="$CARGO_HOME/bin:$PATH"
|
||||
|
||||
steps:
|
||||
prepare_repo:
|
||||
|
@ -66,7 +69,7 @@ steps:
|
|||
- event: pull_request
|
||||
|
||||
cargo_fmt:
|
||||
image: rustlang/rust:nightly
|
||||
image: *rust_nightly_image
|
||||
environment:
|
||||
# store cargo data in repo folder so that it gets cached between steps
|
||||
CARGO_HOME: .cargo_home
|
||||
|
@ -77,11 +80,9 @@ steps:
|
|||
- event: pull_request
|
||||
|
||||
cargo_machete:
|
||||
image: rustlang/rust:nightly
|
||||
image: *rust_nightly_image
|
||||
commands:
|
||||
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- cp cargo-binstall /usr/local/cargo/bin
|
||||
- <<: *install_binstall
|
||||
- cargo binstall -y cargo-machete
|
||||
- cargo machete
|
||||
when:
|
||||
|
@ -133,11 +134,12 @@ steps:
|
|||
when: *slow_check_paths
|
||||
|
||||
check_diesel_schema:
|
||||
image: willsquire/diesel-cli
|
||||
image: *rust_image
|
||||
environment:
|
||||
CARGO_HOME: .cargo_home
|
||||
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
|
||||
commands:
|
||||
- <<: *install_diesel_cli
|
||||
- diesel migration run
|
||||
- diesel print-schema --config-file=diesel.toml > tmp.schema
|
||||
- diff tmp.schema crates/db_schema/src/schema.rs
|
||||
|
@ -197,8 +199,8 @@ steps:
|
|||
PGHOST: database
|
||||
PGDATABASE: lemmy
|
||||
commands:
|
||||
- cargo install diesel_cli
|
||||
- export PATH="$CARGO_HOME/bin:$PATH"
|
||||
# Install diesel_cli
|
||||
- <<: *install_diesel_cli
|
||||
# Run all migrations
|
||||
- diesel migration run
|
||||
# Dump schema to before.sqldump (PostgreSQL apt repo is used to prevent pg_dump version mismatch error)
|
||||
|
@ -276,7 +278,9 @@ steps:
|
|||
publish_to_crates_io:
|
||||
image: *rust_image
|
||||
commands:
|
||||
- cargo install cargo-workspaces
|
||||
- <<: *install_binstall
|
||||
# Install cargo-workspaces
|
||||
- cargo binstall -y cargo-workspaces
|
||||
- cp -r migrations crates/db_schema/
|
||||
- cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
|
||||
secrets: [cargo_api_token]
|
||||
|
|
2311
Cargo.lock
generated
2311
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
71
Cargo.toml
71
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[workspace.package]
|
||||
version = "0.19.4-beta.7"
|
||||
version = "0.19.4"
|
||||
edition = "2021"
|
||||
description = "A link aggregator for the fediverse"
|
||||
license = "AGPL-3.0"
|
||||
|
@ -67,8 +67,8 @@ members = [
|
|||
|
||||
[workspace.lints.clippy]
|
||||
cast_lossless = "deny"
|
||||
complexity = "deny"
|
||||
correctness = "deny"
|
||||
complexity = { level = "deny", priority = -1 }
|
||||
correctness = { level = "deny", priority = -1 }
|
||||
dbg_macro = "deny"
|
||||
explicit_into_iter_loop = "deny"
|
||||
explicit_iter_loop = "deny"
|
||||
|
@ -79,44 +79,44 @@ inefficient_to_string = "deny"
|
|||
items-after-statements = "deny"
|
||||
manual_string_new = "deny"
|
||||
needless_collect = "deny"
|
||||
perf = "deny"
|
||||
perf = { level = "deny", priority = -1 }
|
||||
redundant_closure_for_method_calls = "deny"
|
||||
style = "deny"
|
||||
suspicious = "deny"
|
||||
style = { level = "deny", priority = -1 }
|
||||
suspicious = { level = "deny", priority = -1 }
|
||||
uninlined_format_args = "allow"
|
||||
unused_self = "deny"
|
||||
unwrap_used = "deny"
|
||||
|
||||
[workspace.dependencies]
|
||||
lemmy_api = { version = "=0.19.4-beta.7", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4-beta.7", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4-beta.7", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4-beta.7", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4-beta.7", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4-beta.7", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4-beta.7", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4-beta.7", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4-beta.7", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4-beta.7", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4-beta.7", path = "./crates/federate" }
|
||||
lemmy_api = { version = "=0.19.4", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4", path = "./crates/federate" }
|
||||
activitypub_federation = { version = "0.5.6", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
diesel = "2.1.6"
|
||||
diesel_migrations = "2.1.0"
|
||||
diesel-async = "0.4.1"
|
||||
serde = { version = "1.0.199", features = ["derive"] }
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
serde_with = "3.8.1"
|
||||
actix-web = { version = "4.5.1", default-features = false, features = [
|
||||
actix-web = { version = "4.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"rustls",
|
||||
"rustls-0_23",
|
||||
"compress-brotli",
|
||||
"compress-gzip",
|
||||
"compress-zstd",
|
||||
"cookies",
|
||||
] }
|
||||
tracing = "0.1.40"
|
||||
tracing-actix-web = { version = "0.7.10", default-features = false }
|
||||
tracing-actix-web = { version = "0.7.11", default-features = false }
|
||||
tracing-error = "0.2.0"
|
||||
tracing-log = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
|
@ -128,25 +128,25 @@ clokwerk = "0.4.0"
|
|||
doku = { version = "0.21.1", features = ["url-2"] }
|
||||
bcrypt = "0.15.1"
|
||||
chrono = { version = "0.4.38", features = ["serde"], default-features = false }
|
||||
serde_json = { version = "1.0.116", features = ["preserve_order"] }
|
||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
||||
base64 = "0.22.1"
|
||||
uuid = { version = "1.8.0", features = ["serde", "v4"] }
|
||||
async-trait = "0.1.80"
|
||||
captcha = "0.0.9"
|
||||
anyhow = { version = "1.0.82", features = [
|
||||
anyhow = { version = "1.0.86", features = [
|
||||
"backtrace",
|
||||
] } # backtrace is on by default on nightly, but not stable rust
|
||||
diesel_ltree = "0.3.1"
|
||||
typed-builder = "0.18.2"
|
||||
serial_test = "2.0.0"
|
||||
tokio = { version = "1.37.0", features = ["full"] }
|
||||
serial_test = "3.1.1"
|
||||
tokio = { version = "1.38.0", features = ["full"] }
|
||||
regex = "1.10.4"
|
||||
once_cell = "1.19.0"
|
||||
diesel-derive-newtype = "2.1.2"
|
||||
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
|
||||
strum = "0.25.0"
|
||||
strum_macros = "0.25.3"
|
||||
itertools = "0.12.1"
|
||||
strum = "0.26.2"
|
||||
strum_macros = "0.26.4"
|
||||
itertools = "0.13.0"
|
||||
futures = "0.3.30"
|
||||
http = "0.2.12"
|
||||
rosetta-i18n = "0.1.3"
|
||||
|
@ -157,7 +157,7 @@ ts-rs = { version = "7.1.1", features = [
|
|||
"chrono-impl",
|
||||
"no-serde-warnings",
|
||||
] }
|
||||
rustls = { version = "0.23.5", features = ["ring"] }
|
||||
rustls = { version = "0.23.9", features = ["ring"] }
|
||||
futures-util = "0.3.30"
|
||||
tokio-postgres = "0.7.10"
|
||||
tokio-postgres-rustls = "0.12.0"
|
||||
|
@ -165,8 +165,9 @@ urlencoding = "2.1.3"
|
|||
enum-map = "2.7"
|
||||
moka = { version = "0.12.7", features = ["future"] }
|
||||
i-love-jesus = { version = "0.1.0" }
|
||||
clap = { version = "4.5.4", features = ["derive"] }
|
||||
clap = { version = "4.5.6", features = ["derive", "env"] }
|
||||
pretty_assertions = "1.4.0"
|
||||
derive-new = "0.6.0"
|
||||
|
||||
[dependencies]
|
||||
lemmy_api = { workspace = true }
|
||||
|
@ -194,17 +195,17 @@ clokwerk = { workspace = true }
|
|||
serde_json = { workspace = true }
|
||||
tracing-opentelemetry = { workspace = true, optional = true }
|
||||
opentelemetry = { workspace = true, optional = true }
|
||||
console-subscriber = { version = "0.1.10", optional = true }
|
||||
console-subscriber = { version = "0.3.0", optional = true }
|
||||
opentelemetry-otlp = { version = "0.12.0", optional = true }
|
||||
pict-rs = { version = "0.5.13", optional = true }
|
||||
pict-rs = { version = "0.5.15", optional = true }
|
||||
tokio.workspace = true
|
||||
actix-cors = "0.6.5"
|
||||
actix-cors = "0.7.0"
|
||||
futures-util = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
prometheus = { version = "0.13.3", features = ["process"] }
|
||||
prometheus = { version = "0.13.4", features = ["process"] }
|
||||
serial_test = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
actix-web-prom = "0.7.0"
|
||||
actix-web-prom = "0.8.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = { workspace = true }
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"browser": true
|
||||
},
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"project": "./tsconfig.json",
|
||||
"warnOnUnsupportedTypeScriptVersion": false
|
||||
},
|
||||
"rules": {
|
||||
"@typescript-eslint/ban-ts-comment": 0,
|
||||
"@typescript-eslint/no-explicit-any": 0,
|
||||
"@typescript-eslint/explicit-module-boundary-types": 0,
|
||||
"@typescript-eslint/no-var-requires": 0,
|
||||
"arrow-body-style": 0,
|
||||
"curly": 0,
|
||||
"eol-last": 0,
|
||||
"eqeqeq": 0,
|
||||
"func-style": 0,
|
||||
"import/no-duplicates": 0,
|
||||
"max-statements": 0,
|
||||
"max-params": 0,
|
||||
"new-cap": 0,
|
||||
"no-console": 0,
|
||||
"no-duplicate-imports": 0,
|
||||
"no-extra-parens": 0,
|
||||
"no-return-assign": 0,
|
||||
"no-throw-literal": 0,
|
||||
"no-trailing-spaces": 0,
|
||||
"no-unused-expressions": 0,
|
||||
"no-useless-constructor": 0,
|
||||
"no-useless-escape": 0,
|
||||
"no-var": 0,
|
||||
"prefer-const": 0,
|
||||
"prefer-rest-params": 0,
|
||||
"quote-props": 0,
|
||||
"unicorn/filename-case": 0
|
||||
}
|
||||
}
|
50
api_tests/eslint.config.mjs
Normal file
50
api_tests/eslint.config.mjs
Normal file
|
@ -0,0 +1,50 @@
|
|||
import pluginJs from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
export default [
|
||||
pluginJs.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
languageOptions: {
|
||||
parser: tseslint.parser,
|
||||
},
|
||||
},
|
||||
// For some reason this has to be in its own block
|
||||
{
|
||||
ignores: ["putTypesInIndex.js", "dist/*", "docs/*", ".yalc", "jest.config.js"],
|
||||
},
|
||||
{
|
||||
files: ["src/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-interface": 0,
|
||||
"@typescript-eslint/no-empty-function": 0,
|
||||
"@typescript-eslint/ban-ts-comment": 0,
|
||||
"@typescript-eslint/no-explicit-any": 0,
|
||||
"@typescript-eslint/explicit-module-boundary-types": 0,
|
||||
"@typescript-eslint/no-var-requires": 0,
|
||||
"arrow-body-style": 0,
|
||||
curly: 0,
|
||||
"eol-last": 0,
|
||||
eqeqeq: 0,
|
||||
"func-style": 0,
|
||||
"import/no-duplicates": 0,
|
||||
"max-statements": 0,
|
||||
"max-params": 0,
|
||||
"new-cap": 0,
|
||||
"no-console": 0,
|
||||
"no-duplicate-imports": 0,
|
||||
"no-extra-parens": 0,
|
||||
"no-return-assign": 0,
|
||||
"no-throw-literal": 0,
|
||||
"no-trailing-spaces": 0,
|
||||
"no-unused-expressions": 0,
|
||||
"no-useless-constructor": 0,
|
||||
"no-useless-escape": 0,
|
||||
"no-var": 0,
|
||||
"prefer-const": 0,
|
||||
"prefer-rest-params": 0,
|
||||
"quote-props": 0,
|
||||
"unicorn/filename-case": 0,
|
||||
},
|
||||
},
|
||||
];
|
|
@ -6,9 +6,9 @@
|
|||
"repository": "https://github.com/LemmyNet/lemmy",
|
||||
"author": "Dessalines",
|
||||
"license": "AGPL-3.0",
|
||||
"packageManager": "pnpm@9.1.1+sha256.9551e803dcb7a1839fdf5416153a844060c7bce013218ce823410532504ac10b",
|
||||
"packageManager": "pnpm@9.3.0",
|
||||
"scripts": {
|
||||
"lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'",
|
||||
"lint": "tsc --noEmit && eslint --report-unused-disable-directives && prettier --check 'src/**/*.ts'",
|
||||
"fix": "prettier --write src && eslint --fix src",
|
||||
"api-test": "jest -i follow.spec.ts && jest -i image.spec.ts && jest -i user.spec.ts && jest -i private_message.spec.ts && jest -i community.spec.ts && jest -i post.spec.ts && jest -i comment.spec.ts ",
|
||||
"api-test-follow": "jest -i follow.spec.ts",
|
||||
|
@ -25,12 +25,13 @@
|
|||
"@typescript-eslint/eslint-plugin": "^7.5.0",
|
||||
"@typescript-eslint/parser": "^7.5.0",
|
||||
"download-file-sync": "^1.0.4",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint": "^9.0.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"jest": "^29.5.0",
|
||||
"lemmy-js-client": "0.19.4-alpha.18",
|
||||
"lemmy-js-client": "0.19.5-alpha.1",
|
||||
"prettier": "^3.2.5",
|
||||
"ts-jest": "^29.1.0",
|
||||
"typescript": "^5.4.4"
|
||||
"typescript": "^5.4.4",
|
||||
"typescript-eslint": "^7.13.0"
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -3,19 +3,19 @@
|
|||
# it is expected that this script is called by run-federation-test.sh script.
|
||||
set -e
|
||||
|
||||
if [ -n "$LEMMY_LOG_LEVEL" ];
|
||||
if [ -z "$LEMMY_LOG_LEVEL" ];
|
||||
then
|
||||
LEMMY_LOG_LEVEL=warn
|
||||
LEMMY_LOG_LEVEL=info
|
||||
fi
|
||||
|
||||
export RUST_BACKTRACE=1
|
||||
#export RUST_LOG="warn,lemmy_server=$LEMMY_LOG_LEVEL,lemmy_federate=$LEMMY_LOG_LEVEL,lemmy_api=$LEMMY_LOG_LEVEL,lemmy_api_common=$LEMMY_LOG_LEVEL,lemmy_api_crud=$LEMMY_LOG_LEVEL,lemmy_apub=$LEMMY_LOG_LEVEL,lemmy_db_schema=$LEMMY_LOG_LEVEL,lemmy_db_views=$LEMMY_LOG_LEVEL,lemmy_db_views_actor=$LEMMY_LOG_LEVEL,lemmy_db_views_moderator=$LEMMY_LOG_LEVEL,lemmy_routes=$LEMMY_LOG_LEVEL,lemmy_utils=$LEMMY_LOG_LEVEL,lemmy_websocket=$LEMMY_LOG_LEVEL"
|
||||
export RUST_LOG="warn,lemmy_server=$LEMMY_LOG_LEVEL,lemmy_federate=$LEMMY_LOG_LEVEL,lemmy_api=$LEMMY_LOG_LEVEL,lemmy_api_common=$LEMMY_LOG_LEVEL,lemmy_api_crud=$LEMMY_LOG_LEVEL,lemmy_apub=$LEMMY_LOG_LEVEL,lemmy_db_schema=$LEMMY_LOG_LEVEL,lemmy_db_views=$LEMMY_LOG_LEVEL,lemmy_db_views_actor=$LEMMY_LOG_LEVEL,lemmy_db_views_moderator=$LEMMY_LOG_LEVEL,lemmy_routes=$LEMMY_LOG_LEVEL,lemmy_utils=$LEMMY_LOG_LEVEL,lemmy_websocket=$LEMMY_LOG_LEVEL"
|
||||
|
||||
export LEMMY_TEST_FAST_FEDERATION=1 # by default, the persistent federation queue has delays in the scale of 30s-5min
|
||||
|
||||
# pictrs setup
|
||||
if [ ! -f "api_tests/pict-rs" ]; then
|
||||
curl "https://git.asonix.dog/asonix/pict-rs/releases/download/v0.5.13/pict-rs-linux-amd64" -o api_tests/pict-rs
|
||||
curl "https://git.asonix.dog/asonix/pict-rs/releases/download/v0.5.16/pict-rs-linux-amd64" -o api_tests/pict-rs
|
||||
chmod +x api_tests/pict-rs
|
||||
fi
|
||||
./api_tests/pict-rs \
|
||||
|
|
|
@ -37,8 +37,9 @@ import {
|
|||
followCommunity,
|
||||
blockCommunity,
|
||||
delay,
|
||||
saveUserSettings,
|
||||
} from "./shared";
|
||||
import { CommentView, CommunityView } from "lemmy-js-client";
|
||||
import { CommentView, CommunityView, SaveUserSettings } from "lemmy-js-client";
|
||||
|
||||
let betaCommunity: CommunityView | undefined;
|
||||
let postOnAlphaRes: PostResponse;
|
||||
|
@ -443,6 +444,59 @@ test("Reply to a comment from another instance, get notification", async () => {
|
|||
assertCommentFederation(alphaReply, replyRes.comment_view);
|
||||
});
|
||||
|
||||
test("Bot reply notifications are filtered when bots are hidden", async () => {
|
||||
const newAlphaBot = await registerUser(alpha, alphaUrl);
|
||||
let form: SaveUserSettings = {
|
||||
bot_account: true,
|
||||
};
|
||||
await saveUserSettings(newAlphaBot, form);
|
||||
|
||||
const alphaCommunity = (
|
||||
await resolveCommunity(alpha, "!main@lemmy-alpha:8541")
|
||||
).community;
|
||||
|
||||
if (!alphaCommunity) {
|
||||
throw "Missing alpha community";
|
||||
}
|
||||
|
||||
await alpha.markAllAsRead();
|
||||
form = {
|
||||
show_bot_accounts: false,
|
||||
};
|
||||
await saveUserSettings(alpha, form);
|
||||
const postOnAlphaRes = await createPost(alpha, alphaCommunity.community.id);
|
||||
|
||||
// Bot reply to alpha's post
|
||||
let commentRes = await createComment(
|
||||
newAlphaBot,
|
||||
postOnAlphaRes.post_view.post.id,
|
||||
);
|
||||
expect(commentRes).toBeDefined();
|
||||
|
||||
let alphaUnreadCountRes = await getUnreadCount(alpha);
|
||||
expect(alphaUnreadCountRes.replies).toBe(0);
|
||||
|
||||
let alphaUnreadRepliesRes = await getReplies(alpha, true);
|
||||
expect(alphaUnreadRepliesRes.replies.length).toBe(0);
|
||||
|
||||
// This both restores the original state that may be expected by other tests
|
||||
// implicitly and is used by the next steps to ensure replies are still
|
||||
// returned when a user later decides to show bot accounts again.
|
||||
form = {
|
||||
show_bot_accounts: true,
|
||||
};
|
||||
await saveUserSettings(alpha, form);
|
||||
|
||||
alphaUnreadCountRes = await getUnreadCount(alpha);
|
||||
expect(alphaUnreadCountRes.replies).toBe(1);
|
||||
|
||||
alphaUnreadRepliesRes = await getReplies(alpha, true);
|
||||
expect(alphaUnreadRepliesRes.replies.length).toBe(1);
|
||||
expect(alphaUnreadRepliesRes.replies[0].comment.id).toBe(
|
||||
commentRes.comment_view.comment.id,
|
||||
);
|
||||
});
|
||||
|
||||
test("Mention beta from alpha", async () => {
|
||||
if (!betaCommunity) throw Error("no community");
|
||||
const postOnAlphaRes = await createPost(alpha, betaCommunity.community.id);
|
||||
|
|
|
@ -31,14 +31,14 @@ import {
|
|||
waitUntil,
|
||||
createPostWithThumbnail,
|
||||
sampleImage,
|
||||
sampleSite,
|
||||
} from "./shared";
|
||||
const downloadFileSync = require("download-file-sync");
|
||||
|
||||
beforeAll(setupLogins);
|
||||
|
||||
afterAll(async () => {
|
||||
await unfollows();
|
||||
await deleteAllImages(alpha);
|
||||
await Promise.all([unfollows(), deleteAllImages(alpha)]);
|
||||
});
|
||||
|
||||
test("Upload image and delete it", async () => {
|
||||
|
@ -160,6 +160,7 @@ test("Purge post, linked image removed", async () => {
|
|||
upload.url,
|
||||
);
|
||||
expect(post.post_view.post.url).toBe(upload.url);
|
||||
expect(post.post_view.image_details).toBeDefined();
|
||||
|
||||
// purge post
|
||||
const purgeForm: PurgePost = {
|
||||
|
@ -173,54 +174,97 @@ test("Purge post, linked image removed", async () => {
|
|||
expect(content2).toBe("");
|
||||
});
|
||||
|
||||
test("Images in remote post are proxied if setting enabled", async () => {
|
||||
let user = await registerUser(beta, betaUrl);
|
||||
test("Images in remote image post are proxied if setting enabled", async () => {
|
||||
let community = await createCommunity(gamma);
|
||||
|
||||
const upload_form: UploadImage = {
|
||||
image: Buffer.from("test"),
|
||||
};
|
||||
const upload = await user.uploadImage(upload_form);
|
||||
let post = await createPost(
|
||||
let postRes = await createPost(
|
||||
gamma,
|
||||
community.community_view.community.id,
|
||||
upload.url,
|
||||
sampleImage,
|
||||
`![](${sampleImage})`,
|
||||
);
|
||||
expect(post.post_view.post).toBeDefined();
|
||||
const post = postRes.post_view.post;
|
||||
expect(post).toBeDefined();
|
||||
|
||||
// Make sure it fetched the image details
|
||||
expect(postRes.post_view.image_details).toBeDefined();
|
||||
|
||||
// remote image gets proxied after upload
|
||||
expect(
|
||||
post.post_view.post.thumbnail_url?.startsWith(
|
||||
post.thumbnail_url?.startsWith(
|
||||
"http://lemmy-gamma:8561/api/v3/image_proxy?url",
|
||||
),
|
||||
).toBeTruthy();
|
||||
expect(
|
||||
post.post_view.post.body?.startsWith(
|
||||
"![](http://lemmy-gamma:8561/api/v3/image_proxy?url",
|
||||
),
|
||||
post.body?.startsWith("![](http://lemmy-gamma:8561/api/v3/image_proxy?url"),
|
||||
).toBeTruthy();
|
||||
|
||||
let epsilonPost = await resolvePost(epsilon, post.post_view.post);
|
||||
expect(epsilonPost.post).toBeDefined();
|
||||
// Make sure that it ends with jpg, to be sure its an image
|
||||
expect(post.thumbnail_url?.endsWith(".jpg")).toBeTruthy();
|
||||
|
||||
let epsilonPostRes = await resolvePost(epsilon, postRes.post_view.post);
|
||||
expect(epsilonPostRes.post).toBeDefined();
|
||||
|
||||
// Fetch the post again, the metadata should be backgrounded now
|
||||
// Wait for the metadata to get fetched, since this is backgrounded now
|
||||
let epsilonPost2 = await waitUntil(
|
||||
() => getPost(epsilon, epsilonPost.post!.post.id),
|
||||
let epsilonPostRes2 = await waitUntil(
|
||||
() => getPost(epsilon, epsilonPostRes.post!.post.id),
|
||||
p => p.post_view.post.thumbnail_url != undefined,
|
||||
);
|
||||
const epsilonPost = epsilonPostRes2.post_view.post;
|
||||
|
||||
expect(
|
||||
epsilonPost2.post_view.post.thumbnail_url?.startsWith(
|
||||
epsilonPost.thumbnail_url?.startsWith(
|
||||
"http://lemmy-epsilon:8581/api/v3/image_proxy?url",
|
||||
),
|
||||
).toBeTruthy();
|
||||
expect(
|
||||
epsilonPost2.post_view.post.body?.startsWith(
|
||||
epsilonPost.body?.startsWith(
|
||||
"![](http://lemmy-epsilon:8581/api/v3/image_proxy?url",
|
||||
),
|
||||
).toBeTruthy();
|
||||
|
||||
// Make sure that it ends with jpg, to be sure its an image
|
||||
expect(epsilonPost.thumbnail_url?.endsWith(".jpg")).toBeTruthy();
|
||||
});
|
||||
|
||||
test("Thumbnail of remote image link is proxied if setting enabled", async () => {
|
||||
let community = await createCommunity(gamma);
|
||||
let postRes = await createPost(
|
||||
gamma,
|
||||
community.community_view.community.id,
|
||||
// The sample site metadata thumbnail ends in png
|
||||
sampleSite,
|
||||
);
|
||||
const post = postRes.post_view.post;
|
||||
expect(post).toBeDefined();
|
||||
|
||||
// remote image gets proxied after upload
|
||||
expect(
|
||||
post.thumbnail_url?.startsWith(
|
||||
"http://lemmy-gamma:8561/api/v3/image_proxy?url",
|
||||
),
|
||||
).toBeTruthy();
|
||||
|
||||
// Make sure that it ends with png, to be sure its an image
|
||||
expect(post.thumbnail_url?.endsWith(".png")).toBeTruthy();
|
||||
|
||||
let epsilonPostRes = await resolvePost(epsilon, postRes.post_view.post);
|
||||
expect(epsilonPostRes.post).toBeDefined();
|
||||
|
||||
let epsilonPostRes2 = await waitUntil(
|
||||
() => getPost(epsilon, epsilonPostRes.post!.post.id),
|
||||
p => p.post_view.post.thumbnail_url != undefined,
|
||||
);
|
||||
const epsilonPost = epsilonPostRes2.post_view.post;
|
||||
|
||||
expect(
|
||||
epsilonPost.thumbnail_url?.startsWith(
|
||||
"http://lemmy-epsilon:8581/api/v3/image_proxy?url",
|
||||
),
|
||||
).toBeTruthy();
|
||||
|
||||
// Make sure that it ends with png, to be sure its an image
|
||||
expect(epsilonPost.thumbnail_url?.endsWith(".png")).toBeTruthy();
|
||||
});
|
||||
|
||||
test("No image proxying if setting is disabled", async () => {
|
||||
|
|
|
@ -82,7 +82,7 @@ async function assertPostFederation(postOne: PostView, postTwo: PostView) {
|
|||
|
||||
test("Create a post", async () => {
|
||||
// Setup some allowlists and blocklists
|
||||
let editSiteForm: EditSite = {};
|
||||
const editSiteForm: EditSite = {};
|
||||
|
||||
editSiteForm.allowed_instances = [];
|
||||
editSiteForm.blocked_instances = ["lemmy-alpha"];
|
||||
|
@ -502,7 +502,7 @@ test("Enforce site ban federation for local user", async () => {
|
|||
}
|
||||
let newAlphaUserJwt = await loginUser(alpha, alphaUserPerson.name);
|
||||
alphaUserHttp.setHeaders({
|
||||
Authorization: "Bearer " + newAlphaUserJwt.jwt ?? "",
|
||||
Authorization: "Bearer " + newAlphaUserJwt.jwt,
|
||||
});
|
||||
// alpha makes new post in beta community, it federates
|
||||
let postRes2 = await createPost(alphaUserHttp, betaCommunity!.community.id);
|
||||
|
|
|
@ -83,21 +83,22 @@ export const fetchFunction = fetch;
|
|||
export const imageFetchLimit = 50;
|
||||
export const sampleImage =
|
||||
"https://i.pinimg.com/originals/df/5f/5b/df5f5b1b174a2b4b6026cc6c8f9395c1.jpg";
|
||||
export const sampleSite = "https://yahoo.com";
|
||||
|
||||
export let alphaUrl = "http://127.0.0.1:8541";
|
||||
export let betaUrl = "http://127.0.0.1:8551";
|
||||
export let gammaUrl = "http://127.0.0.1:8561";
|
||||
export let deltaUrl = "http://127.0.0.1:8571";
|
||||
export let epsilonUrl = "http://127.0.0.1:8581";
|
||||
export const alphaUrl = "http://127.0.0.1:8541";
|
||||
export const betaUrl = "http://127.0.0.1:8551";
|
||||
export const gammaUrl = "http://127.0.0.1:8561";
|
||||
export const deltaUrl = "http://127.0.0.1:8571";
|
||||
export const epsilonUrl = "http://127.0.0.1:8581";
|
||||
|
||||
export let alpha = new LemmyHttp(alphaUrl, { fetchFunction });
|
||||
export let alphaImage = new LemmyHttp(alphaUrl);
|
||||
export let beta = new LemmyHttp(betaUrl, { fetchFunction });
|
||||
export let gamma = new LemmyHttp(gammaUrl, { fetchFunction });
|
||||
export let delta = new LemmyHttp(deltaUrl, { fetchFunction });
|
||||
export let epsilon = new LemmyHttp(epsilonUrl, { fetchFunction });
|
||||
export const alpha = new LemmyHttp(alphaUrl, { fetchFunction });
|
||||
export const alphaImage = new LemmyHttp(alphaUrl);
|
||||
export const beta = new LemmyHttp(betaUrl, { fetchFunction });
|
||||
export const gamma = new LemmyHttp(gammaUrl, { fetchFunction });
|
||||
export const delta = new LemmyHttp(deltaUrl, { fetchFunction });
|
||||
export const epsilon = new LemmyHttp(epsilonUrl, { fetchFunction });
|
||||
|
||||
export let betaAllowedInstances = [
|
||||
export const betaAllowedInstances = [
|
||||
"lemmy-alpha",
|
||||
"lemmy-gamma",
|
||||
"lemmy-delta",
|
||||
|
@ -363,10 +364,13 @@ export async function getUnreadCount(
|
|||
return api.getUnreadCount();
|
||||
}
|
||||
|
||||
export async function getReplies(api: LemmyHttp): Promise<GetRepliesResponse> {
|
||||
export async function getReplies(
|
||||
api: LemmyHttp,
|
||||
unread_only: boolean = false,
|
||||
): Promise<GetRepliesResponse> {
|
||||
let form: GetReplies = {
|
||||
sort: "New",
|
||||
unread_only: false,
|
||||
unread_only,
|
||||
};
|
||||
return api.getReplies(form);
|
||||
}
|
||||
|
@ -896,14 +900,17 @@ export async function deleteAllImages(api: LemmyHttp) {
|
|||
limit: imageFetchLimit,
|
||||
});
|
||||
imagesRes.images;
|
||||
|
||||
for (const image of imagesRes.images) {
|
||||
const form: DeleteImage = {
|
||||
token: image.local_image.pictrs_delete_token,
|
||||
filename: image.local_image.pictrs_alias,
|
||||
};
|
||||
await api.deleteImage(form);
|
||||
}
|
||||
Promise.all(
|
||||
imagesRes.images
|
||||
.map(image => {
|
||||
const form: DeleteImage = {
|
||||
token: image.local_image.pictrs_delete_token,
|
||||
filename: image.local_image.pictrs_alias,
|
||||
};
|
||||
return form;
|
||||
})
|
||||
.map(form => api.deleteImage(form)),
|
||||
);
|
||||
}
|
||||
|
||||
export async function unfollows() {
|
||||
|
@ -914,21 +921,20 @@ export async function unfollows() {
|
|||
unfollowRemotes(delta),
|
||||
unfollowRemotes(epsilon),
|
||||
]);
|
||||
await purgeAllPosts(alpha);
|
||||
await purgeAllPosts(beta);
|
||||
await purgeAllPosts(gamma);
|
||||
await purgeAllPosts(delta);
|
||||
await purgeAllPosts(epsilon);
|
||||
await Promise.all([
|
||||
purgeAllPosts(alpha),
|
||||
purgeAllPosts(beta),
|
||||
purgeAllPosts(gamma),
|
||||
purgeAllPosts(delta),
|
||||
purgeAllPosts(epsilon),
|
||||
]);
|
||||
}
|
||||
|
||||
export async function purgeAllPosts(api: LemmyHttp) {
|
||||
// The best way to get all federated items, is to find the posts
|
||||
let res = await api.getPosts({ type_: "All", limit: 50 });
|
||||
await Promise.all(
|
||||
res.posts
|
||||
.map(p => p.post.id)
|
||||
// Unique
|
||||
.filter((v, i, a) => a.indexOf(v) == i)
|
||||
Array.from(new Set(res.posts.map(p => p.post.id)))
|
||||
.map(post_id => api.purgePost({ post_id }))
|
||||
// Ignore errors
|
||||
.map(p => p.catch(e => e)),
|
||||
|
|
|
@ -21,6 +21,7 @@ import {
|
|||
fetchFunction,
|
||||
alphaImage,
|
||||
unfollows,
|
||||
saveUserSettingsBio,
|
||||
} from "./shared";
|
||||
import { LemmyHttp, SaveUserSettings, UploadImage } from "lemmy-js-client";
|
||||
import { GetPosts } from "lemmy-js-client/dist/types/GetPosts";
|
||||
|
@ -186,10 +187,26 @@ test("Set a new avatar, old avatar is deleted", async () => {
|
|||
expect(upload2.url).toBeDefined();
|
||||
|
||||
let form2 = {
|
||||
avatar: upload1.url,
|
||||
avatar: upload2.url,
|
||||
};
|
||||
await saveUserSettings(alpha, form2);
|
||||
// make sure only the new avatar is kept
|
||||
const listMediaRes2 = await alphaImage.listMedia();
|
||||
expect(listMediaRes2.images.length).toBe(1);
|
||||
|
||||
// Upload that same form2 avatar, make sure it isn't replaced / deleted
|
||||
await saveUserSettings(alpha, form2);
|
||||
// make sure only the new avatar is kept
|
||||
const listMediaRes3 = await alphaImage.listMedia();
|
||||
expect(listMediaRes3.images.length).toBe(1);
|
||||
|
||||
// Now try to save a user settings, with the icon missing,
|
||||
// and make sure it doesn't clear the data, or delete the image
|
||||
await saveUserSettingsBio(alpha);
|
||||
let site = await getSite(alpha);
|
||||
expect(site.my_user?.local_user_view.person.avatar).toBe(upload2.url);
|
||||
|
||||
// make sure only the new avatar is kept
|
||||
const listMediaRes4 = await alphaImage.listMedia();
|
||||
expect(listMediaRes4.images.length).toBe(1);
|
||||
});
|
||||
|
|
|
@ -47,7 +47,8 @@
|
|||
#
|
||||
# To be removed in 0.20
|
||||
cache_external_link_previews: true
|
||||
# Specifies how to handle remote images, so that users don't have to connect directly to remote servers.
|
||||
# Specifies how to handle remote images, so that users don't have to connect directly to remote
|
||||
# servers.
|
||||
image_mode:
|
||||
# Leave images unchanged, don't generate any local thumbnails for post urls. Instead the
|
||||
# Opengraph image is directly returned as thumbnail
|
||||
|
@ -64,10 +65,11 @@
|
|||
|
||||
# or
|
||||
|
||||
# If enabled, all images from remote domains are rewritten to pass through `/api/v3/image_proxy`,
|
||||
# including embedded images in markdown. Images are stored temporarily in pict-rs for caching.
|
||||
# This improves privacy as users don't expose their IP to untrusted servers, and decreases load
|
||||
# on other servers. However it increases bandwidth use for the local server.
|
||||
# If enabled, all images from remote domains are rewritten to pass through
|
||||
# `/api/v3/image_proxy`, including embedded images in markdown. Images are stored temporarily
|
||||
# in pict-rs for caching. This improves privacy as users don't expose their IP to untrusted
|
||||
# servers, and decreases load on other servers. However it increases bandwidth use for the
|
||||
# local server.
|
||||
#
|
||||
# Requires pict-rs 0.5
|
||||
"ProxyAllImages"
|
||||
|
|
|
@ -33,7 +33,7 @@ anyhow = { workspace = true }
|
|||
tracing = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
url = { workspace = true }
|
||||
wav = "1.0.0"
|
||||
hound = "3.5.1"
|
||||
sitemap-rs = "0.2.1"
|
||||
totp-rs = { version = "5.5.1", features = ["gen_secret", "otpauth"] }
|
||||
actix-web-httpauth = "0.8.1"
|
||||
|
|
|
@ -43,7 +43,10 @@ pub async fn ban_from_community(
|
|||
&mut context.pool(),
|
||||
)
|
||||
.await?;
|
||||
is_valid_body_field(&data.reason, false)?;
|
||||
|
||||
if let Some(reason) = &data.reason {
|
||||
is_valid_body_field(reason, false)?;
|
||||
}
|
||||
|
||||
let community_user_ban_form = CommunityPersonBanForm {
|
||||
community_id: data.community_id,
|
||||
|
|
|
@ -49,27 +49,33 @@ pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> LemmyResult<String> {
|
|||
|
||||
// Decode each wav file, concatenate the samples
|
||||
let mut concat_samples: Vec<i16> = Vec::new();
|
||||
let mut any_header: Option<wav::Header> = None;
|
||||
let mut any_header: Option<hound::WavSpec> = None;
|
||||
for letter in letters {
|
||||
let mut cursor = Cursor::new(letter.unwrap_or_default());
|
||||
let (header, samples) = wav::read(&mut cursor)?;
|
||||
any_header = Some(header);
|
||||
if let Some(samples16) = samples.as_sixteen() {
|
||||
concat_samples.extend(samples16);
|
||||
} else {
|
||||
Err(LemmyErrorType::CouldntCreateAudioCaptcha)?
|
||||
}
|
||||
let reader = hound::WavReader::new(&mut cursor)?;
|
||||
any_header = Some(reader.spec());
|
||||
let samples16 = reader
|
||||
.into_samples::<i16>()
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?;
|
||||
concat_samples.extend(samples16);
|
||||
}
|
||||
|
||||
// Encode the concatenated result as a wav file
|
||||
let mut output_buffer = Cursor::new(vec![]);
|
||||
if let Some(header) = any_header {
|
||||
wav::write(
|
||||
header,
|
||||
&wav::BitDepth::Sixteen(concat_samples),
|
||||
&mut output_buffer,
|
||||
)
|
||||
.with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?;
|
||||
let mut writer = hound::WavWriter::new(&mut output_buffer, header)
|
||||
.with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?;
|
||||
let mut writer16 = writer.get_i16_writer(concat_samples.len() as u32);
|
||||
for sample in concat_samples {
|
||||
writer16.write_sample(sample);
|
||||
}
|
||||
writer16
|
||||
.flush()
|
||||
.with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?;
|
||||
writer
|
||||
.finalize()
|
||||
.with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?;
|
||||
|
||||
Ok(base64.encode(output_buffer.into_inner()))
|
||||
} else {
|
||||
|
|
|
@ -31,7 +31,9 @@ pub async fn ban_from_site(
|
|||
// Make sure user is an admin
|
||||
is_admin(&local_user_view)?;
|
||||
|
||||
is_valid_body_field(&data.reason, false)?;
|
||||
if let Some(reason) = &data.reason {
|
||||
is_valid_body_field(reason, false)?;
|
||||
}
|
||||
|
||||
let expires = check_expire_time(data.expires)?;
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ pub async fn change_password_after_reset(
|
|||
) -> LemmyResult<Json<SuccessResponse>> {
|
||||
// Fetch the user_id from the token
|
||||
let token = data.token.clone();
|
||||
let local_user_id = PasswordResetRequest::read_from_token(&mut context.pool(), &token)
|
||||
let local_user_id = PasswordResetRequest::read_and_delete(&mut context.pool(), &token)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::TokenNotFound)?
|
||||
.local_user_id;
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
use crate::{build_totp_2fa, generate_totp_2fa_secret};
|
||||
use activitypub_federation::config::Data;
|
||||
use actix_web::web::Json;
|
||||
use lemmy_api_common::{
|
||||
context::LemmyContext,
|
||||
person::GenerateTotpSecretResponse,
|
||||
sensitive::Sensitive,
|
||||
};
|
||||
use lemmy_api_common::{context::LemmyContext, person::GenerateTotpSecretResponse};
|
||||
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
|
@ -41,6 +37,6 @@ pub async fn generate_totp_secret(
|
|||
.await?;
|
||||
|
||||
Ok(Json(GenerateTotpSecretResponse {
|
||||
totp_secret_url: Sensitive::new(secret_url),
|
||||
totp_secret_url: secret_url.into(),
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -11,9 +11,12 @@ pub async fn unread_count(
|
|||
) -> LemmyResult<Json<GetUnreadCountResponse>> {
|
||||
let person_id = local_user_view.person.id;
|
||||
|
||||
let replies = CommentReplyView::get_unread_replies(&mut context.pool(), person_id).await?;
|
||||
let replies =
|
||||
CommentReplyView::get_unread_replies(&mut context.pool(), &local_user_view.local_user).await?;
|
||||
|
||||
let mentions = PersonMentionView::get_unread_mentions(&mut context.pool(), person_id).await?;
|
||||
let mentions =
|
||||
PersonMentionView::get_unread_mentions(&mut context.pool(), &local_user_view.local_user)
|
||||
.await?;
|
||||
|
||||
let private_messages =
|
||||
PrivateMessageView::get_unread_messages(&mut context.pool(), person_id).await?;
|
||||
|
|
|
@ -6,7 +6,6 @@ use lemmy_api_common::{
|
|||
utils::send_password_reset_email,
|
||||
SuccessResponse,
|
||||
};
|
||||
use lemmy_db_schema::source::password_reset_request::PasswordResetRequest;
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
|
||||
|
@ -21,15 +20,6 @@ pub async fn reset_password(
|
|||
.await?
|
||||
.ok_or(LemmyErrorType::IncorrectLogin)?;
|
||||
|
||||
// Check for too many attempts (to limit potential abuse)
|
||||
let recent_resets_count = PasswordResetRequest::get_recent_password_resets_count(
|
||||
&mut context.pool(),
|
||||
local_user_view.local_user.id,
|
||||
)
|
||||
.await?;
|
||||
if recent_resets_count >= 3 {
|
||||
Err(LemmyErrorType::PasswordResetLimitReached)?
|
||||
}
|
||||
let site_view = SiteView::read_local(&mut context.pool())
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::LocalSiteNotSetup)?;
|
||||
|
|
|
@ -21,13 +21,14 @@ use lemmy_db_schema::{
|
|||
person::{Person, PersonUpdateForm},
|
||||
},
|
||||
traits::Crud,
|
||||
utils::diesel_option_overwrite,
|
||||
utils::{diesel_string_update, diesel_url_update},
|
||||
};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::{
|
||||
error::{LemmyErrorType, LemmyResult},
|
||||
utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id},
|
||||
};
|
||||
use std::ops::Deref;
|
||||
|
||||
#[tracing::instrument(skip(context))]
|
||||
pub async fn save_user_settings(
|
||||
|
@ -41,23 +42,29 @@ pub async fn save_user_settings(
|
|||
|
||||
let slur_regex = local_site_to_slur_regex(&site_view.local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let bio = diesel_option_overwrite(
|
||||
process_markdown_opt(&data.bio, &slur_regex, &url_blocklist, &context).await?,
|
||||
let bio = diesel_string_update(
|
||||
process_markdown_opt(&data.bio, &slur_regex, &url_blocklist, &context)
|
||||
.await?
|
||||
.as_deref(),
|
||||
);
|
||||
replace_image(&data.avatar, &local_user_view.person.avatar, &context).await?;
|
||||
replace_image(&data.banner, &local_user_view.person.banner, &context).await?;
|
||||
|
||||
let avatar = proxy_image_link_opt_api(&data.avatar, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
|
||||
let display_name = diesel_option_overwrite(data.display_name.clone());
|
||||
let matrix_user_id = diesel_option_overwrite(data.matrix_user_id.clone());
|
||||
let avatar = diesel_url_update(data.avatar.as_deref())?;
|
||||
replace_image(&avatar, &local_user_view.person.avatar, &context).await?;
|
||||
let avatar = proxy_image_link_opt_api(avatar, &context).await?;
|
||||
|
||||
let banner = diesel_url_update(data.banner.as_deref())?;
|
||||
replace_image(&banner, &local_user_view.person.banner, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(banner, &context).await?;
|
||||
|
||||
let display_name = diesel_string_update(data.display_name.as_deref());
|
||||
let matrix_user_id = diesel_string_update(data.matrix_user_id.as_deref());
|
||||
let email_deref = data.email.as_deref().map(str::to_lowercase);
|
||||
let email = diesel_option_overwrite(email_deref.clone());
|
||||
let email = diesel_string_update(email_deref.as_deref());
|
||||
|
||||
if let Some(Some(email)) = &email {
|
||||
let previous_email = local_user_view.local_user.email.clone().unwrap_or_default();
|
||||
// if email was changed, check that it is not taken and send verification mail
|
||||
if &previous_email != email {
|
||||
if previous_email.deref() != email {
|
||||
if LocalUser::is_email_taken(&mut context.pool(), email).await? {
|
||||
return Err(LemmyErrorType::EmailAlreadyExists)?;
|
||||
}
|
||||
|
@ -71,7 +78,8 @@ pub async fn save_user_settings(
|
|||
}
|
||||
}
|
||||
|
||||
// When the site requires email, make sure email is not Some(None). IE, an overwrite to a None value
|
||||
// When the site requires email, make sure email is not Some(None). IE, an overwrite to a None
|
||||
// value
|
||||
if let Some(email) = &email {
|
||||
if email.is_none() && site_view.local_site.require_email_verification {
|
||||
Err(LemmyErrorType::EmailRequired)?
|
||||
|
|
|
@ -4,14 +4,19 @@ use lemmy_api_common::{
|
|||
post::{GetSiteMetadata, GetSiteMetadataResponse},
|
||||
request::fetch_link_metadata,
|
||||
};
|
||||
use lemmy_utils::error::LemmyResult;
|
||||
use lemmy_utils::{
|
||||
error::{LemmyErrorExt, LemmyResult},
|
||||
LemmyErrorType,
|
||||
};
|
||||
use url::Url;
|
||||
|
||||
#[tracing::instrument(skip(context))]
|
||||
pub async fn get_link_metadata(
|
||||
data: Query<GetSiteMetadata>,
|
||||
context: Data<LemmyContext>,
|
||||
) -> LemmyResult<Json<GetSiteMetadataResponse>> {
|
||||
let metadata = fetch_link_metadata(&data.url, &context).await?;
|
||||
let url = Url::parse(&data.url).with_lemmy_type(LemmyErrorType::InvalidUrl)?;
|
||||
let metadata = fetch_link_metadata(&url, &context).await?;
|
||||
|
||||
Ok(Json(GetSiteMetadataResponse { metadata }))
|
||||
}
|
||||
|
|
|
@ -68,7 +68,6 @@ pub async fn like_post(
|
|||
.with_lemmy_type(LemmyErrorType::CouldntLikePost)?;
|
||||
}
|
||||
|
||||
// Mark the post as read
|
||||
mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
|
||||
|
||||
let community = Community::read(&mut context.pool(), post.community_id)
|
||||
|
|
|
@ -38,7 +38,6 @@ pub async fn save_post(
|
|||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPost)?;
|
||||
|
||||
// Mark the post as read
|
||||
mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
|
||||
|
||||
Ok(Json(PostResponse { post_view }))
|
||||
|
|
|
@ -10,7 +10,7 @@ use lemmy_db_schema::{
|
|||
registration_application::{RegistrationApplication, RegistrationApplicationUpdateForm},
|
||||
},
|
||||
traits::Crud,
|
||||
utils::diesel_option_overwrite,
|
||||
utils::diesel_string_update,
|
||||
};
|
||||
use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView};
|
||||
use lemmy_utils::{error::LemmyResult, LemmyErrorType};
|
||||
|
@ -26,7 +26,7 @@ pub async fn approve_registration_application(
|
|||
is_admin(&local_user_view)?;
|
||||
|
||||
// Update the registration with reason, admin_id
|
||||
let deny_reason = diesel_option_overwrite(data.deny_reason.clone());
|
||||
let deny_reason = diesel_string_update(data.deny_reason.as_deref());
|
||||
let app_form = RegistrationApplicationUpdateForm {
|
||||
admin_id: Some(Some(local_user_view.person.id)),
|
||||
deny_reason,
|
||||
|
|
|
@ -66,13 +66,13 @@ actix-web = { workspace = true, optional = true }
|
|||
enum-map = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
mime = { version = "0.3.17", optional = true }
|
||||
webpage = { version = "1.6", default-features = false, features = [
|
||||
webpage = { version = "2.0", default-features = false, features = [
|
||||
"serde",
|
||||
], optional = true }
|
||||
encoding_rs = { version = "0.8.34", optional = true }
|
||||
jsonwebtoken = { version = "8.3.0", optional = true }
|
||||
jsonwebtoken = { version = "9.3.0", optional = true }
|
||||
# necessary for wasmt compilation
|
||||
getrandom = { version = "0.2.14", features = ["js"] }
|
||||
getrandom = { version = "0.2.15", features = ["js"] }
|
||||
|
||||
[package.metadata.cargo-machete]
|
||||
ignored = ["getrandom"]
|
||||
|
|
|
@ -121,7 +121,8 @@ pub async fn send_local_notifs(
|
|||
if let Ok(Some(mention_user_view)) = user_view {
|
||||
// TODO
|
||||
// At some point, make it so you can't tag the parent creator either
|
||||
// Potential duplication of notifications, one for reply and the other for mention, is handled below by checking recipient ids
|
||||
// Potential duplication of notifications, one for reply and the other for mention, is handled
|
||||
// below by checking recipient ids
|
||||
recipient_ids.push(mention_user_view.local_user.id);
|
||||
|
||||
let user_mention_form = PersonMentionInsertForm {
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
use crate::{context::LemmyContext, sensitive::Sensitive};
|
||||
use crate::context::LemmyContext;
|
||||
use actix_web::{http::header::USER_AGENT, HttpRequest};
|
||||
use chrono::Utc;
|
||||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::LocalUserId,
|
||||
sensitive::SensitiveString,
|
||||
source::login_token::{LoginToken, LoginTokenCreateForm},
|
||||
};
|
||||
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||
|
@ -40,7 +41,7 @@ impl Claims {
|
|||
user_id: LocalUserId,
|
||||
req: HttpRequest,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<Sensitive<String>> {
|
||||
) -> LemmyResult<SensitiveString> {
|
||||
let hostname = context.settings().hostname.clone();
|
||||
let my_claims = Claims {
|
||||
sub: user_id.0.to_string(),
|
||||
|
@ -50,7 +51,7 @@ impl Claims {
|
|||
|
||||
let secret = &context.secret().jwt_secret;
|
||||
let key = EncodingKey::from_secret(secret.as_ref());
|
||||
let token = encode(&Header::default(), &my_claims, &key)?;
|
||||
let token: SensitiveString = encode(&Header::default(), &my_claims, &key)?.into();
|
||||
let ip = req
|
||||
.connection_info()
|
||||
.realip_remote_addr()
|
||||
|
@ -67,7 +68,7 @@ impl Claims {
|
|||
user_agent,
|
||||
};
|
||||
LoginToken::create(&mut context.pool(), form).await?;
|
||||
Ok(Sensitive::new(token))
|
||||
Ok(token)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,11 +112,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("Gerry9812".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "Gerry9812");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ impl LemmyContext {
|
|||
let client = ClientBuilder::new(client).build();
|
||||
let secret = Secret {
|
||||
id: 0,
|
||||
jwt_secret: String::new(),
|
||||
jwt_secret: String::new().into(),
|
||||
};
|
||||
|
||||
let rate_limit_cell = RateLimitCell::with_test_config();
|
||||
|
|
|
@ -14,7 +14,6 @@ pub mod private_message;
|
|||
pub mod request;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod send_activity;
|
||||
pub mod sensitive;
|
||||
pub mod site;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod utils;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::sensitive::Sensitive;
|
||||
use lemmy_db_schema::{
|
||||
newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId},
|
||||
sensitive::SensitiveString,
|
||||
source::site::Site,
|
||||
CommentSortType,
|
||||
ListingType,
|
||||
|
@ -25,8 +25,8 @@ use ts_rs::TS;
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Logging into lemmy.
|
||||
pub struct Login {
|
||||
pub username_or_email: Sensitive<String>,
|
||||
pub password: Sensitive<String>,
|
||||
pub username_or_email: SensitiveString,
|
||||
pub password: SensitiveString,
|
||||
/// May be required, if totp is enabled for their account.
|
||||
pub totp_2fa_token: Option<String>,
|
||||
}
|
||||
|
@ -38,11 +38,11 @@ pub struct Login {
|
|||
/// Register / Sign up to lemmy.
|
||||
pub struct Register {
|
||||
pub username: String,
|
||||
pub password: Sensitive<String>,
|
||||
pub password_verify: Sensitive<String>,
|
||||
pub password: SensitiveString,
|
||||
pub password_verify: SensitiveString,
|
||||
pub show_nsfw: Option<bool>,
|
||||
/// email is mandatory if email verification is enabled on the server
|
||||
pub email: Option<Sensitive<String>>,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// The UUID of the captcha item.
|
||||
pub captcha_uuid: Option<String>,
|
||||
/// Your captcha answer.
|
||||
|
@ -99,7 +99,7 @@ pub struct SaveUserSettings {
|
|||
/// Your display name, which can contain strange characters, and does not need to be unique.
|
||||
pub display_name: Option<String>,
|
||||
/// Your email.
|
||||
pub email: Option<Sensitive<String>>,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// Your bio / info, in markdown.
|
||||
pub bio: Option<String>,
|
||||
/// Your matrix user id. Ex: @my_user:matrix.org
|
||||
|
@ -124,7 +124,8 @@ pub struct SaveUserSettings {
|
|||
pub post_listing_mode: Option<PostListingMode>,
|
||||
/// Whether to allow keyboard navigation (for browsing and interacting with posts and comments).
|
||||
pub enable_keyboard_navigation: Option<bool>,
|
||||
/// Whether user avatars or inline images in the UI that are gifs should be allowed to play or should be paused
|
||||
/// Whether user avatars or inline images in the UI that are gifs should be allowed to play or
|
||||
/// should be paused
|
||||
pub enable_animated_images: Option<bool>,
|
||||
/// Whether to auto-collapse bot comments.
|
||||
pub collapse_bot_comments: Option<bool>,
|
||||
|
@ -140,9 +141,9 @@ pub struct SaveUserSettings {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Changes your account password.
|
||||
pub struct ChangePassword {
|
||||
pub new_password: Sensitive<String>,
|
||||
pub new_password_verify: Sensitive<String>,
|
||||
pub old_password: Sensitive<String>,
|
||||
pub new_password: SensitiveString,
|
||||
pub new_password_verify: SensitiveString,
|
||||
pub old_password: SensitiveString,
|
||||
}
|
||||
|
||||
#[skip_serializing_none]
|
||||
|
@ -151,8 +152,9 @@ pub struct ChangePassword {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// A response for your login.
|
||||
pub struct LoginResponse {
|
||||
/// This is None in response to `Register` if email verification is enabled, or the server requires registration applications.
|
||||
pub jwt: Option<Sensitive<String>>,
|
||||
/// This is None in response to `Register` if email verification is enabled, or the server
|
||||
/// requires registration applications.
|
||||
pub jwt: Option<SensitiveString>,
|
||||
/// If registration applications are required, this will return true for a signup response.
|
||||
pub registration_created: bool,
|
||||
/// If email verifications are required, this will return true for a signup response.
|
||||
|
@ -340,7 +342,7 @@ pub struct CommentReplyResponse {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Delete your account.
|
||||
pub struct DeleteAccount {
|
||||
pub password: Sensitive<String>,
|
||||
pub password: SensitiveString,
|
||||
pub delete_content: bool,
|
||||
}
|
||||
|
||||
|
@ -349,7 +351,7 @@ pub struct DeleteAccount {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Reset your password via email.
|
||||
pub struct PasswordReset {
|
||||
pub email: Sensitive<String>,
|
||||
pub email: SensitiveString,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
|
||||
|
@ -357,9 +359,9 @@ pub struct PasswordReset {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Change your password after receiving a reset request.
|
||||
pub struct PasswordChangeAfterReset {
|
||||
pub token: Sensitive<String>,
|
||||
pub password: Sensitive<String>,
|
||||
pub password_verify: Sensitive<String>,
|
||||
pub token: SensitiveString,
|
||||
pub password: SensitiveString,
|
||||
pub password_verify: SensitiveString,
|
||||
}
|
||||
|
||||
#[skip_serializing_none]
|
||||
|
@ -405,7 +407,7 @@ pub struct VerifyEmail {
|
|||
#[cfg_attr(feature = "full", derive(TS))]
|
||||
#[cfg_attr(feature = "full", ts(export))]
|
||||
pub struct GenerateTotpSecretResponse {
|
||||
pub totp_secret_url: Sensitive<String>,
|
||||
pub totp_secret_url: SensitiveString,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
|
||||
|
|
|
@ -10,7 +10,6 @@ use serde::{Deserialize, Serialize};
|
|||
use serde_with::skip_serializing_none;
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
use url::Url;
|
||||
|
||||
#[skip_serializing_none]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
|
||||
|
@ -20,8 +19,7 @@ use url::Url;
|
|||
pub struct CreatePost {
|
||||
pub name: String,
|
||||
pub community_id: CommunityId,
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
pub url: Option<Url>,
|
||||
pub url: Option<String>,
|
||||
/// An optional body for the post in markdown.
|
||||
pub body: Option<String>,
|
||||
/// An optional alt_text, usable for image posts.
|
||||
|
@ -30,9 +28,8 @@ pub struct CreatePost {
|
|||
pub honeypot: Option<String>,
|
||||
pub nsfw: Option<bool>,
|
||||
pub language_id: Option<LanguageId>,
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
/// Instead of fetching a thumbnail, use a custom one.
|
||||
pub custom_thumbnail: Option<Url>,
|
||||
pub custom_thumbnail: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
@ -114,17 +111,15 @@ pub struct CreatePostLike {
|
|||
pub struct EditPost {
|
||||
pub post_id: PostId,
|
||||
pub name: Option<String>,
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
pub url: Option<Url>,
|
||||
pub url: Option<String>,
|
||||
/// An optional body for the post in markdown.
|
||||
pub body: Option<String>,
|
||||
/// An optional alt_text, usable for image posts.
|
||||
pub alt_text: Option<String>,
|
||||
pub nsfw: Option<bool>,
|
||||
pub language_id: Option<LanguageId>,
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
/// Instead of fetching a thumbnail, use a custom one.
|
||||
pub custom_thumbnail: Option<Url>,
|
||||
pub custom_thumbnail: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)]
|
||||
|
@ -249,8 +244,7 @@ pub struct ListPostReportsResponse {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Get metadata for a given site.
|
||||
pub struct GetSiteMetadata {
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
pub url: Url,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
|
|
@ -93,35 +93,21 @@ pub async fn generate_post_link_metadata(
|
|||
let allow_sensitive = local_site_opt_to_sensitive(&local_site);
|
||||
let allow_generate_thumbnail = allow_sensitive || !post.nsfw;
|
||||
|
||||
let thumbnail_url = if is_image_post {
|
||||
if allow_generate_thumbnail {
|
||||
match post.url {
|
||||
Some(url) => generate_pictrs_thumbnail(&url, &context)
|
||||
.await
|
||||
.ok()
|
||||
.map(Into::into),
|
||||
None => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
let image_url = if is_image_post {
|
||||
post.url
|
||||
} else {
|
||||
// Use custom thumbnail if available and its not an image post
|
||||
if let Some(custom_thumbnail) = custom_thumbnail {
|
||||
proxy_image_link(custom_thumbnail, &context).await.ok()
|
||||
} else if allow_generate_thumbnail {
|
||||
match metadata.opengraph_data.image {
|
||||
Some(url) => generate_pictrs_thumbnail(&url, &context)
|
||||
.await
|
||||
.ok()
|
||||
.map(Into::into),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
// Otherwise use opengraph preview image directly
|
||||
else {
|
||||
metadata.opengraph_data.image
|
||||
}
|
||||
metadata.opengraph_data.image.clone()
|
||||
};
|
||||
|
||||
let thumbnail_url = if let (false, Some(url)) = (is_image_post, custom_thumbnail) {
|
||||
proxy_image_link(url, &context).await.ok()
|
||||
} else if let (true, Some(url)) = (allow_generate_thumbnail, image_url) {
|
||||
generate_pictrs_thumbnail(&url, &context)
|
||||
.await
|
||||
.ok()
|
||||
.map(Into::into)
|
||||
} else {
|
||||
metadata.opengraph_data.image.clone()
|
||||
};
|
||||
|
||||
let form = PostUpdateForm {
|
||||
|
@ -212,9 +198,12 @@ impl PictrsFile {
|
|||
}
|
||||
}
|
||||
|
||||
/// Stores extra details about a Pictrs image.
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
pub struct PictrsFileDetails {
|
||||
/// In pixels
|
||||
pub width: u16,
|
||||
/// In pixels
|
||||
pub height: u16,
|
||||
pub content_type: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
|
@ -315,34 +304,36 @@ async fn generate_pictrs_thumbnail(image_url: &Url, context: &LemmyContext) -> L
|
|||
encode(image_url.as_str())
|
||||
);
|
||||
|
||||
let res: PictrsResponse = context
|
||||
let res = context
|
||||
.client()
|
||||
.get(&fetch_url)
|
||||
.timeout(REQWEST_TIMEOUT)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.json::<PictrsResponse>()
|
||||
.await?;
|
||||
|
||||
if let Some(image) = res.files.unwrap_or_default().first() {
|
||||
let form = LocalImageForm {
|
||||
// This is none because its an internal request.
|
||||
// IE, a local user shouldn't get to delete the thumbnails for their link posts
|
||||
local_user_id: None,
|
||||
pictrs_alias: image.file.clone(),
|
||||
pictrs_delete_token: image.delete_token.clone(),
|
||||
};
|
||||
let protocol_and_hostname = context.settings().get_protocol_and_hostname();
|
||||
let thumbnail_url = image.thumbnail_url(&protocol_and_hostname)?;
|
||||
let files = res.files.unwrap_or_default();
|
||||
|
||||
// Also store the details for the image
|
||||
let details_form = image.details.build_image_details_form(&thumbnail_url);
|
||||
LocalImage::create(&mut context.pool(), &form, &details_form).await?;
|
||||
let image = files
|
||||
.first()
|
||||
.ok_or(LemmyErrorType::PictrsResponseError(res.msg))?;
|
||||
|
||||
Ok(thumbnail_url)
|
||||
} else {
|
||||
Err(LemmyErrorType::PictrsResponseError(res.msg))?
|
||||
}
|
||||
let form = LocalImageForm {
|
||||
// This is none because its an internal request.
|
||||
// IE, a local user shouldn't get to delete the thumbnails for their link posts
|
||||
local_user_id: None,
|
||||
pictrs_alias: image.file.clone(),
|
||||
pictrs_delete_token: image.delete_token.clone(),
|
||||
};
|
||||
let protocol_and_hostname = context.settings().get_protocol_and_hostname();
|
||||
let thumbnail_url = image.thumbnail_url(&protocol_and_hostname)?;
|
||||
|
||||
// Also store the details for the image
|
||||
let details_form = image.details.build_image_details_form(&thumbnail_url);
|
||||
LocalImage::create(&mut context.pool(), &form, &details_form).await?;
|
||||
|
||||
Ok(thumbnail_url)
|
||||
}
|
||||
|
||||
/// Fetches the image details for pictrs proxied images
|
||||
|
@ -353,25 +344,24 @@ pub async fn fetch_pictrs_proxied_image_details(
|
|||
image_url: &Url,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<PictrsFileDetails> {
|
||||
let pictrs_config = context.settings().pictrs_config()?;
|
||||
let pictrs_url = context.settings().pictrs_config()?.url;
|
||||
let encoded_image_url = encode(image_url.as_str());
|
||||
|
||||
// Pictrs needs you to fetch the proxied image before you can fetch the details
|
||||
let proxy_url = format!(
|
||||
"{}image/original?proxy={}",
|
||||
context.settings().pictrs_config()?.url,
|
||||
encode(image_url.as_str())
|
||||
);
|
||||
let proxy_url = format!("{pictrs_url}image/original?proxy={encoded_image_url}");
|
||||
|
||||
let res = context.client().get(&proxy_url).send().await?.status();
|
||||
let res = context
|
||||
.client()
|
||||
.get(&proxy_url)
|
||||
.timeout(REQWEST_TIMEOUT)
|
||||
.send()
|
||||
.await?
|
||||
.status();
|
||||
if !res.is_success() {
|
||||
Err(LemmyErrorType::NotAnImageType)?
|
||||
}
|
||||
|
||||
let details_url = format!(
|
||||
"{}image/details/original?proxy={}",
|
||||
pictrs_config.url,
|
||||
encode(image_url.as_str())
|
||||
);
|
||||
let details_url = format!("{pictrs_url}image/details/original?proxy={encoded_image_url}");
|
||||
|
||||
let res = context
|
||||
.client()
|
||||
|
@ -402,16 +392,19 @@ async fn is_image_content_type(client: &ClientWithMiddleware, url: &Url) -> Lemm
|
|||
}
|
||||
}
|
||||
|
||||
/// When adding a new avatar or similar image, delete the old one.
|
||||
/// When adding a new avatar, banner or similar image, delete the old one.
|
||||
pub async fn replace_image(
|
||||
new_image: &Option<String>,
|
||||
new_image: &Option<Option<DbUrl>>,
|
||||
old_image: &Option<DbUrl>,
|
||||
context: &Data<LemmyContext>,
|
||||
) -> LemmyResult<()> {
|
||||
if new_image.is_some() {
|
||||
// Ignore errors because image may be stored externally.
|
||||
if let Some(avatar) = &old_image {
|
||||
let image = LocalImage::delete_by_url(&mut context.pool(), avatar)
|
||||
if let (Some(Some(new_image)), Some(old_image)) = (new_image, old_image) {
|
||||
// Note: Oftentimes front ends will include the current image in the form.
|
||||
// In this case, deleting `old_image` would also be deletion of `new_image`,
|
||||
// so the deletion must be skipped for the image to be kept.
|
||||
if new_image != old_image {
|
||||
// Ignore errors because image may be stored externally.
|
||||
let image = LocalImage::delete_by_url(&mut context.pool(), old_image)
|
||||
.await
|
||||
.ok();
|
||||
if let Some(image) = image {
|
||||
|
|
|
@ -1,116 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
ops::{Deref, DerefMut},
|
||||
};
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct Sensitive<T>(T);
|
||||
|
||||
impl<T> Sensitive<T> {
|
||||
pub fn new(item: T) -> Self {
|
||||
Sensitive(item)
|
||||
}
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for Sensitive<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Sensitive").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsRef<T> for Sensitive<T> {
|
||||
fn as_ref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for Sensitive<String> {
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Sensitive<String> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Sensitive<Vec<u8>> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsMut<T> for Sensitive<T> {
|
||||
fn as_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsMut<str> for Sensitive<String> {
|
||||
fn as_mut(&mut self) -> &mut str {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Sensitive<String> {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Sensitive<String> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Sensitive<T> {
|
||||
fn from(t: T) -> Self {
|
||||
Sensitive(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Sensitive<String> {
|
||||
fn from(s: &str) -> Self {
|
||||
Sensitive(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Borrow<T> for Sensitive<T> {
|
||||
fn borrow(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for Sensitive<String> {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl TS for Sensitive<String> {
|
||||
fn name() -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn name_with_type_args(_args: Vec<String>) -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn dependencies() -> Vec<ts_rs::Dependency> {
|
||||
Vec::new()
|
||||
}
|
||||
fn transparent() -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -375,7 +375,8 @@ impl From<FederationQueueState> for ReadableFederationState {
|
|||
pub struct InstanceWithFederationState {
|
||||
#[serde(flatten)]
|
||||
pub instance: Instance,
|
||||
/// if federation to this instance is or was active, show state of outgoing federation to this instance
|
||||
/// if federation to this instance is or was active, show state of outgoing federation to this
|
||||
/// instance
|
||||
pub federation_state: Option<ReadableFederationState>,
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
|||
use chrono::{DateTime, Days, Local, TimeZone, Utc};
|
||||
use enum_map::{enum_map, EnumMap};
|
||||
use lemmy_db_schema::{
|
||||
aggregates::structs::{PersonPostAggregates, PersonPostAggregatesForm},
|
||||
newtypes::{CommunityId, DbUrl, InstanceId, PersonId, PostId},
|
||||
source::{
|
||||
comment::{Comment, CommentUpdateForm},
|
||||
|
@ -143,13 +144,7 @@ pub fn is_top_mod(
|
|||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn get_post(post_id: PostId, pool: &mut DbPool<'_>) -> LemmyResult<Post> {
|
||||
Post::read(pool, post_id)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPost.into())
|
||||
}
|
||||
|
||||
/// Marks a post as read for a given person.
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn mark_post_as_read(
|
||||
person_id: PersonId,
|
||||
|
@ -162,6 +157,28 @@ pub async fn mark_post_as_read(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Updates the read comment count for a post. Usually done when reading or creating a new comment.
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn update_read_comments(
|
||||
person_id: PersonId,
|
||||
post_id: PostId,
|
||||
read_comments: i64,
|
||||
pool: &mut DbPool<'_>,
|
||||
) -> LemmyResult<()> {
|
||||
let person_post_agg_form = PersonPostAggregatesForm {
|
||||
person_id,
|
||||
post_id,
|
||||
read_comments,
|
||||
..PersonPostAggregatesForm::default()
|
||||
};
|
||||
|
||||
PersonPostAggregates::upsert(pool, &person_post_agg_form)
|
||||
.await
|
||||
.with_lemmy_type(LemmyErrorType::CouldntFindPost)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn check_user_valid(person: &Person) -> LemmyResult<()> {
|
||||
// Check for a site ban
|
||||
if person.banned {
|
||||
|
@ -360,7 +377,8 @@ pub async fn build_federated_instances(
|
|||
federation_state: federation_state.map(std::convert::Into::into),
|
||||
};
|
||||
if is_blocked {
|
||||
// blocked instances will only have an entry here if they had been federated with in the past.
|
||||
// blocked instances will only have an entry here if they had been federated with in the
|
||||
// past.
|
||||
blocked.push(i);
|
||||
} else if is_allowed {
|
||||
allowed.push(i.clone());
|
||||
|
@ -444,7 +462,7 @@ pub async fn send_password_reset_email(
|
|||
// Insert the row after successful send, to avoid using daily reset limit while
|
||||
// email sending is broken.
|
||||
let local_user_id = user.local_user.id;
|
||||
PasswordResetRequest::create_token(pool, local_user_id, token.clone()).await?;
|
||||
PasswordResetRequest::create(pool, local_user_id, token.clone()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -969,8 +987,8 @@ pub async fn process_markdown_opt(
|
|||
|
||||
/// A wrapper for `proxy_image_link` for use in tests.
|
||||
///
|
||||
/// The parameter `force_image_proxy` is the config value of `pictrs.image_proxy`. Its necessary to pass
|
||||
/// as separate parameter so it can be changed in tests.
|
||||
/// The parameter `force_image_proxy` is the config value of `pictrs.image_proxy`. Its necessary to
|
||||
/// pass as separate parameter so it can be changed in tests.
|
||||
async fn proxy_image_link_internal(
|
||||
link: Url,
|
||||
image_mode: PictrsImageMode,
|
||||
|
@ -981,14 +999,13 @@ async fn proxy_image_link_internal(
|
|||
Ok(link.into())
|
||||
} else if image_mode == PictrsImageMode::ProxyAllImages {
|
||||
let proxied = build_proxied_image_url(&link, &context.settings().get_protocol_and_hostname())?;
|
||||
|
||||
// This should fail softly, since pictrs might not even be running
|
||||
let details_res = fetch_pictrs_proxied_image_details(&link, context).await;
|
||||
|
||||
if let Ok(details) = details_res {
|
||||
let details_form = details.build_image_details_form(&proxied);
|
||||
RemoteImage::create(&mut context.pool(), &details_form).await?;
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proxied.into())
|
||||
} else {
|
||||
|
@ -1008,26 +1025,25 @@ pub(crate) async fn proxy_image_link(link: Url, context: &LemmyContext) -> Lemmy
|
|||
}
|
||||
|
||||
pub async fn proxy_image_link_opt_api(
|
||||
link: &Option<String>,
|
||||
link: Option<Option<DbUrl>>,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<Option<Option<DbUrl>>> {
|
||||
proxy_image_link_api(link, context).await.map(Some)
|
||||
if let Some(Some(link)) = link {
|
||||
proxy_image_link(link.into(), context)
|
||||
.await
|
||||
.map(Some)
|
||||
.map(Some)
|
||||
} else {
|
||||
Ok(link)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn proxy_image_link_api(
|
||||
link: &Option<String>,
|
||||
link: Option<DbUrl>,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<Option<DbUrl>> {
|
||||
let link: Option<DbUrl> = match link.as_ref().map(String::as_str) {
|
||||
// An empty string is an erase
|
||||
Some("") => None,
|
||||
Some(str_url) => Url::parse(str_url)
|
||||
.map(|u| Some(u.into()))
|
||||
.with_lemmy_type(LemmyErrorType::InvalidUrl)?,
|
||||
None => None,
|
||||
};
|
||||
if let Some(l) = link {
|
||||
proxy_image_link(l.into(), context).await.map(Some)
|
||||
if let Some(link) = link {
|
||||
proxy_image_link(link.into(), context).await.map(Some)
|
||||
} else {
|
||||
Ok(link)
|
||||
}
|
||||
|
@ -1137,29 +1153,4 @@ mod tests {
|
|||
.is_err()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_diesel_option_overwrite_to_url() {
|
||||
let context = LemmyContext::init_test_context().await;
|
||||
|
||||
assert!(matches!(
|
||||
proxy_image_link_api(&None, &context).await,
|
||||
Ok(None)
|
||||
));
|
||||
assert!(matches!(
|
||||
proxy_image_link_opt_api(&Some(String::new()), &context).await,
|
||||
Ok(Some(None))
|
||||
));
|
||||
assert!(
|
||||
proxy_image_link_opt_api(&Some("invalid_url".to_string()), &context)
|
||||
.await
|
||||
.is_err()
|
||||
);
|
||||
let example_url = "https://lemmy-alpha/image.png";
|
||||
assert!(matches!(
|
||||
proxy_image_link_opt_api(&Some(example_url.to_string()), &context).await,
|
||||
Ok(Some(Some(url))) if url == Url::parse(example_url).unwrap().into()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,11 +9,11 @@ use lemmy_api_common::{
|
|||
check_community_user_action,
|
||||
check_post_deleted_or_removed,
|
||||
generate_local_apub_endpoint,
|
||||
get_post,
|
||||
get_url_blocklist,
|
||||
is_mod_or_admin,
|
||||
local_site_to_slur_regex,
|
||||
process_markdown,
|
||||
update_read_comments,
|
||||
EndpointType,
|
||||
},
|
||||
};
|
||||
|
@ -28,7 +28,7 @@ use lemmy_db_schema::{
|
|||
},
|
||||
traits::{Crud, Likeable},
|
||||
};
|
||||
use lemmy_db_views::structs::LocalUserView;
|
||||
use lemmy_db_views::structs::{LocalUserView, PostView};
|
||||
use lemmy_utils::{
|
||||
error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
|
||||
utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field},
|
||||
|
@ -47,12 +47,23 @@ pub async fn create_comment(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
|
||||
is_valid_body_field(&Some(content.clone()), false)?;
|
||||
is_valid_body_field(&content, false)?;
|
||||
|
||||
// Check for a community ban
|
||||
let post_id = data.post_id;
|
||||
let post = get_post(post_id, &mut context.pool()).await?;
|
||||
let community_id = post.community_id;
|
||||
|
||||
// Read the full post view in order to get the comments count.
|
||||
let post_view = PostView::read(
|
||||
&mut context.pool(),
|
||||
post_id,
|
||||
Some(local_user_view.person.id),
|
||||
true,
|
||||
)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPost)?;
|
||||
|
||||
let post = post_view.post;
|
||||
let community_id = post_view.community.id;
|
||||
|
||||
check_community_user_action(&local_user_view.person, community_id, &mut context.pool()).await?;
|
||||
check_post_deleted_or_removed(&post)?;
|
||||
|
@ -164,6 +175,15 @@ pub async fn create_comment(
|
|||
)
|
||||
.await?;
|
||||
|
||||
// Update the read comments, so your own new comment doesn't appear as a +1 unread
|
||||
update_read_comments(
|
||||
local_user_view.person.id,
|
||||
post_id,
|
||||
post_view.counts.comments + 1,
|
||||
&mut context.pool(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// If we're responding to a comment where we're the recipient,
|
||||
// (ie we're the grandparent, or the recipient of the parent comment_reply),
|
||||
// then mark the parent as read.
|
||||
|
|
|
@ -37,6 +37,12 @@ pub async fn remove_comment(
|
|||
)
|
||||
.await?;
|
||||
|
||||
// Don't allow removing or restoring comment which was deleted by user, as it would reveal
|
||||
// the comment text in mod log.
|
||||
if orig_comment.comment.deleted {
|
||||
return Err(LemmyErrorType::CouldntUpdateComment.into());
|
||||
}
|
||||
|
||||
// Do the remove
|
||||
let removed = data.removed;
|
||||
let updated_comment = Comment::update(
|
||||
|
|
|
@ -63,7 +63,9 @@ pub async fn update_comment(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let content = process_markdown_opt(&data.content, &slur_regex, &url_blocklist, &context).await?;
|
||||
is_valid_body_field(&content, false)?;
|
||||
if let Some(content) = &content {
|
||||
is_valid_body_field(content, false)?;
|
||||
}
|
||||
|
||||
let comment_id = data.comment_id;
|
||||
let form = CommentUpdateForm {
|
||||
|
|
|
@ -30,6 +30,7 @@ use lemmy_db_schema::{
|
|||
},
|
||||
},
|
||||
traits::{ApubActor, Crud, Followable, Joinable},
|
||||
utils::diesel_url_create,
|
||||
};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::{
|
||||
|
@ -61,11 +62,18 @@ pub async fn create_community(
|
|||
check_slurs(&data.title, &slur_regex)?;
|
||||
let description =
|
||||
process_markdown_opt(&data.description, &slur_regex, &url_blocklist, &context).await?;
|
||||
let icon = proxy_image_link_api(&data.icon, &context).await?;
|
||||
let banner = proxy_image_link_api(&data.banner, &context).await?;
|
||||
|
||||
let icon = diesel_url_create(data.icon.as_deref())?;
|
||||
let icon = proxy_image_link_api(icon, &context).await?;
|
||||
|
||||
let banner = diesel_url_create(data.banner.as_deref())?;
|
||||
let banner = proxy_image_link_api(banner, &context).await?;
|
||||
|
||||
is_valid_actor_name(&data.name, local_site.actor_name_max_length as usize)?;
|
||||
is_valid_body_field(&data.description, false)?;
|
||||
|
||||
if let Some(desc) = &data.description {
|
||||
is_valid_body_field(desc, false)?;
|
||||
}
|
||||
|
||||
// Double check for duplicate community actor_ids
|
||||
let community_actor_id = generate_local_apub_endpoint(
|
||||
|
|
|
@ -21,7 +21,7 @@ use lemmy_db_schema::{
|
|||
local_site::LocalSite,
|
||||
},
|
||||
traits::Crud,
|
||||
utils::{diesel_option_overwrite, naive_now},
|
||||
utils::{diesel_string_update, diesel_url_update, naive_now},
|
||||
};
|
||||
use lemmy_db_views::structs::LocalUserView;
|
||||
use lemmy_utils::{
|
||||
|
@ -40,18 +40,28 @@ pub async fn update_community(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
check_slurs_opt(&data.title, &slur_regex)?;
|
||||
let description =
|
||||
process_markdown_opt(&data.description, &slur_regex, &url_blocklist, &context).await?;
|
||||
is_valid_body_field(&data.description, false)?;
|
||||
|
||||
let description = diesel_string_update(
|
||||
process_markdown_opt(&data.description, &slur_regex, &url_blocklist, &context)
|
||||
.await?
|
||||
.as_deref(),
|
||||
);
|
||||
|
||||
if let Some(Some(desc)) = &description {
|
||||
is_valid_body_field(desc, false)?;
|
||||
}
|
||||
|
||||
let old_community = Community::read(&mut context.pool(), data.community_id)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindCommunity)?;
|
||||
replace_image(&data.icon, &old_community.icon, &context).await?;
|
||||
replace_image(&data.banner, &old_community.banner, &context).await?;
|
||||
|
||||
let description = diesel_option_overwrite(description);
|
||||
let icon = proxy_image_link_opt_api(&data.icon, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
|
||||
let icon = diesel_url_update(data.icon.as_deref())?;
|
||||
replace_image(&icon, &old_community.icon, &context).await?;
|
||||
let icon = proxy_image_link_opt_api(icon, &context).await?;
|
||||
|
||||
let banner = diesel_url_update(data.banner.as_deref())?;
|
||||
replace_image(&banner, &old_community.banner, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(banner, &context).await?;
|
||||
|
||||
// Verify its a mod (only mods can edit it)
|
||||
check_community_mod_action(
|
||||
|
|
|
@ -26,6 +26,7 @@ use lemmy_db_schema::{
|
|||
post::{Post, PostInsertForm, PostLike, PostLikeForm, PostUpdateForm},
|
||||
},
|
||||
traits::{Crud, Likeable},
|
||||
utils::diesel_url_create,
|
||||
CommunityVisibility,
|
||||
};
|
||||
use lemmy_db_views::structs::LocalUserView;
|
||||
|
@ -37,7 +38,6 @@ use lemmy_utils::{
|
|||
slurs::check_slurs,
|
||||
validation::{
|
||||
check_url_scheme,
|
||||
clean_url_params,
|
||||
is_url_blocked,
|
||||
is_valid_alt_text_field,
|
||||
is_valid_body_field,
|
||||
|
@ -64,16 +64,27 @@ pub async fn create_post(
|
|||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
|
||||
let body = process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context).await?;
|
||||
let data_url = data.url.as_ref();
|
||||
let url = data_url.map(clean_url_params); // TODO no good way to handle a "clear"
|
||||
let custom_thumbnail = data.custom_thumbnail.as_ref().map(clean_url_params);
|
||||
let url = diesel_url_create(data.url.as_deref())?;
|
||||
let custom_thumbnail = diesel_url_create(data.custom_thumbnail.as_deref())?;
|
||||
|
||||
is_valid_post_title(&data.name)?;
|
||||
is_valid_body_field(&body, true)?;
|
||||
is_valid_alt_text_field(&data.alt_text)?;
|
||||
is_url_blocked(&url, &url_blocklist)?;
|
||||
check_url_scheme(&url)?;
|
||||
check_url_scheme(&custom_thumbnail)?;
|
||||
|
||||
if let Some(url) = &url {
|
||||
is_url_blocked(url, &url_blocklist)?;
|
||||
check_url_scheme(url)?;
|
||||
}
|
||||
|
||||
if let Some(custom_thumbnail) = &custom_thumbnail {
|
||||
check_url_scheme(custom_thumbnail)?;
|
||||
}
|
||||
|
||||
if let Some(alt_text) = &data.alt_text {
|
||||
is_valid_alt_text_field(alt_text)?;
|
||||
}
|
||||
|
||||
if let Some(body) = &body {
|
||||
is_valid_body_field(body, true)?;
|
||||
}
|
||||
|
||||
check_community_user_action(
|
||||
&local_user_view.person,
|
||||
|
@ -156,7 +167,7 @@ pub async fn create_post(
|
|||
|
||||
generate_post_link_metadata(
|
||||
updated_post.clone(),
|
||||
custom_thumbnail,
|
||||
custom_thumbnail.map(Into::into),
|
||||
|post| Some(SendActivityData::CreatePost(post)),
|
||||
Some(local_site),
|
||||
context.reset_request_count(),
|
||||
|
@ -176,7 +187,6 @@ pub async fn create_post(
|
|||
.await
|
||||
.with_lemmy_type(LemmyErrorType::CouldntLikePost)?;
|
||||
|
||||
// Mark the post as read
|
||||
mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
|
||||
|
||||
if let Some(url) = updated_post.url.clone() {
|
||||
|
|
|
@ -2,10 +2,9 @@ use actix_web::web::{Data, Json, Query};
|
|||
use lemmy_api_common::{
|
||||
context::LemmyContext,
|
||||
post::{GetPost, GetPostResponse},
|
||||
utils::{check_private_instance, is_mod_or_admin_opt, mark_post_as_read},
|
||||
utils::{check_private_instance, is_mod_or_admin_opt, mark_post_as_read, update_read_comments},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
aggregates::structs::{PersonPostAggregates, PersonPostAggregatesForm},
|
||||
source::{comment::Comment, post::Post},
|
||||
traits::Crud,
|
||||
};
|
||||
|
@ -14,7 +13,7 @@ use lemmy_db_views::{
|
|||
structs::{LocalUserView, PostView, SiteView},
|
||||
};
|
||||
use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView};
|
||||
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
|
||||
#[tracing::instrument(skip(context))]
|
||||
pub async fn get_post(
|
||||
|
@ -60,10 +59,17 @@ pub async fn get_post(
|
|||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPost)?;
|
||||
|
||||
// Mark the post as read
|
||||
let post_id = post_view.post.id;
|
||||
if let Some(person_id) = person_id {
|
||||
mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
|
||||
|
||||
update_read_comments(
|
||||
person_id,
|
||||
post_id,
|
||||
post_view.counts.comments,
|
||||
&mut context.pool(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Necessary for the sidebar subscribed
|
||||
|
@ -76,27 +82,14 @@ pub async fn get_post(
|
|||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindCommunity)?;
|
||||
|
||||
// Insert into PersonPostAggregates
|
||||
// to update the read_comments count
|
||||
if let Some(person_id) = person_id {
|
||||
let read_comments = post_view.counts.comments;
|
||||
let person_post_agg_form = PersonPostAggregatesForm {
|
||||
person_id,
|
||||
post_id,
|
||||
read_comments,
|
||||
..PersonPostAggregatesForm::default()
|
||||
};
|
||||
PersonPostAggregates::upsert(&mut context.pool(), &person_post_agg_form)
|
||||
.await
|
||||
.with_lemmy_type(LemmyErrorType::CouldntFindPost)?;
|
||||
}
|
||||
|
||||
let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?;
|
||||
let local_user = local_user_view.as_ref().map(|u| &u.local_user);
|
||||
|
||||
// Fetch the cross_posts
|
||||
let cross_posts = if let Some(url) = &post_view.post.url {
|
||||
let mut x_posts = PostQuery {
|
||||
url_search: Some(url.inner().as_str().into()),
|
||||
local_user,
|
||||
..Default::default()
|
||||
}
|
||||
.list(&local_site.site, &mut context.pool())
|
||||
|
|
|
@ -20,16 +20,15 @@ use lemmy_db_schema::{
|
|||
post::{Post, PostUpdateForm},
|
||||
},
|
||||
traits::Crud,
|
||||
utils::{diesel_option_overwrite, naive_now},
|
||||
utils::{diesel_string_update, diesel_url_update, naive_now},
|
||||
};
|
||||
use lemmy_db_views::structs::LocalUserView;
|
||||
use lemmy_utils::{
|
||||
error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
|
||||
utils::{
|
||||
slurs::check_slurs_opt,
|
||||
slurs::check_slurs,
|
||||
validation::{
|
||||
check_url_scheme,
|
||||
clean_url_params,
|
||||
is_url_blocked,
|
||||
is_valid_alt_text_field,
|
||||
is_valid_body_field,
|
||||
|
@ -47,26 +46,43 @@ pub async fn update_post(
|
|||
) -> LemmyResult<Json<PostResponse>> {
|
||||
let local_site = LocalSite::read(&mut context.pool()).await?;
|
||||
|
||||
// TODO No good way to handle a clear.
|
||||
// Issue link: https://github.com/LemmyNet/lemmy/issues/2287
|
||||
let url = data.url.as_ref().map(clean_url_params);
|
||||
let custom_thumbnail = data.custom_thumbnail.as_ref().map(clean_url_params);
|
||||
let url = diesel_url_update(data.url.as_deref())?;
|
||||
|
||||
let custom_thumbnail = diesel_url_update(data.custom_thumbnail.as_deref())?;
|
||||
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
|
||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
check_slurs_opt(&data.name, &slur_regex)?;
|
||||
let body = process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context).await?;
|
||||
|
||||
let body = diesel_string_update(
|
||||
process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context)
|
||||
.await?
|
||||
.as_deref(),
|
||||
);
|
||||
|
||||
let alt_text = diesel_string_update(data.alt_text.as_deref());
|
||||
|
||||
if let Some(name) = &data.name {
|
||||
is_valid_post_title(name)?;
|
||||
check_slurs(name, &slur_regex)?;
|
||||
}
|
||||
|
||||
is_valid_body_field(&body, true)?;
|
||||
is_valid_alt_text_field(&data.alt_text)?;
|
||||
is_url_blocked(&url, &url_blocklist)?;
|
||||
check_url_scheme(&url)?;
|
||||
check_url_scheme(&custom_thumbnail)?;
|
||||
if let Some(Some(body)) = &body {
|
||||
is_valid_body_field(body, true)?;
|
||||
}
|
||||
|
||||
if let Some(Some(alt_text)) = &alt_text {
|
||||
is_valid_alt_text_field(alt_text)?;
|
||||
}
|
||||
|
||||
if let Some(Some(url)) = &url {
|
||||
is_url_blocked(url, &url_blocklist)?;
|
||||
check_url_scheme(url)?;
|
||||
}
|
||||
|
||||
if let Some(Some(custom_thumbnail)) = &custom_thumbnail {
|
||||
check_url_scheme(custom_thumbnail)?;
|
||||
}
|
||||
|
||||
let post_id = data.post_id;
|
||||
let orig_post = Post::read(&mut context.pool(), post_id)
|
||||
|
@ -95,9 +111,9 @@ pub async fn update_post(
|
|||
|
||||
let post_form = PostUpdateForm {
|
||||
name: data.name.clone(),
|
||||
url: Some(url.map(Into::into)),
|
||||
body: diesel_option_overwrite(body),
|
||||
alt_text: diesel_option_overwrite(data.alt_text.clone()),
|
||||
url,
|
||||
body,
|
||||
alt_text,
|
||||
nsfw: data.nsfw,
|
||||
language_id: data.language_id,
|
||||
updated: Some(Some(naive_now())),
|
||||
|
@ -111,7 +127,7 @@ pub async fn update_post(
|
|||
|
||||
generate_post_link_metadata(
|
||||
updated_post.clone(),
|
||||
custom_thumbnail,
|
||||
custom_thumbnail.flatten().map(Into::into),
|
||||
|post| Some(SendActivityData::UpdatePost(post)),
|
||||
Some(local_site),
|
||||
context.reset_request_count(),
|
||||
|
|
|
@ -39,7 +39,7 @@ pub async fn create_private_message(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
|
||||
is_valid_body_field(&Some(content.clone()), false)?;
|
||||
is_valid_body_field(&content, false)?;
|
||||
|
||||
check_person_block(
|
||||
local_user_view.person.id,
|
||||
|
|
|
@ -41,7 +41,7 @@ pub async fn update_private_message(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
|
||||
is_valid_body_field(&Some(content.clone()), false)?;
|
||||
is_valid_body_field(&content, false)?;
|
||||
|
||||
let private_message_id = data.private_message_id;
|
||||
PrivateMessage::update(
|
||||
|
|
|
@ -11,7 +11,7 @@ use lemmy_api_common::{
|
|||
local_site_rate_limit_to_rate_limit_config,
|
||||
local_site_to_slur_regex,
|
||||
process_markdown_opt,
|
||||
proxy_image_link_opt_api,
|
||||
proxy_image_link_api,
|
||||
},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
|
@ -23,7 +23,7 @@ use lemmy_db_schema::{
|
|||
tagline::Tagline,
|
||||
},
|
||||
traits::Crud,
|
||||
utils::{diesel_option_overwrite, naive_now},
|
||||
utils::{diesel_string_update, diesel_url_create, naive_now},
|
||||
};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::{
|
||||
|
@ -61,21 +61,25 @@ pub async fn create_site(
|
|||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?;
|
||||
let icon = proxy_image_link_opt_api(&data.icon, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
|
||||
|
||||
let icon = diesel_url_create(data.icon.as_deref())?;
|
||||
let icon = proxy_image_link_api(icon, &context).await?;
|
||||
|
||||
let banner = diesel_url_create(data.banner.as_deref())?;
|
||||
let banner = proxy_image_link_api(banner, &context).await?;
|
||||
|
||||
let site_form = SiteUpdateForm {
|
||||
name: Some(data.name.clone()),
|
||||
sidebar: diesel_option_overwrite(sidebar),
|
||||
description: diesel_option_overwrite(data.description.clone()),
|
||||
icon,
|
||||
banner,
|
||||
sidebar: diesel_string_update(sidebar.as_deref()),
|
||||
description: diesel_string_update(data.description.as_deref()),
|
||||
icon: Some(icon),
|
||||
banner: Some(banner),
|
||||
actor_id: Some(actor_id),
|
||||
last_refreshed_at: Some(naive_now()),
|
||||
inbox_url,
|
||||
private_key: Some(Some(keypair.private_key)),
|
||||
public_key: Some(keypair.public_key),
|
||||
content_warning: diesel_option_overwrite(data.content_warning.clone()),
|
||||
content_warning: diesel_string_update(data.content_warning.as_deref()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
@ -91,16 +95,16 @@ pub async fn create_site(
|
|||
enable_nsfw: data.enable_nsfw,
|
||||
community_creation_admin_only: data.community_creation_admin_only,
|
||||
require_email_verification: data.require_email_verification,
|
||||
application_question: diesel_option_overwrite(data.application_question.clone()),
|
||||
application_question: diesel_string_update(data.application_question.as_deref()),
|
||||
private_instance: data.private_instance,
|
||||
default_theme: data.default_theme.clone(),
|
||||
default_post_listing_type: data.default_post_listing_type,
|
||||
default_sort_type: data.default_sort_type,
|
||||
legal_information: diesel_option_overwrite(data.legal_information.clone()),
|
||||
legal_information: diesel_string_update(data.legal_information.as_deref()),
|
||||
application_email_admins: data.application_email_admins,
|
||||
hide_modlog_mod_names: data.hide_modlog_mod_names,
|
||||
updated: Some(Some(naive_now())),
|
||||
slur_filter_regex: diesel_option_overwrite(data.slur_filter_regex.clone()),
|
||||
slur_filter_regex: diesel_string_update(data.slur_filter_regex.as_deref()),
|
||||
actor_name_max_length: data.actor_name_max_length,
|
||||
federation_enabled: data.federation_enabled,
|
||||
captcha_enabled: data.captcha_enabled,
|
||||
|
@ -179,7 +183,9 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) ->
|
|||
)?;
|
||||
|
||||
// Ensure that the sidebar has fewer than the max num characters...
|
||||
is_valid_body_field(&create_site.sidebar, false)?;
|
||||
if let Some(body) = &create_site.sidebar {
|
||||
is_valid_body_field(body, false)?;
|
||||
}
|
||||
|
||||
application_question_check(
|
||||
&local_site.application_question,
|
||||
|
|
|
@ -27,7 +27,7 @@ use lemmy_db_schema::{
|
|||
tagline::Tagline,
|
||||
},
|
||||
traits::Crud,
|
||||
utils::{diesel_option_overwrite, naive_now},
|
||||
utils::{diesel_string_update, diesel_url_update, naive_now},
|
||||
RegistrationMode,
|
||||
};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
|
@ -67,22 +67,29 @@ pub async fn update_site(
|
|||
SiteLanguage::update(&mut context.pool(), discussion_languages.clone(), &site).await?;
|
||||
}
|
||||
|
||||
replace_image(&data.icon, &site.icon, &context).await?;
|
||||
replace_image(&data.banner, &site.banner, &context).await?;
|
||||
|
||||
let slur_regex = local_site_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(&context).await?;
|
||||
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?;
|
||||
let icon = proxy_image_link_opt_api(&data.icon, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
|
||||
let sidebar = diesel_string_update(
|
||||
process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context)
|
||||
.await?
|
||||
.as_deref(),
|
||||
);
|
||||
|
||||
let icon = diesel_url_update(data.icon.as_deref())?;
|
||||
replace_image(&icon, &site.icon, &context).await?;
|
||||
let icon = proxy_image_link_opt_api(icon, &context).await?;
|
||||
|
||||
let banner = diesel_url_update(data.banner.as_deref())?;
|
||||
replace_image(&banner, &site.banner, &context).await?;
|
||||
let banner = proxy_image_link_opt_api(banner, &context).await?;
|
||||
|
||||
let site_form = SiteUpdateForm {
|
||||
name: data.name.clone(),
|
||||
sidebar: diesel_option_overwrite(sidebar),
|
||||
description: diesel_option_overwrite(data.description.clone()),
|
||||
sidebar,
|
||||
description: diesel_string_update(data.description.as_deref()),
|
||||
icon,
|
||||
banner,
|
||||
content_warning: diesel_option_overwrite(data.content_warning.clone()),
|
||||
content_warning: diesel_string_update(data.content_warning.as_deref()),
|
||||
updated: Some(Some(naive_now())),
|
||||
..Default::default()
|
||||
};
|
||||
|
@ -99,16 +106,16 @@ pub async fn update_site(
|
|||
enable_nsfw: data.enable_nsfw,
|
||||
community_creation_admin_only: data.community_creation_admin_only,
|
||||
require_email_verification: data.require_email_verification,
|
||||
application_question: diesel_option_overwrite(data.application_question.clone()),
|
||||
application_question: diesel_string_update(data.application_question.as_deref()),
|
||||
private_instance: data.private_instance,
|
||||
default_theme: data.default_theme.clone(),
|
||||
default_post_listing_type: data.default_post_listing_type,
|
||||
default_sort_type: data.default_sort_type,
|
||||
legal_information: diesel_option_overwrite(data.legal_information.clone()),
|
||||
legal_information: diesel_string_update(data.legal_information.as_deref()),
|
||||
application_email_admins: data.application_email_admins,
|
||||
hide_modlog_mod_names: data.hide_modlog_mod_names,
|
||||
updated: Some(Some(naive_now())),
|
||||
slur_filter_regex: diesel_option_overwrite(data.slur_filter_regex.clone()),
|
||||
slur_filter_regex: diesel_string_update(data.slur_filter_regex.as_deref()),
|
||||
actor_name_max_length: data.actor_name_max_length,
|
||||
federation_enabled: data.federation_enabled,
|
||||
captcha_enabled: data.captcha_enabled,
|
||||
|
@ -156,7 +163,8 @@ pub async fn update_site(
|
|||
// TODO can't think of a better way to do this.
|
||||
// If the server suddenly requires email verification, or required applications, no old users
|
||||
// will be able to log in. It really only wants this to be a requirement for NEW signups.
|
||||
// So if it was set from false, to true, you need to update all current users columns to be verified.
|
||||
// So if it was set from false, to true, you need to update all current users columns to be
|
||||
// verified.
|
||||
|
||||
let old_require_application =
|
||||
local_site.registration_mode == RegistrationMode::RequireApplication;
|
||||
|
@ -228,7 +236,9 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm
|
|||
)?;
|
||||
|
||||
// Ensure that the sidebar has fewer than the max num characters...
|
||||
is_valid_body_field(&edit_site.sidebar, false)?;
|
||||
if let Some(body) = &edit_site.sidebar {
|
||||
is_valid_body_field(body, false)?;
|
||||
}
|
||||
|
||||
application_question_check(
|
||||
&local_site.application_question,
|
||||
|
|
|
@ -112,15 +112,17 @@ pub async fn register(
|
|||
// We have to create both a person, and local_user
|
||||
|
||||
// Register the new person
|
||||
let person_form = PersonInsertForm::builder()
|
||||
.name(data.username.clone())
|
||||
.actor_id(Some(actor_id.clone()))
|
||||
.private_key(Some(actor_keypair.private_key))
|
||||
.public_key(actor_keypair.public_key)
|
||||
.inbox_url(Some(generate_inbox_url(&actor_id)?))
|
||||
.shared_inbox_url(Some(generate_shared_inbox_url(context.settings())?))
|
||||
.instance_id(site_view.site.instance_id)
|
||||
.build();
|
||||
let person_form = PersonInsertForm {
|
||||
actor_id: Some(actor_id.clone()),
|
||||
inbox_url: Some(generate_inbox_url(&actor_id)?),
|
||||
shared_inbox_url: Some(generate_shared_inbox_url(context.settings())?),
|
||||
private_key: Some(actor_keypair.private_key),
|
||||
..PersonInsertForm::new(
|
||||
data.username.clone(),
|
||||
actor_keypair.public_key,
|
||||
site_view.site.instance_id,
|
||||
)
|
||||
};
|
||||
|
||||
// insert the person
|
||||
let inserted_person = Person::create(&mut context.pool(), &person_form)
|
||||
|
@ -197,7 +199,8 @@ pub async fn register(
|
|||
verify_email_sent: false,
|
||||
};
|
||||
|
||||
// Log the user in directly if the site is not setup, or email verification and application aren't required
|
||||
// Log the user in directly if the site is not setup, or email verification and application aren't
|
||||
// required
|
||||
if !local_site.site_setup
|
||||
|| (!require_registration_application && !local_site.require_email_verification)
|
||||
{
|
||||
|
|
|
@ -44,7 +44,7 @@ once_cell = { workspace = true }
|
|||
moka.workspace = true
|
||||
serde_with.workspace = true
|
||||
html2md = "0.2.14"
|
||||
html2text = "0.6.0"
|
||||
html2text = "0.12.5"
|
||||
stringreader = "0.1.1"
|
||||
enum_delegate = "0.2.0"
|
||||
|
||||
|
|
|
@ -138,8 +138,8 @@ impl ActivityHandler for CollectionAdd {
|
|||
.dereference(context)
|
||||
.await?;
|
||||
|
||||
// If we had to refetch the community while parsing the activity, then the new mod has already
|
||||
// been added. Skip it here as it would result in a duplicate key error.
|
||||
// If we had to refetch the community while parsing the activity, then the new mod has
|
||||
// already been added. Skip it here as it would result in a duplicate key error.
|
||||
let new_mod_id = new_mod.id;
|
||||
let moderated_communities =
|
||||
CommunityModerator::get_person_moderated_communities(&mut context.pool(), new_mod_id)
|
||||
|
|
|
@ -24,13 +24,14 @@ pub mod update;
|
|||
///
|
||||
/// Activities are sent to the community itself if it lives on another instance. If the community
|
||||
/// is local, the activity is directly wrapped into Announce and sent to community followers.
|
||||
/// Activities are also sent to those who follow the actor (with exception of moderation activities).
|
||||
/// Activities are also sent to those who follow the actor (with exception of moderation
|
||||
/// activities).
|
||||
///
|
||||
/// * `activity` - The activity which is being sent
|
||||
/// * `actor` - The user who is sending the activity
|
||||
/// * `community` - Community inside which the activity is sent
|
||||
/// * `inboxes` - Any additional inboxes the activity should be sent to (for example,
|
||||
/// to the user who is being promoted to moderator)
|
||||
/// * `inboxes` - Any additional inboxes the activity should be sent to (for example, to the user
|
||||
/// who is being promoted to moderator)
|
||||
/// * `is_mod_activity` - True for things like Add/Mod, these are not sent to user followers
|
||||
pub(crate) async fn send_activity_in_community(
|
||||
activity: AnnouncableActivities,
|
||||
|
|
|
@ -176,7 +176,8 @@ impl ActivityHandler for CreateOrUpdateNote {
|
|||
// Although mentions could be gotten from the post tags (they are included there), or the ccs,
|
||||
// Its much easier to scrape them from the comment body, since the API has to do that
|
||||
// anyway.
|
||||
// TODO: for compatibility with other projects, it would be much better to read this from cc or tags
|
||||
// TODO: for compatibility with other projects, it would be much better to read this from cc or
|
||||
// tags
|
||||
let mentions = scrape_text_for_mentions(&comment.content);
|
||||
send_local_notifs(mentions, comment.id, &actor, do_send_email, context).await?;
|
||||
Ok(())
|
||||
|
|
|
@ -70,6 +70,8 @@ pub async fn list_comments(
|
|||
|
||||
let parent_path_cloned = parent_path.clone();
|
||||
let post_id = data.post_id;
|
||||
let local_user = local_user_view.as_ref().map(|l| &l.local_user);
|
||||
|
||||
let comments = CommentQuery {
|
||||
listing_type,
|
||||
sort,
|
||||
|
@ -80,7 +82,7 @@ pub async fn list_comments(
|
|||
community_id,
|
||||
parent_path: parent_path_cloned,
|
||||
post_id,
|
||||
local_user: local_user_view.as_ref(),
|
||||
local_user,
|
||||
page,
|
||||
limit,
|
||||
..Default::default()
|
||||
|
|
|
@ -49,17 +49,17 @@ pub async fn list_posts(
|
|||
return Err(LemmyError::from(LemmyErrorType::ContradictingFilters));
|
||||
}
|
||||
|
||||
let local_user_ref = local_user_view.as_ref().map(|u| &u.local_user);
|
||||
let local_user = local_user_view.as_ref().map(|u| &u.local_user);
|
||||
let listing_type = Some(listing_type_with_default(
|
||||
data.type_,
|
||||
local_user_ref,
|
||||
local_user,
|
||||
&local_site.local_site,
|
||||
community_id,
|
||||
));
|
||||
|
||||
let sort = Some(sort_type_with_default(
|
||||
data.sort,
|
||||
local_user_ref,
|
||||
local_user,
|
||||
&local_site.local_site,
|
||||
));
|
||||
|
||||
|
@ -71,7 +71,7 @@ pub async fn list_posts(
|
|||
};
|
||||
|
||||
let posts = PostQuery {
|
||||
local_user: local_user_view.as_ref(),
|
||||
local_user,
|
||||
listing_type,
|
||||
sort,
|
||||
community_id,
|
||||
|
|
|
@ -65,10 +65,12 @@ pub async fn read_person(
|
|||
None
|
||||
};
|
||||
|
||||
let local_user = local_user_view.as_ref().map(|l| &l.local_user);
|
||||
|
||||
let posts = PostQuery {
|
||||
sort,
|
||||
saved_only,
|
||||
local_user: local_user_view.as_ref(),
|
||||
local_user,
|
||||
community_id,
|
||||
page,
|
||||
limit,
|
||||
|
@ -79,7 +81,7 @@ pub async fn read_person(
|
|||
.await?;
|
||||
|
||||
let comments = CommentQuery {
|
||||
local_user: local_user_view.as_ref(),
|
||||
local_user,
|
||||
sort: sort.map(post_to_comment_sort_type),
|
||||
saved_only,
|
||||
community_id,
|
||||
|
|
|
@ -55,7 +55,7 @@ pub async fn search(
|
|||
data.community_id
|
||||
};
|
||||
let creator_id = data.creator_id;
|
||||
let local_user = local_user_view.as_ref().map(|luv| &luv.local_user);
|
||||
let local_user = local_user_view.as_ref().map(|l| &l.local_user);
|
||||
|
||||
match search_type {
|
||||
SearchType::Posts => {
|
||||
|
@ -64,7 +64,7 @@ pub async fn search(
|
|||
listing_type: (listing_type),
|
||||
community_id: (community_id),
|
||||
creator_id: (creator_id),
|
||||
local_user: (local_user_view.as_ref()),
|
||||
local_user,
|
||||
search_term: (Some(q)),
|
||||
page: (page),
|
||||
limit: (limit),
|
||||
|
@ -80,7 +80,7 @@ pub async fn search(
|
|||
search_term: (Some(q)),
|
||||
community_id: (community_id),
|
||||
creator_id: (creator_id),
|
||||
local_user: (local_user_view.as_ref()),
|
||||
local_user,
|
||||
page: (page),
|
||||
limit: (limit),
|
||||
..Default::default()
|
||||
|
@ -125,7 +125,7 @@ pub async fn search(
|
|||
listing_type: (listing_type),
|
||||
community_id: (community_id),
|
||||
creator_id: (creator_id),
|
||||
local_user: (local_user_view.as_ref()),
|
||||
local_user,
|
||||
search_term: (Some(q)),
|
||||
page: (page),
|
||||
limit: (limit),
|
||||
|
@ -142,7 +142,7 @@ pub async fn search(
|
|||
search_term: (Some(q)),
|
||||
community_id: (community_id),
|
||||
creator_id: (creator_id),
|
||||
local_user: (local_user_view.as_ref()),
|
||||
local_user,
|
||||
page: (page),
|
||||
limit: (limit),
|
||||
..Default::default()
|
||||
|
@ -192,6 +192,7 @@ pub async fn search(
|
|||
community_id: (community_id),
|
||||
creator_id: (creator_id),
|
||||
url_search: (Some(q)),
|
||||
local_user,
|
||||
page: (page),
|
||||
limit: (limit),
|
||||
..Default::default()
|
||||
|
|
|
@ -338,13 +338,11 @@ mod tests {
|
|||
context: &Data<LemmyContext>,
|
||||
) -> LemmyResult<LocalUserView> {
|
||||
let instance = Instance::read_or_create(&mut context.pool(), "example.com".to_string()).await?;
|
||||
let person_form = PersonInsertForm::builder()
|
||||
.name(name.clone())
|
||||
.display_name(Some(name.clone()))
|
||||
.bio(bio)
|
||||
.public_key("asd".to_string())
|
||||
.instance_id(instance.id)
|
||||
.build();
|
||||
let person_form = PersonInsertForm {
|
||||
display_name: Some(name.clone()),
|
||||
bio,
|
||||
..PersonInsertForm::test_form(instance.id, &name)
|
||||
};
|
||||
let person = Person::create(&mut context.pool(), &person_form).await?;
|
||||
|
||||
let user_form = LocalUserInsertForm::builder()
|
||||
|
|
|
@ -72,7 +72,8 @@ impl Collection for ApubCommunityFeatured {
|
|||
.to_vec();
|
||||
}
|
||||
|
||||
// process items in parallel, to avoid long delay from fetch_site_metadata() and other processing
|
||||
// process items in parallel, to avoid long delay from fetch_site_metadata() and other
|
||||
// processing
|
||||
let stickied_posts: Vec<Post> = join_all(pages.into_iter().map(|page| {
|
||||
async {
|
||||
// use separate request counter for each item, otherwise there will be problems with
|
||||
|
|
|
@ -129,11 +129,7 @@ mod tests {
|
|||
let inserted_instance =
|
||||
Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?;
|
||||
|
||||
let old_mod = PersonInsertForm::builder()
|
||||
.name("holly".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let old_mod = PersonInsertForm::test_form(inserted_instance.id, "holly");
|
||||
|
||||
let old_mod = Person::create(&mut context.pool(), &old_mod).await?;
|
||||
let community_moderator_form = CommunityModeratorForm {
|
||||
|
|
|
@ -102,7 +102,8 @@ impl Collection for ApubCommunityOutbox {
|
|||
// We intentionally ignore errors here. This is because the outbox might contain posts from old
|
||||
// Lemmy versions, or from other software which we cant parse. In that case, we simply skip the
|
||||
// item and only parse the ones that work.
|
||||
// process items in parallel, to avoid long delay from fetch_site_metadata() and other processing
|
||||
// process items in parallel, to avoid long delay from fetch_site_metadata() and other
|
||||
// processing
|
||||
join_all(outbox_activities.into_iter().map(|activity| {
|
||||
async {
|
||||
// Receiving announce requires at least one local community follower for anti spam purposes.
|
||||
|
|
|
@ -28,6 +28,7 @@ use lemmy_api_common::{
|
|||
},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
actor_language::CommunityLanguage,
|
||||
|
@ -213,7 +214,7 @@ impl Actor for ApubCommunity {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -29,6 +29,7 @@ use lemmy_api_common::{
|
|||
};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::InstanceId,
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
actor_language::SiteLanguage,
|
||||
|
@ -187,7 +188,7 @@ impl Actor for ApubSite {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -30,6 +30,7 @@ use lemmy_api_common::{
|
|||
},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
local_site::LocalSite,
|
||||
|
@ -200,7 +201,7 @@ impl Actor for ApubPerson {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -25,12 +25,7 @@ use html2text::{from_read_with_decorator, render::text_renderer::TrivialDecorato
|
|||
use lemmy_api_common::{
|
||||
context::LemmyContext,
|
||||
request::generate_post_link_metadata,
|
||||
utils::{
|
||||
get_url_blocklist,
|
||||
local_site_opt_to_slur_regex,
|
||||
process_markdown_opt,
|
||||
proxy_image_link_opt_apub,
|
||||
},
|
||||
utils::{get_url_blocklist, local_site_opt_to_slur_regex, process_markdown_opt},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
source::{
|
||||
|
@ -224,12 +219,13 @@ impl Object for ApubPost {
|
|||
} else {
|
||||
None
|
||||
};
|
||||
check_url_scheme(&url)?;
|
||||
|
||||
if let Some(url) = &url {
|
||||
check_url_scheme(url)?;
|
||||
}
|
||||
|
||||
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
|
||||
|
||||
let url = proxy_image_link_opt_apub(url, context).await?;
|
||||
|
||||
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(context).await?;
|
||||
|
||||
|
@ -259,7 +255,8 @@ impl Object for ApubPost {
|
|||
let post_ = post.clone();
|
||||
let context_ = context.reset_request_count();
|
||||
|
||||
// Generates a post thumbnail in background task, because some sites can be very slow to respond.
|
||||
// Generates a post thumbnail in background task, because some sites can be very slow to
|
||||
// respond.
|
||||
spawn_try_task(async move {
|
||||
generate_post_link_metadata(post_, None, |_| None, local_site, context_).await
|
||||
});
|
||||
|
|
|
@ -23,6 +23,7 @@ pub struct DeleteUser {
|
|||
#[serde(deserialize_with = "deserialize_one_or_many", default)]
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub(crate) cc: Vec<Url>,
|
||||
/// Nonstandard field. If present, all content from the user should be deleted along with the account
|
||||
/// Nonstandard field. If present, all content from the user should be deleted along with the
|
||||
/// account
|
||||
pub(crate) remove_data: Option<bool>,
|
||||
}
|
||||
|
|
|
@ -72,11 +72,7 @@ async fn try_main() -> LemmyResult<()> {
|
|||
println!("🫃 creating {} people", args.people);
|
||||
let mut person_ids = vec![];
|
||||
for i in 0..args.people.get() {
|
||||
let form = PersonInsertForm::builder()
|
||||
.name(format!("p{i}"))
|
||||
.public_key("pubkey".to_owned())
|
||||
.instance_id(instance.id)
|
||||
.build();
|
||||
let form = PersonInsertForm::test_form(instance.id, &format!("p{i}"));
|
||||
person_ids.push(Person::create(&mut conn.into(), &form).await?.id);
|
||||
}
|
||||
|
||||
|
@ -132,7 +128,8 @@ async fn try_main() -> LemmyResult<()> {
|
|||
// Make sure the println above shows the correct amount
|
||||
assert_eq!(num_inserted_posts, num_posts as usize);
|
||||
|
||||
// Manually trigger and wait for a statistics update to ensure consistent and high amount of accuracy in the statistics used for query planning
|
||||
// Manually trigger and wait for a statistics update to ensure consistent and high amount of
|
||||
// accuracy in the statistics used for query planning
|
||||
println!("🧮 updating database statistics");
|
||||
conn.batch_execute("ANALYZE;").await?;
|
||||
|
||||
|
|
|
@ -14,10 +14,12 @@ use diesel::{
|
|||
|
||||
/// Gererates a series of rows for insertion.
|
||||
///
|
||||
/// An inclusive range is created from `start` and `stop`. A row for each number is generated using `selection`, which can be a tuple.
|
||||
/// [`current_value`] is an expression that gets the current value.
|
||||
/// An inclusive range is created from `start` and `stop`. A row for each number is generated using
|
||||
/// `selection`, which can be a tuple. [`current_value`] is an expression that gets the current
|
||||
/// value.
|
||||
///
|
||||
/// For example, if there's a `numbers` table with a `number` column, this inserts all numbers from 1 to 10 in a single statement:
|
||||
/// For example, if there's a `numbers` table with a `number` column, this inserts all numbers from
|
||||
/// 1 to 10 in a single statement:
|
||||
///
|
||||
/// ```
|
||||
/// dsl::insert_into(numbers::table)
|
||||
|
|
|
@ -70,7 +70,7 @@ diesel_ltree = { workspace = true, optional = true }
|
|||
typed-builder = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
deadpool = { version = "0.10.0", features = ["rt_tokio_1"], optional = true }
|
||||
deadpool = { version = "0.12.1", features = ["rt_tokio_1"], optional = true }
|
||||
ts-rs = { workspace = true, optional = true }
|
||||
futures-util = { workspace = true }
|
||||
tokio = { workspace = true, optional = true }
|
||||
|
@ -81,6 +81,7 @@ uuid = { workspace = true, features = ["v4"] }
|
|||
i-love-jesus = { workspace = true, optional = true }
|
||||
anyhow = { workspace = true }
|
||||
moka.workspace = true
|
||||
derive-new.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = { workspace = true }
|
||||
|
|
|
@ -5,12 +5,17 @@
|
|||
-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and
|
||||
-- before the automatic deletion of the row that references it. This is not a problem for insert or delete.
|
||||
--
|
||||
-- After a row update begins, a concurrent update on the same row can't begin until the whole
|
||||
-- transaction that contains the first update is finished. To reduce this locking, statements in
|
||||
-- triggers should be ordered based on the likelihood of concurrent writers. For example, updating
|
||||
-- site_aggregates should be done last because the same row is updated for all local stuff. If
|
||||
-- it were not last, then the locking period for concurrent writers would extend to include the
|
||||
-- time consumed by statements that come after.
|
||||
-- Triggers that update multiple tables should use this order: person_aggregates, comment_aggregates,
|
||||
-- post_aggregates, community_aggregates, site_aggregates
|
||||
-- * The order matters because the updated rows are locked until the end of the transaction, and statements
|
||||
-- in a trigger don't use separate transactions. This means that updates closer to the beginning cause
|
||||
-- longer locks because the duration of each update extends the durations of the locks caused by previous
|
||||
-- updates. Long locks are worse on rows that have more concurrent transactions trying to update them. The
|
||||
-- listed order starts with tables that are less likely to have such rows.
|
||||
-- https://www.postgresql.org/docs/16/transaction-iso.html#XACT-READ-COMMITTED
|
||||
-- * Using the same order in every trigger matters because a deadlock is possible if multiple transactions
|
||||
-- update the same rows in a different order.
|
||||
-- https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS
|
||||
--
|
||||
--
|
||||
-- Create triggers for both post and comments
|
||||
|
|
|
@ -64,19 +64,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_comment_agg".into())
|
||||
.public_key("pubkey".into())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_comment_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let another_person = PersonInsertForm::builder()
|
||||
.name("jerry_comment_agg".into())
|
||||
.public_key("pubkey".into())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_comment_agg");
|
||||
|
||||
let another_inserted_person = Person::create(pool, &another_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -65,19 +65,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_community_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let another_person = PersonInsertForm::builder()
|
||||
.name("jerry_community_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_community_agg");
|
||||
|
||||
let another_inserted_person = Person::create(pool, &another_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -49,19 +49,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_user_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_user_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let another_person = PersonInsertForm::builder()
|
||||
.name("jerry_user_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_user_agg");
|
||||
|
||||
let another_inserted_person = Person::create(pool, &another_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -83,19 +83,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_community_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let another_person = PersonInsertForm::builder()
|
||||
.name("jerry_community_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_community_agg");
|
||||
|
||||
let another_inserted_person = Person::create(pool, &another_person).await.unwrap();
|
||||
|
||||
|
@ -229,11 +221,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_community_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -42,11 +42,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy_site_agg".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_site_agg");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -531,11 +531,7 @@ mod tests {
|
|||
|
||||
let (site, instance) = create_test_site(pool).await;
|
||||
|
||||
let person_form = PersonInsertForm::builder()
|
||||
.name("my test person".to_string())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(instance.id)
|
||||
.build();
|
||||
let person_form = PersonInsertForm::test_form(instance.id, "my test person");
|
||||
let person = Person::create(pool, &person_form).await.unwrap();
|
||||
let local_user_form = LocalUserInsertForm::builder()
|
||||
.person_id(person.id)
|
||||
|
@ -647,11 +643,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let person_form = PersonInsertForm::builder()
|
||||
.name("my test person".to_string())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(instance.id)
|
||||
.build();
|
||||
let person_form = PersonInsertForm::test_form(instance.id, "my test person");
|
||||
let person = Person::create(pool, &person_form).await.unwrap();
|
||||
let local_user_form = LocalUserInsertForm::builder()
|
||||
.person_id(person.id)
|
||||
|
|
|
@ -118,8 +118,9 @@ impl Crud for Comment {
|
|||
type IdType = CommentId;
|
||||
|
||||
/// This is unimplemented, use [[Comment::create]]
|
||||
async fn create(_pool: &mut DbPool<'_>, _comment_form: &Self::InsertForm) -> Result<Self, Error> {
|
||||
unimplemented!();
|
||||
async fn create(pool: &mut DbPool<'_>, comment_form: &Self::InsertForm) -> Result<Self, Error> {
|
||||
debug_assert!(false);
|
||||
Comment::create(pool, comment_form, None).await
|
||||
}
|
||||
|
||||
async fn update(
|
||||
|
@ -233,11 +234,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("terry".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "terry");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -87,117 +87,3 @@ impl CommentReply {
|
|||
.optional()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
#[allow(clippy::indexing_slicing)]
|
||||
mod tests {
|
||||
|
||||
use crate::{
|
||||
source::{
|
||||
comment::{Comment, CommentInsertForm},
|
||||
comment_reply::{CommentReply, CommentReplyInsertForm, CommentReplyUpdateForm},
|
||||
community::{Community, CommunityInsertForm},
|
||||
instance::Instance,
|
||||
person::{Person, PersonInsertForm},
|
||||
post::{Post, PostInsertForm},
|
||||
},
|
||||
traits::Crud,
|
||||
utils::build_db_pool_for_tests,
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_crud() {
|
||||
let pool = &build_db_pool_for_tests().await;
|
||||
let pool = &mut pool.into();
|
||||
|
||||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("terrylake".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let recipient_form = PersonInsertForm::builder()
|
||||
.name("terrylakes recipient".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap();
|
||||
|
||||
let new_community = CommunityInsertForm::builder()
|
||||
.name("test community lake".to_string())
|
||||
.title("nada".to_owned())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_community = Community::create(pool, &new_community).await.unwrap();
|
||||
|
||||
let new_post = PostInsertForm::builder()
|
||||
.name("A test post".into())
|
||||
.creator_id(inserted_person.id)
|
||||
.community_id(inserted_community.id)
|
||||
.build();
|
||||
|
||||
let inserted_post = Post::create(pool, &new_post).await.unwrap();
|
||||
|
||||
let comment_form = CommentInsertForm::builder()
|
||||
.content("A test comment".into())
|
||||
.creator_id(inserted_person.id)
|
||||
.post_id(inserted_post.id)
|
||||
.build();
|
||||
|
||||
let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap();
|
||||
|
||||
let comment_reply_form = CommentReplyInsertForm {
|
||||
recipient_id: inserted_recipient.id,
|
||||
comment_id: inserted_comment.id,
|
||||
read: None,
|
||||
};
|
||||
|
||||
let inserted_reply = CommentReply::create(pool, &comment_reply_form)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let expected_reply = CommentReply {
|
||||
id: inserted_reply.id,
|
||||
recipient_id: inserted_reply.recipient_id,
|
||||
comment_id: inserted_reply.comment_id,
|
||||
read: false,
|
||||
published: inserted_reply.published,
|
||||
};
|
||||
|
||||
let read_reply = CommentReply::read(pool, inserted_reply.id)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let comment_reply_update_form = CommentReplyUpdateForm { read: Some(false) };
|
||||
let updated_reply = CommentReply::update(pool, inserted_reply.id, &comment_reply_update_form)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Comment::delete(pool, inserted_comment.id).await.unwrap();
|
||||
Post::delete(pool, inserted_post.id).await.unwrap();
|
||||
Community::delete(pool, inserted_community.id)
|
||||
.await
|
||||
.unwrap();
|
||||
Person::delete(pool, inserted_person.id).await.unwrap();
|
||||
Person::delete(pool, inserted_recipient.id).await.unwrap();
|
||||
Instance::delete(pool, inserted_instance.id).await.unwrap();
|
||||
|
||||
assert_eq!(expected_reply, read_reply);
|
||||
assert_eq!(expected_reply, inserted_reply);
|
||||
assert_eq!(expected_reply, updated_reply);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -141,7 +141,8 @@ impl Community {
|
|||
Ok(community_)
|
||||
}
|
||||
|
||||
/// Get the community which has a given moderators or featured url, also return the collection type
|
||||
/// Get the community which has a given moderators or featured url, also return the collection
|
||||
/// type
|
||||
pub async fn get_by_collection_url(
|
||||
pool: &mut DbPool<'_>,
|
||||
url: &DbUrl,
|
||||
|
@ -433,11 +434,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("bobbee".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "bobbee");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -103,11 +103,11 @@ impl ImageDetails {
|
|||
pub(crate) async fn create(
|
||||
conn: &mut AsyncPgConnection,
|
||||
form: &ImageDetailsForm,
|
||||
) -> Result<Self, Error> {
|
||||
) -> Result<usize, Error> {
|
||||
insert_into(image_details::table)
|
||||
.values(form)
|
||||
.on_conflict_do_nothing()
|
||||
.get_result::<ImageDetails>(conn)
|
||||
.execute(conn)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,11 +94,15 @@ impl Instance {
|
|||
.await
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
/// Only for use in tests
|
||||
pub async fn delete_all(pool: &mut DbPool<'_>) -> Result<usize, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
diesel::delete(federation_queue_state::table)
|
||||
.execute(conn)
|
||||
.await?;
|
||||
diesel::delete(instance::table).execute(conn).await
|
||||
}
|
||||
|
||||
pub async fn allowlist(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
instance::table
|
||||
|
@ -117,15 +121,15 @@ impl Instance {
|
|||
.await
|
||||
}
|
||||
|
||||
/// returns a list of all instances, each with a flag of whether the instance is allowed or not and dead or not
|
||||
/// ordered by id
|
||||
/// returns a list of all instances, each with a flag of whether the instance is allowed or not
|
||||
/// and dead or not ordered by id
|
||||
pub async fn read_federated_with_blocked_and_dead(
|
||||
pool: &mut DbPool<'_>,
|
||||
) -> Result<Vec<(Self, bool, bool)>, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
let is_dead_expr = coalesce(instance::updated, instance::published).lt(now() - 3.days());
|
||||
// this needs to be done in two steps because the meaning of the "blocked" column depends on the existence
|
||||
// of any value at all in the allowlist. (so a normal join wouldn't work)
|
||||
// this needs to be done in two steps because the meaning of the "blocked" column depends on the
|
||||
// existence of any value at all in the allowlist. (so a normal join wouldn't work)
|
||||
let use_allowlist = federation_allowlist::table
|
||||
.select(count_star().gt(0))
|
||||
.get_result::<bool>(conn)
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::{
|
|||
actor_language::LocalUserLanguage,
|
||||
local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm},
|
||||
local_user_vote_display_mode::{LocalUserVoteDisplayMode, LocalUserVoteDisplayModeInsertForm},
|
||||
site::Site,
|
||||
},
|
||||
utils::{
|
||||
functions::{coalesce, lower},
|
||||
|
@ -216,6 +217,44 @@ impl LocalUser {
|
|||
}
|
||||
}
|
||||
|
||||
/// Adds some helper functions for an optional LocalUser
|
||||
pub trait LocalUserOptionHelper {
|
||||
fn person_id(&self) -> Option<PersonId>;
|
||||
fn local_user_id(&self) -> Option<LocalUserId>;
|
||||
fn show_bot_accounts(&self) -> bool;
|
||||
fn show_read_posts(&self) -> bool;
|
||||
fn is_admin(&self) -> bool;
|
||||
fn show_nsfw(&self, site: &Site) -> bool;
|
||||
}
|
||||
|
||||
impl LocalUserOptionHelper for Option<&LocalUser> {
|
||||
fn person_id(&self) -> Option<PersonId> {
|
||||
self.map(|l| l.person_id)
|
||||
}
|
||||
|
||||
fn local_user_id(&self) -> Option<LocalUserId> {
|
||||
self.map(|l| l.id)
|
||||
}
|
||||
|
||||
fn show_bot_accounts(&self) -> bool {
|
||||
self.map(|l| l.show_bot_accounts).unwrap_or(true)
|
||||
}
|
||||
|
||||
fn show_read_posts(&self) -> bool {
|
||||
self.map(|l| l.show_read_posts).unwrap_or(true)
|
||||
}
|
||||
|
||||
fn is_admin(&self) -> bool {
|
||||
self.map(|l| l.admin).unwrap_or(false)
|
||||
}
|
||||
|
||||
fn show_nsfw(&self, site: &Site) -> bool {
|
||||
self
|
||||
.map(|l| l.show_nsfw)
|
||||
.unwrap_or(site.content_warning.is_some())
|
||||
}
|
||||
}
|
||||
|
||||
impl LocalUserInsertForm {
|
||||
pub fn test_form(person_id: PersonId) -> Self {
|
||||
Self::builder()
|
||||
|
|
|
@ -513,19 +513,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_mod = PersonInsertForm::builder()
|
||||
.name("the mod".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_mod = PersonInsertForm::test_form(inserted_instance.id, "the mod");
|
||||
|
||||
let inserted_mod = Person::create(pool, &new_mod).await.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("jim2".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim2");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -1,81 +1,45 @@
|
|||
use crate::{
|
||||
diesel::OptionalExtension,
|
||||
newtypes::LocalUserId,
|
||||
schema::password_reset_request::dsl::{local_user_id, password_reset_request, published, token},
|
||||
schema::password_reset_request::dsl::{password_reset_request, published, token},
|
||||
source::password_reset_request::{PasswordResetRequest, PasswordResetRequestForm},
|
||||
traits::Crud,
|
||||
utils::{get_conn, DbPool},
|
||||
};
|
||||
use diesel::{
|
||||
delete,
|
||||
dsl::{insert_into, now, IntervalDsl},
|
||||
result::Error,
|
||||
sql_types::Timestamptz,
|
||||
ExpressionMethods,
|
||||
IntoSql,
|
||||
QueryDsl,
|
||||
};
|
||||
use diesel_async::RunQueryDsl;
|
||||
|
||||
#[async_trait]
|
||||
impl Crud for PasswordResetRequest {
|
||||
type InsertForm = PasswordResetRequestForm;
|
||||
type UpdateForm = PasswordResetRequestForm;
|
||||
type IdType = i32;
|
||||
|
||||
async fn create(pool: &mut DbPool<'_>, form: &PasswordResetRequestForm) -> Result<Self, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
insert_into(password_reset_request)
|
||||
.values(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
async fn update(
|
||||
pool: &mut DbPool<'_>,
|
||||
password_reset_request_id: i32,
|
||||
form: &PasswordResetRequestForm,
|
||||
) -> Result<Self, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
diesel::update(password_reset_request.find(password_reset_request_id))
|
||||
.set(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl PasswordResetRequest {
|
||||
pub async fn create_token(
|
||||
pub async fn create(
|
||||
pool: &mut DbPool<'_>,
|
||||
from_local_user_id: LocalUserId,
|
||||
token_: String,
|
||||
) -> Result<PasswordResetRequest, Error> {
|
||||
let form = PasswordResetRequestForm {
|
||||
local_user_id: from_local_user_id,
|
||||
token: token_,
|
||||
token: token_.into(),
|
||||
};
|
||||
|
||||
Self::create(pool, &form).await
|
||||
}
|
||||
pub async fn read_from_token(pool: &mut DbPool<'_>, token_: &str) -> Result<Option<Self>, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
password_reset_request
|
||||
insert_into(password_reset_request)
|
||||
.values(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn read_and_delete(pool: &mut DbPool<'_>, token_: &str) -> Result<Option<Self>, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
delete(password_reset_request)
|
||||
.filter(token.eq(token_))
|
||||
.filter(published.gt(now.into_sql::<Timestamptz>() - 1.days()))
|
||||
.first(conn)
|
||||
.await
|
||||
.optional()
|
||||
}
|
||||
|
||||
pub async fn get_recent_password_resets_count(
|
||||
pool: &mut DbPool<'_>,
|
||||
user_id: LocalUserId,
|
||||
) -> Result<i64, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
password_reset_request
|
||||
.filter(local_user_id.eq(user_id))
|
||||
.filter(published.gt(now.into_sql::<Timestamptz>() - 1.days()))
|
||||
.count()
|
||||
.get_result(conn)
|
||||
.await
|
||||
.optional()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,62 +58,60 @@ mod tests {
|
|||
traits::Crud,
|
||||
utils::build_db_pool_for_tests,
|
||||
};
|
||||
use lemmy_utils::error::LemmyResult;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_crud() {
|
||||
async fn test_password_reset() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool_for_tests().await;
|
||||
let pool = &mut pool.into();
|
||||
|
||||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("thommy prw".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
// Setup
|
||||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy prw");
|
||||
let inserted_person = Person::create(pool, &new_person).await?;
|
||||
let new_local_user = LocalUserInsertForm::builder()
|
||||
.person_id(inserted_person.id)
|
||||
.password_encrypted("pass".to_string())
|
||||
.build();
|
||||
let inserted_local_user = LocalUser::create(pool, &new_local_user, vec![]).await?;
|
||||
|
||||
let inserted_local_user = LocalUser::create(pool, &new_local_user, vec![])
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Create password reset token
|
||||
let token = "nope";
|
||||
|
||||
let inserted_password_reset_request =
|
||||
PasswordResetRequest::create_token(pool, inserted_local_user.id, token.to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
PasswordResetRequest::create(pool, inserted_local_user.id, token.to_string()).await?;
|
||||
|
||||
let expected_password_reset_request = PasswordResetRequest {
|
||||
id: inserted_password_reset_request.id,
|
||||
local_user_id: inserted_local_user.id,
|
||||
token: token.to_string(),
|
||||
published: inserted_password_reset_request.published,
|
||||
};
|
||||
|
||||
let read_password_reset_request = PasswordResetRequest::read_from_token(pool, token)
|
||||
.await
|
||||
.unwrap()
|
||||
// Read it and verify
|
||||
let read_password_reset_request = PasswordResetRequest::read_and_delete(pool, token)
|
||||
.await?
|
||||
.unwrap();
|
||||
let num_deleted = Person::delete(pool, inserted_person.id).await.unwrap();
|
||||
Instance::delete(pool, inserted_instance.id).await.unwrap();
|
||||
|
||||
assert_eq!(expected_password_reset_request, read_password_reset_request);
|
||||
assert_eq!(
|
||||
expected_password_reset_request,
|
||||
inserted_password_reset_request
|
||||
inserted_password_reset_request.id,
|
||||
read_password_reset_request.id
|
||||
);
|
||||
assert_eq!(
|
||||
inserted_password_reset_request.local_user_id,
|
||||
read_password_reset_request.local_user_id
|
||||
);
|
||||
assert_eq!(
|
||||
inserted_password_reset_request.token,
|
||||
read_password_reset_request.token
|
||||
);
|
||||
assert_eq!(
|
||||
inserted_password_reset_request.published,
|
||||
read_password_reset_request.published
|
||||
);
|
||||
|
||||
// Cannot reuse same token again
|
||||
let read_password_reset_request = PasswordResetRequest::read_and_delete(pool, token).await?;
|
||||
assert!(read_password_reset_request.is_none());
|
||||
|
||||
// Cleanup
|
||||
let num_deleted = Person::delete(pool, inserted_person.id).await?;
|
||||
Instance::delete(pool, inserted_instance.id).await?;
|
||||
assert_eq!(1, num_deleted);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,14 @@ use crate::{
|
|||
traits::{ApubActor, Crud, Followable},
|
||||
utils::{functions::lower, get_conn, naive_now, DbPool},
|
||||
};
|
||||
use diesel::{dsl::insert_into, result::Error, CombineDsl, ExpressionMethods, JoinOnDsl, QueryDsl};
|
||||
use diesel::{
|
||||
dsl::{insert_into, not},
|
||||
result::Error,
|
||||
CombineDsl,
|
||||
ExpressionMethods,
|
||||
JoinOnDsl,
|
||||
QueryDsl,
|
||||
};
|
||||
use diesel_async::RunQueryDsl;
|
||||
|
||||
#[async_trait]
|
||||
|
@ -55,7 +62,8 @@ impl Crud for Person {
|
|||
impl Person {
|
||||
/// Update or insert the person.
|
||||
///
|
||||
/// This is necessary for federation, because Activitypub doesn't distinguish between these actions.
|
||||
/// This is necessary for federation, because Activitypub doesn't distinguish between these
|
||||
/// actions.
|
||||
pub async fn upsert(pool: &mut DbPool<'_>, form: &PersonInsertForm) -> Result<Self, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
insert_into(person::table)
|
||||
|
@ -99,6 +107,8 @@ impl Person {
|
|||
.inner_join(post::table)
|
||||
.inner_join(community::table.on(post::community_id.eq(community::id)))
|
||||
.filter(community::local.eq(true))
|
||||
.filter(not(community::deleted))
|
||||
.filter(not(community::removed))
|
||||
.filter(comment::creator_id.eq(for_creator_id))
|
||||
.select(community::id)
|
||||
.union(
|
||||
|
@ -115,11 +125,7 @@ impl Person {
|
|||
|
||||
impl PersonInsertForm {
|
||||
pub fn test_form(instance_id: InstanceId, name: &str) -> Self {
|
||||
Self::builder()
|
||||
.name(name.to_owned())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(instance_id)
|
||||
.build()
|
||||
Self::new(name.to_owned(), "pubkey".to_string(), instance_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,11 +245,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("holly".into())
|
||||
.public_key("nada".to_owned())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "holly");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
@ -262,7 +264,7 @@ mod tests {
|
|||
local: true,
|
||||
bot_account: false,
|
||||
private_key: None,
|
||||
public_key: "nada".to_owned(),
|
||||
public_key: "pubkey".to_owned(),
|
||||
last_refreshed_at: inserted_person.published,
|
||||
inbox_url: inserted_person.inbox_url.clone(),
|
||||
shared_inbox_url: None,
|
||||
|
@ -302,17 +304,9 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let person_form_1 = PersonInsertForm::builder()
|
||||
.name("erich".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let person_form_1 = PersonInsertForm::test_form(inserted_instance.id, "erich");
|
||||
let person_1 = Person::create(pool, &person_form_1).await.unwrap();
|
||||
let person_form_2 = PersonInsertForm::builder()
|
||||
.name("michele".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let person_form_2 = PersonInsertForm::test_form(inserted_instance.id, "michele");
|
||||
let person_2 = Person::create(pool, &person_form_2).await.unwrap();
|
||||
|
||||
let follow_form = PersonFollowerForm {
|
||||
|
|
|
@ -74,117 +74,3 @@ impl PersonMention {
|
|||
.optional()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
#[allow(clippy::indexing_slicing)]
|
||||
mod tests {
|
||||
|
||||
use crate::{
|
||||
source::{
|
||||
comment::{Comment, CommentInsertForm},
|
||||
community::{Community, CommunityInsertForm},
|
||||
instance::Instance,
|
||||
person::{Person, PersonInsertForm},
|
||||
person_mention::{PersonMention, PersonMentionInsertForm, PersonMentionUpdateForm},
|
||||
post::{Post, PostInsertForm},
|
||||
},
|
||||
traits::Crud,
|
||||
utils::build_db_pool_for_tests,
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_crud() {
|
||||
let pool = &build_db_pool_for_tests().await;
|
||||
let pool = &mut pool.into();
|
||||
|
||||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("terrylake".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
let recipient_form = PersonInsertForm::builder()
|
||||
.name("terrylakes recipient".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap();
|
||||
|
||||
let new_community = CommunityInsertForm::builder()
|
||||
.name("test community lake".to_string())
|
||||
.title("nada".to_owned())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
|
||||
let inserted_community = Community::create(pool, &new_community).await.unwrap();
|
||||
|
||||
let new_post = PostInsertForm::builder()
|
||||
.name("A test post".into())
|
||||
.creator_id(inserted_person.id)
|
||||
.community_id(inserted_community.id)
|
||||
.build();
|
||||
|
||||
let inserted_post = Post::create(pool, &new_post).await.unwrap();
|
||||
|
||||
let comment_form = CommentInsertForm::builder()
|
||||
.content("A test comment".into())
|
||||
.creator_id(inserted_person.id)
|
||||
.post_id(inserted_post.id)
|
||||
.build();
|
||||
|
||||
let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap();
|
||||
|
||||
let person_mention_form = PersonMentionInsertForm {
|
||||
recipient_id: inserted_recipient.id,
|
||||
comment_id: inserted_comment.id,
|
||||
read: None,
|
||||
};
|
||||
|
||||
let inserted_mention = PersonMention::create(pool, &person_mention_form)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let expected_mention = PersonMention {
|
||||
id: inserted_mention.id,
|
||||
recipient_id: inserted_mention.recipient_id,
|
||||
comment_id: inserted_mention.comment_id,
|
||||
read: false,
|
||||
published: inserted_mention.published,
|
||||
};
|
||||
|
||||
let read_mention = PersonMention::read(pool, inserted_mention.id)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let person_mention_update_form = PersonMentionUpdateForm { read: Some(false) };
|
||||
let updated_mention =
|
||||
PersonMention::update(pool, inserted_mention.id, &person_mention_update_form)
|
||||
.await
|
||||
.unwrap();
|
||||
Comment::delete(pool, inserted_comment.id).await.unwrap();
|
||||
Post::delete(pool, inserted_post.id).await.unwrap();
|
||||
Community::delete(pool, inserted_community.id)
|
||||
.await
|
||||
.unwrap();
|
||||
Person::delete(pool, inserted_person.id).await.unwrap();
|
||||
Person::delete(pool, inserted_recipient.id).await.unwrap();
|
||||
Instance::delete(pool, inserted_instance.id).await.unwrap();
|
||||
|
||||
assert_eq!(expected_mention, read_mention);
|
||||
assert_eq!(expected_mention, inserted_mention);
|
||||
assert_eq!(expected_mention, updated_mention);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -401,11 +401,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_person = PersonInsertForm::builder()
|
||||
.name("jim".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim");
|
||||
|
||||
let inserted_person = Person::create(pool, &new_person).await.unwrap();
|
||||
|
||||
|
|
|
@ -101,11 +101,7 @@ mod tests {
|
|||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
let person_form = PersonInsertForm::builder()
|
||||
.name("jim".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let person_form = PersonInsertForm::test_form(inserted_instance.id, "jim");
|
||||
let person = Person::create(pool, &person_form).await.unwrap();
|
||||
|
||||
let community_form = CommunityInsertForm::builder()
|
||||
|
|
|
@ -111,19 +111,11 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let creator_form = PersonInsertForm::builder()
|
||||
.name("creator_pm".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let creator_form = PersonInsertForm::test_form(inserted_instance.id, "creator_pm");
|
||||
|
||||
let inserted_creator = Person::create(pool, &creator_form).await.unwrap();
|
||||
|
||||
let recipient_form = PersonInsertForm::builder()
|
||||
.name("recipient_pm".into())
|
||||
.public_key("pubkey".to_string())
|
||||
.instance_id(inserted_instance.id)
|
||||
.build();
|
||||
let recipient_form = PersonInsertForm::test_form(inserted_instance.id, "recipient_pm");
|
||||
|
||||
let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap();
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ pub mod aggregates;
|
|||
#[cfg(feature = "full")]
|
||||
pub mod impls;
|
||||
pub mod newtypes;
|
||||
pub mod sensitive;
|
||||
#[cfg(feature = "full")]
|
||||
#[rustfmt::skip]
|
||||
#[allow(clippy::wildcard_imports)]
|
||||
|
|
|
@ -127,11 +127,13 @@ pub struct LanguageId(pub i32);
|
|||
/// The comment reply id.
|
||||
pub struct CommentReplyId(i32);
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)]
|
||||
#[derive(
|
||||
Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default, Ord, PartialOrd,
|
||||
)]
|
||||
#[cfg_attr(feature = "full", derive(DieselNewType, TS))]
|
||||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// The instance id.
|
||||
pub struct InstanceId(i32);
|
||||
pub struct InstanceId(pub i32);
|
||||
|
||||
#[derive(
|
||||
Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default, PartialOrd, Ord,
|
||||
|
|
|
@ -6,18 +6,19 @@ use tracing::info;
|
|||
|
||||
const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
||||
|
||||
/// This SQL code sets up the `r` schema, which contains things that can be safely dropped and replaced
|
||||
/// instead of being changed using migrations. It may not create or modify things outside of the `r` schema
|
||||
/// (indicated by `r.` before the name), unless a comment says otherwise.
|
||||
/// This SQL code sets up the `r` schema, which contains things that can be safely dropped and
|
||||
/// replaced instead of being changed using migrations. It may not create or modify things outside
|
||||
/// of the `r` schema (indicated by `r.` before the name), unless a comment says otherwise.
|
||||
///
|
||||
/// Currently, this code is only run after the server starts and there's at least 1 pending migration
|
||||
/// to run. This means every time you change something here, you must also create a migration (a blank
|
||||
/// up.sql file works fine). This behavior will be removed when we implement a better way to avoid
|
||||
/// useless schema updates and locks.
|
||||
/// Currently, this code is only run after the server starts and there's at least 1 pending
|
||||
/// migration to run. This means every time you change something here, you must also create a
|
||||
/// migration (a blank up.sql file works fine). This behavior will be removed when we implement a
|
||||
/// better way to avoid useless schema updates and locks.
|
||||
///
|
||||
/// If you add something that depends on something (such as a table) created in a new migration, then down.sql
|
||||
/// must use `CASCADE` when dropping it. This doesn't need to be fixed in old migrations because the
|
||||
/// "replaceable-schema" migration runs `DROP SCHEMA IF EXISTS r CASCADE` in down.sql.
|
||||
/// If you add something that depends on something (such as a table) created in a new migration,
|
||||
/// then down.sql must use `CASCADE` when dropping it. This doesn't need to be fixed in old
|
||||
/// migrations because the "replaceable-schema" migration runs `DROP SCHEMA IF EXISTS r CASCADE` in
|
||||
/// down.sql.
|
||||
const REPLACEABLE_SCHEMA: &[&str] = &[
|
||||
"DROP SCHEMA IF EXISTS r CASCADE;",
|
||||
"CREATE SCHEMA r;",
|
||||
|
@ -29,9 +30,10 @@ pub fn run(db_url: &str) -> Result<(), LemmyError> {
|
|||
// Migrations don't support async connection
|
||||
let mut conn = PgConnection::establish(db_url).with_context(|| "Error connecting to database")?;
|
||||
|
||||
// Run all pending migrations except for the newest one, then run the newest one in the same transaction
|
||||
// as `REPLACEABLE_SCHEMA`. This code will be becone less hacky when the conditional setup of things in
|
||||
// `REPLACEABLE_SCHEMA` is done without using the number of pending migrations.
|
||||
// Run all pending migrations except for the newest one, then run the newest one in the same
|
||||
// transaction as `REPLACEABLE_SCHEMA`. This code will be becone less hacky when the conditional
|
||||
// setup of things in `REPLACEABLE_SCHEMA` is done without using the number of pending
|
||||
// migrations.
|
||||
info!("Running Database migrations (This may take a long time)...");
|
||||
let migrations = conn
|
||||
.pending_migrations(MIGRATIONS)
|
||||
|
|
57
crates/db_schema/src/sensitive.rs
Normal file
57
crates/db_schema/src/sensitive.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Debug, ops::Deref};
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
|
||||
#[cfg_attr(feature = "full", derive(DieselNewType))]
|
||||
#[serde(transparent)]
|
||||
pub struct SensitiveString(String);
|
||||
|
||||
impl SensitiveString {
|
||||
pub fn into_inner(self) -> String {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for SensitiveString {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Sensitive").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for SensitiveString {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SensitiveString {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for SensitiveString {
|
||||
fn from(t: String) -> Self {
|
||||
SensitiveString(t)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl TS for SensitiveString {
|
||||
fn name() -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn name_with_type_args(_args: Vec<String>) -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn dependencies() -> Vec<ts_rs::Dependency> {
|
||||
Vec::new()
|
||||
}
|
||||
fn transparent() -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -41,7 +41,8 @@ pub struct Comment {
|
|||
#[cfg(feature = "full")]
|
||||
#[cfg_attr(feature = "full", serde(with = "LtreeDef"))]
|
||||
#[cfg_attr(feature = "full", ts(type = "string"))]
|
||||
/// The path / tree location of a comment, separated by dots, ending with the comment's id. Ex: 0.24.27
|
||||
/// The path / tree location of a comment, separated by dots, ending with the comment's id. Ex:
|
||||
/// 0.24.27
|
||||
pub path: Ltree,
|
||||
#[cfg(not(feature = "full"))]
|
||||
pub path: String,
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
use crate::schema::{community, community_follower, community_moderator, community_person_ban};
|
||||
use crate::{
|
||||
newtypes::{CommunityId, DbUrl, InstanceId, PersonId},
|
||||
sensitive::SensitiveString,
|
||||
source::placeholder_apub_url,
|
||||
CommunityVisibility,
|
||||
};
|
||||
|
@ -39,7 +40,7 @@ pub struct Community {
|
|||
/// Whether the community is local.
|
||||
pub local: bool,
|
||||
#[serde(skip)]
|
||||
pub private_key: Option<String>,
|
||||
pub private_key: Option<SensitiveString>,
|
||||
#[serde(skip)]
|
||||
pub public_key: String,
|
||||
#[serde(skip)]
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
use crate::schema::local_user;
|
||||
use crate::{
|
||||
newtypes::{LocalUserId, PersonId},
|
||||
sensitive::SensitiveString,
|
||||
ListingType,
|
||||
PostListingMode,
|
||||
SortType,
|
||||
|
@ -24,8 +25,8 @@ pub struct LocalUser {
|
|||
/// The person_id for the local user.
|
||||
pub person_id: PersonId,
|
||||
#[serde(skip)]
|
||||
pub password_encrypted: String,
|
||||
pub email: Option<String>,
|
||||
pub password_encrypted: SensitiveString,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// Whether to show NSFW content.
|
||||
pub show_nsfw: bool,
|
||||
pub theme: String,
|
||||
|
@ -47,7 +48,7 @@ pub struct LocalUser {
|
|||
/// Whether their registration application has been accepted.
|
||||
pub accepted_application: bool,
|
||||
#[serde(skip)]
|
||||
pub totp_2fa_secret: Option<String>,
|
||||
pub totp_2fa_secret: Option<SensitiveString>,
|
||||
/// Open links in a new tab.
|
||||
pub open_links_in_new_tab: bool,
|
||||
pub blur_nsfw: bool,
|
||||
|
@ -61,7 +62,8 @@ pub struct LocalUser {
|
|||
pub totp_2fa_enabled: bool,
|
||||
/// Whether to allow keyboard navigation (for browsing and interacting with posts and comments).
|
||||
pub enable_keyboard_navigation: bool,
|
||||
/// Whether user avatars and inline images in the UI that are gifs should be allowed to play or should be paused
|
||||
/// Whether user avatars and inline images in the UI that are gifs should be allowed to play or
|
||||
/// should be paused
|
||||
pub enable_animated_images: bool,
|
||||
/// Whether to auto-collapse bot comments.
|
||||
pub collapse_bot_comments: bool,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::newtypes::LocalUserId;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::schema::login_token;
|
||||
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
|
@ -18,7 +18,7 @@ use ts_rs::TS;
|
|||
pub struct LoginToken {
|
||||
/// Jwt token for this login
|
||||
#[serde(skip)]
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
pub user_id: LocalUserId,
|
||||
/// Time of login
|
||||
pub published: DateTime<Utc>,
|
||||
|
@ -31,7 +31,7 @@ pub struct LoginToken {
|
|||
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
|
||||
#[cfg_attr(feature = "full", diesel(table_name = login_token))]
|
||||
pub struct LoginTokenCreateForm {
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
pub user_id: LocalUserId,
|
||||
pub ip: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue