diff --git a/.cargo/config b/.cargo/config deleted file mode 100644 index bff29e6e1..000000000 --- a/.cargo/config +++ /dev/null @@ -1,2 +0,0 @@ -[build] -rustflags = ["--cfg", "tokio_unstable"] diff --git a/.dockerignore b/.dockerignore index 5982307c0..43381f7b7 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,4 @@ api_tests ansible tests *.sh -pictrs \ No newline at end of file +pictrs diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3d1bd7c72..06a92b09b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1,3 @@ -* @Nutomic @dessalines @phiresky +* @Nutomic @dessalines @phiresky @dullbananas @SleeplessOne1917 crates/apub/ @Nutomic -migrations/ @dessalines @phiresky +migrations/ @dessalines @phiresky @dullbananas diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml index a4028afd0..3d3caa261 100644 --- a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml @@ -20,6 +20,8 @@ body: required: true - label: Is this only a single bug? Do not put multiple bugs in one issue. required: true + - label: Do you agree to follow the rules in our [Code of Conduct](https://join-lemmy.org/docs/code_of_conduct.html)? + required: true - label: Is this a backend issue? Use the [lemmy-ui](https://github.com/LemmyNet/lemmy-ui) repo for UI / frontend issues. required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml index 40ef2caf3..f50a93ff2 100644 --- a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml +++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml @@ -20,6 +20,8 @@ body: required: true - label: Is this a backend issue? Use the [lemmy-ui](https://github.com/LemmyNet/lemmy-ui) repo for UI / frontend issues. required: true + - label: Do you agree to follow the rules in our [Code of Conduct](https://join-lemmy.org/docs/code_of_conduct.html)? + required: true - type: textarea id: problem attributes: diff --git a/.gitignore b/.gitignore index 186713e1f..07a838201 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ query_testing/**/reports/*.json api_tests/node_modules api_tests/.yalc api_tests/yalc.lock +api_tests/pict-rs # pictrs data pictrs/ diff --git a/.rustfmt.toml b/.rustfmt.toml index 80c01a69b..dfeeb3bfd 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -3,3 +3,5 @@ edition = "2021" imports_layout = "HorizontalVertical" imports_granularity = "Crate" group_imports = "One" +wrap_comments = true +comment_width = 100 diff --git a/.woodpecker.yml b/.woodpecker.yml index ff4685ac2..885796cac 100644 --- a/.woodpecker.yml +++ b/.woodpecker.yml @@ -2,32 +2,34 @@ # See https://github.com/woodpecker-ci/woodpecker/issues/1677 variables: - - &rust_image "rust:1.72.1" + - &rust_image "rust:1.81" + - &rust_nightly_image "rustlang/rust:nightly" + - &install_pnpm "corepack enable pnpm" + - &install_binstall "wget -O- https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz | tar -xvz -C /usr/local/cargo/bin" + - install_diesel_cli: &install_diesel_cli + - apt-get update && apt-get install -y postgresql-client + - cargo install diesel_cli --no-default-features --features postgres + - export PATH="$CARGO_HOME/bin:$PATH" - &slow_check_paths - - path: - # rust source code - - "**/*.rs" - - "**/Cargo.toml" - - "Cargo.lock" - # database migrations - - "migrations/**" - # typescript tests - - "api_tests/**" - # config files and scripts used by ci - - ".woodpecker.yml" - - ".rustfmt.toml" - - "scripts/update_config_defaults.sh" - - "diesel.toml" - - ".gitmodules" - -# Broken for cron jobs currently, see -# https://github.com/woodpecker-ci/woodpecker/issues/1716 -# clone: -# git: -# image: woodpeckerci/plugin-git -# settings: -# recursive: true -# submodule_update_remote: true + - event: pull_request + path: + include: [ + # rust source code + "crates/**", + "src/**", + "**/Cargo.toml", + "Cargo.lock", + # database migrations + "migrations/**", + # typescript tests + "api_tests/**", + # config files and scripts used by ci + ".woodpecker.yml", + ".rustfmt.toml", + "scripts/update_config_defaults.sh", + "diesel.toml", + ".gitmodules", + ] steps: prepare_repo: @@ -36,64 +38,64 @@ steps: - apk add git - git submodule init - git submodule update + when: + - event: [pull_request, tag] prettier_check: - group: format - image: tmknom/prettier:3.0.0 + image: tmknom/prettier:3.2.5 commands: - - prettier -c . '!**/volumes' '!**/dist' '!target' '!**/translations' + - prettier -c . '!**/volumes' '!**/dist' '!target' '!**/translations' '!api_tests/pnpm-lock.yaml' + when: + - event: pull_request toml_fmt: - group: format - image: tamasfe/taplo:0.8.1 + image: tamasfe/taplo:0.9.3 commands: - taplo format --check + when: + - event: pull_request sql_fmt: - group: format - image: backplane/pgformatter:latest + image: backplane/pgformatter commands: - ./scripts/sql_format_check.sh + when: + - event: pull_request cargo_fmt: - group: format - image: rustlang/rust:nightly + image: *rust_nightly_image environment: # store cargo data in repo folder so that it gets cached between steps - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - # need make existing toolchain available + - rustup component add rustfmt - cargo +nightly fmt -- --check + when: + - event: pull_request - restore-cache: - image: meltwater/drone-cache:v1 - pull: true - settings: - restore: true - endpoint: - from_secret: MINIO_ENDPOINT - access-key: - from_secret: MINIO_WRITE_USER - secret-key: - from_secret: MINIO_WRITE_PASSWORD - bucket: - from_secret: MINIO_BUCKET - region: us-east-1 - cache_key: "rust-cache" - path-style: true - mount: - - ".cargo" - - "target" - - "api_tests/node_modules" - secrets: - [MINIO_ENDPOINT, MINIO_WRITE_USER, MINIO_WRITE_PASSWORD, MINIO_BUCKET] - when: *slow_check_paths + cargo_shear: + image: *rust_nightly_image + commands: + - *install_binstall + - cargo binstall -y cargo-shear + - cargo shear + when: + - event: pull_request + + ignored_files: + image: alpine:3 + commands: + - apk add git + - IGNORED=$(git ls-files --cached -i --exclude-standard) + - if [[ "$IGNORED" ]]; then echo "Ignored files present:\n$IGNORED\n"; exit 1; fi + when: + - event: pull_request # make sure api builds with default features (used by other crates relying on lemmy api) check_api_common_default_features: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - cargo check --package lemmy_api_common when: *slow_check_paths @@ -101,7 +103,7 @@ steps: lemmy_api_common_doesnt_depend_on_diesel: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - "! cargo tree -p lemmy_api_common --no-default-features -i diesel" when: *slow_check_paths @@ -109,7 +111,7 @@ steps: lemmy_api_common_works_with_wasm: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - "rustup target add wasm32-unknown-unknown" - "cargo check --target wasm32-unknown-unknown -p lemmy_api_common" @@ -118,7 +120,7 @@ steps: check_defaults_hjson_updated: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - export LEMMY_CONFIG_LOCATION=./config/config.hjson - ./scripts/update_config_defaults.sh config/defaults_current.hjson @@ -126,138 +128,160 @@ steps: when: *slow_check_paths check_diesel_schema: - image: willsquire/diesel-cli + image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home DATABASE_URL: postgres://lemmy:password@database:5432/lemmy commands: + - <<: *install_diesel_cli - diesel migration run - diesel print-schema --config-file=diesel.toml > tmp.schema - diff tmp.schema crates/db_schema/src/schema.rs when: *slow_check_paths - check_diesel_migration_revertable: - image: willsquire/diesel-cli + check_db_perf_tool: + image: *rust_image environment: - CARGO_HOME: .cargo - DATABASE_URL: postgres://lemmy:password@database:5432/lemmy + LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy + RUST_BACKTRACE: "1" + CARGO_HOME: .cargo_home commands: - - diesel migration run - - diesel migration redo + # same as scripts/db_perf.sh but without creating a new database server + - export LEMMY_CONFIG_LOCATION=config/config.hjson + - cargo run --package lemmy_db_perf -- --posts 10 --read-post-pages 1 when: *slow_check_paths cargo_clippy: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - # when adding new clippy lints, make sure to also add them in scripts/lint.sh - rustup component add clippy - - cargo clippy --workspace --tests --all-targets --features console -- - -D warnings -D deprecated -D clippy::perf -D clippy::complexity - -D clippy::style -D clippy::correctness -D clippy::suspicious - -D clippy::dbg_macro -D clippy::inefficient_to_string - -D clippy::items-after-statements -D clippy::implicit_clone - -D clippy::cast_lossless -D clippy::manual_string_new - -D clippy::redundant_closure_for_method_calls - -D clippy::unused_self - -A clippy::uninlined_format_args - -D clippy::get_first - -D clippy::explicit_into_iter_loop - -D clippy::explicit_iter_loop - -D clippy::needless_collect - -D clippy::unwrap_used - -D clippy::indexing_slicing + - cargo clippy --workspace --tests --all-targets -- -D warnings when: *slow_check_paths cargo_build: image: *rust_image environment: - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home commands: - cargo build - mv target/debug/lemmy_server target/lemmy_server when: *slow_check_paths cargo_test: - group: tests image: *rust_image environment: LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy RUST_BACKTRACE: "1" - CARGO_HOME: .cargo + CARGO_HOME: .cargo_home + LEMMY_TEST_FAST_FEDERATION: "1" commands: - export LEMMY_CONFIG_LOCATION=../../config/config.hjson - cargo test --workspace --no-fail-fast when: *slow_check_paths + check_diesel_migration: + # TODO: use willsquire/diesel-cli image when shared libraries become optional in lemmy_server + image: *rust_image + environment: + LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy + RUST_BACKTRACE: "1" + CARGO_HOME: .cargo_home + DATABASE_URL: postgres://lemmy:password@database:5432/lemmy + PGUSER: lemmy + PGPASSWORD: password + PGHOST: database + PGDATABASE: lemmy + commands: + # Install diesel_cli + - <<: *install_diesel_cli + # Run all migrations + - diesel migration run + - psql -c "DROP SCHEMA IF EXISTS r CASCADE;" + - pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --no-sync -f before.sqldump + # Make sure that the newest migration is revertable without the `r` schema + - diesel migration redo + # Run schema setup twice, which fails on the 2nd time if `DROP SCHEMA IF EXISTS r CASCADE` drops the wrong things + - alias lemmy_schema_setup="target/lemmy_server --disable-scheduled-tasks --disable-http-server --disable-activity-sending" + - lemmy_schema_setup + - lemmy_schema_setup + # Make sure that the newest migration is revertable with the `r` schema + - diesel migration redo + # Check for changes in the schema, which would be caused by an incorrect migration + - psql -c "DROP SCHEMA IF EXISTS r CASCADE;" + - pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --no-sync -f after.sqldump + - diff before.sqldump after.sqldump + when: *slow_check_paths + run_federation_tests: - group: tests - image: node:20-bookworm-slim + image: node:22-bookworm-slim environment: LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432 DO_WRITE_HOSTS_FILE: "1" commands: - - apt update && apt install -y bash curl postgresql-client + - *install_pnpm + - apt-get update && apt-get install -y bash curl postgresql-client - bash api_tests/prepare-drone-federation-test.sh - cd api_tests/ - - yarn - - yarn api-test + - pnpm i + - pnpm api-test when: *slow_check_paths - rebuild-cache: - image: meltwater/drone-cache:v1 - pull: true - settings: - rebuild: true - endpoint: - from_secret: MINIO_ENDPOINT - access-key: - from_secret: MINIO_WRITE_USER - secret-key: - from_secret: MINIO_WRITE_PASSWORD - bucket: - from_secret: MINIO_BUCKET - cache_key: "rust-cache" - region: us-east-1 - path-style: true - mount: - - ".cargo" - - "target" - - "api_tests/node_modules" - secrets: - [MINIO_ENDPOINT, MINIO_WRITE_USER, MINIO_WRITE_PASSWORD, MINIO_BUCKET] + federation_tests_server_output: + image: alpine:3 + commands: + # `|| true` prevents this step from appearing to fail if the server output files don't exist + - cat target/log/lemmy_*.out || true + - "# If you can't see all output, then use the download button" when: - - event: push - branch: main + - event: pull_request + status: failure publish_release_docker: image: woodpeckerci/plugin-docker-buildx - secrets: [docker_username, docker_password] settings: repo: dessalines/lemmy dockerfile: docker/Dockerfile - # TODO fix arm build: see: https://woodpecker.join-lemmy.org/repos/129/pipeline/2888/20 - # platforms: linux/amd64,linux/arm64 - platforms: linux/amd64 + username: + from_secret: docker_username + password: + from_secret: docker_password + platforms: linux/amd64, linux/arm64 build_args: - RUST_RELEASE_MODE=release tag: ${CI_COMMIT_TAG} when: - event: tag + - event: tag nightly_build: image: woodpeckerci/plugin-docker-buildx - secrets: [docker_username, docker_password] settings: repo: dessalines/lemmy dockerfile: docker/Dockerfile + username: + from_secret: docker_username + password: + from_secret: docker_password platforms: linux/amd64,linux/arm64 build_args: - RUST_RELEASE_MODE=release tag: dev when: - event: cron + - event: cron + + # using https://github.com/pksunkara/cargo-workspaces + publish_to_crates_io: + image: *rust_image + commands: + - *install_binstall + # Install cargo-workspaces + - cargo binstall -y cargo-workspaces + - cp -r migrations crates/db_schema/ + - cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}" + secrets: [cargo_api_token] + when: + - event: tag notify_on_failure: image: alpine:3 @@ -265,7 +289,8 @@ steps: - apk add curl - "curl -d'Lemmy CI build failed: ${CI_PIPELINE_URL}' ntfy.sh/lemmy_drone_ci" when: - status: [failure] + - event: [pull_request, tag] + status: failure notify_on_tag_deploy: image: alpine:3 @@ -273,11 +298,12 @@ steps: - apk add curl - "curl -d'lemmy:${CI_COMMIT_TAG} deployed' ntfy.sh/lemmy_drone_ci" when: - event: tag + - event: tag services: database: - image: postgres:15.2-alpine + # 15-alpine image necessary because of diesel tests + image: pgautoupgrade/pgautoupgrade:15-alpine environment: POSTGRES_USER: lemmy POSTGRES_PASSWORD: password diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 75015d8f5..000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,3 +0,0 @@ -# Contributing - -See [here](https://join-lemmy.org/docs/en/contributors/01-overview.html) for contributing Instructions. diff --git a/Cargo.lock b/Cargo.lock index 378f6cb2e..491b7cc94 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9,16 +9,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" [[package]] -name = "activitypub_federation" -version = "0.5.0-beta.3" +name = "accept-language" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "509cbafa1b42e01b7ca76c26298814a6638825df4fd67aef2f4c9d36a39c2b6d" +checksum = "8f27d075294830fcab6f66e320dab524bc6d048f4a151698e153205559113772" + +[[package]] +name = "activitypub_federation" +version = "0.6.0-alpha2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4877d467ddf2fac85e9ee33aba6f2560df14125b8bfa864f85ab40e9b87753a9" dependencies = [ "activitystreams-kinds", "actix-web", - "anyhow", "async-trait", - "base64 0.21.2", + "base64 0.22.1", "bytes", "chrono", "derive_builder", @@ -26,18 +31,20 @@ dependencies = [ "enum_delegate", "futures", "futures-core", - "http", + "http 0.2.12", + "http 1.1.0", "http-signature-normalization", "http-signature-normalization-reqwest", "httpdate", - "itertools 0.10.5", + "itertools 0.13.0", "moka", "once_cell", - "openssl", "pin-project-lite", + "rand", "regex", - "reqwest", + "reqwest 0.12.8", "reqwest-middleware", + "rsa", "serde", "serde_json", "sha2", @@ -59,26 +66,26 @@ dependencies = [ [[package]] name = "actix-codec" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a7559404a7f3573127aab53c08ce37a6c6a315c374a31070f3c91cd1b4a7fe" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "bytes", "futures-core", "futures-sink", - "log", "memchr", "pin-project-lite", "tokio", "tokio-util", + "tracing", ] [[package]] name = "actix-cors" -version = "0.6.4" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b340e9cfa5b08690aae90fb61beb44e9b06f44fe3d0f93781aaa58cfba86245e" +checksum = "f9e772b3bcafe335042b5db010ab7c09013dad6eac4915c91d8d50902769f331" dependencies = [ "actix-utils", "actix-web", @@ -89,36 +96,20 @@ dependencies = [ "smallvec", ] -[[package]] -name = "actix-form-data" -version = "0.7.0-beta.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f2c32091d556a5064062087511db68526848dfa3de83c5262156431d27df7a4" -dependencies = [ - "actix-multipart", - "actix-rt", - "actix-web", - "futures-util", - "mime", - "thiserror", - "tokio", - "tracing", -] - [[package]] name = "actix-http" -version = "3.3.1" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2079246596c18b4a33e274ae10c0e50613f4d32a4198e09c7b93771013fed74" +checksum = "d48f96fc3003717aeb9856ca3d02a8c7de502667ad76eeacd830b48d2e91fac4" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-tls", "actix-utils", - "ahash 0.8.3", - "base64 0.21.2", - "bitflags 1.3.2", + "ahash", + "base64 0.22.1", + "bitflags 2.6.0", "brotli", "bytes", "bytestring", @@ -127,7 +118,7 @@ dependencies = [ "flate2", "futures-core", "h2", - "http", + "http 0.2.12", "httparse", "httpdate", "itoa", @@ -147,66 +138,43 @@ dependencies = [ [[package]] name = "actix-macros" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 1.0.103", -] - -[[package]] -name = "actix-multipart" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee489e3c01eae4d1c35b03c4493f71cb40d93f66b14558feb1b1a807671cc4e" -dependencies = [ - "actix-utils", - "actix-web", - "bytes", - "derive_more", - "futures-core", - "futures-util", - "httparse", - "local-waker", - "log", - "memchr", - "mime", - "serde", - "serde_json", - "serde_plain", - "tokio", + "syn 2.0.77", ] [[package]] name = "actix-router" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d66ff4d247d2b160861fa2866457e85706833527840e4133f8f49aa423a38799" +checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" dependencies = [ "bytestring", - "http", - "regex", + "cfg-if", + "http 0.2.12", + "regex-lite", "serde", "tracing", ] [[package]] name = "actix-rt" -version = "2.8.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" +checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" dependencies = [ - "actix-macros", "futures-core", "tokio", ] [[package]] name = "actix-server" -version = "2.1.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da34f8e659ea1b077bb4637948b815cd3768ad5a188fdcd74ff4d84240cd824" +checksum = "7ca2549781d8dd6d75c40cf6b6051260a2cc2f3c62343d761a969a0640646894" dependencies = [ "actix-rt", "actix-service", @@ -214,8 +182,7 @@ dependencies = [ "futures-core", "futures-util", "mio", - "num_cpus", - "socket2 0.4.9", + "socket2", "tokio", "tracing", ] @@ -233,21 +200,21 @@ dependencies = [ [[package]] name = "actix-tls" -version = "3.0.3" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fde0cf292f7cdc7f070803cb9a0d45c018441321a78b1042ffbbb81ec333297" +checksum = "ac453898d866cdbecdbc2334fe1738c747b4eba14a677261f2b768ba05329389" dependencies = [ - "actix-codec", "actix-rt", "actix-service", "actix-utils", "futures-core", - "http", - "log", + "impl-more", "pin-project-lite", - "tokio-rustls 0.23.4", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", "tokio-util", - "webpki-roots", + "tracing", ] [[package]] @@ -262,9 +229,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.3.1" +version = "4.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3cb42f9566ab176e1ef0b8b3a896529062b4efc6be0123046095914c4c1c96" +checksum = "9180d76e5cc7ccbc4d60a506f2c727730b154010262df5b910eb17dbe4b8cb38" dependencies = [ "actix-codec", "actix-http", @@ -276,7 +243,7 @@ dependencies = [ "actix-tls", "actix-utils", "actix-web-codegen", - "ahash 0.7.6", + "ahash", "bytes", "bytestring", "cfg-if", @@ -285,44 +252,44 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "http", + "impl-more", "itoa", "language-tags", "log", "mime", "once_cell", "pin-project-lite", - "regex", + "regex-lite", "serde", "serde_json", "serde_urlencoded", "smallvec", - "socket2 0.4.9", - "time 0.3.15", + "socket2", + "time", "url", ] [[package]] name = "actix-web-codegen" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2262160a7ae29e3415554a3f1fc04c764b1540c116aa524683208078b7a75bc9" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.77", ] [[package]] name = "actix-web-httpauth" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d613edf08a42ccc6864c941d30fe14e1b676a77d16f1dbadc1174d065a0a775" +checksum = "456348ed9dcd72a13a1f4a660449fafdecee9ac8205552e286809eb5b0b29bd3" dependencies = [ "actix-utils", "actix-web", - "base64 0.21.2", + "base64 0.22.1", "futures-core", "futures-util", "log", @@ -331,21 +298,24 @@ dependencies = [ [[package]] name = "actix-web-prom" -version = "0.6.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9df3127d20a5d01c9fc9aceb969a38d31a6767e1b48a54d55a8f56c769a84923" +checksum = "56a34f1825c3ae06567a9d632466809bbf34963c86002e8921b64f32d48d289d" dependencies = [ "actix-web", "futures-core", + "log", "pin-project-lite", "prometheus", + "regex", + "strfmt", ] [[package]] name = "addr2line" -version = "0.19.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ "gimli", ] @@ -357,33 +327,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "ahash" -version = "0.7.6" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -403,6 +369,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -420,57 +392,58 @@ dependencies = [ [[package]] name = "anstream" -version = "0.5.0" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.2" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "2.1.0" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" dependencies = [ "backtrace", ] @@ -493,9 +466,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.1" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b74f44609f0f91493e3082d3734d98497e094777144380ea4db9f9905dd5b6" +checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" dependencies = [ "flate2", "futures-core", @@ -504,223 +477,87 @@ dependencies = [ "tokio", ] -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite", - "log", - "parking", - "polling", - "rustix 0.37.22", - "slab", - "socket2 0.4.9", - "waker-fn", -] - [[package]] name = "async-lock" -version = "2.7.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ "event-listener", -] - -[[package]] -name = "async-stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" -dependencies = [ - "async-stream-impl", - "futures-core", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.103", + "event-listener-strategy", + "pin-project-lite", ] [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "atom_syndication" -version = "0.12.1" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca96cb38e3d8236f1573a84bbc55e130bd1ae07df770e36d0cf221ea7a50e36c" +checksum = "2a3a5ed3201df5658d1aa45060c5a57dc9dba8a8ada20d696d67cb0c479ee043" dependencies = [ "chrono", "derive_builder", "diligent-date-parser", "never", - "quick-xml 0.28.2", + "quick-xml 0.36.1", ] [[package]] name = "autocfg" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] -name = "awc" -version = "3.0.1" +name = "aws-lc-rs" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80ca7ff88063086d2e2c70b9f3b29b2fcd999bac68ac21731e66781970d68519" +checksum = "2f95446d919226d587817a7d21379e6eb099b97b45110a7f272a444ca5c54070" dependencies = [ - "actix-codec", - "actix-http", - "actix-rt", - "actix-service", - "actix-tls", - "actix-utils", - "ahash 0.7.6", - "base64 0.13.1", - "bytes", - "cfg-if", - "derive_more", - "futures-core", - "futures-util", - "h2", - "http", - "itoa", - "log", - "mime", - "percent-encoding", - "pin-project-lite", - "rand", - "rustls 0.20.7", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", + "aws-lc-sys", + "mirai-annotations", + "paste", + "zeroize", ] [[package]] -name = "axum" -version = "0.5.17" +name = "aws-lc-sys" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" +checksum = "234314bd569802ec87011d653d6815c6d7b9ffb969e9fee5b8b20ef860e8dce9" dependencies = [ - "async-trait", - "axum-core 0.2.9", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http", - "http-body", - "hyper", - "itoa", - "matchit 0.5.0", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "serde", - "sync_wrapper", - "tokio", - "tower", - "tower-http", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" -dependencies = [ - "async-trait", - "axum-core 0.3.4", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http", - "http-body", - "hyper", - "itoa", - "matchit 0.7.0", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37e5939e02c56fecd5c017c37df4238c0a839fa76b7f97acdd7efb804fd181cc" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "mime", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "mime", - "rustversion", - "tower-layer", - "tower-service", + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", ] [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.6.2", + "miniz_oxide 0.8.0", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -737,17 +574,39 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bcder" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c627747a6774aab38beb35990d88309481378558875a41da1a4b2e373c906ef0" +dependencies = [ + "bytes", + "smallvec", +] [[package]] name = "bcrypt" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d1c9c15093eb224f0baa400f38fcd713fc1391a6f1c389d886beef146d60a3" +checksum = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7" dependencies = [ - "base64 0.21.2", + "base64 0.22.1", "blowfish", "getrandom", "subtle", @@ -763,6 +622,29 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.69.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +dependencies = [ + "bitflags 2.6.0", + "cexpr", + "clang-sys", + "itertools 0.12.1", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.77", + "which", +] + [[package]] name = "bit-set" version = "0.5.3" @@ -786,15 +668,15 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.1" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6776fc96284a0bb647b615056fc496d1fe1644a7ab01829818a6d91cae888b84" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] @@ -811,9 +693,9 @@ dependencies = [ [[package]] name = "brotli" -version = "3.3.4" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -822,9 +704,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "4.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -832,52 +714,37 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.11.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" - -[[package]] -name = "bytecount" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" -version = "1.12.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f5715e491b5a1598fc2bef5a606847b5dc1d48ea625bd3c02c00de8285591da" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" [[package]] name = "bytestring" -version = "1.1.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b6a75fd3048808ef06af5cd79712be8111960adaf89d90250974b38fc3928a" +checksum = "74d80203ea6b29df88012294f62733de21cfeab47f17b41af3a38bc30a03ee72" dependencies = [ "bytes", ] -[[package]] -name = "camino" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2" -dependencies = [ - "serde", -] - [[package]] name = "captcha" version = "0.0.9" @@ -892,35 +759,15 @@ dependencies = [ "serde_json", ] -[[package]] -name = "cargo-platform" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo_metadata" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" -dependencies = [ - "camino", - "cargo-platform", - "semver", - "serde", - "serde_json", -] - [[package]] name = "cc" -version = "1.0.73" +version = "1.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +checksum = "2d74707dde2ba56f86ae90effb3b43ddd369504387e718014de010cec7959800" dependencies = [ "jobserver", + "libc", + "shlex", ] [[package]] @@ -929,6 +776,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -937,70 +793,103 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.26" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.44", "wasm-bindgen", - "winapi", + "windows-targets 0.52.6", +] + +[[package]] +name = "chumsky" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" +dependencies = [ + "hashbrown 0.14.5", + "stacker", ] [[package]] name = "cipher" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", ] [[package]] -name = "clap" -version = "4.4.0" +name = "clang-sys" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d5f1946157a96594eb2d2c10eb7ad9a2b27518cb3000209dec700c35df9197d" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.4.0" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78116e32a042dd73c2901f0dc30790d20ff3447f3e3472fad359e8c3d282bcd6" +checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim", + "strsim 0.11.1", ] [[package]] name = "clap_derive" -version = "4.4.0" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9fd1a5729c4548118d7d70ff234a44868d00489a4b6597b0b020918a0e91a1a" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "clap_lex" -version = "0.5.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "clearurls" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e291c00af89ac0a5b400d9ba46a682e38015ae3cd8926dbbe85b3b864d550be3" +dependencies = [ + "linkify", + "percent-encoding", + "regex", + "serde", + "serde_json", + "url", +] [[package]] name = "clokwerk" @@ -1012,40 +901,12 @@ dependencies = [ ] [[package]] -name = "codespan-reporting" -version = "0.11.1" +name = "cmake" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" dependencies = [ - "termcolor", - "unicode-width", -] - -[[package]] -name = "color-eyre" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a667583cca8c4f8436db8de46ea8233c42a7d9ae424a82d338f2e4675229204" -dependencies = [ - "backtrace", - "color-spantrace", - "eyre", - "indenter", - "once_cell", - "owo-colors", - "tracing-error", -] - -[[package]] -name = "color-spantrace" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" -dependencies = [ - "once_cell", - "owo-colors", - "tracing-core", - "tracing-error", + "cc", ] [[package]] @@ -1056,15 +917,15 @@ checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "combine" -version = "4.6.6" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "memchr", @@ -1072,82 +933,33 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.2.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] [[package]] -name = "config" -version = "0.13.3" +name = "const-oid" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" -dependencies = [ - "async-trait", - "json5", - "lazy_static", - "nom", - "pathdiff", - "ron", - "rust-ini", - "serde", - "serde_json", - "toml 0.5.9", - "yaml-rust", -] - -[[package]] -name = "console-api" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2895653b4d9f1538a83970077cb01dfc77a4810524e51a110944688e916b18e" -dependencies = [ - "prost", - "prost-types", - "tonic 0.9.2", - "tracing-core", -] - -[[package]] -name = "console-subscriber" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4cf42660ac07fcebed809cfe561dd8730bcd35b075215e6479c516bcd0d11cb" -dependencies = [ - "console-api", - "crossbeam-channel", - "crossbeam-utils", - "futures", - "hdrhistogram", - "humantime", - "prost-types", - "serde", - "serde_json", - "thread_local", - "tokio", - "tokio-stream", - "tonic 0.9.2", - "tracing", - "tracing-core", - "tracing-subscriber", -] +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.31" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" +checksum = "50c655d81ff1114fb0dcdea9225ea9f0cc712a6f8d189378e82bdf62a473a64b" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.31" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" +checksum = "eff1a44b93f47b1bac19a27932f5c591e43d1ba357ee4f61526c8a25603f0eb1" dependencies = [ "proc-macro2", "quote", @@ -1156,9 +968,9 @@ dependencies = [ [[package]] name = "constant_time_eq" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3ad85c1f65dc7b37604eb0e89748faf0b9653065f2a8ef69f96a687ec1e9279" +checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" [[package]] name = "convert_case" @@ -1173,15 +985,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time 0.3.15", + "time", "version_check", ] [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -1189,59 +1001,51 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "autocfg", - "cfg-if", "crossbeam-utils", - "memoffset", - "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.12" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" -dependencies = [ - "cfg-if", -] +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" @@ -1253,50 +1057,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "cxx" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7d4e43b25d3c994662706a1d4fcfc32aaa6afd287502c111b237093bb23f3a" -dependencies = [ - "cc", - "cxxbridge-flags", - "cxxbridge-macro", - "link-cplusplus", -] - -[[package]] -name = "cxx-build" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84f8829ddc213e2c1368e51a2564c552b65a8cb6a28f31e576270ac81d5e5827" -dependencies = [ - "cc", - "codespan-reporting", - "once_cell", - "proc-macro2", - "quote", - "scratch", - "syn 1.0.103", -] - -[[package]] -name = "cxxbridge-flags" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e72537424b474af1460806647c41d4b6d35d09ef7fe031c5c2fa5766047cc56a" - -[[package]] -name = "cxxbridge-macro" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "309e4fb93eed90e1e14bea0da16b209f81813ba9fc7830c20ed151dd7bc0a4d7" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.103", -] - [[package]] name = "darling" version = "0.13.4" @@ -1309,22 +1069,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.1" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4529658bdda7fd6769b8614be250cdcfc3aeb0ee72fe66f9e41e5e5eb73eac02" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ - "darling_core 0.14.1", - "darling_macro 0.14.1", -] - -[[package]] -name = "darling" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0558d22a7b463ed0241e993f76f09f30b126687447751a8638587b864e4b3944" -dependencies = [ - "darling_core 0.20.1", - "darling_macro 0.20.1", + "darling_core 0.20.10", + "darling_macro 0.20.10", ] [[package]] @@ -1337,36 +1087,22 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", - "syn 1.0.103", + "strsim 0.10.0", + "syn 1.0.109", ] [[package]] name = "darling_core" -version = "0.14.1" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "649c91bc01e8b1eac09fb91e8dbc7d517684ca6be8ebc75bb9cafc894f9fdb6f" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "strsim", - "syn 1.0.103", -] - -[[package]] -name = "darling_core" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.31", + "strsim 0.11.1", + "syn 2.0.77", ] [[package]] @@ -1377,42 +1113,18 @@ checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.14.1" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc69c5bfcbd2fc09a0f38451d2daf0e372e367986a83906d1b0dbc88134fb5" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ - "darling_core 0.14.1", + "darling_core 0.20.10", "quote", - "syn 1.0.103", -] - -[[package]] -name = "darling_macro" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" -dependencies = [ - "darling_core 0.20.1", - "quote", - "syn 2.0.31", -] - -[[package]] -name = "dashmap" -version = "5.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" -dependencies = [ - "cfg-if", - "hashbrown 0.12.3", - "lock_api", - "once_cell", - "parking_lot_core 0.9.4", + "syn 2.0.77", ] [[package]] @@ -1429,14 +1141,46 @@ dependencies = [ ] [[package]] -name = "deadpool-runtime" -version = "0.1.2" +name = "deadpool" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +dependencies = [ + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" dependencies = [ "tokio", ] +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + [[package]] name = "derivative" version = "2.2.0" @@ -1445,69 +1189,80 @@ checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", +] + +[[package]] +name = "derive-new" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cdc8d50f426189eef89dac62fabfa0abb27d5cc008f25bf4156a0203325becc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", ] [[package]] name = "derive_builder" -version = "0.12.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8" +checksum = "cd33f37ee6a119146a1781d3356a7c26028f83d779b2e04ecd45fdc75c76877b" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.12.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f" +checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" dependencies = [ - "darling 0.14.1", + "darling 0.20.10", "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.77", ] [[package]] name = "derive_builder_macro" -version = "0.12.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e" +checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" dependencies = [ "derive_builder_core", - "syn 1.0.103", + "syn 2.0.77", ] [[package]] name = "derive_more" -version = "0.99.17" +version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version", - "syn 1.0.103", + "syn 2.0.77", ] [[package]] name = "deser-hjson" -version = "1.2.0" +version = "2.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e1ab99fef4d11b2de312a0650bbf312fb48aa11a00084f35b27bf8c57d4cad" +checksum = "7d94aac4095c08ded7e4b9ba7fc2b2929f11b94bb96897ca188b0f64e01688e1" dependencies = [ "serde", ] [[package]] name = "diesel" -version = "2.1.0" +version = "2.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7a532c1f99a0f596f6960a60d1e119e91582b24b39e2d83a190e61262c3ef0c" +checksum = "ff236accb9a5069572099f0b350a92e9560e8e63a9b8d546162f4a5e03026bb2" dependencies = [ - "bitflags 2.3.1", + "bitflags 2.6.0", "byteorder", "chrono", "diesel_derives", @@ -1519,12 +1274,12 @@ dependencies = [ [[package]] name = "diesel-async" -version = "0.3.1" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40df24b390b2437af8b934b39acd277c246a08004afb91b8ccbe3137ffd4edc" +checksum = "acada1517534c92d3f382217b485db8a8638f111b0e3f2a2a8e26165050f77be" dependencies = [ "async-trait", - "deadpool", + "deadpool 0.9.5", "diesel", "futures-util", "scoped-futures", @@ -1538,40 +1293,40 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81c5131a2895ef64741dad1d483f358c2a229a3a2d1b256778cdc5e146db64d4" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "diesel-derive-newtype" -version = "2.1.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7267437d5b12df60ae29bd97f8d120f1c3a6272d6f213551afa56bbb2ecfbb7" +checksum = "d5adf688c584fe33726ce0e2898f608a2a92578ac94a4a92fcecf73214fe0716" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "diesel_derives" -version = "2.1.0" +version = "2.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74398b79d81e52e130d991afeed9c86034bb1b7735f46d2f5bf7deb261d80303" +checksum = "14701062d6bed917b5c7103bdffaee1e4609279e240488ad24e7bd979ca6866c" dependencies = [ "diesel_table_macro_syntax", "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "diesel_ltree" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d9f0b872d6c87b68a71f105802b941a7262788bf69d1bcd05654669cdbd55d" +checksum = "9f5884ffa287a93dce7bd7e5263241c4db5ba7418863fe754d6b731c7e5e06f2" dependencies = [ "byteorder", "diesel", @@ -1594,9 +1349,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5" dependencies = [ - "syn 2.0.31", + "syn 2.0.77", ] +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + [[package]] name = "digest" version = "0.10.7" @@ -1604,24 +1365,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] [[package]] name = "diligent-date-parser" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2d0fd95c7c02e2d6c588c6c5628466fff9bdde4b8c6196465e087b08e792720" +checksum = "f6cf7fe294274a222363f84bcb63cdea762979a0443b4cf1f4f8fd17c86b1182" dependencies = [ "chrono", ] [[package]] -name = "dlv-list" -version = "0.3.0" +name = "displaydoc" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] [[package]] name = "doku" @@ -1644,26 +1411,38 @@ dependencies = [ "darling 0.13.4", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] -name = "downcast-rs" -version = "1.2.0" +name = "downcast" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "dyn-clone" -version = "1.0.11" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "either" -version = "1.8.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "elementtree" @@ -1676,89 +1455,25 @@ dependencies = [ [[package]] name = "email-encoding" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfb21b9878cf7a348dcb8559109aabc0ec40d69924bd706fa5149846c4fef75" +checksum = "60d1d33cdaede7e24091f039632eb5d3c7469fe5b066a985281a34fc70fa317f" dependencies = [ - "base64 0.21.2", + "base64 0.22.1", "memchr", ] [[package]] name = "email_address" -version = "0.2.3" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1b32a7a2580c4473f10f66b512c34bdd7d33c5e3473227ca833abdb5afe4809" - -[[package]] -name = "encoding" -version = "0.2.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" -dependencies = [ - "encoding-index-japanese", - "encoding-index-korean", - "encoding-index-simpchinese", - "encoding-index-singlebyte", - "encoding-index-tradchinese", -] - -[[package]] -name = "encoding-index-japanese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-korean" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-simpchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-singlebyte" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-tradchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding_index_tests" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] @@ -1771,22 +1486,22 @@ checksum = "b5320ae4c3782150d900b79807611a59a99fc9a1d61d686faafc24b93fc8d7ca" [[package]] name = "enum-map" -version = "2.6.0" +version = "2.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017b207acb4cc917f4c31758ed95c0bc63ddb0f358b22eb38f80a2b2a43f6b1f" +checksum = "6866f3bfdf8207509a033af1a75a7b08abda06bbaaeae6669323fd5a097df2e9" dependencies = [ "enum-map-derive", ] [[package]] name = "enum-map-derive" -version = "0.12.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8560b409800a72d2d7860f8e5f4e0b0bd22bea6a352ea2a9ce30ccdef7f16d2f" +checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] @@ -1798,7 +1513,7 @@ dependencies = [ "enum_delegate_lib", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -1810,70 +1525,44 @@ dependencies = [ "proc-macro2", "quote", "rand", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "equivalent" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.2.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ - "errno-dragonfly", "libc", - "winapi", -] - -[[package]] -name = "errno" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "error-chain" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" -dependencies = [ - "version_check", + "windows-sys 0.52.0", ] [[package]] name = "event-listener" -version = "2.5.3" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] [[package]] -name = "eyre" -version = "0.6.8" +name = "event-listener-strategy" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "indenter", - "once_cell", + "event-listener", + "pin-project-lite", ] [[package]] @@ -1882,20 +1571,11 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" -[[package]] -name = "fallible_collections" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c195cf4b2285d3c993eb887b4dc56b0d5728bbe1d0f9a99c0ac6bec2da3e4d85" -dependencies = [ - "hashbrown 0.12.3", -] - [[package]] name = "fancy-regex" -version = "0.7.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6b8560a05112eb52f04b00e5d3790c0dd75d9d980eb8a122fb23b92a623ccf" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" dependencies = [ "bit-set", "regex", @@ -1903,21 +1583,27 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.8.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" + +[[package]] +name = "fdeflate" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f9bfee30e4dedf0ab8b422f03af778d9612b63f502710fc500a334ebe2de645" dependencies = [ - "instant", + "simd-adler32", ] [[package]] name = "flate2" -version = "1.0.24" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" +checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" dependencies = [ "crc32fast", - "miniz_oxide 0.5.4", + "miniz_oxide 0.8.0", ] [[package]] @@ -1926,39 +1612,26 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] -name = "fs2" -version = "0.4.3" +name = "fragile" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] +checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" [[package]] name = "futf" @@ -1972,9 +1645,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -1987,9 +1660,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -1997,15 +1670,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -2014,53 +1687,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" - -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -2074,20 +1732,11 @@ dependencies = [ "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -2095,22 +1744,22 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "gimli" -version = "0.27.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" [[package]] name = "glob" @@ -2120,55 +1769,37 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "h2" -version = "0.3.14" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http", - "indexmap 1.9.1", + "http 0.2.12", + "indexmap 2.5.0", "slab", "tokio", "tokio-util", "tracing", ] -[[package]] -name = "half" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" - [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.6", -] [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" - -[[package]] -name = "hdrhistogram" -version = "7.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "base64 0.13.1", - "byteorder", - "flate2", - "nom", - "num-traits", + "ahash", + "allocator-api2", ] [[package]] @@ -2178,19 +1809,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] -name = "hermit-abi" -version = "0.1.19" +name = "heck" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -2208,21 +1836,19 @@ dependencies = [ ] [[package]] -name = "hostname" -version = "0.3.1" +name = "home" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "libc", - "match_cfg", - "winapi", + "windows-sys 0.52.0", ] [[package]] name = "hound" -version = "3.5.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d13cdbd5dbb29f9c88095bbdc2590c9cba0d0a1269b983fef6b2cdd7e9f4db1" +checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f" [[package]] name = "html-escape" @@ -2239,25 +1865,25 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be92446e11d68f5d71367d571c229d09ced1f24ab6d08ea0bff329d5f6c0b2a3" dependencies = [ - "html5ever", + "html5ever 0.26.0", "jni", "lazy_static", - "markup5ever_rcdom", + "markup5ever_rcdom 0.2.0", "percent-encoding", "regex", ] [[package]] name = "html2text" -version = "0.6.0" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74cda84f06c1cc83476f79ae8e2e892b626bdadafcb227baec54c918cadc18a0" +checksum = "042a9677c258ac2952dd026bb0cd21972f00f644a5a38f5a215cb22cdaf6834e" dependencies = [ - "html5ever", - "markup5ever", + "html5ever 0.27.0", + "markup5ever 0.12.1", "tendril", + "thiserror", "unicode-width", - "xml5ever", ] [[package]] @@ -2268,17 +1894,42 @@ checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", - "markup5ever", + "markup5ever 0.11.0", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", +] + +[[package]] +name = "html5ever" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" +dependencies = [ + "log", + "mac", + "markup5ever 0.12.1", + "proc-macro2", + "quote", + "syn 2.0.77", ] [[package]] name = "http" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -2287,20 +1938,37 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", "pin-project-lite", ] [[package]] -name = "http-range-header" -version = "0.3.0" +name = "http-body" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] [[package]] name = "http-signature-normalization" @@ -2313,14 +1981,15 @@ dependencies = [ [[package]] name = "http-signature-normalization-reqwest" -version = "0.8.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c382c69a07b21accae86298d520579403af6479b1cd1c389e3ee11f01d48627" +checksum = "b8822f7eab343cae1ce3bd3b6d0b9b58c72adaf3463627cfe150f8f5406f27aa" dependencies = [ - "base64 0.13.1", + "async-trait", + "base64 0.22.1", "http-signature-normalization", "httpdate", - "reqwest", + "reqwest 0.12.8", "reqwest-middleware", "sha2", "thiserror", @@ -2329,40 +1998,34 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" - -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.25" +version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc5e554ff619822309ffd57d8734d77cd5ce6238bc956f037ea06c58238c9899" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2", "tokio", "tower-service", "tracing", @@ -2370,52 +2033,235 @@ dependencies = [ ] [[package]] -name = "hyper-timeout" -version = "0.4.1" +name = "hyper" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ - "hyper", + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "itoa", "pin-project-lite", + "smallvec", "tokio", - "tokio-io-timeout", + "want", ] [[package]] -name = "hyper-tls" -version = "0.5.0" +name = "hyper-rustls" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.30", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.14", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", + "webpki-roots 0.26.5", +] + +[[package]] +name = "hyper-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" dependencies = [ "bytes", - "hyper", - "native-tls", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "socket2", "tokio", - "tokio-native-tls", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "i-love-jesus" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39fa60e3281e1529cc56d96cca925215f51f9b39a96bc677982fbfdf2663cc84" +dependencies = [ + "diesel", + "i-love-jesus-macros", +] + +[[package]] +name = "i-love-jesus-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8215279f83f9b829403812f845aa2d0dd5966332aa2fd0334a375256f3dd0322" +dependencies = [ + "quote", + "syn 2.0.77", ] [[package]] name = "iana-time-zone" -version = "0.1.51" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "winapi", + "windows-core", ] [[package]] name = "iana-time-zone-haiku" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ - "cxx", - "cxx-build", + "cc", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", ] [[package]] @@ -2436,39 +2282,50 @@ dependencies = [ [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", ] [[package]] -name = "image" -version = "0.24.4" +name = "idna" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd8e4fb07cf672b1642304e731ef8a6a4c7891d67bb4fd4f5ce58cd6ed86803c" +checksum = "bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd" +dependencies = [ + "icu_normalizer", + "icu_properties", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "image" +version = "0.24.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" dependencies = [ "bytemuck", "byteorder", "color_quant", - "num-rational", "num-traits", "png", ] [[package]] -name = "indenter" -version = "0.3.3" +name = "impl-more" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" +checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" [[package]] name = "indexmap" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -2477,12 +2334,13 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.5", + "serde", ] [[package]] @@ -2494,55 +2352,41 @@ dependencies = [ "generic-array", ] -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.2", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipnet" -version = "2.5.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" +checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" -version = "0.10.5" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itertools" -version = "0.11.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jni" @@ -2566,40 +2410,30 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.26" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.60" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json5" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" -dependencies = [ - "pest", - "pest_derive", - "serde", -] - [[package]] name = "jsonwebtoken" -version = "8.3.0" +version = "9.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" dependencies = [ - "base64 0.21.2", + "base64 0.21.7", + "js-sys", "pem", "ring", "serde", @@ -2615,50 +2449,58 @@ checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "lemmy_api" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", "actix-web", "actix-web-httpauth", "anyhow", - "async-trait", - "base64 0.21.2", + "base64 0.22.1", "bcrypt", "captcha", "chrono", "elementtree", + "hound", "lemmy_api_common", + "lemmy_api_crud", "lemmy_db_schema", "lemmy_db_views", "lemmy_db_views_actor", "lemmy_db_views_moderator", "lemmy_utils", - "serde", + "pretty_assertions", "serial_test", "sitemap-rs", "tokio", "totp-rs", "tracing", "url", - "uuid", - "wav", ] [[package]] name = "lemmy_api_common" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", "actix-web", "anyhow", "chrono", - "encoding", + "encoding_rs", "enum-map", "futures", "getrandom", @@ -2668,10 +2510,11 @@ dependencies = [ "lemmy_db_views_actor", "lemmy_db_views_moderator", "lemmy_utils", - "once_cell", - "percent-encoding", + "mime", + "moka", + "pretty_assertions", "regex", - "reqwest", + "reqwest 0.12.8", "reqwest-middleware", "rosetta-i18n", "serde", @@ -2681,25 +2524,31 @@ dependencies = [ "tracing", "ts-rs", "url", + "urlencoding", "uuid", "webpage", ] [[package]] name = "lemmy_api_crud" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ + "accept-language", "activitypub_federation", "actix-web", - "async-trait", + "anyhow", "bcrypt", "chrono", + "futures", "lemmy_api_common", "lemmy_db_schema", "lemmy_db_views", "lemmy_db_views_actor", "lemmy_utils", + "moka", "serde", + "serde_json", + "serde_with", "tracing", "url", "uuid", @@ -2708,7 +2557,7 @@ dependencies = [ [[package]] name = "lemmy_apub" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", "actix-web", @@ -2721,39 +2570,53 @@ dependencies = [ "futures", "html2md", "html2text", - "http", - "itertools 0.11.0", + "itertools 0.13.0", "lemmy_api_common", "lemmy_db_schema", "lemmy_db_views", "lemmy_db_views_actor", "lemmy_utils", "moka", - "once_cell", - "reqwest", - "reqwest-middleware", + "pretty_assertions", + "reqwest 0.12.8", "serde", "serde_json", "serde_with", "serial_test", "stringreader", - "strum_macros", - "task-local-extensions", + "strum", "tokio", "tracing", "url", "uuid", ] +[[package]] +name = "lemmy_db_perf" +version = "0.19.6-beta.7" +dependencies = [ + "anyhow", + "clap", + "diesel", + "diesel-async", + "lemmy_db_schema", + "lemmy_db_views", + "lemmy_utils", + "tokio", + "url", +] + [[package]] name = "lemmy_db_schema" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", + "anyhow", "async-trait", "bcrypt", "chrono", - "deadpool", + "deadpool 0.12.1", + "derive-new", "diesel", "diesel-async", "diesel-derive-enum", @@ -2761,64 +2624,71 @@ dependencies = [ "diesel_ltree", "diesel_migrations", "futures-util", + "i-love-jesus", "lemmy_utils", - "once_cell", + "moka", + "pretty_assertions", "regex", - "rustls 0.21.3", + "rustls 0.23.14", "serde", "serde_json", "serde_with", "serial_test", "strum", - "strum_macros", "tokio", "tokio-postgres", "tokio-postgres-rustls", "tracing", "ts-rs", - "typed-builder", "url", "uuid", ] [[package]] name = "lemmy_db_views" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "actix-web", + "chrono", "diesel", "diesel-async", "diesel_ltree", + "i-love-jesus", "lemmy_db_schema", "lemmy_utils", + "pretty_assertions", "serde", "serde_with", "serial_test", "tokio", "tracing", "ts-rs", + "url", ] [[package]] name = "lemmy_db_views_actor" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "chrono", "diesel", "diesel-async", "lemmy_db_schema", + "lemmy_db_views", + "lemmy_utils", + "pretty_assertions", "serde", "serde_with", "serial_test", "strum", - "strum_macros", "tokio", "ts-rs", + "url", ] [[package]] name = "lemmy_db_views_moderator" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "diesel", "diesel-async", @@ -2830,56 +2700,54 @@ dependencies = [ [[package]] name = "lemmy_federate" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", + "actix-web", "anyhow", "async-trait", - "bytes", "chrono", "diesel", "diesel-async", - "enum_delegate", "futures", "lemmy_api_common", "lemmy_apub", "lemmy_db_schema", "lemmy_db_views_actor", "lemmy_utils", + "mockall", "moka", - "once_cell", - "openssl", - "reqwest", - "reqwest-middleware", - "reqwest-tracing", - "serde", + "reqwest 0.12.8", "serde_json", + "serial_test", + "test-context", "tokio", "tokio-util", "tracing", - "tracing-subscriber", + "tracing-test", + "url", + "uuid", ] [[package]] name = "lemmy_routes" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", "actix-web", "anyhow", "chrono", "futures", + "http 1.1.0", "lemmy_api_common", "lemmy_db_schema", "lemmy_db_views", "lemmy_db_views_actor", "lemmy_utils", - "once_cell", - "reqwest", + "reqwest 0.12.8", "reqwest-middleware", "rss", "serde", - "strum", "tokio", "tracing", "url", @@ -2887,20 +2755,17 @@ dependencies = [ [[package]] name = "lemmy_server" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "activitypub_federation", "actix-cors", "actix-web", - "actix-web-httpauth", "actix-web-prom", "chrono", "clap", "clokwerk", - "console-subscriber", "diesel", "diesel-async", - "doku", "futures-util", "lemmy_api", "lemmy_api_common", @@ -2910,51 +2775,45 @@ dependencies = [ "lemmy_federate", "lemmy_routes", "lemmy_utils", - "opentelemetry 0.19.0", - "opentelemetry-otlp", - "pict-rs", + "pretty_assertions", "prometheus", - "reqwest", + "reqwest 0.12.8", "reqwest-middleware", "reqwest-tracing", - "rustls 0.21.3", - "serde", + "rustls 0.23.14", "serde_json", "serial_test", "tokio", - "tokio-postgres", - "tokio-postgres-rustls", "tracing", "tracing-actix-web", - "tracing-error", - "tracing-log", - "tracing-opentelemetry 0.19.0", "tracing-subscriber", "url", ] [[package]] name = "lemmy_utils" -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" dependencies = [ "actix-web", "anyhow", - "chrono", + "cfg-if", + "clearurls", "deser-hjson", "diesel", "doku", "enum-map", "futures", "html2text", - "http", - "itertools 0.11.0", + "http 1.1.0", + "itertools 0.13.0", "lettre", "markdown-it", - "once_cell", - "openssl", - "percent-encoding", + "markdown-it-block-spoiler", + "markdown-it-ruby", + "markdown-it-sub", + "markdown-it-sup", + "pretty_assertions", "regex", - "reqwest", "reqwest-middleware", "rosetta-build", "rosetta-i18n", @@ -2962,65 +2821,65 @@ dependencies = [ "serde_json", "smart-default", "strum", - "strum_macros", "tokio", "tracing", - "tracing-error", "ts-rs", - "typed-builder", "url", + "urlencoding", "uuid", ] [[package]] name = "lettre" -version = "0.10.4" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bd09637ae3ec7bd605b8e135e757980b3968430ff2b1a4a94fb7769e50166d" +checksum = "69f204773bab09b150320ea1c83db41dc6ee606a4bc36dc1f43005fe7b58ce06" dependencies = [ "async-trait", - "base64 0.21.2", + "base64 0.22.1", + "chumsky", "email-encoding", "email_address", "fastrand", "futures-io", "futures-util", - "hostname", "httpdate", - "idna 0.3.0", + "idna 1.0.2", "mime", - "native-tls", "nom", - "once_cell", + "percent-encoding", "quoted_printable", - "socket2 0.4.9", + "rustls 0.23.14", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "socket2", "tokio", - "tokio-native-tls", + "tokio-rustls 0.26.0", + "url", + "webpki-roots 0.26.5", ] [[package]] name = "libc" -version = "0.2.146" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] -name = "line-wrap" -version = "0.1.1" +name = "libloading" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ - "safemem", + "cfg-if", + "windows-targets 0.52.6", ] [[package]] -name = "link-cplusplus" -version = "1.0.7" +name = "libm" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" -dependencies = [ - "cc", -] +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "linked-hash-map" @@ -3030,48 +2889,47 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linkify" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96dd5884008358112bc66093362197c7248ece00d46624e2cf71e50029f8cff5" +checksum = "f1dfa36d52c581e9ec783a7ce2a5e0143da6237be5811a0b3153fedfdbe9f780" dependencies = [ "memchr", ] [[package]] name = "linux-raw-sys" -version = "0.1.4" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] -name = "linux-raw-sys" -version = "0.3.8" +name = "litemap" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" [[package]] name = "local-channel" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" dependencies = [ "futures-core", "futures-sink", - "futures-util", "local-waker", ] [[package]] name = "local-waker" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -3079,12 +2937,11 @@ dependencies = [ [[package]] name = "lodepng" -version = "3.7.2" +version = "3.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0ad39f75bbaa4b10bb6f2316543632a8046a5bcf9c785488d79720b21f044f8" +checksum = "7b2dea7cda68e381418c985fd8f32a9c279a21ae8c715f2376adb20c27a0fad3" dependencies = [ "crc32fast", - "fallible_collections", "flate2", "libc", "rgb", @@ -3092,12 +2949,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "mac" @@ -3105,20 +2959,11 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" -[[package]] -name = "mach2" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" -dependencies = [ - "libc", -] - [[package]] name = "markdown-it" -version = "0.5.1" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c44ffb00018b76ef3c6eff5e17d34b44f0bbded0b70291940564c527cba07ad8" +checksum = "f99c010929c8217b2dc0940954267a2e15a15f17cb309cd1f299e21933f84fac" dependencies = [ "argparse", "const_format", @@ -3137,6 +2982,44 @@ dependencies = [ "unicode-general-category", ] +[[package]] +name = "markdown-it-block-spoiler" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "008a8e4184fd08b5dca0f2b5b2ef8f126c1e83ca797c44ee41f8d7765951360c" +dependencies = [ + "itertools 0.13.0", + "markdown-it", +] + +[[package]] +name = "markdown-it-ruby" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3505f4ada7c372e7f5eb4b07850bf5921193bc0bd43cb18991233999c9134d4" +dependencies = [ + "itertools 0.13.0", + "markdown-it", +] + +[[package]] +name = "markdown-it-sub" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8abe3aa8927af2314644b3aae37393241a229e869ff9c95ac640749e08357d2a" +dependencies = [ + "markdown-it", +] + +[[package]] +name = "markdown-it-sup" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ae949e78c7a615f88a47019d51b65962bfc5c4cbc65fa81eae8b9b2506d1cb1" +dependencies = [ + "markdown-it", +] + [[package]] name = "markup5ever" version = "0.11.0" @@ -3145,7 +3028,21 @@ checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ "log", "phf 0.10.1", - "phf_codegen", + "phf_codegen 0.10.0", + "string_cache", + "string_cache_codegen", + "tendril", +] + +[[package]] +name = "markup5ever" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" +dependencies = [ + "log", + "phf 0.11.2", + "phf_codegen 0.11.2", "string_cache", "string_cache_codegen", "tendril", @@ -3157,17 +3054,23 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9521dd6750f8e80ee6c53d65e2e4656d7de37064f3a7a5d2d11d05df93839c2" dependencies = [ - "html5ever", - "markup5ever", + "html5ever 0.26.0", + "markup5ever 0.11.0", "tendril", - "xml5ever", + "xml5ever 0.17.0", ] [[package]] -name = "match_cfg" -version = "0.1.0" +name = "markup5ever_rcdom" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +checksum = "edaa21ab3701bfee5099ade5f7e1f84553fd19228cf332f13cd6e964bf59be18" +dependencies = [ + "html5ever 0.27.0", + "markup5ever 0.12.1", + "tendril", + "xml5ever 0.18.1", +] [[package]] name = "matchers" @@ -3180,22 +3083,17 @@ dependencies = [ [[package]] name = "matchit" -version = "0.5.0" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" - -[[package]] -name = "matchit" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" [[package]] name = "md-5" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if", "digest", ] @@ -3212,18 +3110,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "memoffset" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" -dependencies = [ - "autocfg", -] +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "migrations_internals" @@ -3232,7 +3121,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f23f71580015254b020e856feac3df5878c2c7a8812297edd6c0a485ac9dada" dependencies = [ "serde", - "toml 0.7.4", + "toml", ] [[package]] @@ -3248,19 +3137,9 @@ dependencies = [ [[package]] name = "mime" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" - -[[package]] -name = "mime_guess" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" -dependencies = [ - "mime", - "unicase", -] +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "minimal-lexical" @@ -3270,52 +3149,85 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.4" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", + "simd-adler32", ] [[package]] name = "miniz_oxide" -version = "0.6.2" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] name = "mio" -version = "0.8.8" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ + "hermit-abi", "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + +[[package]] +name = "mockall" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.77", ] [[package]] name = "moka" -version = "0.11.2" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206bf83f415b0579fd885fe0804eb828e727636657dc1bf73d80d2f1218e14a1" +checksum = "32cf62eb4dd975d2dde76432fb1075c49e3ee2331cf36f1f8fd4b66550d32b6f" dependencies = [ - "async-io", "async-lock", + "async-trait", "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", + "event-listener", "futures-util", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "quanta", "rustc_version", - "scheduled-thread-pool", - "skeptic", "smallvec", "tagptr", "thiserror", @@ -3324,22 +3236,10 @@ dependencies = [ ] [[package]] -name = "native-tls" -version = "0.2.10" +name = "mutually_exclusive_features" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd7e2f3618557f980e0b17e8856252eee3c97fa12c54dff0ca290fb6266ca4a9" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] +checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e" [[package]] name = "never" @@ -3349,15 +3249,15 @@ checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91" [[package]] name = "new_debug_unreachable" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "nom" -version = "7.1.1" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", @@ -3375,30 +3275,51 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] [[package]] -name = "num-integer" -version = "0.1.45" +name = "num-bigint-dig" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] -name = "num-rational" -version = "0.4.1" +name = "num-iter" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", "num-integer", @@ -3407,196 +3328,38 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] name = "num_cpus" -version = "1.13.1" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi 0.1.19", - "libc", -] - -[[package]] -name = "num_threads" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.30.0" +version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" - -[[package]] -name = "openssl" -version = "0.10.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.103", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-sys" -version = "0.9.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "opentelemetry" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf9b1c4e9a6c4de793c632496fa490bdc0e1eea73f0c91394f7b6990935d22" -dependencies = [ - "async-trait", - "crossbeam-channel", - "futures", - "js-sys", - "lazy_static", - "percent-encoding", - "pin-project", - "rand", - "thiserror", -] - -[[package]] -name = "opentelemetry" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4b8347cc26099d3aeee044065ecc3ae11469796b4d65d065a23a584ed92a6f" -dependencies = [ - "opentelemetry_api", - "opentelemetry_sdk", -] - -[[package]] -name = "opentelemetry-otlp" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af72d59a4484654ea8eb183fea5ae4eb6a41d7ac3e3bae5f4d2a282a3a7d3ca" -dependencies = [ - "async-trait", - "futures", - "futures-util", - "http", - "opentelemetry 0.19.0", - "opentelemetry-proto", - "prost", - "thiserror", - "tokio", - "tonic 0.8.2", -] - -[[package]] -name = "opentelemetry-proto" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "045f8eea8c0fa19f7d48e7bc3128a39c2e5c533d5c61298c548dfefc1064474c" -dependencies = [ - "futures", - "futures-util", - "opentelemetry 0.19.0", - "prost", - "tonic 0.8.2", -] - -[[package]] -name = "opentelemetry_api" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed41783a5bf567688eb38372f2b7a8530f5a607a4b49d38dd7573236c23ca7e2" -dependencies = [ - "fnv", - "futures-channel", - "futures-util", - "indexmap 1.9.1", - "once_cell", - "pin-project-lite", - "thiserror", - "urlencoding", -] - -[[package]] -name = "opentelemetry_sdk" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b3a2a91fdbfdd4d212c0dcc2ab540de2c2bcbbd90be17de7a7daf8822d010c1" -dependencies = [ - "async-trait", - "crossbeam-channel", - "dashmap", - "fnv", - "futures-channel", - "futures-executor", - "futures-util", - "once_cell", - "opentelemetry_api", - "percent-encoding", - "rand", - "thiserror", - "tokio", - "tokio-stream", -] - -[[package]] -name = "ordered-multimap" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a" -dependencies = [ - "dlv-list", - "hashbrown 0.12.3", -] +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "overload" @@ -3604,136 +3367,65 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "owo-colors" -version = "3.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" - [[package]] name = "parking" -version = "2.1.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.11.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", + "parking_lot_core", ] [[package]] name = "parking_lot_core" -version = "0.8.6" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys 0.42.0", + "windows-targets 0.52.6", ] [[package]] name = "paste" -version = "1.0.9" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" - -[[package]] -name = "pathdiff" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pem" -version = "1.1.1" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" dependencies = [ - "base64 0.13.1", + "base64 0.22.1", + "serde", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", ] [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" - -[[package]] -name = "pest" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc7bc69c062e492337d74d59b120c274fd3d261b6bf6d3207d499b4b379c41a" -dependencies = [ - "thiserror", - "ucd-trie", -] - -[[package]] -name = "pest_derive" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b75706b9642ebcb34dab3bc7750f811609a0eb1dd8b88c2d15bf628c1c65b2" -dependencies = [ - "pest", - "pest_generator", -] - -[[package]] -name = "pest_generator" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f9272122f5979a6511a749af9db9bfc810393f63119970d7085fed1c4ea0db" -dependencies = [ - "pest", - "pest_meta", - "proc-macro2", - "quote", - "syn 1.0.103", -] - -[[package]] -name = "pest_meta" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8717927f9b79515e565a64fe46c38b8cd0427e64c40680b14a7365ab09ac8d" -dependencies = [ - "once_cell", - "pest", - "sha1", -] +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "phf" @@ -3746,11 +3438,11 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ - "phf_shared 0.11.1", + "phf_shared 0.11.2", ] [[package]] @@ -3759,10 +3451,20 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" dependencies = [ - "phf_generator", + "phf_generator 0.10.0", "phf_shared 0.10.0", ] +[[package]] +name = "phf_codegen" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +dependencies = [ + "phf_generator 0.11.2", + "phf_shared 0.11.2", +] + [[package]] name = "phf_generator" version = "0.10.0" @@ -3773,6 +3475,16 @@ dependencies = [ "rand", ] +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared 0.11.2", + "rand", +] + [[package]] name = "phf_shared" version = "0.10.0" @@ -3784,92 +3496,38 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ "siphasher", ] -[[package]] -name = "pict-rs" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01bddf6e5b03b80b35dfcbd0d52905a7f3c60723f2f079f3a4d4442ac8ef09a8" -dependencies = [ - "actix-form-data", - "actix-rt", - "actix-server", - "actix-web", - "anyhow", - "async-trait", - "awc", - "base64 0.21.2", - "clap", - "color-eyre", - "config", - "console-subscriber", - "dashmap", - "futures-util", - "hex", - "md-5", - "mime", - "num_cpus", - "once_cell", - "opentelemetry 0.19.0", - "opentelemetry-otlp", - "pin-project-lite", - "quick-xml 0.29.0", - "rusty-s3", - "serde", - "serde_cbor", - "serde_json", - "serde_urlencoded", - "sha2", - "sled", - "storage-path-generator", - "thiserror", - "time 0.3.15", - "tokio", - "tokio-util", - "toml 0.7.4", - "tracing", - "tracing-actix-web", - "tracing-awc", - "tracing-error", - "tracing-futures", - "tracing-log", - "tracing-opentelemetry 0.19.0", - "tracing-subscriber", - "url", - "uuid", -] - [[package]] name = "pin-project" -version = "1.0.12" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.12" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.77", ] [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -3878,60 +3536,65 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] -name = "pkg-config" -version = "0.3.25" +name = "pkcs1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "plist" -version = "1.4.3" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bd9647b268a3d3e14ff09c23201133a62589c658db02bb7388c7246aafe0590" +checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016" dependencies = [ - "base64 0.21.2", - "indexmap 1.9.1", - "line-wrap", - "quick-xml 0.28.2", + "base64 0.22.1", + "indexmap 2.5.0", + "quick-xml 0.32.0", "serde", - "time 0.3.15", + "time", ] [[package]] name = "png" -version = "0.17.6" +version = "0.17.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f0e7f4c94ec26ff209cee506314212639d6c91b80afb82984819fafce9df01c" +checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1" dependencies = [ "bitflags 1.3.2", "crc32fast", + "fdeflate", "flate2", - "miniz_oxide 0.5.4", -] - -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", + "miniz_oxide 0.7.4", ] [[package]] name = "postgres-protocol" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b7fa9f396f51dffd61546fd8573ee20592287996568e6175ceb0f8699ad75d" +checksum = "acda0ebdebc28befa84bee35e651e4c5f09073d668c7aed4cf7e23c3cda84b23" dependencies = [ - "base64 0.21.2", + "base64 0.22.1", "byteorder", "bytes", "fallible-iterator", @@ -3945,9 +3608,9 @@ dependencies = [ [[package]] name = "postgres-types" -version = "0.2.4" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73d946ec7d256b04dfadc4e6a3292324e6f417124750fc5c0950f981b703a0f1" +checksum = "f66ea23a2d0e5734297357705193335e0a957696f34bed2f2faefacb2fec336f" dependencies = [ "bytes", "fallible-iterator", @@ -3955,16 +3618,25 @@ dependencies = [ ] [[package]] -name = "ppv-lite86" -version = "0.2.16" +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "pq-sys" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b845d6d8ec554f972a2c5298aad68953fd64e7441e846075450b44656a016d1" +checksum = "31c0052426df997c0cbd30789eb44ca097e3541717a7b8fa36b1c464ee7edebd" dependencies = [ "vcpkg", ] @@ -3976,77 +3648,100 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] -name = "proc-macro2" -version = "1.0.64" +name = "predicates" +version = "3.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" +checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +dependencies = [ + "anstyle", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931" + +[[package]] +name = "predicates-tree" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +dependencies = [ + "proc-macro2", + "syn 2.0.77", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "procfs" -version = "0.14.2" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de8dacb0873f77e6aefc6d71e044761fcc68060290f5b1089fcdf84626bb69" +checksum = "731e0d9356b0c25f16f33b5be79b1c57b562f141ebfcdb0ad8ac2c13a24293b4" dependencies = [ - "bitflags 1.3.2", - "byteorder", + "bitflags 2.6.0", "hex", "lazy_static", - "rustix 0.36.5", + "procfs-core", + "rustix", +] + +[[package]] +name = "procfs-core" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" +dependencies = [ + "bitflags 2.6.0", + "hex", ] [[package]] name = "prometheus" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" dependencies = [ "cfg-if", "fnv", "lazy_static", "libc", "memchr", - "parking_lot 0.12.1", + "parking_lot", "procfs", "protobuf", "thiserror", ] -[[package]] -name = "prost" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "399c3c31cdec40583bb68f0b18403400d01ec4289c383aa047560439952c4dd7" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-derive" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7345d5f0e08c0536d7ac7229952590239e77abf0a0100a1b1d890add6ea96364" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.103", -] - -[[package]] -name = "prost-types" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dfaa718ad76a44b3415e6c4d53b17c8f99160dcb3a99b10470fce8ad43f6e3e" -dependencies = [ - "bytes", - "prost", -] - [[package]] name = "protobuf" version = "2.28.0" @@ -4055,84 +3750,109 @@ checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" [[package]] name = "psm" -version = "0.1.21" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +checksum = "aa37f80ca58604976033fae9515a8a2989fc13797d953f7c04fb8fa36a11f205" dependencies = [ "cc", ] -[[package]] -name = "pulldown-cmark" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" -dependencies = [ - "bitflags 1.3.2", - "memchr", - "unicase", -] - [[package]] name = "quanta" -version = "0.11.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab" +checksum = "8e5167a477619228a0b284fac2674e3c388cba90631d7b7de620e6f1fcd08da5" dependencies = [ "crossbeam-utils", "libc", - "mach2", "once_cell", "raw-cpuid", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "web-sys", "winapi", ] [[package]] name = "quick-xml" -version = "0.27.1" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc053f057dd768a56f62cd7e434c42c831d296968997e9ac1f76ea7c2d14c41" +checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" dependencies = [ "memchr", - "serde", ] [[package]] name = "quick-xml" -version = "0.28.2" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce5e73202a820a31f8a0ee32ada5e21029c81fd9e3ebf668a40832e4219d9d1" +checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc" dependencies = [ "encoding_rs", "memchr", ] [[package]] -name = "quick-xml" -version = "0.29.0" +name = "quinn" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" dependencies = [ - "memchr", - "serde", + "bytes", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.0.0", + "rustls 0.23.14", + "socket2", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +dependencies = [ + "bytes", + "rand", + "ring", + "rustc-hash 2.0.0", + "rustls 0.23.14", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +dependencies = [ + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.59.0", ] [[package]] name = "quote" -version = "1.0.29" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "quoted_printable" -version = "0.4.8" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3866219251662ec3b26fc217e3e05bf9c4f84325234dfb96bf0bf840889e49" +checksum = "640c9bd8497b02465aeef5375144c26062e0dcd5939dfcbb0f5db76cb8c17c73" [[package]] name = "rand" @@ -4166,43 +3886,43 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "10.7.0" +version = "11.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" +checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "readonly" -version = "0.2.8" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb656d27c22b5c47154452686cae5e096f12e124daacb36a0bfcb32dbebb39e3" +checksum = "a25d631e41bfb5fdcde1d4e2215f62f7f0afa3ff11e26563765bd6ea1d229aeb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.9.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.2", - "regex-syntax 0.7.3", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", ] [[package]] @@ -4211,72 +3931,114 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ - "regex-syntax 0.6.27", + "regex-syntax 0.6.29", ] [[package]] name = "regex-automata" -version = "0.3.2" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.3", + "regex-syntax 0.8.5", ] [[package]] -name = "regex-syntax" -version = "0.6.27" +name = "regex-lite" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" [[package]] name = "regex-syntax" -version = "0.7.3" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] -name = "remove_dir_all" -version = "0.5.3" +name = "regex-syntax" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.11.18" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ - "async-compression", - "base64 0.21.2", + "base64 0.21.7", "bytes", "encoding_rs", "futures-core", "futures-util", "h2", - "http", - "http-body", - "hyper", - "hyper-tls", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "ipnet", "js-sys", "log", "mime", - "mime_guess", - "native-tls", "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration", "tokio", - "tokio-native-tls", + "tokio-rustls 0.24.1", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.25.4", + "winreg", +] + +[[package]] +name = "reqwest" +version = "0.12.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +dependencies = [ + "async-compression", + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls 0.27.3", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.14", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "tokio", + "tokio-rustls 0.26.0", "tokio-util", "tower-service", "url", @@ -4284,40 +4046,39 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "winreg", + "webpki-roots 0.26.5", + "windows-registry", ] [[package]] name = "reqwest-middleware" -version = "0.2.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4531c89d50effe1fac90d095c8b133c20c5c714204feee0bfc3fd158e784209d" +checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" dependencies = [ "anyhow", "async-trait", - "http", - "reqwest", + "http 1.1.0", + "reqwest 0.12.8", "serde", - "task-local-extensions", "thiserror", + "tower-service", ] [[package]] name = "reqwest-tracing" -version = "0.4.5" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b97ad83c2fc18113346b7158d79732242002427c30f620fa817c1f32901e0a8" +checksum = "bfdd9bfa64c72233d8dd99ab7883efcdefe9e16d46488ecb9228b71a2e2ceb45" dependencies = [ "anyhow", "async-trait", "getrandom", - "matchit 0.7.0", - "opentelemetry 0.16.0", - "reqwest", + "http 1.1.0", + "matchit", + "reqwest 0.12.8", "reqwest-middleware", - "task-local-extensions", "tracing", - "tracing-opentelemetry 0.16.0", ] [[package]] @@ -4328,43 +4089,26 @@ checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" [[package]] name = "rgb" -version = "0.8.34" +version = "0.8.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3603b7d71ca82644f79b5a06d1220e9a58ede60bd32255f698cb1af8838b8db3" +checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" dependencies = [ "bytemuck", ] -[[package]] -name = "riff" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9b1a3d5f46d53f4a3478e2be4a5a5ce5108ea58b100dcd139830eae7f79a3a1" - [[package]] name = "ring" -version = "0.16.20" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", + "cfg-if", + "getrandom", "libc", - "once_cell", "spin", "untrusted", - "web-sys", - "winapi", -] - -[[package]] -name = "ron" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a" -dependencies = [ - "base64 0.13.1", - "bitflags 1.3.2", - "serde", + "windows-sys 0.52.0", ] [[package]] @@ -4388,140 +4132,163 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f8c01b9158de3aa5a7ac041a41c0e854d7adc3e473e7d7e2143eb5432bc5ba2" [[package]] -name = "rss" -version = "2.0.4" +name = "rsa" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9acf62e0f3f4b52f61d3a12d6279e3f0b90d4811b0ae888eabdf61a2e7c03a95" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rss" +version = "2.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27e92048f840d98c6d6dd870af9101610ea9ff413f11f1bcebf4f4c31d96d957" dependencies = [ "atom_syndication", "derive_builder", "never", - "quick-xml 0.28.2", -] - -[[package]] -name = "rust-ini" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df" -dependencies = [ - "cfg-if", - "ordered-multimap", + "quick-xml 0.36.1", ] [[package]] name = "rustc-demangle" -version = "0.1.21" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.36.5" +version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3807b5d10909833d3e9acd1eb5fb988f79376ff10fce42937de71a449c4c588" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 1.3.2", - "errno 0.2.8", - "io-lifetimes", + "bitflags 2.6.0", + "errno", "libc", - "linux-raw-sys 0.1.4", - "windows-sys 0.42.0", -] - -[[package]] -name = "rustix" -version = "0.37.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8818fa822adcc98b18fedbb3632a6a33213c070556b5aa7c4c8cc21cff565c4c" -dependencies = [ - "bitflags 1.3.2", - "errno 0.3.1", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", + "linux-raw-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.20.7" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", + "rustls-webpki 0.101.7", "sct", - "webpki", ] [[package]] name = "rustls" -version = "0.21.3" +version = "0.23.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" +checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" dependencies = [ + "aws-lc-rs", "log", + "once_cell", "ring", - "rustls-webpki", - "sct", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", ] +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" + [[package]] name = "rustls-webpki" -version = "0.101.1" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ "ring", "untrusted", ] [[package]] -name = "rustversion" -version = "1.0.9" +name = "rustls-webpki" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" - -[[package]] -name = "rusty-s3" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c92776b0db0fea6d7cc3abb9d0d745814c71124471a3bfd84b1265a28c06130" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "base64 0.21.2", - "hmac", - "md-5", - "percent-encoding", - "quick-xml 0.27.1", - "serde", - "serde_json", - "sha2", - "time 0.3.15", - "url", - "zeroize", + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] -name = "ryu" -version = "1.0.11" +name = "rustversion" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] -name = "safemem" -version = "0.3.3" +name = "ryu" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -4533,22 +4300,12 @@ dependencies = [ ] [[package]] -name = "schannel" -version = "0.1.20" +name = "scc" +version = "2.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" +checksum = "0c947adb109a8afce5fc9c7bf951f87f146e9147b3a6a58413105628fb1d1e66" dependencies = [ - "lazy_static", - "windows-sys 0.36.1", -] - -[[package]] -name = "scheduled-thread-pool" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" -dependencies = [ - "parking_lot 0.12.1", + "sdd", ] [[package]] @@ -4563,48 +4320,25 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "scratch" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", ] [[package]] -name = "security-framework" -version = "2.7.0" +name = "sdd" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" -dependencies = [ - "core-foundation-sys", - "libc", -] +checksum = "60a7b59a5d9b0099720b417b6325d91a52cbf5b3dcb5041d864be53eefa58abc" [[package]] name = "select" @@ -4613,75 +4347,54 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f9da09dc3f4dfdb6374cbffff7a2cffcec316874d4429899eefdc97b3b94dcd" dependencies = [ "bit-set", - "html5ever", - "markup5ever_rcdom", + "html5ever 0.26.0", + "markup5ever_rcdom 0.2.0", ] [[package]] name = "semver" -version = "1.0.14" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" -dependencies = [ - "serde", -] +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.171" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "serde_json" -version = "1.0.100" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.5.0", "itoa", + "memchr", "ryu", "serde", ] -[[package]] -name = "serde_plain" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6018081315db179d0ce57b1fe4b62a12a0028c9cf9bbef868c9cf477b3c34ae" -dependencies = [ - "serde", -] - [[package]] name = "serde_spanned" -version = "0.6.2" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93107647184f6027e3b7dcb2e11034cf95ffa1e3a682c67951963ac69c1c007d" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -4700,62 +4413,64 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.0.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f02d8aa6e3c385bf084924f660ce2a3a6bd333ba55b35e8590b321f35d88513" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ - "base64 0.21.2", + "base64 0.22.1", "chrono", "hex", - "indexmap 1.9.1", + "indexmap 1.9.3", + "indexmap 2.5.0", "serde", + "serde_derive", "serde_json", "serde_with_macros", - "time 0.3.15", + "time", ] [[package]] name = "serde_with_macros" -version = "3.0.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc7d5d3932fb12ce722ee5e64dd38c504efba37567f0c402f6ca728c3b8b070" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ - "darling 0.20.1", + "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "serial_test" -version = "2.0.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" dependencies = [ - "dashmap", "futures", - "lazy_static", "log", - "parking_lot 0.12.1", + "once_cell", + "parking_lot", + "scc", "serial_test_derive", ] [[package]] name = "serial_test_derive" -version = "2.0.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -4764,9 +4479,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -4775,22 +4490,44 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] -name = "signal-hook-registry" -version = "1.4.0" +name = "shlex" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "simple_asn1" version = "0.6.2" @@ -4800,70 +4537,39 @@ dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.15", + "time", ] [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "sitemap-rs" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b58125f0ab4317b5ba3cdc1f60696e47958760e356874c759334fa56ae1596" +checksum = "88cc73a9aac975541c9054e74ceae8d8ee85edc89a322404c275c1d100fffa51" dependencies = [ "chrono", "xml-builder", ] -[[package]] -name = "skeptic" -version = "0.13.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" -dependencies = [ - "bytecount", - "cargo_metadata", - "error-chain", - "glob", - "pulldown-cmark", - "tempfile", - "walkdir", -] - [[package]] name = "slab" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] -[[package]] -name = "sled" -version = "0.34.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" -dependencies = [ - "crc32fast", - "crossbeam-epoch", - "crossbeam-utils", - "fs2", - "fxhash", - "libc", - "log", - "parking_lot 0.11.2", -] - [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smart-default" @@ -4873,63 +4579,69 @@ checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "socket2" -version = "0.4.9" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", - "winapi", -] - -[[package]] -name = "socket2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" -dependencies = [ - "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "spin" -version = "0.5.2" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "stacker" -version = "0.1.15" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" +checksum = "799c883d55abdb5e98af1a7b3f23b9b6de8ecada0ecac058672d7635eb48ca7b" dependencies = [ "cc", "cfg-if", "libc", "psm", - "winapi", + "windows-sys 0.59.0", ] [[package]] -name = "storage-path-generator" -version = "0.1.1" +name = "strfmt" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f11d35dae9818c4313649da4a97c8329e29357a7fe584526c1d78f5b63ef836" +checksum = "7a8348af2d9fc3258c8733b8d9d8db2e56f54b2363a4b5b81585c7875ed65e65" [[package]] name = "string_cache" -version = "0.8.4" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" dependencies = [ "new_debug_unreachable", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "phf_shared 0.10.0", "precomputed-hash", "serde", @@ -4941,7 +4653,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" dependencies = [ - "phf_generator", + "phf_generator 0.10.0", "phf_shared 0.10.0", "proc-macro2", "quote", @@ -4949,12 +4661,13 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.2" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" dependencies = [ "unicode-bidi", "unicode-normalization", + "unicode-properties", ] [[package]] @@ -4970,35 +4683,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] -name = "strum" -version = "0.25.0" +name = "strsim" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] [[package]] name = "strum_macros" -version = "0.25.1" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6069ca09d878a33f883cc06aaa9718ede171841d3832450354410b718b097232" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "rustversion", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "subtle" -version = "2.4.1" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "1.0.103" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -5007,9 +4729,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.31" +version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718fa2415bcb8d8bd775917a1bf12a7931b6dfa890753378538118181e0cb398" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", @@ -5018,25 +4740,44 @@ dependencies = [ [[package]] name = "sync_wrapper" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] [[package]] name = "syntect" -version = "5.0.0" +version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c454c27d9d7d9a84c7803aaa3c50cd088d2906fe3c6e42da3209aa623576a8" +checksum = "874dcfa363995604333cf947ae9f751ca3af4522c60886774c4963943b4746b1" dependencies = [ "bincode", "bitflags 1.3.2", "fancy-regex", "flate2", "fnv", - "lazy_static", "once_cell", "plist", - "regex-syntax 0.6.27", + "regex-syntax 0.8.5", "serde", "serde_derive", "serde_json", @@ -5045,35 +4786,33 @@ dependencies = [ "yaml-rust", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tagptr" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" -[[package]] -name = "task-local-extensions" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba323866e5d033818e3240feeb9f7db2c4296674e4d9e16b97b7bf8f490434e8" -dependencies = [ - "pin-utils", -] - -[[package]] -name = "tempfile" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" -dependencies = [ - "cfg-if", - "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", -] - [[package]] name = "tendril" version = "0.4.3" @@ -5087,150 +4826,166 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.1.3" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] -name = "thiserror" -version = "1.0.40" +name = "termtree" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" + +[[package]] +name = "test-context" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6676ab8513edfd2601a108621103fdb45cac9098305ca25ec93f7023b06b05d9" +dependencies = [ + "futures", + "test-context-macros", +] + +[[package]] +name = "test-context-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ea17a2dc368aeca6f554343ced1b1e31f76d63683fa8016e5844bd7a5144a1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", ] [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.1.44" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d634a985c4d4238ec39cacaed2e7ae552fbd3c476b552c1deac3021b7d7eaf0c" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ + "deranged", "itoa", - "libc", - "num_threads", + "num-conv", + "powerfmt", "serde", + "time-core", "time-macros", ] [[package]] -name = "time-macros" -version = "0.2.4" +name = "time-core" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] [[package]] name = "tinyjson" -version = "2.5.0" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b2f023483085707f4f0c1238a8a7baf8b502a1d427b9ed2a243884d3d687bb" +checksum = "9ab95735ea2c8fd51154d01e39cf13912a78071c2d89abc49a7ef102a7dd725a" + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", "mio", - "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.4.9", + "socket2", "tokio-macros", - "tracing", - "windows-sys 0.48.0", -] - -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.31", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" -dependencies = [ - "native-tls", - "tokio", + "syn 2.0.77", ] [[package]] name = "tokio-postgres" -version = "0.7.8" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e89f6234aa8fd43779746012fcf53603cdb91fdd8399aa0de868c2d56b6dde1" +checksum = "3b5d3742945bc7d7f210693b0c58ae542c6fd47b17adbbda0885f3dcb34a6bdb" dependencies = [ "async-trait", "byteorder", @@ -5239,40 +4994,31 @@ dependencies = [ "futures-channel", "futures-util", "log", - "parking_lot 0.12.1", + "parking_lot", "percent-encoding", - "phf 0.11.1", + "phf 0.11.2", "pin-project-lite", "postgres-protocol", "postgres-types", - "socket2 0.5.3", + "rand", + "socket2", "tokio", "tokio-util", + "whoami", ] [[package]] name = "tokio-postgres-rustls" -version = "0.10.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd5831152cb0d3f79ef5523b357319ba154795d64c7078b2daa95a803b54057f" +checksum = "04fb792ccd6bbcd4bba408eb8a292f70fc4a3589e5d793626f45190e6454b6ab" dependencies = [ - "futures", "ring", - "rustls 0.21.3", + "rustls 0.23.14", "tokio", "tokio-postgres", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.7", - "tokio", - "webpki", + "tokio-rustls 0.26.0", + "x509-certificate", ] [[package]] @@ -5281,49 +5027,39 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.3", + "rustls 0.21.12", "tokio", ] [[package]] -name = "tokio-stream" -version = "0.1.11" +name = "tokio-rustls" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "futures-core", - "pin-project-lite", + "rustls 0.23.14", + "rustls-pki-types", "tokio", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] name = "toml" -version = "0.5.9" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" -dependencies = [ - "serde", -] - -[[package]] -name = "toml" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6135d499e69981f9ff0ef2167955a5333c35e36f6937d382974566b3d5b94ec" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ "serde", "serde_spanned", @@ -5333,91 +5069,31 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.2" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.10" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 1.9.1", + "indexmap 2.5.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] -[[package]] -name = "tonic" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55b9af819e54b8f33d453655bef9b9acc171568fb49523078d0cc4e7484200ec" -dependencies = [ - "async-stream", - "async-trait", - "axum 0.5.17", - "base64 0.13.1", - "bytes", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-timeout", - "percent-encoding", - "pin-project", - "prost", - "prost-derive", - "tokio", - "tokio-stream", - "tokio-util", - "tower", - "tower-layer", - "tower-service", - "tracing", - "tracing-futures", -] - -[[package]] -name = "tonic" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" -dependencies = [ - "async-trait", - "axum 0.6.18", - "base64 0.21.2", - "bytes", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-timeout", - "percent-encoding", - "pin-project", - "prost", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "totp-rs" -version = "5.0.2" +version = "5.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad5e73765ff14ae797c1a61ee0c7beaf21b4e4a0047844300e332c6c24df1fc" +checksum = "17b2f27dad992486c26b4e7455f38aa487e838d6d61b57e72906ee2b8c287a90" dependencies = [ "base32", "constant_time_eq", @@ -5437,56 +5113,31 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", - "indexmap 1.9.1", "pin-project", "pin-project-lite", - "rand", - "slab", "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c530c8675c1dbf98facee631536fa116b5fb6382d7dd6dc1b118d970eafe3ba" -dependencies = [ - "bitflags 1.3.2", - "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "pin-project-lite", - "tower", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -5495,112 +5146,47 @@ dependencies = [ [[package]] name = "tracing-actix-web" -version = "0.7.5" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce52ffaf2d544e317d3bef63f49a6a22022866505fa4840a4339b1756834a2a9" +checksum = "284586dc201db407be8c9d721abad1b3a6dacbbce5cccecd4fd15a37db95ab0d" dependencies = [ "actix-web", - "opentelemetry 0.19.0", + "mutually_exclusive_features", "pin-project", "tracing", - "tracing-opentelemetry 0.19.0", "uuid", ] [[package]] name = "tracing-attributes" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", -] - -[[package]] -name = "tracing-awc" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa1a68fce4d1a7fad459f81ddcafbdd7c6f6bcda5c7e07d5f42db637931fac7" -dependencies = [ - "actix-http", - "actix-service", - "awc", - "bytes", - "futures-core", - "opentelemetry 0.19.0", - "pin-project-lite", - "tracing", - "tracing-opentelemetry 0.19.0", + "syn 2.0.77", ] [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", ] [[package]] -name = "tracing-error" +name = "tracing-log" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - -[[package]] -name = "tracing-log" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" -dependencies = [ - "lazy_static", "log", - "tracing-core", -] - -[[package]] -name = "tracing-opentelemetry" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ffbf13a0f8b054a4e59df3a173b818e9c6177c02789871f2073977fd0062076" -dependencies = [ - "opentelemetry 0.16.0", - "tracing", - "tracing-core", - "tracing-log", - "tracing-subscriber", -] - -[[package]] -name = "tracing-opentelemetry" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00a39dcf9bfc1742fa4d6215253b33a6e474be78275884c216fc2a06267b3600" -dependencies = [ "once_cell", - "opentelemetry 0.19.0", - "tracing", "tracing-core", - "tracing-log", - "tracing-subscriber", ] [[package]] @@ -5615,9 +5201,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", @@ -5635,22 +5221,43 @@ dependencies = [ ] [[package]] -name = "triomphe" -version = "0.1.8" +name = "tracing-test" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1ee9bd9239c339d714d657fac840c6d2a4f9c45f4f9ec7b0975113458be78db" +checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" +dependencies = [ + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" +dependencies = [ + "quote", + "syn 2.0.77", +] + +[[package]] +name = "triomphe" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" [[package]] name = "try-lock" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "ts-rs" -version = "7.0.0" +version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1ff1f8c90369bc172200013ac17ae86e7b5def580687df4e6127883454ff2b0" +checksum = "fc2cae1fc5d05d47aa24b64f9a4f7cba24cdc9187a2084dd97ac57bef5eccae6" dependencies = [ "chrono", "thiserror", @@ -5659,63 +5266,28 @@ dependencies = [ [[package]] name = "ts-rs-macros" -version = "7.0.0" +version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6f41cc0aeb7a4a55730188e147d3795a7349b501f8334697fd37629b896cdc2" +checksum = "73f7f9b821696963053a89a7bd8b292dc34420aea8294d7b225274d488f3ec92" dependencies = [ "Inflector", "proc-macro2", "quote", - "syn 2.0.31", + "syn 2.0.77", "termcolor", ] -[[package]] -name = "typed-builder" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d2135600ca28125d27c63643ed7789b9f467a316e3a8ad98a9abeeb3eec4a83" -dependencies = [ - "typed-builder-macro", -] - -[[package]] -name = "typed-builder-macro" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "952108e5d54c3c3f6552e8c5cdb3600adf49c22a4ea82066dea90d2f5c71c526" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.31", -] - [[package]] name = "typenum" -version = "1.15.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "ucd-trie" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" - -[[package]] -name = "unicase" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" -dependencies = [ - "version_check", -] +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-general-category" @@ -5725,54 +5297,60 @@ checksum = "2281c8c1d221438e373249e065ca4989c4c36952c211ff21a0ee91c44a3869e7" [[package]] name = "unicode-ident" -version = "1.0.5" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] -name = "unicode-width" -version = "0.1.10" +name = "unicode-properties" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "52ea75f83c0137a9b98608359a5f1af8144876eb67bcb1ce837368e906a9f524" + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" [[package]] name = "unicode-xid" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", "serde", ] [[package]] name = "urlencoding" -version = "2.1.2" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" [[package]] name = "utf-8" @@ -5781,22 +5359,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] -name = "utf8-width" -version = "0.1.6" +name = "utf16_iter" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8-width" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.4.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" dependencies = [ "getrandom", "serde", @@ -5816,43 +5406,29 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "waker-fn" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" -version = "2.3.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", - "winapi", "winapi-util", ] [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -5860,35 +5436,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasm-bindgen" -version = "0.2.83" +name = "wasite" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.83" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.77", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.33" +version = "0.4.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" +checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" dependencies = [ "cfg-if", "js-sys", @@ -5898,9 +5481,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.83" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5908,28 +5491,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.83" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.77", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.83" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "wasm-streams" -version = "0.2.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" dependencies = [ "futures-util", "js-sys", @@ -5938,20 +5521,11 @@ dependencies = [ "web-sys", ] -[[package]] -name = "wav" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a65e199c799848b4f997072aa4d673c034f80f40191f97fe2f0a23f410be1609" -dependencies = [ - "riff", -] - [[package]] name = "web-sys" -version = "0.3.60" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" dependencies = [ "js-sys", "wasm-bindgen", @@ -5959,13 +5533,13 @@ dependencies = [ [[package]] name = "webmention" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d07b90492f7b6fe35f5298fcd01c663d3c453e8c302dc86c7292c6681b8117d" +checksum = "c2c1a8d1f70dd7b5b5e2bf5fca4dd97fa5ed4e8adcf0b0ee4c6ebe1ebac7a2fe" dependencies = [ "anyhow", "nom", - "reqwest", + "reqwest 0.11.27", "select", "serde", "thiserror", @@ -5974,33 +5548,53 @@ dependencies = [ [[package]] name = "webpage" -version = "1.6.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8598785beeb5af95abe95e7bb20c7e747d1188347080d6811d5a56d2b9a5f368" +checksum = "70862efc041d46e6bbaa82bb9c34ae0596d090e86cbd14bd9e93b36ee6802eac" dependencies = [ - "html5ever", - "markup5ever_rcdom", + "html5ever 0.27.0", + "markup5ever_rcdom 0.3.0", "serde", "serde_json", -] - -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring", - "untrusted", + "url", ] [[package]] name = "webpki-roots" -version = "0.22.5" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "webpki-roots" +version = "0.26.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd24728e5af82c6c4ec1b66ac4844bdf8156257fccda846ec58b42cd0cdbe6a" dependencies = [ - "webpki", + "rustls-pki-types", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "whoami" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" +dependencies = [ + "redox_syscall", + "wasite", + "web-sys", ] [[package]] @@ -6021,11 +5615,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -6035,31 +5629,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows-sys" -version = "0.36.1" +name = "windows-core" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows_aarch64_msvc 0.36.1", - "windows_i686_gnu 0.36.1", - "windows_i686_msvc 0.36.1", - "windows_x86_64_gnu 0.36.1", - "windows_x86_64_msvc 0.36.1", + "windows-targets 0.52.6", ] [[package]] -name = "windows-sys" -version = "0.42.0" +name = "windows-registry" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", ] [[package]] @@ -6068,161 +5673,203 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] -name = "windows_i686_gnu" -version = "0.48.0" +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.42.2" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.4.6" +version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] [[package]] name = "winreg" -version = "0.10.1" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "winapi", + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "x509-certificate" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66534846dec7a11d7c50a74b7cdb208b9a581cad890b7866430d438455847c85" +dependencies = [ + "bcder", + "bytes", + "chrono", + "der", + "hex", + "pem", + "ring", + "signature", + "spki", + "thiserror", + "zeroize", ] [[package]] name = "xml-builder" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efc4f1a86af7800dfc4056c7833648ea4515ae21502060b5c98114d828f5333b" +checksum = "5ef5f40cd674b9d9814545203f175ac29ffdcb6e006727f4d95797d7badd72e2" [[package]] name = "xml5ever" @@ -6232,7 +5879,18 @@ checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" dependencies = [ "log", "mac", - "markup5ever", + "markup5ever 0.11.0", +] + +[[package]] +name = "xml5ever" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bbb26405d8e919bc1547a5aa9abc95cbfa438f04844f5fdd9dc7596b748bf69" +dependencies = [ + "log", + "mac", + "markup5ever 0.12.1", ] [[package]] @@ -6245,37 +5903,143 @@ dependencies = [ ] [[package]] -name = "zeroize" -version = "1.5.7" +name = "yansi" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + +[[package]] +name = "zerofrom" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] [[package]] name = "zstd" -version = "0.12.3+zstd.1.5.2" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "6.0.5+zstd.1.5.4" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" dependencies = [ - "libc", "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.8+zstd.1.5.5" +version = "2.0.13+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", - "libc", "pkg-config", ] diff --git a/Cargo.toml b/Cargo.toml index a01cc687b..5523dcfd6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace.package] -version = "0.19.0-rc.3" +version = "0.19.6-beta.7" edition = "2021" description = "A link aggregator for the fediverse" license = "AGPL-3.0" @@ -16,15 +16,20 @@ license.workspace = true homepage.workspace = true documentation.workspace = true repository.workspace = true +publish = false [lib] doctest = false +[lints] +workspace = true + [profile.release] debug = 0 -lto = "thin" -strip = true # Automatically strip symbols from the binary. -opt-level = "z" # Optimize for size. +lto = "fat" +strip = true # Automatically strip symbols from the binary. +opt-level = 3 # Optimize for speed, not size. +codegen-units = 1 # Reduce parallel code generation. # This profile significantly speeds up build time. If debug info is needed you can comment the line # out temporarily, but make sure to leave this in the main branch. @@ -32,16 +37,7 @@ opt-level = "z" # Optimize for size. debug = 0 [features] -embed-pictrs = ["pict-rs"] -console = [ - "console-subscriber", - "opentelemetry", - "opentelemetry-otlp", - "tracing-opentelemetry", - "reqwest-tracing/opentelemetry_0_16", -] json-log = ["tracing-subscriber/json"] -prometheus-metrics = ["prometheus", "actix-web-prom"] default = [] [workspace] @@ -51,6 +47,7 @@ members = [ "crates/api_common", "crates/apub", "crates/utils", + "crates/db_perf", "crates/db_schema", "crates/db_views", "crates/db_views_actor", @@ -59,77 +56,108 @@ members = [ "crates/federate", ] +[workspace.lints.clippy] +cast_lossless = "deny" +complexity = { level = "deny", priority = -1 } +correctness = { level = "deny", priority = -1 } +dbg_macro = "deny" +explicit_into_iter_loop = "deny" +explicit_iter_loop = "deny" +get_first = "deny" +implicit_clone = "deny" +indexing_slicing = "deny" +inefficient_to_string = "deny" +items-after-statements = "deny" +manual_string_new = "deny" +needless_collect = "deny" +perf = { level = "deny", priority = -1 } +redundant_closure_for_method_calls = "deny" +style = { level = "deny", priority = -1 } +suspicious = { level = "deny", priority = -1 } +uninlined_format_args = "allow" +unused_self = "deny" +unwrap_used = "deny" +unimplemented = "deny" + [workspace.dependencies] -lemmy_api = { version = "=0.19.0-rc.3", path = "./crates/api" } -lemmy_api_crud = { version = "=0.19.0-rc.3", path = "./crates/api_crud" } -lemmy_apub = { version = "=0.19.0-rc.3", path = "./crates/apub" } -lemmy_utils = { version = "=0.19.0-rc.3", path = "./crates/utils" } -lemmy_db_schema = { version = "=0.19.0-rc.3", path = "./crates/db_schema" } -lemmy_api_common = { version = "=0.19.0-rc.3", path = "./crates/api_common" } -lemmy_routes = { version = "=0.19.0-rc.3", path = "./crates/routes" } -lemmy_db_views = { version = "=0.19.0-rc.3", path = "./crates/db_views" } -lemmy_db_views_actor = { version = "=0.19.0-rc.3", path = "./crates/db_views_actor" } -lemmy_db_views_moderator = { version = "=0.19.0-rc.3", path = "./crates/db_views_moderator" } -activitypub_federation = { version = "0.5.0-beta.3", default-features = false, features = [ +lemmy_api = { version = "=0.19.6-beta.7", path = "./crates/api" } +lemmy_api_crud = { version = "=0.19.6-beta.7", path = "./crates/api_crud" } +lemmy_apub = { version = "=0.19.6-beta.7", path = "./crates/apub" } +lemmy_utils = { version = "=0.19.6-beta.7", path = "./crates/utils", default-features = false } +lemmy_db_schema = { version = "=0.19.6-beta.7", path = "./crates/db_schema" } +lemmy_api_common = { version = "=0.19.6-beta.7", path = "./crates/api_common" } +lemmy_routes = { version = "=0.19.6-beta.7", path = "./crates/routes" } +lemmy_db_views = { version = "=0.19.6-beta.7", path = "./crates/db_views" } +lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" } +lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" } +lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" } +activitypub_federation = { version = "0.6.0-alpha2", default-features = false, features = [ "actix-web", ] } -diesel = "2.1.0" +diesel = "2.1.6" diesel_migrations = "2.1.0" -diesel-async = "0.3.1" -serde = { version = "1.0.167", features = ["derive"] } -serde_with = "3.0.0" -actix-web = { version = "4.3.1", default-features = false, features = [ +diesel-async = "0.4.1" +serde = { version = "1.0.204", features = ["derive"] } +serde_with = "3.9.0" +actix-web = { version = "4.9.0", default-features = false, features = [ "macros", - "rustls", + "rustls-0_23", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", ] } -tracing = "0.1.37" -tracing-actix-web = { version = "0.7.5", default-features = false } -tracing-error = "0.2.0" -tracing-log = "0.1.3" -tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } -url = { version = "2.4.0", features = ["serde"] } -reqwest = { version = "0.11.18", features = ["json", "blocking", "gzip"] } -reqwest-middleware = "0.2.2" -reqwest-tracing = "0.4.5" +tracing = "0.1.40" +tracing-actix-web = { version = "0.7.10", default-features = false } +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } +url = { version = "2.5.2", features = ["serde"] } +reqwest = { version = "0.12.7", default-features = false, features = [ + "json", + "blocking", + "gzip", + "rustls-tls", +] } +reqwest-middleware = "0.3.3" +reqwest-tracing = "0.5.3" clokwerk = "0.4.0" doku = { version = "0.21.1", features = ["url-2"] } -bcrypt = "0.15.0" -chrono = { version = "0.4.26", features = ["serde"], default-features = false } -serde_json = { version = "1.0.100", features = ["preserve_order"] } -base64 = "0.21.2" -uuid = { version = "1.4.0", features = ["serde", "v4"] } -async-trait = "0.1.71" +bcrypt = "0.15.1" +chrono = { version = "0.4.38", features = ["serde"], default-features = false } +serde_json = { version = "1.0.121", features = ["preserve_order"] } +base64 = "0.22.1" +uuid = { version = "1.10.0", features = ["serde", "v4"] } +async-trait = "0.1.81" captcha = "0.0.9" -anyhow = { version = "1.0.71", features = [ +anyhow = { version = "1.0.86", features = [ "backtrace", ] } # backtrace is on by default on nightly, but not stable rust -diesel_ltree = "0.3.0" -typed-builder = "0.15.0" -serial_test = "2.0.0" -tokio = { version = "1.29.1", features = ["full"] } -regex = "1.9.0" -once_cell = "1.18.0" -diesel-derive-newtype = "2.1.0" +diesel_ltree = "0.3.1" +serial_test = "3.1.1" +tokio = { version = "1.39.2", features = ["full"] } +regex = "1.10.5" +diesel-derive-newtype = "2.1.2" diesel-derive-enum = { version = "2.1.0", features = ["postgres"] } -strum = "0.25.0" -strum_macros = "0.25.1" -itertools = "0.11.0" -futures = "0.3.28" -http = "0.2.9" -percent-encoding = "2.3.0" +strum = { version = "0.26.3", features = ["derive"] } +itertools = "0.13.0" +futures = "0.3.30" +http = "1.1" rosetta-i18n = "0.1.3" -opentelemetry = { version = "0.19.0", features = ["rt-tokio"] } -tracing-opentelemetry = { version = "0.19.0" } -ts-rs = { version = "7.0.0", features = ["serde-compat", "chrono-impl"] } -rustls = { version = "0.21.3", features = ["dangerous_configuration"] } -futures-util = "0.3.28" -tokio-postgres = "0.7.8" -tokio-postgres-rustls = "0.10.0" -enum-map = "2.6" +ts-rs = { version = "7.1.1", features = [ + "serde-compat", + "chrono-impl", + "no-serde-warnings", +] } +rustls = { version = "0.23.12", features = ["ring"] } +futures-util = "0.3.30" +tokio-postgres = "0.7.11" +tokio-postgres-rustls = "0.12.0" +urlencoding = "2.1.3" +enum-map = "2.7" +moka = { version = "0.12.8", features = ["future"] } +i-love-jesus = { version = "0.1.0" } +clap = { version = "4.5.13", features = ["derive", "env"] } +pretty_assertions = "1.4.0" +derive-new = "0.7.0" [dependencies] lemmy_api = { workspace = true } @@ -139,38 +167,29 @@ lemmy_utils = { workspace = true } lemmy_db_schema = { workspace = true } lemmy_api_common = { workspace = true } lemmy_routes = { workspace = true } -lemmy_federate = { version = "0.19.0-rc.3", path = "crates/federate" } +lemmy_federate = { workspace = true } activitypub_federation = { workspace = true } diesel = { workspace = true } diesel-async = { workspace = true } -serde = { workspace = true } actix-web = { workspace = true } tracing = { workspace = true } tracing-actix-web = { workspace = true } -tracing-error = { workspace = true } -tracing-log = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } reqwest = { workspace = true } reqwest-middleware = { workspace = true } reqwest-tracing = { workspace = true } clokwerk = { workspace = true } -doku = { workspace = true } serde_json = { workspace = true } -tracing-opentelemetry = { workspace = true, optional = true } -opentelemetry = { workspace = true, optional = true } -console-subscriber = { version = "0.1.10", optional = true } -opentelemetry-otlp = { version = "0.12.0", optional = true } -pict-rs = { version = "0.4.0-rc.12", optional = true } -tokio.workspace = true -actix-cors = "0.6.4" rustls = { workspace = true } +tokio.workspace = true +actix-cors = "0.7.0" futures-util = { workspace = true } -tokio-postgres = { workspace = true } -tokio-postgres-rustls = { workspace = true } chrono = { workspace = true } -prometheus = { version = "0.13.3", features = ["process"], optional = true } -actix-web-prom = { version = "0.6.0", optional = true } +prometheus = { version = "0.13.4", features = ["process"] } serial_test = { workspace = true } -clap = { version = "4.3.19", features = ["derive"] } -actix-web-httpauth = "0.8.1" +clap = { workspace = true } +actix-web-prom = "0.9.0" + +[dev-dependencies] +pretty_assertions = { workspace = true } diff --git a/README.md b/README.md index bb77a1cd8..6c8398cd9 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ [![Translation status](http://weblate.join-lemmy.org/widgets/lemmy/-/lemmy/svg-badge.svg)](http://weblate.join-lemmy.org/engage/lemmy/) [![License](https://img.shields.io/github/license/LemmyNet/lemmy.svg)](LICENSE) ![GitHub stars](https://img.shields.io/github/stars/LemmyNet/lemmy?style=social) -[![Delightful Humane Tech](https://codeberg.org/teaserbot-labs/delightful-humane-design/raw/branch/main/humane-tech-badge.svg)](https://codeberg.org/teaserbot-labs/delightful-humane-design) + @@ -47,9 +47,9 @@ ## About The Project -| Desktop | Mobile | -| ---------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | -| ![desktop](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/main_img.webp) | ![mobile](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/mobile_pic.webp) | +| Desktop | Mobile | +| --------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | +| ![desktop](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/main_screen_2.webp) | ![mobile](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/mobile_pic.webp) | [Lemmy](https://github.com/LemmyNet/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse). @@ -107,7 +107,6 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins - NSFW post / community support. - High performance. - Server is written in rust. - - Front end is `~80kB` gzipped. - Supports arm64 / Raspberry Pi. ## Installation @@ -122,6 +121,8 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins Lemmy is free, open-source software, meaning no advertising, monetizing, or venture capital, ever. Your donations directly support full-time development of the project. +Lemmy is made possible by a generous grant from the [NLnet foundation](https://nlnet.nl/). + - [Support on Liberapay](https://liberapay.com/Lemmy). - [Support on Patreon](https://www.patreon.com/dessalines). - [Support on OpenCollective](https://opencollective.com/lemmy). @@ -132,21 +133,25 @@ Lemmy is free, open-source software, meaning no advertising, monetizing, or vent - bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK` - ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01` - monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV` -- cardano: `addr1q858t89l2ym6xmrugjs0af9cslfwvnvsh2xxp6x4dcez7pf5tushkp4wl7zxfhm2djp6gq60dk4cmc7seaza5p3slx0sakjutm` ## Contributing +Read the following documentation to setup the development environment and start coding: + - [Contributing instructions](https://join-lemmy.org/docs/contributors/01-overview.html) - [Docker Development](https://join-lemmy.org/docs/contributors/03-docker-development.html) - [Local Development](https://join-lemmy.org/docs/contributors/02-local-development.html) +When working on an issue or pull request, you can comment with any questions you may have so that maintainers can answer them. You can also join the [Matrix Development Chat](https://matrix.to/#/#lemmydev:matrix.org) for general assistance. + ### Translations - If you want to help with translating, take a look at [Weblate](https://weblate.join-lemmy.org/projects/lemmy/). You can also help by [translating the documentation](https://github.com/LemmyNet/lemmy-docs#adding-a-new-language). -## Contact +## Community -- [Mastodon](https://mastodon.social/@LemmyDev) +- [Matrix Space](https://matrix.to/#/#lemmy-space:matrix.org) +- [Lemmy Forum](https://lemmy.ml/c/lemmy) - [Lemmy Support Forum](https://lemmy.ml/c/lemmy_support) ## Code Mirrors diff --git a/SECURITY.md b/SECURITY.md index 0bb85174e..1e1750489 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,4 +2,4 @@ ## Reporting a Vulnerability -Message contact at join-lemmy.org for any security-related issues. +Use [Github's security advisory issue system](https://github.com/LemmyNet/lemmy/security/advisories/new). diff --git a/api_tests/.eslintrc.json b/api_tests/.eslintrc.json deleted file mode 100644 index c48d71f94..000000000 --- a/api_tests/.eslintrc.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "root": true, - "env": { - "browser": true - }, - "plugins": ["@typescript-eslint"], - "extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended"], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "project": "./tsconfig.json", - "warnOnUnsupportedTypeScriptVersion": false - }, - "rules": { - "@typescript-eslint/ban-ts-comment": 0, - "@typescript-eslint/no-explicit-any": 0, - "@typescript-eslint/explicit-module-boundary-types": 0, - "arrow-body-style": 0, - "curly": 0, - "eol-last": 0, - "eqeqeq": 0, - "func-style": 0, - "import/no-duplicates": 0, - "max-statements": 0, - "max-params": 0, - "new-cap": 0, - "no-console": 0, - "no-duplicate-imports": 0, - "no-extra-parens": 0, - "no-return-assign": 0, - "no-throw-literal": 0, - "no-trailing-spaces": 0, - "no-unused-expressions": 0, - "no-useless-constructor": 0, - "no-useless-escape": 0, - "no-var": 0, - "prefer-const": 0, - "prefer-rest-params": 0, - "quote-props": 0, - "unicorn/filename-case": 0 - } -} diff --git a/api_tests/.npmrc b/api_tests/.npmrc new file mode 100644 index 000000000..e941d13c2 --- /dev/null +++ b/api_tests/.npmrc @@ -0,0 +1 @@ +package-manager-strict=false diff --git a/api_tests/eslint.config.mjs b/api_tests/eslint.config.mjs new file mode 100644 index 000000000..cf2c426d0 --- /dev/null +++ b/api_tests/eslint.config.mjs @@ -0,0 +1,56 @@ +import pluginJs from "@eslint/js"; +import tseslint from "typescript-eslint"; + +export default [ + pluginJs.configs.recommended, + ...tseslint.configs.recommended, + { + languageOptions: { + parser: tseslint.parser, + }, + }, + // For some reason this has to be in its own block + { + ignores: [ + "putTypesInIndex.js", + "dist/*", + "docs/*", + ".yalc", + "jest.config.js", + ], + }, + { + files: ["src/**/*"], + rules: { + "@typescript-eslint/no-empty-interface": 0, + "@typescript-eslint/no-empty-function": 0, + "@typescript-eslint/ban-ts-comment": 0, + "@typescript-eslint/no-explicit-any": 0, + "@typescript-eslint/explicit-module-boundary-types": 0, + "@typescript-eslint/no-var-requires": 0, + "arrow-body-style": 0, + curly: 0, + "eol-last": 0, + eqeqeq: 0, + "func-style": 0, + "import/no-duplicates": 0, + "max-statements": 0, + "max-params": 0, + "new-cap": 0, + "no-console": 0, + "no-duplicate-imports": 0, + "no-extra-parens": 0, + "no-return-assign": 0, + "no-throw-literal": 0, + "no-trailing-spaces": 0, + "no-unused-expressions": 0, + "no-useless-constructor": 0, + "no-useless-escape": 0, + "no-var": 0, + "prefer-const": 0, + "prefer-rest-params": 0, + "quote-props": 0, + "unicorn/filename-case": 0, + }, + }, +]; diff --git a/api_tests/package.json b/api_tests/package.json index 92e00b81b..81e518ea4 100644 --- a/api_tests/package.json +++ b/api_tests/package.json @@ -6,22 +6,31 @@ "repository": "https://github.com/LemmyNet/lemmy", "author": "Dessalines", "license": "AGPL-3.0", + "packageManager": "pnpm@9.12.3", "scripts": { - "lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'", + "lint": "tsc --noEmit && eslint --report-unused-disable-directives && prettier --check 'src/**/*.ts'", "fix": "prettier --write src && eslint --fix src", - "api-test": "jest -i follow.spec.ts && jest -i post.spec.ts && jest -i comment.spec.ts && jest -i private_message.spec.ts && jest -i user.spec.ts && jest -i community.spec.ts" + "api-test": "jest -i follow.spec.ts && jest -i image.spec.ts && jest -i user.spec.ts && jest -i private_message.spec.ts && jest -i community.spec.ts && jest -i post.spec.ts && jest -i comment.spec.ts ", + "api-test-follow": "jest -i follow.spec.ts", + "api-test-comment": "jest -i comment.spec.ts", + "api-test-post": "jest -i post.spec.ts", + "api-test-user": "jest -i user.spec.ts", + "api-test-community": "jest -i community.spec.ts", + "api-test-private-message": "jest -i private_message.spec.ts", + "api-test-image": "jest -i image.spec.ts" }, "devDependencies": { - "@types/jest": "^29.5.6", - "@types/node": "^20.8.7", - "@typescript-eslint/eslint-plugin": "^6.8.0", - "@typescript-eslint/parser": "^6.8.0", - "eslint": "^8.52.0", - "eslint-plugin-prettier": "^5.0.1", + "@types/jest": "^29.5.12", + "@types/node": "^22.3.0", + "@typescript-eslint/eslint-plugin": "^8.1.0", + "@typescript-eslint/parser": "^8.1.0", + "eslint": "^9.9.0", + "eslint-plugin-prettier": "^5.1.3", "jest": "^29.5.0", - "lemmy-js-client": "0.19.0-rc.12", - "prettier": "^3.0.0", + "lemmy-js-client": "0.20.0-alpha.11", + "prettier": "^3.2.5", "ts-jest": "^29.1.0", - "typescript": "^5.0.4" + "typescript": "^5.5.4", + "typescript-eslint": "^8.1.0" } } diff --git a/api_tests/pnpm-lock.yaml b/api_tests/pnpm-lock.yaml new file mode 100644 index 000000000..dd357d248 --- /dev/null +++ b/api_tests/pnpm-lock.yaml @@ -0,0 +1,3444 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@types/jest': + specifier: ^29.5.12 + version: 29.5.14 + '@types/node': + specifier: ^22.3.0 + version: 22.8.6 + '@typescript-eslint/eslint-plugin': + specifier: ^8.1.0 + version: 8.12.2(@typescript-eslint/parser@8.12.2(eslint@9.13.0)(typescript@5.6.3))(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/parser': + specifier: ^8.1.0 + version: 8.12.2(eslint@9.13.0)(typescript@5.6.3) + eslint: + specifier: ^9.9.0 + version: 9.13.0 + eslint-plugin-prettier: + specifier: ^5.1.3 + version: 5.2.1(eslint@9.13.0)(prettier@3.3.3) + jest: + specifier: ^29.5.0 + version: 29.7.0(@types/node@22.8.6) + lemmy-js-client: + specifier: 0.20.0-alpha.11 + version: 0.20.0-alpha.11 + prettier: + specifier: ^3.2.5 + version: 3.3.3 + ts-jest: + specifier: ^29.1.0 + version: 29.2.5(@babel/core@7.23.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@22.8.6))(typescript@5.6.3) + typescript: + specifier: ^5.5.4 + version: 5.6.3 + typescript-eslint: + specifier: ^8.1.0 + version: 8.12.2(eslint@9.13.0)(typescript@5.6.3) + +packages: + + '@ampproject/remapping@2.2.1': + resolution: {integrity: sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==} + engines: {node: '>=6.0.0'} + + '@babel/code-frame@7.26.2': + resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.23.5': + resolution: {integrity: sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.23.9': + resolution: {integrity: sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.23.6': + resolution: {integrity: sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.23.6': + resolution: {integrity: sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-environment-visitor@7.22.20': + resolution: {integrity: sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-function-name@7.23.0': + resolution: {integrity: sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-hoist-variables@7.22.5': + resolution: {integrity: sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.22.15': + resolution: {integrity: sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.23.3': + resolution: {integrity: sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-plugin-utils@7.22.5': + resolution: {integrity: sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-simple-access@7.22.5': + resolution: {integrity: sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==} + engines: {node: '>=6.9.0'} + + '@babel/helper-split-export-declaration@7.22.6': + resolution: {integrity: sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.23.4': + resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.22.20': + resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.25.9': + resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.23.5': + resolution: {integrity: sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.23.9': + resolution: {integrity: sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.23.9': + resolution: {integrity: sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-syntax-async-generators@7.8.4': + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-bigint@7.8.3': + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-class-properties@7.12.13': + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-meta@7.10.4': + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-json-strings@7.8.3': + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-jsx@7.23.3': + resolution: {integrity: sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4': + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3': + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-numeric-separator@7.10.4': + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-object-rest-spread@7.8.3': + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3': + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-chaining@7.8.3': + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-top-level-await@7.14.5': + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-typescript@7.23.3': + resolution: {integrity: sha512-9EiNjVJOMwCO+43TqoTrgQ8jMwcAd0sWyXi9RPfIsLTj4R2MADDDQXELhffaUx/uJv2AYcxBgPwH6j4TIA4ytQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/template@7.23.9': + resolution: {integrity: sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.23.9': + resolution: {integrity: sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.23.9': + resolution: {integrity: sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@0.2.3': + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + + '@eslint-community/eslint-utils@4.4.1': + resolution: {integrity: sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.1': + resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.18.0': + resolution: {integrity: sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.7.0': + resolution: {integrity: sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.1.0': + resolution: {integrity: sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.13.0': + resolution: {integrity: sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.4': + resolution: {integrity: sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.2.2': + resolution: {integrity: sha512-CXtq5nR4Su+2I47WPOlWud98Y5Lv8Kyxp2ukhgFx/eW6Blm18VXJO5WuQylPugRo8nbluoi6GvvxBLqHcvqUUw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.6': + resolution: {integrity: sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.3.1': + resolution: {integrity: sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==} + engines: {node: '>=18.18'} + + '@istanbuljs/load-nyc-config@1.1.0': + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + + '@istanbuljs/schema@0.1.3': + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + + '@jest/console@29.7.0': + resolution: {integrity: sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/core@29.7.0': + resolution: {integrity: sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/environment@29.7.0': + resolution: {integrity: sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/expect-utils@29.7.0': + resolution: {integrity: sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/expect@29.7.0': + resolution: {integrity: sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/fake-timers@29.7.0': + resolution: {integrity: sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/globals@29.7.0': + resolution: {integrity: sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/reporters@29.7.0': + resolution: {integrity: sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/source-map@29.6.3': + resolution: {integrity: sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/test-result@29.7.0': + resolution: {integrity: sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/test-sequencer@29.7.0': + resolution: {integrity: sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/transform@29.7.0': + resolution: {integrity: sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/types@29.6.3': + resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jridgewell/gen-mapping@0.3.3': + resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} + engines: {node: '>=6.0.0'} + + '@jridgewell/resolve-uri@3.1.1': + resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} + engines: {node: '>=6.0.0'} + + '@jridgewell/set-array@1.1.2': + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.4.15': + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + + '@jridgewell/trace-mapping@0.3.22': + resolution: {integrity: sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@pkgr/core@0.1.1': + resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + + '@sinclair/typebox@0.27.8': + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + + '@sinonjs/commons@3.0.1': + resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} + + '@sinonjs/fake-timers@10.3.0': + resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==} + + '@types/babel__core@7.20.5': + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + + '@types/babel__generator@7.6.8': + resolution: {integrity: sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==} + + '@types/babel__template@7.4.4': + resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} + + '@types/babel__traverse@7.20.5': + resolution: {integrity: sha512-WXCyOcRtH37HAUkpXhUduaxdm82b4GSlyTqajXviN4EfiuPgNYR109xMCKvpl6zPIpua0DGlMEDCq+g8EdoheQ==} + + '@types/estree@1.0.6': + resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + + '@types/graceful-fs@4.1.9': + resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + + '@types/istanbul-lib-coverage@2.0.6': + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + + '@types/istanbul-lib-report@3.0.3': + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + + '@types/istanbul-reports@3.0.4': + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + + '@types/jest@29.5.14': + resolution: {integrity: sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/node@22.8.6': + resolution: {integrity: sha512-tosuJYKrIqjQIlVCM4PEGxOmyg3FCPa/fViuJChnGeEIhjA46oy8FMVoF9su1/v8PNs2a8Q0iFNyOx0uOF91nw==} + + '@types/stack-utils@2.0.3': + resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} + + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.32': + resolution: {integrity: sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==} + + '@typescript-eslint/eslint-plugin@8.12.2': + resolution: {integrity: sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.0.0 || ^8.0.0-alpha.0 + eslint: ^8.57.0 || ^9.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/parser@8.12.2': + resolution: {integrity: sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/scope-manager@8.12.2': + resolution: {integrity: sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/type-utils@8.12.2': + resolution: {integrity: sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/types@8.12.2': + resolution: {integrity: sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.12.2': + resolution: {integrity: sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/utils@8.12.2': + resolution: {integrity: sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + + '@typescript-eslint/visitor-keys@8.12.2': + resolution: {integrity: sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.14.0: + resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + + babel-jest@29.7.0: + resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + peerDependencies: + '@babel/core': ^7.8.0 + + babel-plugin-istanbul@6.1.1: + resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} + engines: {node: '>=8'} + + babel-plugin-jest-hoist@29.6.3: + resolution: {integrity: sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + babel-preset-current-node-syntax@1.0.1: + resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} + peerDependencies: + '@babel/core': ^7.0.0 + + babel-preset-jest@29.6.3: + resolution: {integrity: sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + peerDependencies: + '@babel/core': ^7.0.0 + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + + braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.22.3: + resolution: {integrity: sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + bs-logger@0.2.6: + resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} + engines: {node: '>= 6'} + + bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + + camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + + caniuse-lite@1.0.30001581: + resolution: {integrity: sha512-whlTkwhqV2tUmP3oYhtNfaWGYHDdS3JYFQBKXxcUR9qqPWsRhFHhoISO2Xnl/g0xyKzht9mI1LZpiNWfMzHixQ==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + ci-info@3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + + cjs-module-lexer@1.2.3: + resolution: {integrity: sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + co@4.6.0: + resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + + collect-v8-coverage@1.0.2: + resolution: {integrity: sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + create-jest@29.7.0: + resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + hasBin: true + + cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + dedent@1.5.1: + resolution: {integrity: sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + + detect-newline@3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} + + diff-sequences@29.6.3: + resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + ejs@3.1.10: + resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} + engines: {node: '>=0.10.0'} + hasBin: true + + electron-to-chromium@1.4.648: + resolution: {integrity: sha512-EmFMarXeqJp9cUKu/QEciEApn0S/xRcpZWuAm32U7NgoZCimjsilKXHRO9saeEW55eHZagIDg6XTUOv32w9pjg==} + + emittery@0.13.1: + resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} + engines: {node: '>=12'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + + escalade@3.1.1: + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + + escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + eslint-plugin-prettier@5.2.1: + resolution: {integrity: sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + '@types/eslint': '>=8.0.0' + eslint: '>=8.0.0' + eslint-config-prettier: '*' + prettier: '>=3.0.0' + peerDependenciesMeta: + '@types/eslint': + optional: true + eslint-config-prettier: + optional: true + + eslint-scope@8.2.0: + resolution: {integrity: sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.0: + resolution: {integrity: sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint@9.13.0: + resolution: {integrity: sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.3.0: + resolution: {integrity: sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + esquery@1.6.0: + resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + exit@0.1.2: + resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} + engines: {node: '>= 0.8.0'} + + expect@29.7.0: + resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-diff@1.3.0: + resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} + + fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + + fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + filelist@1.0.4: + resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} + + fill-range@7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flatted@3.3.1: + resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported + + globals@11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + graphemer@1.4.0: + resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + hasown@2.0.0: + resolution: {integrity: sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==} + engines: {node: '>= 0.4'} + + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + import-fresh@3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + + import-local@3.1.0: + resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} + engines: {node: '>=8'} + hasBin: true + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-core-module@2.13.1: + resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-generator-fn@2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + + istanbul-lib-instrument@5.2.1: + resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} + engines: {node: '>=8'} + + istanbul-lib-instrument@6.0.1: + resolution: {integrity: sha512-EAMEJBsYuyyztxMxW3g7ugGPkrZsV57v0Hmv3mm1uQsmB+QnZuepg731CRaIgeUVSdmsTngOkSnauNF8p7FIhA==} + engines: {node: '>=10'} + + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + + istanbul-lib-source-maps@4.0.1: + resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} + engines: {node: '>=10'} + + istanbul-reports@3.1.6: + resolution: {integrity: sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg==} + engines: {node: '>=8'} + + jake@10.9.2: + resolution: {integrity: sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==} + engines: {node: '>=10'} + hasBin: true + + jest-changed-files@29.7.0: + resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-circus@29.7.0: + resolution: {integrity: sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-cli@29.7.0: + resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + jest-config@29.7.0: + resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + peerDependencies: + '@types/node': '*' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + ts-node: + optional: true + + jest-diff@29.7.0: + resolution: {integrity: sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-docblock@29.7.0: + resolution: {integrity: sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-each@29.7.0: + resolution: {integrity: sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-environment-node@29.7.0: + resolution: {integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-get-type@29.6.3: + resolution: {integrity: sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-haste-map@29.7.0: + resolution: {integrity: sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-leak-detector@29.7.0: + resolution: {integrity: sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-matcher-utils@29.7.0: + resolution: {integrity: sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-message-util@29.7.0: + resolution: {integrity: sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-mock@29.7.0: + resolution: {integrity: sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-pnp-resolver@1.2.3: + resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} + engines: {node: '>=6'} + peerDependencies: + jest-resolve: '*' + peerDependenciesMeta: + jest-resolve: + optional: true + + jest-regex-util@29.6.3: + resolution: {integrity: sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-resolve-dependencies@29.7.0: + resolution: {integrity: sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-resolve@29.7.0: + resolution: {integrity: sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-runner@29.7.0: + resolution: {integrity: sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-runtime@29.7.0: + resolution: {integrity: sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-snapshot@29.7.0: + resolution: {integrity: sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-util@29.7.0: + resolution: {integrity: sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-validate@29.7.0: + resolution: {integrity: sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-watcher@29.7.0: + resolution: {integrity: sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-worker@29.7.0: + resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest@29.7.0: + resolution: {integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + jsesc@2.5.2: + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + kleur@3.0.3: + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + + lemmy-js-client@0.20.0-alpha.11: + resolution: {integrity: sha512-iRSG4xHMjPDIreQqVIoJ5JrMY71uk07G0Zbgyf068xKbib22J3+i1x/XgCTs6tiHlqTnw1Ig/KRq7p7qJoA4uw==} + + leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + + make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + + makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} + + node-releases@2.0.14: + resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + pirates@4.0.6: + resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==} + engines: {node: '>= 6'} + + pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + prettier-linter-helpers@1.0.0: + resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + engines: {node: '>=6.0.0'} + + prettier@3.3.3: + resolution: {integrity: sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==} + engines: {node: '>=14'} + hasBin: true + + pretty-format@29.7.0: + resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + prompts@2.4.2: + resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} + engines: {node: '>= 6'} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + pure-rand@6.0.4: + resolution: {integrity: sha512-LA0Y9kxMYv47GIPJy6MI84fqTd2HmYZI83W/kM/SkKfDlajnZYfmXFTxkbY+xSBPkLJxltMa9hIkmdc29eguMA==} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve.exports@2.0.2: + resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} + engines: {node: '>=10'} + + resolve@1.22.8: + resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} + hasBin: true + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.6.2: + resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} + engines: {node: '>=10'} + hasBin: true + + semver@7.6.3: + resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + source-map-support@0.5.13: + resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + + string-length@4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-bom@4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + synckit@0.9.1: + resolution: {integrity: sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==} + engines: {node: ^14.18.0 || >=16.0.0} + + test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + + text-table@0.2.0: + resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} + + tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + + to-fast-properties@2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + ts-api-utils@1.4.0: + resolution: {integrity: sha512-032cPxaEKwM+GT3vA5JXNzIaizx388rhsSW79vGRNGXfRRAdEAn2mvk36PvK5HnOchyWZ7afLEXqYCvPCrzuzQ==} + engines: {node: '>=16'} + peerDependencies: + typescript: '>=4.2.0' + + ts-jest@29.2.5: + resolution: {integrity: sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==} + engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@jest/transform': ^29.0.0 + '@jest/types': ^29.0.0 + babel-jest: ^29.0.0 + esbuild: '*' + jest: ^29.0.0 + typescript: '>=4.3 <6' + peerDependenciesMeta: + '@babel/core': + optional: true + '@jest/transform': + optional: true + '@jest/types': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + + tslib@2.6.3: + resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + + typescript-eslint@8.12.2: + resolution: {integrity: sha512-UbuVUWSrHVR03q9CWx+JDHeO6B/Hr9p4U5lRH++5tq/EbFq1faYZe50ZSBePptgfIKLEti0aPQ3hFgnPVcd8ZQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + typescript@5.6.3: + resolution: {integrity: sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@6.19.8: + resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + + update-browserslist-db@1.0.13: + resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + v8-to-istanbul@9.2.0: + resolution: {integrity: sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==} + engines: {node: '>=10.12.0'} + + walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + write-file-atomic@4.0.2: + resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + +snapshots: + + '@ampproject/remapping@2.2.1': + dependencies: + '@jridgewell/gen-mapping': 0.3.3 + '@jridgewell/trace-mapping': 0.3.22 + + '@babel/code-frame@7.26.2': + dependencies: + '@babel/helper-validator-identifier': 7.25.9 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.23.5': {} + + '@babel/core@7.23.9': + dependencies: + '@ampproject/remapping': 2.2.1 + '@babel/code-frame': 7.26.2 + '@babel/generator': 7.23.6 + '@babel/helper-compilation-targets': 7.23.6 + '@babel/helper-module-transforms': 7.23.3(@babel/core@7.23.9) + '@babel/helpers': 7.23.9 + '@babel/parser': 7.23.9 + '@babel/template': 7.23.9 + '@babel/traverse': 7.23.9 + '@babel/types': 7.23.9 + convert-source-map: 2.0.0 + debug: 4.3.7 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.23.6': + dependencies: + '@babel/types': 7.23.9 + '@jridgewell/gen-mapping': 0.3.3 + '@jridgewell/trace-mapping': 0.3.22 + jsesc: 2.5.2 + + '@babel/helper-compilation-targets@7.23.6': + dependencies: + '@babel/compat-data': 7.23.5 + '@babel/helper-validator-option': 7.23.5 + browserslist: 4.22.3 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-environment-visitor@7.22.20': {} + + '@babel/helper-function-name@7.23.0': + dependencies: + '@babel/template': 7.23.9 + '@babel/types': 7.23.9 + + '@babel/helper-hoist-variables@7.22.5': + dependencies: + '@babel/types': 7.23.9 + + '@babel/helper-module-imports@7.22.15': + dependencies: + '@babel/types': 7.23.9 + + '@babel/helper-module-transforms@7.23.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-environment-visitor': 7.22.20 + '@babel/helper-module-imports': 7.22.15 + '@babel/helper-simple-access': 7.22.5 + '@babel/helper-split-export-declaration': 7.22.6 + '@babel/helper-validator-identifier': 7.25.9 + + '@babel/helper-plugin-utils@7.22.5': {} + + '@babel/helper-simple-access@7.22.5': + dependencies: + '@babel/types': 7.23.9 + + '@babel/helper-split-export-declaration@7.22.6': + dependencies: + '@babel/types': 7.23.9 + + '@babel/helper-string-parser@7.23.4': {} + + '@babel/helper-validator-identifier@7.22.20': {} + + '@babel/helper-validator-identifier@7.25.9': {} + + '@babel/helper-validator-option@7.23.5': {} + + '@babel/helpers@7.23.9': + dependencies: + '@babel/template': 7.23.9 + '@babel/traverse': 7.23.9 + '@babel/types': 7.23.9 + transitivePeerDependencies: + - supports-color + + '@babel/parser@7.23.9': + dependencies: + '@babel/types': 7.23.9 + + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-jsx@7.23.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/plugin-syntax-typescript@7.23.3(@babel/core@7.23.9)': + dependencies: + '@babel/core': 7.23.9 + '@babel/helper-plugin-utils': 7.22.5 + + '@babel/template@7.23.9': + dependencies: + '@babel/code-frame': 7.26.2 + '@babel/parser': 7.23.9 + '@babel/types': 7.23.9 + + '@babel/traverse@7.23.9': + dependencies: + '@babel/code-frame': 7.26.2 + '@babel/generator': 7.23.6 + '@babel/helper-environment-visitor': 7.22.20 + '@babel/helper-function-name': 7.23.0 + '@babel/helper-hoist-variables': 7.22.5 + '@babel/helper-split-export-declaration': 7.22.6 + '@babel/parser': 7.23.9 + '@babel/types': 7.23.9 + debug: 4.3.7 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.23.9': + dependencies: + '@babel/helper-string-parser': 7.23.4 + '@babel/helper-validator-identifier': 7.22.20 + to-fast-properties: 2.0.0 + + '@bcoe/v8-coverage@0.2.3': {} + + '@eslint-community/eslint-utils@4.4.1(eslint@9.13.0)': + dependencies: + eslint: 9.13.0 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.1': {} + + '@eslint/config-array@0.18.0': + dependencies: + '@eslint/object-schema': 2.1.4 + debug: 4.3.7 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + '@eslint/core@0.7.0': {} + + '@eslint/eslintrc@3.1.0': + dependencies: + ajv: 6.12.6 + debug: 4.3.7 + espree: 10.3.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.13.0': {} + + '@eslint/object-schema@2.1.4': {} + + '@eslint/plugin-kit@0.2.2': + dependencies: + levn: 0.4.1 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.6': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.3.1 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.3.1': {} + + '@istanbuljs/load-nyc-config@1.1.0': + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.1 + resolve-from: 5.0.0 + + '@istanbuljs/schema@0.1.3': {} + + '@jest/console@29.7.0': + dependencies: + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + chalk: 4.1.2 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + slash: 3.0.0 + + '@jest/core@29.7.0': + dependencies: + '@jest/console': 29.7.0 + '@jest/reporters': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 3.9.0 + exit: 0.1.2 + graceful-fs: 4.2.11 + jest-changed-files: 29.7.0 + jest-config: 29.7.0(@types/node@22.8.6) + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-resolve-dependencies: 29.7.0 + jest-runner: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + jest-watcher: 29.7.0 + micromatch: 4.0.5 + pretty-format: 29.7.0 + slash: 3.0.0 + strip-ansi: 6.0.1 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + - ts-node + + '@jest/environment@29.7.0': + dependencies: + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + jest-mock: 29.7.0 + + '@jest/expect-utils@29.7.0': + dependencies: + jest-get-type: 29.6.3 + + '@jest/expect@29.7.0': + dependencies: + expect: 29.7.0 + jest-snapshot: 29.7.0 + transitivePeerDependencies: + - supports-color + + '@jest/fake-timers@29.7.0': + dependencies: + '@jest/types': 29.6.3 + '@sinonjs/fake-timers': 10.3.0 + '@types/node': 22.8.6 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-util: 29.7.0 + + '@jest/globals@29.7.0': + dependencies: + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/types': 29.6.3 + jest-mock: 29.7.0 + transitivePeerDependencies: + - supports-color + + '@jest/reporters@29.7.0': + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.22 + '@types/node': 22.8.6 + chalk: 4.1.2 + collect-v8-coverage: 1.0.2 + exit: 0.1.2 + glob: 7.2.3 + graceful-fs: 4.2.11 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-instrument: 6.0.1 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 4.0.1 + istanbul-reports: 3.1.6 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + jest-worker: 29.7.0 + slash: 3.0.0 + string-length: 4.0.2 + strip-ansi: 6.0.1 + v8-to-istanbul: 9.2.0 + transitivePeerDependencies: + - supports-color + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 + + '@jest/source-map@29.6.3': + dependencies: + '@jridgewell/trace-mapping': 0.3.22 + callsites: 3.1.0 + graceful-fs: 4.2.11 + + '@jest/test-result@29.7.0': + dependencies: + '@jest/console': 29.7.0 + '@jest/types': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.6 + collect-v8-coverage: 1.0.2 + + '@jest/test-sequencer@29.7.0': + dependencies: + '@jest/test-result': 29.7.0 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + slash: 3.0.0 + + '@jest/transform@29.7.0': + dependencies: + '@babel/core': 7.23.9 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.22 + babel-plugin-istanbul: 6.1.1 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 + micromatch: 4.0.5 + pirates: 4.0.6 + slash: 3.0.0 + write-file-atomic: 4.0.2 + transitivePeerDependencies: + - supports-color + + '@jest/types@29.6.3': + dependencies: + '@jest/schemas': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 22.8.6 + '@types/yargs': 17.0.32 + chalk: 4.1.2 + + '@jridgewell/gen-mapping@0.3.3': + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping': 0.3.22 + + '@jridgewell/resolve-uri@3.1.1': {} + + '@jridgewell/set-array@1.1.2': {} + + '@jridgewell/sourcemap-codec@1.4.15': {} + + '@jridgewell/trace-mapping@0.3.22': + dependencies: + '@jridgewell/resolve-uri': 3.1.1 + '@jridgewell/sourcemap-codec': 1.4.15 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@pkgr/core@0.1.1': {} + + '@sinclair/typebox@0.27.8': {} + + '@sinonjs/commons@3.0.1': + dependencies: + type-detect: 4.0.8 + + '@sinonjs/fake-timers@10.3.0': + dependencies: + '@sinonjs/commons': 3.0.1 + + '@types/babel__core@7.20.5': + dependencies: + '@babel/parser': 7.23.9 + '@babel/types': 7.23.9 + '@types/babel__generator': 7.6.8 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.20.5 + + '@types/babel__generator@7.6.8': + dependencies: + '@babel/types': 7.23.9 + + '@types/babel__template@7.4.4': + dependencies: + '@babel/parser': 7.23.9 + '@babel/types': 7.23.9 + + '@types/babel__traverse@7.20.5': + dependencies: + '@babel/types': 7.23.9 + + '@types/estree@1.0.6': {} + + '@types/graceful-fs@4.1.9': + dependencies: + '@types/node': 22.8.6 + + '@types/istanbul-lib-coverage@2.0.6': {} + + '@types/istanbul-lib-report@3.0.3': + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + + '@types/istanbul-reports@3.0.4': + dependencies: + '@types/istanbul-lib-report': 3.0.3 + + '@types/jest@29.5.14': + dependencies: + expect: 29.7.0 + pretty-format: 29.7.0 + + '@types/json-schema@7.0.15': {} + + '@types/node@22.8.6': + dependencies: + undici-types: 6.19.8 + + '@types/stack-utils@2.0.3': {} + + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.32': + dependencies: + '@types/yargs-parser': 21.0.3 + + '@typescript-eslint/eslint-plugin@8.12.2(@typescript-eslint/parser@8.12.2(eslint@9.13.0)(typescript@5.6.3))(eslint@9.13.0)(typescript@5.6.3)': + dependencies: + '@eslint-community/regexpp': 4.12.1 + '@typescript-eslint/parser': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/scope-manager': 8.12.2 + '@typescript-eslint/type-utils': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/utils': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/visitor-keys': 8.12.2 + eslint: 9.13.0 + graphemer: 1.4.0 + ignore: 5.3.2 + natural-compare: 1.4.0 + ts-api-utils: 1.4.0(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.12.2(eslint@9.13.0)(typescript@5.6.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.12.2 + '@typescript-eslint/types': 8.12.2 + '@typescript-eslint/typescript-estree': 8.12.2(typescript@5.6.3) + '@typescript-eslint/visitor-keys': 8.12.2 + debug: 4.3.7 + eslint: 9.13.0 + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.12.2': + dependencies: + '@typescript-eslint/types': 8.12.2 + '@typescript-eslint/visitor-keys': 8.12.2 + + '@typescript-eslint/type-utils@8.12.2(eslint@9.13.0)(typescript@5.6.3)': + dependencies: + '@typescript-eslint/typescript-estree': 8.12.2(typescript@5.6.3) + '@typescript-eslint/utils': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + debug: 4.3.7 + ts-api-utils: 1.4.0(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - eslint + - supports-color + + '@typescript-eslint/types@8.12.2': {} + + '@typescript-eslint/typescript-estree@8.12.2(typescript@5.6.3)': + dependencies: + '@typescript-eslint/types': 8.12.2 + '@typescript-eslint/visitor-keys': 8.12.2 + debug: 4.3.7 + fast-glob: 3.3.2 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.6.3 + ts-api-utils: 1.4.0(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.12.2(eslint@9.13.0)(typescript@5.6.3)': + dependencies: + '@eslint-community/eslint-utils': 4.4.1(eslint@9.13.0) + '@typescript-eslint/scope-manager': 8.12.2 + '@typescript-eslint/types': 8.12.2 + '@typescript-eslint/typescript-estree': 8.12.2(typescript@5.6.3) + eslint: 9.13.0 + transitivePeerDependencies: + - supports-color + - typescript + + '@typescript-eslint/visitor-keys@8.12.2': + dependencies: + '@typescript-eslint/types': 8.12.2 + eslint-visitor-keys: 3.4.3 + + acorn-jsx@5.3.2(acorn@8.14.0): + dependencies: + acorn: 8.14.0 + + acorn@8.14.0: {} + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@5.2.0: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + async@3.2.6: {} + + babel-jest@29.7.0(@babel/core@7.23.9): + dependencies: + '@babel/core': 7.23.9 + '@jest/transform': 29.7.0 + '@types/babel__core': 7.20.5 + babel-plugin-istanbul: 6.1.1 + babel-preset-jest: 29.6.3(@babel/core@7.23.9) + chalk: 4.1.2 + graceful-fs: 4.2.11 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-istanbul@6.1.1: + dependencies: + '@babel/helper-plugin-utils': 7.22.5 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 5.2.1 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-jest-hoist@29.6.3: + dependencies: + '@babel/template': 7.23.9 + '@babel/types': 7.23.9 + '@types/babel__core': 7.20.5 + '@types/babel__traverse': 7.20.5 + + babel-preset-current-node-syntax@1.0.1(@babel/core@7.23.9): + dependencies: + '@babel/core': 7.23.9 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.23.9) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.23.9) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.23.9) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.23.9) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.23.9) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.23.9) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.23.9) + + babel-preset-jest@29.6.3(@babel/core@7.23.9): + dependencies: + '@babel/core': 7.23.9 + babel-plugin-jest-hoist: 29.6.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.23.9) + + balanced-match@1.0.2: {} + + brace-expansion@1.1.11: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.2: + dependencies: + fill-range: 7.0.1 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.22.3: + dependencies: + caniuse-lite: 1.0.30001581 + electron-to-chromium: 1.4.648 + node-releases: 2.0.14 + update-browserslist-db: 1.0.13(browserslist@4.22.3) + + bs-logger@0.2.6: + dependencies: + fast-json-stable-stringify: 2.1.0 + + bser@2.1.1: + dependencies: + node-int64: 0.4.0 + + buffer-from@1.1.2: {} + + callsites@3.1.0: {} + + camelcase@5.3.1: {} + + camelcase@6.3.0: {} + + caniuse-lite@1.0.30001581: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + char-regex@1.0.2: {} + + ci-info@3.9.0: {} + + cjs-module-lexer@1.2.3: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + co@4.6.0: {} + + collect-v8-coverage@1.0.2: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + concat-map@0.0.1: {} + + convert-source-map@2.0.0: {} + + create-jest@29.7.0(@types/node@22.8.6): + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.11 + jest-config: 29.7.0(@types/node@22.8.6) + jest-util: 29.7.0 + prompts: 2.4.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + cross-spawn@7.0.3: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + debug@4.3.7: + dependencies: + ms: 2.1.3 + + dedent@1.5.1: {} + + deep-is@0.1.4: {} + + deepmerge@4.3.1: {} + + detect-newline@3.1.0: {} + + diff-sequences@29.6.3: {} + + ejs@3.1.10: + dependencies: + jake: 10.9.2 + + electron-to-chromium@1.4.648: {} + + emittery@0.13.1: {} + + emoji-regex@8.0.0: {} + + error-ex@1.3.2: + dependencies: + is-arrayish: 0.2.1 + + escalade@3.1.1: {} + + escape-string-regexp@2.0.0: {} + + escape-string-regexp@4.0.0: {} + + eslint-plugin-prettier@5.2.1(eslint@9.13.0)(prettier@3.3.3): + dependencies: + eslint: 9.13.0 + prettier: 3.3.3 + prettier-linter-helpers: 1.0.0 + synckit: 0.9.1 + + eslint-scope@8.2.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.0: {} + + eslint@9.13.0: + dependencies: + '@eslint-community/eslint-utils': 4.4.1(eslint@9.13.0) + '@eslint-community/regexpp': 4.12.1 + '@eslint/config-array': 0.18.0 + '@eslint/core': 0.7.0 + '@eslint/eslintrc': 3.1.0 + '@eslint/js': 9.13.0 + '@eslint/plugin-kit': 0.2.2 + '@humanfs/node': 0.16.6 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.3.1 + '@types/estree': 1.0.6 + '@types/json-schema': 7.0.15 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.7 + escape-string-regexp: 4.0.0 + eslint-scope: 8.2.0 + eslint-visitor-keys: 4.2.0 + espree: 10.3.0 + esquery: 1.6.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + + espree@10.3.0: + dependencies: + acorn: 8.14.0 + acorn-jsx: 5.3.2(acorn@8.14.0) + eslint-visitor-keys: 4.2.0 + + esprima@4.0.1: {} + + esquery@1.6.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + esutils@2.0.3: {} + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + exit@0.1.2: {} + + expect@29.7.0: + dependencies: + '@jest/expect-utils': 29.7.0 + jest-get-type: 29.6.3 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + + fast-deep-equal@3.1.3: {} + + fast-diff@1.3.0: {} + + fast-glob@3.3.2: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + + fb-watchman@2.0.2: + dependencies: + bser: 2.1.1 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + filelist@1.0.4: + dependencies: + minimatch: 5.1.6 + + fill-range@7.0.1: + dependencies: + to-regex-range: 5.0.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + flat-cache@4.0.1: + dependencies: + flatted: 3.3.1 + keyv: 4.5.4 + + flatted@3.3.1: {} + + fs.realpath@1.0.0: {} + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + gensync@1.0.0-beta.2: {} + + get-caller-file@2.0.5: {} + + get-package-type@0.1.0: {} + + get-stream@6.0.1: {} + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + globals@11.12.0: {} + + globals@14.0.0: {} + + graceful-fs@4.2.11: {} + + graphemer@1.4.0: {} + + has-flag@4.0.0: {} + + hasown@2.0.0: + dependencies: + function-bind: 1.1.2 + + html-escaper@2.0.2: {} + + human-signals@2.1.0: {} + + ignore@5.3.2: {} + + import-fresh@3.3.0: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + import-local@3.1.0: + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + + imurmurhash@0.1.4: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + is-arrayish@0.2.1: {} + + is-core-module@2.13.1: + dependencies: + hasown: 2.0.0 + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-generator-fn@2.1.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + is-stream@2.0.1: {} + + isexe@2.0.0: {} + + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-instrument@5.2.1: + dependencies: + '@babel/core': 7.23.9 + '@babel/parser': 7.23.9 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + istanbul-lib-instrument@6.0.1: + dependencies: + '@babel/core': 7.23.9 + '@babel/parser': 7.23.9 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 7.6.3 + transitivePeerDependencies: + - supports-color + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@4.0.1: + dependencies: + debug: 4.3.7 + istanbul-lib-coverage: 3.2.2 + source-map: 0.6.1 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.1.6: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + + jake@10.9.2: + dependencies: + async: 3.2.6 + chalk: 4.1.2 + filelist: 1.0.4 + minimatch: 3.1.2 + + jest-changed-files@29.7.0: + dependencies: + execa: 5.1.1 + jest-util: 29.7.0 + p-limit: 3.1.0 + + jest-circus@29.7.0: + dependencies: + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + chalk: 4.1.2 + co: 4.6.0 + dedent: 1.5.1 + is-generator-fn: 2.1.0 + jest-each: 29.7.0 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + p-limit: 3.1.0 + pretty-format: 29.7.0 + pure-rand: 6.0.4 + slash: 3.0.0 + stack-utils: 2.0.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-cli@29.7.0(@types/node@22.8.6): + dependencies: + '@jest/core': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + chalk: 4.1.2 + create-jest: 29.7.0(@types/node@22.8.6) + exit: 0.1.2 + import-local: 3.1.0 + jest-config: 29.7.0(@types/node@22.8.6) + jest-util: 29.7.0 + jest-validate: 29.7.0 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + jest-config@29.7.0(@types/node@22.8.6): + dependencies: + '@babel/core': 7.23.9 + '@jest/test-sequencer': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.23.9) + chalk: 4.1.2 + ci-info: 3.9.0 + deepmerge: 4.3.1 + glob: 7.2.3 + graceful-fs: 4.2.11 + jest-circus: 29.7.0 + jest-environment-node: 29.7.0 + jest-get-type: 29.6.3 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-runner: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + micromatch: 4.0.5 + parse-json: 5.2.0 + pretty-format: 29.7.0 + slash: 3.0.0 + strip-json-comments: 3.1.1 + optionalDependencies: + '@types/node': 22.8.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-diff@29.7.0: + dependencies: + chalk: 4.1.2 + diff-sequences: 29.6.3 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + jest-docblock@29.7.0: + dependencies: + detect-newline: 3.1.0 + + jest-each@29.7.0: + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + jest-get-type: 29.6.3 + jest-util: 29.7.0 + pretty-format: 29.7.0 + + jest-environment-node@29.7.0: + dependencies: + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + jest-mock: 29.7.0 + jest-util: 29.7.0 + + jest-get-type@29.6.3: {} + + jest-haste-map@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/graceful-fs': 4.1.9 + '@types/node': 22.8.6 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 + jest-worker: 29.7.0 + micromatch: 4.0.5 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.3 + + jest-leak-detector@29.7.0: + dependencies: + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + jest-matcher-utils@29.7.0: + dependencies: + chalk: 4.1.2 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + jest-message-util@29.7.0: + dependencies: + '@babel/code-frame': 7.26.2 + '@jest/types': 29.6.3 + '@types/stack-utils': 2.0.3 + chalk: 4.1.2 + graceful-fs: 4.2.11 + micromatch: 4.0.8 + pretty-format: 29.7.0 + slash: 3.0.0 + stack-utils: 2.0.6 + + jest-mock@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + jest-util: 29.7.0 + + jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): + optionalDependencies: + jest-resolve: 29.7.0 + + jest-regex-util@29.6.3: {} + + jest-resolve-dependencies@29.7.0: + dependencies: + jest-regex-util: 29.6.3 + jest-snapshot: 29.7.0 + transitivePeerDependencies: + - supports-color + + jest-resolve@29.7.0: + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-pnp-resolver: 1.2.3(jest-resolve@29.7.0) + jest-util: 29.7.0 + jest-validate: 29.7.0 + resolve: 1.22.8 + resolve.exports: 2.0.2 + slash: 3.0.0 + + jest-runner@29.7.0: + dependencies: + '@jest/console': 29.7.0 + '@jest/environment': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + chalk: 4.1.2 + emittery: 0.13.1 + graceful-fs: 4.2.11 + jest-docblock: 29.7.0 + jest-environment-node: 29.7.0 + jest-haste-map: 29.7.0 + jest-leak-detector: 29.7.0 + jest-message-util: 29.7.0 + jest-resolve: 29.7.0 + jest-runtime: 29.7.0 + jest-util: 29.7.0 + jest-watcher: 29.7.0 + jest-worker: 29.7.0 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + + jest-runtime@29.7.0: + dependencies: + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/globals': 29.7.0 + '@jest/source-map': 29.6.3 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + chalk: 4.1.2 + cjs-module-lexer: 1.2.3 + collect-v8-coverage: 1.0.2 + glob: 7.2.3 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + + jest-snapshot@29.7.0: + dependencies: + '@babel/core': 7.23.9 + '@babel/generator': 7.23.6 + '@babel/plugin-syntax-jsx': 7.23.3(@babel/core@7.23.9) + '@babel/plugin-syntax-typescript': 7.23.3(@babel/core@7.23.9) + '@babel/types': 7.23.9 + '@jest/expect-utils': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.23.9) + chalk: 4.1.2 + expect: 29.7.0 + graceful-fs: 4.2.11 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + natural-compare: 1.4.0 + pretty-format: 29.7.0 + semver: 7.6.2 + transitivePeerDependencies: + - supports-color + + jest-util@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + chalk: 4.1.2 + ci-info: 3.9.0 + graceful-fs: 4.2.11 + picomatch: 2.3.1 + + jest-validate@29.7.0: + dependencies: + '@jest/types': 29.6.3 + camelcase: 6.3.0 + chalk: 4.1.2 + jest-get-type: 29.6.3 + leven: 3.1.0 + pretty-format: 29.7.0 + + jest-watcher@29.7.0: + dependencies: + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.8.6 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.13.1 + jest-util: 29.7.0 + string-length: 4.0.2 + + jest-worker@29.7.0: + dependencies: + '@types/node': 22.8.6 + jest-util: 29.7.0 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest@29.7.0(@types/node@22.8.6): + dependencies: + '@jest/core': 29.7.0 + '@jest/types': 29.6.3 + import-local: 3.1.0 + jest-cli: 29.7.0(@types/node@22.8.6) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + js-tokens@4.0.0: {} + + js-yaml@3.14.1: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsesc@2.5.2: {} + + json-buffer@3.0.1: {} + + json-parse-even-better-errors@2.3.1: {} + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@2.2.3: {} + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + kleur@3.0.3: {} + + lemmy-js-client@0.20.0-alpha.11: {} + + leven@3.1.0: {} + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lines-and-columns@1.2.4: {} + + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.memoize@4.1.2: {} + + lodash.merge@4.6.2: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + make-dir@4.0.0: + dependencies: + semver: 7.6.3 + + make-error@1.3.6: {} + + makeerror@1.0.12: + dependencies: + tmpl: 1.0.5 + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.5: + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mimic-fn@2.1.0: {} + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.11 + + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.1 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.1 + + ms@2.1.3: {} + + natural-compare@1.4.0: {} + + node-int64@0.4.0: {} + + node-releases@2.0.14: {} + + normalize-path@3.0.0: {} + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-try@2.2.0: {} + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.26.2 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + path-exists@4.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@3.1.1: {} + + path-parse@1.0.7: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + pirates@4.0.6: {} + + pkg-dir@4.2.0: + dependencies: + find-up: 4.1.0 + + prelude-ls@1.2.1: {} + + prettier-linter-helpers@1.0.0: + dependencies: + fast-diff: 1.3.0 + + prettier@3.3.3: {} + + pretty-format@29.7.0: + dependencies: + '@jest/schemas': 29.6.3 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + prompts@2.4.2: + dependencies: + kleur: 3.0.3 + sisteransi: 1.0.5 + + punycode@2.3.1: {} + + pure-rand@6.0.4: {} + + queue-microtask@1.2.3: {} + + react-is@18.3.1: {} + + require-directory@2.1.1: {} + + resolve-cwd@3.0.0: + dependencies: + resolve-from: 5.0.0 + + resolve-from@4.0.0: {} + + resolve-from@5.0.0: {} + + resolve.exports@2.0.2: {} + + resolve@1.22.8: + dependencies: + is-core-module: 2.13.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + reusify@1.0.4: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + semver@6.3.1: {} + + semver@7.6.2: {} + + semver@7.6.3: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + signal-exit@3.0.7: {} + + sisteransi@1.0.5: {} + + slash@3.0.0: {} + + source-map-support@0.5.13: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + sprintf-js@1.0.3: {} + + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + string-length@4.0.2: + dependencies: + char-regex: 1.0.2 + strip-ansi: 6.0.1 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-bom@4.0.0: {} + + strip-final-newline@2.0.0: {} + + strip-json-comments@3.1.1: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + synckit@0.9.1: + dependencies: + '@pkgr/core': 0.1.1 + tslib: 2.6.3 + + test-exclude@6.0.0: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.2 + + text-table@0.2.0: {} + + tmpl@1.0.5: {} + + to-fast-properties@2.0.0: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + ts-api-utils@1.4.0(typescript@5.6.3): + dependencies: + typescript: 5.6.3 + + ts-jest@29.2.5(@babel/core@7.23.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@22.8.6))(typescript@5.6.3): + dependencies: + bs-logger: 0.2.6 + ejs: 3.1.10 + fast-json-stable-stringify: 2.1.0 + jest: 29.7.0(@types/node@22.8.6) + jest-util: 29.7.0 + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.6.3 + typescript: 5.6.3 + yargs-parser: 21.1.1 + optionalDependencies: + '@babel/core': 7.23.9 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.23.9) + + tslib@2.6.3: {} + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-detect@4.0.8: {} + + type-fest@0.21.3: {} + + typescript-eslint@8.12.2(eslint@9.13.0)(typescript@5.6.3): + dependencies: + '@typescript-eslint/eslint-plugin': 8.12.2(@typescript-eslint/parser@8.12.2(eslint@9.13.0)(typescript@5.6.3))(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/parser': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + '@typescript-eslint/utils': 8.12.2(eslint@9.13.0)(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - eslint + - supports-color + + typescript@5.6.3: {} + + undici-types@6.19.8: {} + + update-browserslist-db@1.0.13(browserslist@4.22.3): + dependencies: + browserslist: 4.22.3 + escalade: 3.1.1 + picocolors: 1.1.1 + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + v8-to-istanbul@9.2.0: + dependencies: + '@jridgewell/trace-mapping': 0.3.22 + '@types/istanbul-lib-coverage': 2.0.6 + convert-source-map: 2.0.0 + + walker@1.0.8: + dependencies: + makeerror: 1.0.12 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + word-wrap@1.2.5: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrappy@1.0.2: {} + + write-file-atomic@4.0.2: + dependencies: + imurmurhash: 0.1.4 + signal-exit: 3.0.7 + + y18n@5.0.8: {} + + yallist@3.1.1: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} diff --git a/api_tests/prepare-drone-federation-test.sh b/api_tests/prepare-drone-federation-test.sh index 4044ba0dd..65c4827d9 100755 --- a/api_tests/prepare-drone-federation-test.sh +++ b/api_tests/prepare-drone-federation-test.sh @@ -3,11 +3,28 @@ # it is expected that this script is called by run-federation-test.sh script. set -e +if [ -z "$LEMMY_LOG_LEVEL" ]; +then + LEMMY_LOG_LEVEL=info +fi + export RUST_BACKTRACE=1 -export RUST_LOG="warn,lemmy_server=debug,lemmy_federate=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug" +export RUST_LOG="warn,lemmy_server=$LEMMY_LOG_LEVEL,lemmy_federate=$LEMMY_LOG_LEVEL,lemmy_api=$LEMMY_LOG_LEVEL,lemmy_api_common=$LEMMY_LOG_LEVEL,lemmy_api_crud=$LEMMY_LOG_LEVEL,lemmy_apub=$LEMMY_LOG_LEVEL,lemmy_db_schema=$LEMMY_LOG_LEVEL,lemmy_db_views=$LEMMY_LOG_LEVEL,lemmy_db_views_actor=$LEMMY_LOG_LEVEL,lemmy_db_views_moderator=$LEMMY_LOG_LEVEL,lemmy_routes=$LEMMY_LOG_LEVEL,lemmy_utils=$LEMMY_LOG_LEVEL,lemmy_websocket=$LEMMY_LOG_LEVEL" export LEMMY_TEST_FAST_FEDERATION=1 # by default, the persistent federation queue has delays in the scale of 30s-5min +# pictrs setup +if [ ! -f "api_tests/pict-rs" ]; then + curl "https://git.asonix.dog/asonix/pict-rs/releases/download/v0.5.16/pict-rs-linux-amd64" -o api_tests/pict-rs + chmod +x api_tests/pict-rs +fi +./api_tests/pict-rs \ + run -a 0.0.0.0:8080 \ + --danger-dummy-mode \ + --api-key "my-pictrs-key" \ + filesystem -p /tmp/pictrs/files \ + sled -p /tmp/pictrs/sled-repo 2>&1 & + for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do echo "DB URL: ${LEMMY_DATABASE_URL} INSTANCE: $INSTANCE" psql "${LEMMY_DATABASE_URL}/lemmy" -c "DROP DATABASE IF EXISTS $INSTANCE" @@ -34,32 +51,35 @@ fi echo "$PWD" +LOG_DIR=target/log +mkdir -p $LOG_DIR + echo "start alpha" LEMMY_CONFIG_LOCATION=./docker/federation/lemmy_alpha.hjson \ LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_alpha" \ - target/lemmy_server >/tmp/lemmy_alpha.out 2>&1 & + target/lemmy_server >$LOG_DIR/lemmy_alpha.out 2>&1 & echo "start beta" LEMMY_CONFIG_LOCATION=./docker/federation/lemmy_beta.hjson \ LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_beta" \ - target/lemmy_server >/tmp/lemmy_beta.out 2>&1 & + target/lemmy_server >$LOG_DIR/lemmy_beta.out 2>&1 & echo "start gamma" LEMMY_CONFIG_LOCATION=./docker/federation/lemmy_gamma.hjson \ LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_gamma" \ - target/lemmy_server >/tmp/lemmy_gamma.out 2>&1 & + target/lemmy_server >$LOG_DIR/lemmy_gamma.out 2>&1 & echo "start delta" # An instance with only an allowlist for beta LEMMY_CONFIG_LOCATION=./docker/federation/lemmy_delta.hjson \ LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_delta" \ - target/lemmy_server >/tmp/lemmy_delta.out 2>&1 & + target/lemmy_server >$LOG_DIR/lemmy_delta.out 2>&1 & echo "start epsilon" # An instance who has a blocklist, with lemmy-alpha blocked LEMMY_CONFIG_LOCATION=./docker/federation/lemmy_epsilon.hjson \ LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_epsilon" \ - target/lemmy_server >/tmp/lemmy_epsilon.out 2>&1 & + target/lemmy_server >$LOG_DIR/lemmy_epsilon.out 2>&1 & echo "wait for all instances to start" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-alpha:8541/api/v3/site')" != "200" ]]; do sleep 1; done diff --git a/api_tests/run-federation-test.sh b/api_tests/run-federation-test.sh index 3042fd344..969a95b3e 100755 --- a/api_tests/run-federation-test.sh +++ b/api_tests/run-federation-test.sh @@ -10,10 +10,12 @@ killall -s1 lemmy_server || true ./api_tests/prepare-drone-federation-test.sh popd -yarn -yarn api-test || true +pnpm i +pnpm api-test || true killall -s1 lemmy_server || true +killall -s1 pict-rs || true for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do psql "$LEMMY_DATABASE_URL" -c "DROP DATABASE $INSTANCE" done +rm -r /tmp/pictrs diff --git a/api_tests/src/comment.spec.ts b/api_tests/src/comment.spec.ts index 6522a472e..c3f4b3efe 100644 --- a/api_tests/src/comment.spec.ts +++ b/api_tests/src/comment.spec.ts @@ -35,17 +35,17 @@ import { waitForPost, alphaUrl, followCommunity, + blockCommunity, + delay, + saveUserSettings, } from "./shared"; -import { CommentView } from "lemmy-js-client/dist/types/CommentView"; -import { CommunityView } from "lemmy-js-client"; -import { LemmyHttp } from "lemmy-js-client"; +import { CommentView, CommunityView, SaveUserSettings } from "lemmy-js-client"; let betaCommunity: CommunityView | undefined; let postOnAlphaRes: PostResponse; beforeAll(async () => { await setupLogins(); - await unfollows(); await Promise.all([followBeta(alpha), followBeta(gamma)]); betaCommunity = (await resolveBetaCommunity(alpha)).community; if (betaCommunity) { @@ -53,9 +53,7 @@ beforeAll(async () => { } }); -afterAll(async () => { - await unfollows(); -}); +afterAll(unfollows); function assertCommentFederation( commentOne?: CommentView, @@ -93,7 +91,9 @@ test("Create a comment", async () => { }); test("Create a comment in a non-existent post", async () => { - await expect(createComment(alpha, -1)).rejects.toBe("couldnt_find_post"); + await expect(createComment(alpha, -1)).rejects.toStrictEqual( + Error("not_found"), + ); }); test("Update a comment", async () => { @@ -126,8 +126,9 @@ test("Update a comment", async () => { }); test("Delete a comment", async () => { + let post = await createPost(alpha, betaCommunity!.community.id); // creating a comment on alpha (remote from home of community) - let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id); + let commentRes = await createComment(alpha, post.post_view.post.id); // Find the comment on beta (home of community) let betaComment = ( @@ -142,7 +143,7 @@ test("Delete a comment", async () => { await waitUntil( () => resolveComment(gamma, commentRes.comment_view.comment).catch(e => e), - r => r !== "couldnt_find_object", + r => r.message !== "not_found", ) ).comment; if (!gammaComment) { @@ -155,17 +156,18 @@ test("Delete a comment", async () => { commentRes.comment_view.comment.id, ); expect(deleteCommentRes.comment_view.comment.deleted).toBe(true); + expect(deleteCommentRes.comment_view.comment.content).toBe(""); - // Make sure that comment is undefined on beta + // Make sure that comment is deleted on beta await waitUntil( - () => resolveComment(beta, commentRes.comment_view.comment).catch(e => e), - e => e === "couldnt_find_object", + () => resolveComment(beta, commentRes.comment_view.comment), + c => c.comment?.comment.deleted === true, ); - // Make sure that comment is undefined on gamma after delete + // Make sure that comment is deleted on gamma after delete await waitUntil( - () => resolveComment(gamma, commentRes.comment_view.comment).catch(e => e), - e => e === "couldnt_find_object", + () => resolveComment(gamma, commentRes.comment_view.comment), + c => c.comment?.comment.deleted === true, ); // Test undeleting the comment @@ -179,11 +181,10 @@ test("Delete a comment", async () => { // Make sure that comment is undeleted on beta let betaComment2 = ( await waitUntil( - () => resolveComment(beta, commentRes.comment_view.comment).catch(e => e), - e => e !== "couldnt_find_object", + () => resolveComment(beta, commentRes.comment_view.comment), + c => c.comment?.comment.deleted === false, ) ).comment; - expect(betaComment2?.comment.deleted).toBe(false); assertCommentFederation(betaComment2, undeleteCommentRes.comment_view); }); @@ -227,10 +228,7 @@ test.skip("Remove a comment from admin and community on the same instance", asyn }); test("Remove a comment from admin and community on different instance", async () => { - let alpha_user = await registerUser(alpha); - let newAlphaApi = new LemmyHttp(alphaUrl, { - headers: { Authorization: `Bearer ${alpha_user.jwt ?? ""}` }, - }); + let newAlphaApi = await registerUser(alpha, alphaUrl); // New alpha user creates a community, post, and comment. let newCommunity = await createCommunity(newAlphaApi); @@ -256,6 +254,16 @@ test("Remove a comment from admin and community on different instance", async () betaComment.comment.id, ); expect(removeCommentRes.comment_view.comment.removed).toBe(true); + expect(removeCommentRes.comment_view.comment.content).toBe(""); + + // Comment text is also hidden from list + let listComments = await getComments( + beta, + removeCommentRes.comment_view.post.id, + ); + expect(listComments.comments.length).toBe(1); + expect(listComments.comments[0].comment.removed).toBe(true); + expect(listComments.comments[0].comment.content).toBe(""); // Make sure its not removed on alpha let refetchedPostComments = await getComments( @@ -345,17 +353,26 @@ test("Federated comment like", async () => { test("Reply to a comment from another instance, get notification", async () => { await alpha.markAllAsRead(); - let betaCommunity = (await resolveBetaCommunity(alpha)).community; + let betaCommunity = ( + await waitUntil( + () => resolveBetaCommunity(alpha), + c => !!c.community?.community.instance_id, + ) + ).community; if (!betaCommunity) { throw "Missing beta community"; } + const postOnAlphaRes = await createPost(alpha, betaCommunity.community.id); // Create a root-level trunk-branch comment on alpha let commentRes = await createComment(alpha, postOnAlphaRes.post_view.post.id); // find that comment id on beta let betaComment = ( - await resolveComment(beta, commentRes.comment_view.comment) + await waitUntil( + () => resolveComment(beta, commentRes.comment_view.comment), + c => c.comment?.counts.score === 1, + ) ).comment; if (!betaComment) { @@ -406,7 +423,10 @@ test("Reply to a comment from another instance, get notification", async () => { expect(alphaUnreadCountRes.replies).toBeGreaterThanOrEqual(1); // check inbox of replies on alpha, fetching read/unread both - let alphaRepliesRes = await getReplies(alpha); + let alphaRepliesRes = await waitUntil( + () => getReplies(alpha), + r => r.replies.length > 0, + ); const alphaReply = alphaRepliesRes.replies.find( r => r.comment.id === alphaComment.comment.id, ); @@ -423,6 +443,59 @@ test("Reply to a comment from another instance, get notification", async () => { assertCommentFederation(alphaReply, replyRes.comment_view); }); +test("Bot reply notifications are filtered when bots are hidden", async () => { + const newAlphaBot = await registerUser(alpha, alphaUrl); + let form: SaveUserSettings = { + bot_account: true, + }; + await saveUserSettings(newAlphaBot, form); + + const alphaCommunity = ( + await resolveCommunity(alpha, "!main@lemmy-alpha:8541") + ).community; + + if (!alphaCommunity) { + throw "Missing alpha community"; + } + + await alpha.markAllAsRead(); + form = { + show_bot_accounts: false, + }; + await saveUserSettings(alpha, form); + const postOnAlphaRes = await createPost(alpha, alphaCommunity.community.id); + + // Bot reply to alpha's post + let commentRes = await createComment( + newAlphaBot, + postOnAlphaRes.post_view.post.id, + ); + expect(commentRes).toBeDefined(); + + let alphaUnreadCountRes = await getUnreadCount(alpha); + expect(alphaUnreadCountRes.replies).toBe(0); + + let alphaUnreadRepliesRes = await getReplies(alpha, true); + expect(alphaUnreadRepliesRes.replies.length).toBe(0); + + // This both restores the original state that may be expected by other tests + // implicitly and is used by the next steps to ensure replies are still + // returned when a user later decides to show bot accounts again. + form = { + show_bot_accounts: true, + }; + await saveUserSettings(alpha, form); + + alphaUnreadCountRes = await getUnreadCount(alpha); + expect(alphaUnreadCountRes.replies).toBe(1); + + alphaUnreadRepliesRes = await getReplies(alpha, true); + expect(alphaUnreadRepliesRes.replies.length).toBe(1); + expect(alphaUnreadRepliesRes.replies[0].comment.id).toBe( + commentRes.comment_view.comment.id, + ); +}); + test("Mention beta from alpha", async () => { if (!betaCommunity) throw Error("no community"); const postOnAlphaRes = await createPost(alpha, betaCommunity.community.id); @@ -740,3 +813,70 @@ test("Report a comment", async () => { ); expect(betaReport.reason).toBe(alphaReport.reason); }); + +test("Dont send a comment reply to a blocked community", async () => { + let newCommunity = await createCommunity(beta); + let newCommunityId = newCommunity.community_view.community.id; + + // Create a post on beta + let betaPost = await createPost(beta, newCommunityId); + + let alphaPost = (await resolvePost(alpha, betaPost.post_view.post))!.post; + if (!alphaPost) { + throw "unable to locate post on alpha"; + } + + // Check beta's inbox count + let unreadCount = await getUnreadCount(beta); + expect(unreadCount.replies).toBe(1); + + // Beta blocks the new beta community + let blockRes = await blockCommunity(beta, newCommunityId, true); + expect(blockRes.blocked).toBe(true); + delay(); + + // Alpha creates a comment + let commentRes = await createComment(alpha, alphaPost.post.id); + expect(commentRes.comment_view.comment.content).toBeDefined(); + let alphaComment = await resolveComment( + beta, + commentRes.comment_view.comment, + ); + if (!alphaComment) { + throw "Missing alpha comment before block"; + } + + // Check beta's inbox count, make sure it stays the same + unreadCount = await getUnreadCount(beta); + expect(unreadCount.replies).toBe(1); + + let replies = await getReplies(beta); + expect(replies.replies.length).toBe(1); + + // Unblock the community + blockRes = await blockCommunity(beta, newCommunityId, false); + expect(blockRes.blocked).toBe(false); +}); + +/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also +/// fetched recursively. Ensure that it works properly. +test.skip("Fetch a deeply nested comment", async () => { + let lastComment; + for (let i = 0; i < 50; i++) { + let commentRes = await createComment( + alpha, + postOnAlphaRes.post_view.post.id, + lastComment?.comment_view.comment.id, + ); + expect(commentRes.comment_view.comment).toBeDefined(); + lastComment = commentRes; + } + + let betaComment = await resolveComment( + beta, + lastComment!.comment_view.comment, + ); + + expect(betaComment!.comment!.comment).toBeDefined(); + expect(betaComment?.comment?.post).toBeDefined(); +}); diff --git a/api_tests/src/community.spec.ts b/api_tests/src/community.spec.ts index 2f3a410f6..77b68e2fc 100644 --- a/api_tests/src/community.spec.ts +++ b/api_tests/src/community.spec.ts @@ -1,5 +1,6 @@ jest.setTimeout(120000); +import { AddModToCommunity } from "lemmy-js-client/dist/types/AddModToCommunity"; import { CommunityView } from "lemmy-js-client/dist/types/CommunityView"; import { alpha, @@ -9,6 +10,7 @@ import { resolveCommunity, createCommunity, deleteCommunity, + delay, removeCommunity, getCommunity, followCommunity, @@ -29,14 +31,14 @@ import { delta, betaAllowedInstances, searchPostLocal, - resolveBetaCommunity, longDelay, + editCommunity, + unfollows, } from "./shared"; -import { EditSite, LemmyHttp } from "lemmy-js-client"; +import { EditCommunity, EditSite } from "lemmy-js-client"; -beforeAll(async () => { - await setupLogins(); -}); +beforeAll(setupLogins); +afterAll(unfollows); function assertCommunityFederation( communityOne?: CommunityView, @@ -66,8 +68,8 @@ test("Create community", async () => { // A dupe check let prevName = communityRes.community_view.community.name; - await expect(createCommunity(alpha, prevName)).rejects.toBe( - "community_already_exists", + await expect(createCommunity(alpha, prevName)).rejects.toStrictEqual( + Error("community_already_exists"), ); // Cache the community on beta, make sure it has the other fields @@ -242,7 +244,7 @@ test("Admin actions in remote community are not federated to origin", async () = ); expect(banRes.banned).toBe(true); - // ban doesnt federate to community's origin instance alpha + // ban doesn't federate to community's origin instance alpha let alphaPost = (await resolvePost(alpha, gammaPost.post)).post; expect(alphaPost?.creator_banned_from_community).toBe(false); @@ -253,10 +255,7 @@ test("Admin actions in remote community are not federated to origin", async () = test("moderator view", async () => { // register a new user with their own community on alpha and post to it - let registerUserRes = await registerUser(alpha); - let otherUser = new LemmyHttp(alphaUrl, { - headers: { Authorization: `Bearer ${registerUserRes.jwt ?? ""}` }, - }); + let otherUser = await registerUser(alpha, alphaUrl); let otherCommunity = (await createCommunity(otherUser)).community_view; expect(otherCommunity.community.name).toBeDefined(); @@ -333,8 +332,8 @@ test("Get community for different casing on domain", async () => { // A dupe check let prevName = communityRes.community_view.community.name; - await expect(createCommunity(alpha, prevName)).rejects.toBe( - "community_already_exists", + await expect(createCommunity(alpha, prevName)).rejects.toStrictEqual( + Error("community_already_exists"), ); // Cache the community on beta, make sure it has the other fields @@ -382,7 +381,9 @@ test("User blocks instance, communities are hidden", async () => { test("Community follower count is federated", async () => { // Follow the beta community from alpha - let resolved = await resolveBetaCommunity(alpha); + let community = await createCommunity(beta); + let communityActorId = community.community_view.community.actor_id; + let resolved = await resolveCommunity(alpha, communityActorId); if (!resolved.community) { throw "Missing beta community"; } @@ -390,7 +391,7 @@ test("Community follower count is federated", async () => { await followCommunity(alpha, true, resolved.community.community.id); let followed = ( await waitUntil( - () => resolveBetaCommunity(alpha), + () => resolveCommunity(alpha, communityActorId), c => c.community?.subscribed === "Subscribed", ) ).community; @@ -399,7 +400,7 @@ test("Community follower count is federated", async () => { expect(followed?.counts.subscribers).toBe(1); // Follow the community from gamma - resolved = await resolveBetaCommunity(gamma); + resolved = await resolveCommunity(gamma, communityActorId); if (!resolved.community) { throw "Missing beta community"; } @@ -407,7 +408,7 @@ test("Community follower count is federated", async () => { await followCommunity(gamma, true, resolved.community.community.id); followed = ( await waitUntil( - () => resolveBetaCommunity(gamma), + () => resolveCommunity(gamma, communityActorId), c => c.community?.subscribed === "Subscribed", ) ).community; @@ -416,7 +417,7 @@ test("Community follower count is federated", async () => { expect(followed?.counts?.subscribers).toBe(2); // Follow the community from delta - resolved = await resolveBetaCommunity(delta); + resolved = await resolveCommunity(delta, communityActorId); if (!resolved.community) { throw "Missing beta community"; } @@ -424,7 +425,7 @@ test("Community follower count is federated", async () => { await followCommunity(delta, true, resolved.community.community.id); followed = ( await waitUntil( - () => resolveBetaCommunity(delta), + () => resolveCommunity(delta, communityActorId), c => c.community?.subscribed === "Subscribed", ) ).community; @@ -453,7 +454,7 @@ test("Dont receive community activities after unsubscribe", async () => { ); expect(communityRes1.community_view.counts.subscribers).toBe(2); - // temporarily block alpha, so that it doesnt know about unfollow + // temporarily block alpha, so that it doesn't know about unfollow let editSiteForm: EditSite = {}; editSiteForm.allowed_instances = ["lemmy-epsilon"]; await beta.editSite(editSiteForm); @@ -485,3 +486,90 @@ test("Dont receive community activities after unsubscribe", async () => { let postResBeta = searchPostLocal(beta, postRes.post_view.post); expect((await postResBeta).posts.length).toBe(0); }); + +test("Fetch community, includes posts", async () => { + let communityRes = await createCommunity(alpha); + expect(communityRes.community_view.community.name).toBeDefined(); + expect(communityRes.community_view.counts.subscribers).toBe(1); + + let postRes = await createPost( + alpha, + communityRes.community_view.community.id, + ); + expect(postRes.post_view.post).toBeDefined(); + + let resolvedCommunity = await waitUntil( + () => + resolveCommunity(beta, communityRes.community_view.community.actor_id), + c => c.community?.community.id != undefined, + ); + let betaCommunity = resolvedCommunity.community; + expect(betaCommunity?.community.actor_id).toBe( + communityRes.community_view.community.actor_id, + ); + + await longDelay(); + + let post_listing = await getPosts(beta, "All", betaCommunity?.community.id); + expect(post_listing.posts.length).toBe(1); + expect(post_listing.posts[0].post.ap_id).toBe(postRes.post_view.post.ap_id); +}); + +test("Content in local-only community doesn't federate", async () => { + // create a community and set it local-only + let communityRes = (await createCommunity(alpha)).community_view.community; + let form: EditCommunity = { + community_id: communityRes.id, + visibility: "LocalOnly", + }; + await editCommunity(alpha, form); + + // cant resolve the community from another instance + await expect( + resolveCommunity(beta, communityRes.actor_id), + ).rejects.toStrictEqual(Error("not_found")); + + // create a post, also cant resolve it + let postRes = await createPost(alpha, communityRes.id); + await expect(resolvePost(beta, postRes.post_view.post)).rejects.toStrictEqual( + Error("not_found"), + ); +}); + +test("Remote mods can edit communities", async () => { + let communityRes = await createCommunity(alpha); + + let betaCommunity = await resolveCommunity( + beta, + communityRes.community_view.community.actor_id, + ); + if (!betaCommunity.community) { + throw "Missing beta community"; + } + let betaOnAlpha = await resolvePerson(alpha, "lemmy_beta@lemmy-beta:8551"); + + let form: AddModToCommunity = { + community_id: communityRes.community_view.community.id, + person_id: betaOnAlpha.person?.person.id as number, + added: true, + }; + alpha.addModToCommunity(form); + + let form2: EditCommunity = { + community_id: betaCommunity.community?.community.id as number, + description: "Example description", + }; + + await editCommunity(beta, form2); + // give alpha time to get and process the edit + await delay(1000); + + let alphaCommunity = await getCommunity( + alpha, + communityRes.community_view.community.id, + ); + + await expect(alphaCommunity.community_view.community.description).toBe( + "Example description", + ); +}); diff --git a/api_tests/src/follow.spec.ts b/api_tests/src/follow.spec.ts index a0b43989b..22fdfa305 100644 --- a/api_tests/src/follow.spec.ts +++ b/api_tests/src/follow.spec.ts @@ -5,26 +5,65 @@ import { setupLogins, resolveBetaCommunity, followCommunity, - unfollowRemotes, getSite, waitUntil, + beta, + betaUrl, + registerUser, + unfollows, + delay, } from "./shared"; -beforeAll(async () => { - await setupLogins(); -}); +beforeAll(setupLogins); -afterAll(async () => { - await unfollowRemotes(alpha); +afterAll(unfollows); + +test("Follow local community", async () => { + let user = await registerUser(beta, betaUrl); + + let community = (await resolveBetaCommunity(user)).community!; + let follow = await followCommunity(user, true, community.community.id); + + // Make sure the follow response went through + expect(follow.community_view.community.local).toBe(true); + expect(follow.community_view.subscribed).toBe("Subscribed"); + expect(follow.community_view.counts.subscribers).toBe( + community.counts.subscribers + 1, + ); + expect(follow.community_view.counts.subscribers_local).toBe( + community.counts.subscribers_local + 1, + ); + + // Test an unfollow + let unfollow = await followCommunity(user, false, community.community.id); + expect(unfollow.community_view.subscribed).toBe("NotSubscribed"); + expect(unfollow.community_view.counts.subscribers).toBe( + community.counts.subscribers, + ); + expect(unfollow.community_view.counts.subscribers_local).toBe( + community.counts.subscribers_local, + ); }); test("Follow federated community", async () => { - let betaCommunity = (await resolveBetaCommunity(alpha)).community; - if (!betaCommunity) { + // It takes about 1 second for the community aggregates to federate + await delay(2000); // if this is the second test run, we don't have a way to wait for the correct number of subscribers + const betaCommunityInitial = ( + await waitUntil( + () => resolveBetaCommunity(alpha), + c => !!c.community && c.community?.counts.subscribers >= 1, + ) + ).community; + if (!betaCommunityInitial) { throw "Missing beta community"; } - await followCommunity(alpha, true, betaCommunity.community.id); - betaCommunity = ( + let follow = await followCommunity( + alpha, + true, + betaCommunityInitial.community.id, + ); + expect(follow.community_view.subscribed).toBe("Pending"); + const betaCommunity = ( await waitUntil( () => resolveBetaCommunity(alpha), c => c.community?.subscribed === "Subscribed", @@ -35,14 +74,24 @@ test("Follow federated community", async () => { expect(betaCommunity?.community.local).toBe(false); expect(betaCommunity?.community.name).toBe("main"); expect(betaCommunity?.subscribed).toBe("Subscribed"); + expect(betaCommunity?.counts.subscribers_local).toBe( + betaCommunityInitial.counts.subscribers_local + 1, + ); + + // check that unfollow was federated + let communityOnBeta1 = await resolveBetaCommunity(beta); + expect(communityOnBeta1.community?.counts.subscribers).toBe( + betaCommunityInitial.counts.subscribers + 1, + ); // Check it from local let site = await getSite(alpha); let remoteCommunityId = site.my_user?.follows.find( - c => c.community.local == false, + c => + c.community.local == false && + c.community.id === betaCommunityInitial.community.id, )?.community.id; expect(remoteCommunityId).toBeDefined(); - expect(site.my_user?.follows.length).toBe(2); if (!remoteCommunityId) { throw "Missing remote community id"; @@ -54,5 +103,21 @@ test("Follow federated community", async () => { // Make sure you are unsubbed locally let siteUnfollowCheck = await getSite(alpha); - expect(siteUnfollowCheck.my_user?.follows.length).toBe(1); + expect( + siteUnfollowCheck.my_user?.follows.find( + c => c.community.id === betaCommunityInitial.community.id, + ), + ).toBe(undefined); + + // check that unfollow was federated + let communityOnBeta2 = await waitUntil( + () => resolveBetaCommunity(beta), + c => + c.community?.counts.subscribers === + betaCommunityInitial.counts.subscribers, + ); + expect(communityOnBeta2.community?.counts.subscribers).toBe( + betaCommunityInitial.counts.subscribers, + ); + expect(communityOnBeta2.community?.counts.subscribers_local).toBe(1); }); diff --git a/api_tests/src/image.spec.ts b/api_tests/src/image.spec.ts new file mode 100644 index 000000000..ed96451a2 --- /dev/null +++ b/api_tests/src/image.spec.ts @@ -0,0 +1,372 @@ +jest.setTimeout(120000); + +import { + UploadImage, + DeleteImage, + PurgePerson, + PurgePost, +} from "lemmy-js-client"; +import { + alpha, + alphaImage, + alphaUrl, + beta, + betaUrl, + createCommunity, + createPost, + deleteAllImages, + epsilon, + followCommunity, + gamma, + getSite, + imageFetchLimit, + registerUser, + resolveBetaCommunity, + resolveCommunity, + resolvePost, + setupLogins, + waitForPost, + unfollows, + getPost, + waitUntil, + createPostWithThumbnail, + sampleImage, + sampleSite, +} from "./shared"; + +beforeAll(setupLogins); + +afterAll(async () => { + await Promise.all([unfollows(), deleteAllImages(alpha)]); +}); + +test("Upload image and delete it", async () => { + // Before running this test, you need to delete all previous images in the DB + await deleteAllImages(alpha); + + // Upload test image. We use a simple string buffer as pictrs doesn't require an actual image + // in testing mode. + const upload_form: UploadImage = { + image: Buffer.from("test"), + }; + const upload = await alphaImage.uploadImage(upload_form); + expect(upload.files![0].file).toBeDefined(); + expect(upload.files![0].delete_token).toBeDefined(); + expect(upload.url).toBeDefined(); + expect(upload.delete_url).toBeDefined(); + + // ensure that image download is working. theres probably a better way to do this + const response = await fetch(upload.url ?? ""); + const content = await response.text(); + expect(content.length).toBeGreaterThan(0); + + // Ensure that it comes back with the list_media endpoint + const listMediaRes = await alphaImage.listMedia(); + expect(listMediaRes.images.length).toBe(1); + + // Ensure that it also comes back with the admin all images + const listAllMediaRes = await alphaImage.listAllMedia({ + limit: imageFetchLimit, + }); + + // This number comes from all the previous thumbnails fetched in other tests. + const previousThumbnails = 1; + expect(listAllMediaRes.images.length).toBe(previousThumbnails); + + // The deleteUrl is a combination of the endpoint, delete token, and alias + let firstImage = listMediaRes.images[0]; + let deleteUrl = `${alphaUrl}/pictrs/image/delete/${firstImage.local_image.pictrs_delete_token}/${firstImage.local_image.pictrs_alias}`; + expect(deleteUrl).toBe(upload.delete_url); + + // Make sure the uploader is correct + expect(firstImage.person.actor_id).toBe( + `http://lemmy-alpha:8541/u/lemmy_alpha`, + ); + + // delete image + const delete_form: DeleteImage = { + token: upload.files![0].delete_token, + filename: upload.files![0].file, + }; + const delete_ = await alphaImage.deleteImage(delete_form); + expect(delete_).toBe(true); + + // ensure that image is deleted + const response2 = await fetch(upload.url ?? ""); + const content2 = await response2.text(); + expect(content2).toBe(""); + + // Ensure that it shows the image is deleted + const deletedListMediaRes = await alphaImage.listMedia(); + expect(deletedListMediaRes.images.length).toBe(0); + + // Ensure that the admin shows its deleted + const deletedListAllMediaRes = await alphaImage.listAllMedia({ + limit: imageFetchLimit, + }); + expect(deletedListAllMediaRes.images.length).toBe(previousThumbnails - 1); +}); + +test("Purge user, uploaded image removed", async () => { + let user = await registerUser(alphaImage, alphaUrl); + + // upload test image + const upload_form: UploadImage = { + image: Buffer.from("test"), + }; + const upload = await user.uploadImage(upload_form); + expect(upload.files![0].file).toBeDefined(); + expect(upload.files![0].delete_token).toBeDefined(); + expect(upload.url).toBeDefined(); + expect(upload.delete_url).toBeDefined(); + + // ensure that image download is working. theres probably a better way to do this + const response = await fetch(upload.url ?? ""); + const content = await response.text(); + expect(content.length).toBeGreaterThan(0); + + // purge user + let site = await getSite(user); + const purgeForm: PurgePerson = { + person_id: site.my_user!.local_user_view.person.id, + }; + const delete_ = await alphaImage.purgePerson(purgeForm); + expect(delete_.success).toBe(true); + + // ensure that image is deleted + const response2 = await fetch(upload.url ?? ""); + const content2 = await response2.text(); + expect(content2).toBe(""); +}); + +test("Purge post, linked image removed", async () => { + let user = await registerUser(beta, betaUrl); + + // upload test image + const upload_form: UploadImage = { + image: Buffer.from("test"), + }; + const upload = await user.uploadImage(upload_form); + expect(upload.files![0].file).toBeDefined(); + expect(upload.files![0].delete_token).toBeDefined(); + expect(upload.url).toBeDefined(); + expect(upload.delete_url).toBeDefined(); + + // ensure that image download is working. theres probably a better way to do this + const response = await fetch(upload.url ?? ""); + const content = await response.text(); + expect(content.length).toBeGreaterThan(0); + + let community = await resolveBetaCommunity(user); + let post = await createPost( + user, + community.community!.community.id, + upload.url, + ); + expect(post.post_view.post.url).toBe(upload.url); + expect(post.post_view.image_details).toBeDefined(); + + // purge post + const purgeForm: PurgePost = { + post_id: post.post_view.post.id, + }; + const delete_ = await beta.purgePost(purgeForm); + expect(delete_.success).toBe(true); + + // ensure that image is deleted + const response2 = await fetch(upload.url ?? ""); + const content2 = await response2.text(); + expect(content2).toBe(""); +}); + +test("Images in remote image post are proxied if setting enabled", async () => { + let community = await createCommunity(gamma); + let postRes = await createPost( + gamma, + community.community_view.community.id, + sampleImage, + `![](${sampleImage})`, + ); + const post = postRes.post_view.post; + expect(post).toBeDefined(); + + // Make sure it fetched the image details + expect(postRes.post_view.image_details).toBeDefined(); + + // remote image gets proxied after upload + expect( + post.thumbnail_url?.startsWith( + "http://lemmy-gamma:8561/api/v3/image_proxy?url", + ), + ).toBeTruthy(); + expect( + post.body?.startsWith("![](http://lemmy-gamma:8561/api/v3/image_proxy?url"), + ).toBeTruthy(); + + // Make sure that it ends with jpg, to be sure its an image + expect(post.thumbnail_url?.endsWith(".jpg")).toBeTruthy(); + + let epsilonPostRes = await resolvePost(epsilon, postRes.post_view.post); + expect(epsilonPostRes.post).toBeDefined(); + + // Fetch the post again, the metadata should be backgrounded now + // Wait for the metadata to get fetched, since this is backgrounded now + let epsilonPostRes2 = await waitUntil( + () => getPost(epsilon, epsilonPostRes.post!.post.id), + p => p.post_view.post.thumbnail_url != undefined, + ); + const epsilonPost = epsilonPostRes2.post_view.post; + + expect( + epsilonPost.thumbnail_url?.startsWith( + "http://lemmy-epsilon:8581/api/v3/image_proxy?url", + ), + ).toBeTruthy(); + expect( + epsilonPost.body?.startsWith( + "![](http://lemmy-epsilon:8581/api/v3/image_proxy?url", + ), + ).toBeTruthy(); + + // Make sure that it ends with jpg, to be sure its an image + expect(epsilonPost.thumbnail_url?.endsWith(".jpg")).toBeTruthy(); +}); + +test("Thumbnail of remote image link is proxied if setting enabled", async () => { + let community = await createCommunity(gamma); + let postRes = await createPost( + gamma, + community.community_view.community.id, + // The sample site metadata thumbnail ends in png + sampleSite, + ); + const post = postRes.post_view.post; + expect(post).toBeDefined(); + + // remote image gets proxied after upload + expect( + post.thumbnail_url?.startsWith( + "http://lemmy-gamma:8561/api/v3/image_proxy?url", + ), + ).toBeTruthy(); + + // Make sure that it ends with png, to be sure its an image + expect(post.thumbnail_url?.endsWith(".png")).toBeTruthy(); + + let epsilonPostRes = await resolvePost(epsilon, postRes.post_view.post); + expect(epsilonPostRes.post).toBeDefined(); + + let epsilonPostRes2 = await waitUntil( + () => getPost(epsilon, epsilonPostRes.post!.post.id), + p => p.post_view.post.thumbnail_url != undefined, + ); + const epsilonPost = epsilonPostRes2.post_view.post; + + expect( + epsilonPost.thumbnail_url?.startsWith( + "http://lemmy-epsilon:8581/api/v3/image_proxy?url", + ), + ).toBeTruthy(); + + // Make sure that it ends with png, to be sure its an image + expect(epsilonPost.thumbnail_url?.endsWith(".png")).toBeTruthy(); +}); + +test("No image proxying if setting is disabled", async () => { + let user = await registerUser(beta, betaUrl); + let community = await createCommunity(alpha); + let betaCommunity = await resolveCommunity( + beta, + community.community_view.community.actor_id, + ); + await followCommunity(beta, true, betaCommunity.community!.community.id); + + const upload_form: UploadImage = { + image: Buffer.from("test"), + }; + const upload = await user.uploadImage(upload_form); + let post = await createPost( + alpha, + community.community_view.community.id, + upload.url, + `![](${sampleImage})`, + ); + expect(post.post_view.post).toBeDefined(); + + // remote image doesn't get proxied after upload + expect( + post.post_view.post.url?.startsWith("http://127.0.0.1:8551/pictrs/image/"), + ).toBeTruthy(); + expect(post.post_view.post.body).toBe(`![](${sampleImage})`); + + let betaPost = await waitForPost( + beta, + post.post_view.post, + res => res?.post.alt_text != null, + ); + expect(betaPost.post).toBeDefined(); + + // remote image doesn't get proxied after federation + expect( + betaPost.post.url?.startsWith("http://127.0.0.1:8551/pictrs/image/"), + ).toBeTruthy(); + expect(betaPost.post.body).toBe(`![](${sampleImage})`); + // Make sure the alt text got federated + expect(post.post_view.post.alt_text).toBe(betaPost.post.alt_text); +}); + +test("Make regular post, and give it a custom thumbnail", async () => { + const uploadForm1: UploadImage = { + image: Buffer.from("testRegular1"), + }; + const upload1 = await alphaImage.uploadImage(uploadForm1); + + const community = await createCommunity(alphaImage); + + // Use wikipedia since it has an opengraph image + const wikipediaUrl = "https://wikipedia.org/"; + + let post = await createPostWithThumbnail( + alphaImage, + community.community_view.community.id, + wikipediaUrl, + upload1.url!, + ); + + // Wait for the metadata to get fetched, since this is backgrounded now + post = await waitUntil( + () => getPost(alphaImage, post.post_view.post.id), + p => p.post_view.post.thumbnail_url != undefined, + ); + expect(post.post_view.post.url).toBe(wikipediaUrl); + // Make sure it uses custom thumbnail + expect(post.post_view.post.thumbnail_url).toBe(upload1.url); +}); + +test("Create an image post, and make sure a custom thumbnail doesn't overwrite it", async () => { + const uploadForm1: UploadImage = { + image: Buffer.from("test1"), + }; + const upload1 = await alphaImage.uploadImage(uploadForm1); + + const uploadForm2: UploadImage = { + image: Buffer.from("test2"), + }; + const upload2 = await alphaImage.uploadImage(uploadForm2); + + const community = await createCommunity(alphaImage); + + let post = await createPostWithThumbnail( + alphaImage, + community.community_view.community.id, + upload1.url!, + upload2.url!, + ); + post = await waitUntil( + () => getPost(alphaImage, post.post_view.post.id), + p => p.post_view.post.thumbnail_url != undefined, + ); + expect(post.post_view.post.url).toBe(upload1.url); + // Make sure the custom thumbnail is ignored + expect(post.post_view.post.thumbnail_url == upload2.url).toBe(false); +}); diff --git a/api_tests/src/post.spec.ts b/api_tests/src/post.spec.ts index 8c1f22226..59e3d774e 100644 --- a/api_tests/src/post.spec.ts +++ b/api_tests/src/post.spec.ts @@ -18,12 +18,12 @@ import { resolveBetaCommunity, createComment, deletePost, + delay, removePost, getPost, unfollowRemotes, resolvePerson, banPersonFromSite, - searchPostLocal, followCommunity, banPersonFromCommunity, reportPost, @@ -37,9 +37,10 @@ import { waitForPost, alphaUrl, loginUser, + createCommunity, } from "./shared"; import { PostView } from "lemmy-js-client/dist/types/PostView"; -import { LemmyHttp } from "lemmy-js-client"; +import { EditSite, ResolveObject } from "lemmy-js-client"; let betaCommunity: CommunityView | undefined; @@ -47,14 +48,28 @@ beforeAll(async () => { await setupLogins(); betaCommunity = (await resolveBetaCommunity(alpha)).community; expect(betaCommunity).toBeDefined(); - await unfollows(); }); -afterAll(async () => { - await unfollows(); -}); +afterAll(unfollows); + +async function assertPostFederation( + postOne: PostView, + postTwo: PostView, + waitForMeta = true, +) { + // Link metadata is generated in background task and may not be ready yet at this time, + // so wait for it explicitly. For removed posts we cant refetch anything. + if (waitForMeta) { + postOne = await waitForPost(beta, postOne.post, res => { + return res === null || !!res?.post.embed_title; + }); + postTwo = await waitForPost( + beta, + postTwo.post, + res => res === null || !!res?.post.embed_title, + ); + } -function assertPostFederation(postOne?: PostView, postTwo?: PostView) { expect(postOne?.post.ap_id).toBe(postTwo?.post.ap_id); expect(postOne?.post.name).toBe(postTwo?.post.name); expect(postOne?.post.body).toBe(postTwo?.post.body); @@ -72,11 +87,23 @@ function assertPostFederation(postOne?: PostView, postTwo?: PostView) { } test("Create a post", async () => { + // Setup some allowlists and blocklists + const editSiteForm: EditSite = {}; + + editSiteForm.allowed_instances = []; + editSiteForm.blocked_instances = ["lemmy-alpha"]; + await epsilon.editSite(editSiteForm); + if (!betaCommunity) { throw "Missing beta community"; } - let postRes = await createPost(alpha, betaCommunity.community.id); + let postRes = await createPost( + alpha, + betaCommunity.community.id, + "https://example.com/", + "აშშ ითხოვს ირანს დაუყოვნებლივ გაანთავისუფლოს დაკავებული ნავთობის ტანკერი", + ); expect(postRes.post_view.post).toBeDefined(); expect(postRes.post_view.community.local).toBe(false); expect(postRes.post_view.creator.local).toBe(true); @@ -93,21 +120,27 @@ test("Create a post", async () => { expect(betaPost?.community.local).toBe(true); expect(betaPost?.creator.local).toBe(false); expect(betaPost?.counts.score).toBe(1); - assertPostFederation(betaPost, postRes.post_view); + await assertPostFederation(betaPost, postRes.post_view); // Delta only follows beta, so it should not see an alpha ap_id - await expect(resolvePost(delta, postRes.post_view.post)).rejects.toBe( - "couldnt_find_object", - ); + await expect( + resolvePost(delta, postRes.post_view.post), + ).rejects.toStrictEqual(Error("not_found")); // Epsilon has alpha blocked, it should not see the alpha post - await expect(resolvePost(epsilon, postRes.post_view.post)).rejects.toBe( - "couldnt_find_object", - ); + await expect( + resolvePost(epsilon, postRes.post_view.post), + ).rejects.toStrictEqual(Error("not_found")); + + // remove added allow/blocklists + editSiteForm.allowed_instances = []; + editSiteForm.blocked_instances = []; + await delta.editSite(editSiteForm); + await epsilon.editSite(editSiteForm); }); test("Create a post in a non-existent community", async () => { - await expect(createPost(alpha, -2)).rejects.toBe("couldnt_find_community"); + await expect(createPost(alpha, -2)).rejects.toStrictEqual(Error("not_found")); }); test("Unlike a post", async () => { @@ -133,7 +166,7 @@ test("Unlike a post", async () => { expect(betaPost?.community.local).toBe(true); expect(betaPost?.creator.local).toBe(false); expect(betaPost?.counts.score).toBe(0); - assertPostFederation(betaPost, postRes.post_view); + await assertPostFederation(betaPost, postRes.post_view); }); test("Update a post", async () => { @@ -154,11 +187,11 @@ test("Update a post", async () => { expect(betaPost.community.local).toBe(true); expect(betaPost.creator.local).toBe(false); expect(betaPost.post.name).toBe(updatedName); - assertPostFederation(betaPost, updatedPost.post_view); + await assertPostFederation(betaPost, updatedPost.post_view); // Make sure lemmy beta cannot update the post - await expect(editPost(beta, betaPost.post)).rejects.toBe( - "no_post_edit_allowed", + await expect(editPost(beta, betaPost.post)).rejects.toStrictEqual( + Error("no_post_edit_allowed"), ); }); @@ -196,12 +229,35 @@ test("Sticky a post", async () => { if (!gammaPost) { throw "Missing gamma post"; } - let gammaTrySticky = await featurePost(gamma, true, gammaPost.post); + // This has been failing occasionally + await featurePost(gamma, true, gammaPost.post); let betaPost3 = (await resolvePost(beta, postRes.post_view.post)).post; - expect(gammaTrySticky.post_view.post.featured_community).toBe(true); + // expect(gammaTrySticky.post_view.post.featured_community).toBe(true); expect(betaPost3?.post.featured_community).toBe(false); }); +test("Collection of featured posts gets federated", async () => { + // create a new community and feature a post + let community = await createCommunity(alpha); + let post = await createPost(alpha, community.community_view.community.id); + let featuredPost = await featurePost(alpha, true, post.post_view.post); + expect(featuredPost.post_view.post.featured_community).toBe(true); + + // fetch the community, ensure that post is also fetched and marked as featured + let betaCommunity = await resolveCommunity( + beta, + community.community_view.community.actor_id, + ); + expect(betaCommunity).toBeDefined(); + + const betaPost = await waitForPost( + beta, + post.post_view.post, + post => post?.post.featured_community === true, + ); + expect(betaPost).toBeDefined(); +}); + test("Lock a post", async () => { if (!betaCommunity) { throw "Missing beta community"; @@ -225,8 +281,12 @@ test("Lock a post", async () => { post => !!post && post.post.locked, ); - // Try to make a new comment there, on alpha - await expect(createComment(alpha, alphaPost1.post.id)).rejects.toBe("locked"); + // Try to make a new comment there, on alpha. For this we need to create a normal + // user account because admins/mods can comment in locked posts. + let user = await registerUser(alpha, alphaUrl); + await expect(createComment(user, alphaPost1.post.id)).rejects.toStrictEqual( + Error("locked"), + ); // Unlock a post let unlockedPost = await lockPost(beta, false, betaPost1.post); @@ -243,7 +303,7 @@ test("Lock a post", async () => { expect(alphaPost2.post.locked).toBe(false); // Try to create a new comment, on alpha - let commentAlpha = await createComment(alpha, alphaPost1.post.id); + let commentAlpha = await createComment(user, alphaPost1.post.id); expect(commentAlpha).toBeDefined(); }); @@ -278,11 +338,11 @@ test("Delete a post", async () => { throw "Missing beta post 2"; } expect(betaPost2.post.deleted).toBe(false); - assertPostFederation(betaPost2, undeletedPost.post_view); + await assertPostFederation(betaPost2, undeletedPost.post_view); // Make sure lemmy beta cannot delete the post - await expect(deletePost(beta, true, betaPost2.post)).rejects.toBe( - "no_post_edit_allowed", + await expect(deletePost(beta, true, betaPost2.post)).rejects.toStrictEqual( + Error("no_post_edit_allowed"), ); }); @@ -321,7 +381,7 @@ test("Remove a post from admin and community on different instance", async () => // Make sure lemmy beta sees post is undeleted let betaPost2 = (await resolvePost(beta, postRes.post_view.post)).post; expect(betaPost2?.post.removed).toBe(false); - assertPostFederation(betaPost2, undeletedPost.post_view); + await assertPostFederation(betaPost2!, undeletedPost.post_view); }); test("Remove a post from admin and community on same instance", async () => { @@ -352,7 +412,11 @@ test("Remove a post from admin and community on same instance", async () => { p => p?.post_view.post.removed ?? false, ); expect(alphaPost?.post_view.post.removed).toBe(true); - assertPostFederation(alphaPost.post_view, removePostRes.post_view); + await assertPostFederation( + alphaPost.post_view, + removePostRes.post_view, + false, + ); // Undelete let undeletedPost = await removePost(beta, false, betaPost.post); @@ -365,7 +429,7 @@ test("Remove a post from admin and community on same instance", async () => { p => !!p && !p.post.removed, ); expect(alphaPost2.post.removed).toBe(false); - assertPostFederation(alphaPost2, undeletedPost.post_view); + await assertPostFederation(alphaPost2, undeletedPost.post_view); await unfollowRemotes(alpha); }); @@ -381,34 +445,34 @@ test("Search for a post", async () => { expect(betaPost?.post.name).toBeDefined(); }); -test("Enforce site ban for federated user", async () => { +test("Enforce site ban federation for local user", async () => { if (!betaCommunity) { throw "Missing beta community"; } + // create a test user - let alphaUserJwt = await registerUser(alpha); - expect(alphaUserJwt).toBeDefined(); - let alpha_user = new LemmyHttp(alphaUrl, { - headers: { Authorization: `Bearer ${alphaUserJwt.jwt ?? ""}` }, - }); - let alphaUserPerson = (await getSite(alpha_user)).my_user?.local_user_view + let alphaUserHttp = await registerUser(alpha, alphaUrl); + let alphaUserPerson = (await getSite(alphaUserHttp)).my_user?.local_user_view .person; let alphaUserActorId = alphaUserPerson?.actor_id; if (!alphaUserActorId) { throw "Missing alpha user actor id"; } expect(alphaUserActorId).toBeDefined(); - let alphaPerson = (await resolvePerson(alpha_user, alphaUserActorId!)).person; + await followBeta(alphaUserHttp); + + let alphaPerson = (await resolvePerson(alphaUserHttp, alphaUserActorId!)) + .person; if (!alphaPerson) { throw "Missing alpha person"; } expect(alphaPerson).toBeDefined(); // alpha makes post in beta community, it federates to beta instance - let postRes1 = await createPost(alpha_user, betaCommunity.community.id); + let postRes1 = await createPost(alphaUserHttp, betaCommunity.community.id); let searchBeta1 = await waitForPost(beta, postRes1.post_view.post); - // ban alpha from its instance + // ban alpha from its own instance let banAlpha = await banPersonFromSite( alpha, alphaPerson.person.id, @@ -425,40 +489,111 @@ test("Enforce site ban for federated user", async () => { expect(alphaUserOnBeta1.person?.person.banned).toBe(true); // existing alpha post should be removed on beta - await waitUntil( + let betaBanRes = await waitUntil( () => getPost(beta, searchBeta1.post.id), s => s.post_view.post.removed, ); + expect(betaBanRes.post_view.post.removed).toBe(true); // Unban alpha let unBanAlpha = await banPersonFromSite( alpha, alphaPerson.person.id, false, - false, + true, ); expect(unBanAlpha.banned).toBe(false); + // existing alpha post should be restored on beta + betaBanRes = await waitUntil( + () => getPost(beta, searchBeta1.post.id), + s => !s.post_view.post.removed, + ); + expect(betaBanRes.post_view.post.removed).toBe(false); + // Login gets invalidated by ban, need to login again if (!alphaUserPerson) { throw "Missing alpha person"; } let newAlphaUserJwt = await loginUser(alpha, alphaUserPerson.name); - alpha_user.setHeaders({ - Authorization: "Bearer " + newAlphaUserJwt.jwt ?? "", + alphaUserHttp.setHeaders({ + Authorization: "Bearer " + newAlphaUserJwt.jwt, }); // alpha makes new post in beta community, it federates - let postRes2 = await createPost(alpha_user, betaCommunity!.community.id); + let postRes2 = await createPost(alphaUserHttp, betaCommunity!.community.id); await waitForPost(beta, postRes2.post_view.post); - let alphaUserOnBeta2 = await resolvePerson(beta, alphaUserActorId!); - expect(alphaUserOnBeta2.person?.person.banned).toBe(false); + await unfollowRemotes(alpha); }); -test.skip("Enforce community ban for federated user", async () => { +test("Enforce site ban federation for federated user", async () => { if (!betaCommunity) { throw "Missing beta community"; } + + // create a test user + let alphaUserHttp = await registerUser(alpha, alphaUrl); + let alphaUserPerson = (await getSite(alphaUserHttp)).my_user?.local_user_view + .person; + let alphaUserActorId = alphaUserPerson?.actor_id; + if (!alphaUserActorId) { + throw "Missing alpha user actor id"; + } + expect(alphaUserActorId).toBeDefined(); + await followBeta(alphaUserHttp); + + let alphaUserOnBeta2 = await resolvePerson(beta, alphaUserActorId!); + expect(alphaUserOnBeta2.person?.person.banned).toBe(false); + + if (!alphaUserOnBeta2.person) { + throw "Missing alpha person"; + } + + // alpha makes post in beta community, it federates to beta instance + let postRes1 = await createPost(alphaUserHttp, betaCommunity.community.id); + let searchBeta1 = await waitForPost(beta, postRes1.post_view.post); + expect(searchBeta1.post).toBeDefined(); + + // Now ban and remove their data from beta + let banAlphaOnBeta = await banPersonFromSite( + beta, + alphaUserOnBeta2.person.person.id, + true, + true, + ); + expect(banAlphaOnBeta.banned).toBe(true); + + // The beta site ban should NOT be federated to alpha + let alphaPerson2 = (await getSite(alphaUserHttp)).my_user!.local_user_view + .person; + expect(alphaPerson2.banned).toBe(false); + + // existing alpha post should be removed on beta + let betaBanRes = await waitUntil( + () => getPost(beta, searchBeta1.post.id), + s => s.post_view.post.removed, + ); + expect(betaBanRes.post_view.post.removed).toBe(true); + + // existing alpha's post to the beta community should be removed on alpha + let alphaPostAfterRemoveOnBeta = await waitUntil( + () => getPost(alpha, postRes1.post_view.post.id), + s => s.post_view.post.removed, + ); + expect(betaBanRes.post_view.post.removed).toBe(true); + expect(alphaPostAfterRemoveOnBeta.post_view.post.removed).toBe(true); + expect( + alphaPostAfterRemoveOnBeta.post_view.creator_banned_from_community, + ).toBe(true); + + await unfollowRemotes(alpha); +}); + +test("Enforce community ban for federated user", async () => { + if (!betaCommunity) { + throw "Missing beta community"; + } + await followBeta(alpha); let alphaShortname = `@lemmy_alpha@lemmy-alpha:8541`; let alphaPerson = (await resolvePerson(beta, alphaShortname)).person; if (!alphaPerson) { @@ -468,38 +603,46 @@ test.skip("Enforce community ban for federated user", async () => { // make a post in beta, it goes through let postRes1 = await createPost(alpha, betaCommunity.community.id); - let searchBeta1 = await searchPostLocal(beta, postRes1.post_view.post); - expect(searchBeta1.posts[0]).toBeDefined(); + let searchBeta1 = await waitForPost(beta, postRes1.post_view.post); + expect(searchBeta1.post).toBeDefined(); // ban alpha from beta community let banAlpha = await banPersonFromCommunity( beta, alphaPerson.person.id, - 2, + searchBeta1.community.id, true, true, ); expect(banAlpha.banned).toBe(true); // ensure that the post by alpha got removed - await expect(getPost(alpha, searchBeta1.posts[0].post.id)).rejects.toBe( - "unknown", + let removePostRes = await waitUntil( + () => getPost(alpha, postRes1.post_view.post.id), + s => s.post_view.post.removed, ); + expect(removePostRes.post_view.post.removed).toBe(true); + expect(removePostRes.post_view.creator_banned_from_community).toBe(true); + expect(removePostRes.community_view.banned_from_community).toBe(true); // Alpha tries to make post on beta, but it fails because of ban - await expect(createPost(alpha, betaCommunity.community.id)).rejects.toBe( - "banned_from_community", - ); + await expect( + createPost(alpha, betaCommunity.community.id), + ).rejects.toStrictEqual(Error("person_is_banned_from_community")); // Unban alpha let unBanAlpha = await banPersonFromCommunity( beta, alphaPerson.person.id, - 2, + searchBeta1.community.id, false, false, ); expect(unBanAlpha.banned).toBe(false); + + // Need to re-follow the community + await followBeta(alpha); + let postRes3 = await createPost(alpha, betaCommunity.community.id); expect(postRes3.post_view.post).toBeDefined(); expect(postRes3.post_view.community.local).toBe(false); @@ -507,52 +650,173 @@ test.skip("Enforce community ban for federated user", async () => { expect(postRes3.post_view.counts.score).toBe(1); // Make sure that post makes it to beta community - let searchBeta2 = await searchPostLocal(beta, postRes3.post_view.post); - expect(searchBeta2.posts[0]).toBeDefined(); + let postRes4 = await waitForPost(beta, postRes3.post_view.post); + expect(postRes4.post).toBeDefined(); + expect(postRes4.creator_banned_from_community).toBe(false); + + await unfollowRemotes(alpha); }); test("A and G subscribe to B (center) A posts, it gets announced to G", async () => { if (!betaCommunity) { throw "Missing beta community"; } + await followBeta(alpha); + let postRes = await createPost(alpha, betaCommunity.community.id); expect(postRes.post_view.post).toBeDefined(); let betaPost = (await resolvePost(gamma, postRes.post_view.post)).post; expect(betaPost?.post.name).toBeDefined(); + await unfollowRemotes(alpha); }); test("Report a post", async () => { - // Note, this is a different one from the setup - let betaCommunity = (await resolveBetaCommunity(beta)).community; - if (!betaCommunity) { - throw "Missing beta community"; - } - let postRes = await createPost(beta, betaCommunity.community.id); + // Create post from alpha + let alphaCommunity = (await resolveBetaCommunity(alpha)).community!; + await followBeta(alpha); + let postRes = await createPost(alpha, alphaCommunity.community.id); expect(postRes.post_view.post).toBeDefined(); let alphaPost = (await resolvePost(alpha, postRes.post_view.post)).post; if (!alphaPost) { throw "Missing alpha post"; } - let alphaReport = ( - await reportPost(alpha, alphaPost.post.id, randomString(10)) - ).post_report_view.post_report; + // Send report from gamma + let gammaPost = (await resolvePost(gamma, alphaPost.post)).post!; + let gammaReport = ( + await reportPost(gamma, gammaPost.post.id, randomString(10)) + ).post_report_view.post_report; + expect(gammaReport).toBeDefined(); + + // Report was federated to community instance let betaReport = (await waitUntil( () => listPostReports(beta).then(p => p.post_reports.find( r => - r.post_report.original_post_name === alphaReport.original_post_name, + r.post_report.original_post_name === gammaReport.original_post_name, ), ), res => !!res, ))!.post_report; expect(betaReport).toBeDefined(); expect(betaReport.resolved).toBe(false); - expect(betaReport.original_post_name).toBe(alphaReport.original_post_name); - expect(betaReport.original_post_url).toBe(alphaReport.original_post_url); - expect(betaReport.original_post_body).toBe(alphaReport.original_post_body); - expect(betaReport.reason).toBe(alphaReport.reason); + expect(betaReport.original_post_name).toBe(gammaReport.original_post_name); + //expect(betaReport.original_post_url).toBe(gammaReport.original_post_url); + expect(betaReport.original_post_body).toBe(gammaReport.original_post_body); + expect(betaReport.reason).toBe(gammaReport.reason); + await unfollowRemotes(alpha); + + // Report was federated to poster's instance + let alphaReport = (await waitUntil( + () => + listPostReports(alpha).then(p => + p.post_reports.find( + r => + r.post_report.original_post_name === gammaReport.original_post_name, + ), + ), + res => !!res, + ))!.post_report; + expect(alphaReport).toBeDefined(); + expect(alphaReport.resolved).toBe(false); + expect(alphaReport.original_post_name).toBe(gammaReport.original_post_name); + //expect(alphaReport.original_post_url).toBe(gammaReport.original_post_url); + expect(alphaReport.original_post_body).toBe(gammaReport.original_post_body); + expect(alphaReport.reason).toBe(gammaReport.reason); +}); + +test("Fetch post via redirect", async () => { + await followBeta(alpha); + let alphaPost = await createPost(alpha, betaCommunity!.community.id); + expect(alphaPost.post_view.post).toBeDefined(); + // Make sure that post is liked on beta + const betaPost = await waitForPost( + beta, + alphaPost.post_view.post, + res => res?.counts.score === 1, + ); + + expect(betaPost).toBeDefined(); + expect(betaPost.post?.ap_id).toBe(alphaPost.post_view.post.ap_id); + + // Fetch post from url on beta instance instead of ap_id + let q = `http://lemmy-beta:8551/post/${betaPost.post.id}`; + let form: ResolveObject = { + q, + }; + let gammaPost = await gamma.resolveObject(form); + expect(gammaPost).toBeDefined(); + expect(gammaPost.post?.post.ap_id).toBe(alphaPost.post_view.post.ap_id); + await unfollowRemotes(alpha); +}); + +test("Block post that contains banned URL", async () => { + let editSiteForm: EditSite = { + blocked_urls: ["https://evil.com/"], + }; + + await epsilon.editSite(editSiteForm); + + await delay(); + + if (!betaCommunity) { + throw "Missing beta community"; + } + + expect( + createPost(epsilon, betaCommunity.community.id, "https://evil.com"), + ).rejects.toStrictEqual(Error("blocked_url")); + + // Later tests need this to be empty + editSiteForm.blocked_urls = []; + await epsilon.editSite(editSiteForm); +}); + +test("Fetch post with redirect", async () => { + let alphaPost = await createPost(alpha, betaCommunity!.community.id); + expect(alphaPost.post_view.post).toBeDefined(); + + // beta fetches from alpha as usual + let betaPost = await resolvePost(beta, alphaPost.post_view.post); + expect(betaPost.post).toBeDefined(); + + // gamma fetches from beta, and gets redirected to alpha + let gammaPost = await resolvePost(gamma, betaPost.post!.post); + expect(gammaPost.post).toBeDefined(); + + // fetch remote object from local url, which redirects to the original url + let form: ResolveObject = { + q: `http://lemmy-gamma:8561/post/${gammaPost.post!.post.id}`, + }; + let gammaPost2 = await gamma.resolveObject(form); + expect(gammaPost2.post).toBeDefined(); +}); + +test("Rewrite markdown links", async () => { + const community = (await resolveBetaCommunity(beta)).community!; + + // create a post + let postRes1 = await createPost(beta, community.community.id); + + // link to this post in markdown + let postRes2 = await createPost( + beta, + community.community.id, + "https://example.com/", + `[link](${postRes1.post_view.post.ap_id})`, + ); + console.log(postRes2.post_view.post.body); + expect(postRes2.post_view.post).toBeDefined(); + + // fetch both posts from another instance + const alphaPost1 = await resolvePost(alpha, postRes1.post_view.post); + const alphaPost2 = await resolvePost(alpha, postRes2.post_view.post); + + // remote markdown link is replaced with local link + expect(alphaPost2.post?.post.body).toBe( + `[link](http://lemmy-alpha:8541/post/${alphaPost1.post?.post.id})`, + ); }); diff --git a/api_tests/src/private_message.spec.ts b/api_tests/src/private_message.spec.ts index 081bb8d8d..8fd683ff0 100644 --- a/api_tests/src/private_message.spec.ts +++ b/api_tests/src/private_message.spec.ts @@ -8,8 +8,9 @@ import { editPrivateMessage, listPrivateMessages, deletePrivateMessage, - unfollowRemotes, waitUntil, + reportPrivateMessage, + unfollows, } from "./shared"; let recipient_id: number; @@ -20,9 +21,7 @@ beforeAll(async () => { recipient_id = 3; }); -afterAll(async () => { - await unfollowRemotes(alpha); -}); +afterAll(unfollows); test("Create a private message", async () => { let pmRes = await createPrivateMessage(alpha, recipient_id); @@ -109,3 +108,42 @@ test("Delete a private message", async () => { betaPms1.private_messages.length, ); }); + +test("Create a private message report", async () => { + let pmRes = await createPrivateMessage(alpha, recipient_id); + let betaPms1 = await waitUntil( + () => listPrivateMessages(beta), + m => + !!m.private_messages.find( + e => + e.private_message.ap_id === + pmRes.private_message_view.private_message.ap_id, + ), + ); + let betaPm = betaPms1.private_messages[0]; + expect(betaPm).toBeDefined(); + + // Make sure that only the recipient can report it, so this should fail + await expect( + reportPrivateMessage( + alpha, + pmRes.private_message_view.private_message.id, + "a reason", + ), + ).rejects.toStrictEqual(Error("couldnt_create_report")); + + // This one should pass + let reason = "another reason"; + let report = await reportPrivateMessage( + beta, + betaPm.private_message.id, + reason, + ); + + expect(report.private_message_report_view.private_message.id).toBe( + betaPm.private_message.id, + ); + expect(report.private_message_report_view.private_message_report.reason).toBe( + reason, + ); +}); diff --git a/api_tests/src/shared.ts b/api_tests/src/shared.ts index e4dabb1d4..8ec4b29ed 100644 --- a/api_tests/src/shared.ts +++ b/api_tests/src/shared.ts @@ -1,12 +1,20 @@ import { + BlockCommunity, + BlockCommunityResponse, BlockInstance, BlockInstanceResponse, + CommunityId, + CreatePrivateMessageReport, + DeleteImage, + EditCommunity, GetReplies, GetRepliesResponse, GetUnreadCountResponse, InstanceId, LemmyHttp, PostView, + PrivateMessageReportResponse, + SuccessResponse, } from "lemmy-js-client"; import { CreatePost } from "lemmy-js-client/dist/types/CreatePost"; import { DeletePost } from "lemmy-js-client/dist/types/DeletePost"; @@ -55,7 +63,6 @@ import { Register } from "lemmy-js-client/dist/types/Register"; import { SaveUserSettings } from "lemmy-js-client/dist/types/SaveUserSettings"; import { DeleteAccount } from "lemmy-js-client/dist/types/DeleteAccount"; import { GetSiteResponse } from "lemmy-js-client/dist/types/GetSiteResponse"; -import { DeleteAccountResponse } from "lemmy-js-client/dist/types/DeleteAccountResponse"; import { PrivateMessagesResponse } from "lemmy-js-client/dist/types/PrivateMessagesResponse"; import { GetPrivateMessages } from "lemmy-js-client/dist/types/GetPrivateMessages"; import { PostReportResponse } from "lemmy-js-client/dist/types/PostReportResponse"; @@ -72,19 +79,26 @@ import { GetPersonDetailsResponse } from "lemmy-js-client/dist/types/GetPersonDe import { GetPersonDetails } from "lemmy-js-client/dist/types/GetPersonDetails"; import { ListingType } from "lemmy-js-client/dist/types/ListingType"; -export let alphaUrl = "http://127.0.0.1:8541"; -export let betaUrl = "http://127.0.0.1:8551"; -export let gammaUrl = "http://127.0.0.1:8561"; -export let deltaUrl = "http://127.0.0.1:8571"; -export let epsilonUrl = "http://127.0.0.1:8581"; +export const fetchFunction = fetch; +export const imageFetchLimit = 50; +export const sampleImage = + "https://i.pinimg.com/originals/df/5f/5b/df5f5b1b174a2b4b6026cc6c8f9395c1.jpg"; +export const sampleSite = "https://yahoo.com"; -export let alpha = new LemmyHttp(alphaUrl); -export let beta = new LemmyHttp(betaUrl); -export let gamma = new LemmyHttp(gammaUrl); -export let delta = new LemmyHttp(deltaUrl); -export let epsilon = new LemmyHttp(epsilonUrl); +export const alphaUrl = "http://127.0.0.1:8541"; +export const betaUrl = "http://127.0.0.1:8551"; +export const gammaUrl = "http://127.0.0.1:8561"; +export const deltaUrl = "http://127.0.0.1:8571"; +export const epsilonUrl = "http://127.0.0.1:8581"; -export let betaAllowedInstances = [ +export const alpha = new LemmyHttp(alphaUrl, { fetchFunction }); +export const alphaImage = new LemmyHttp(alphaUrl); +export const beta = new LemmyHttp(betaUrl, { fetchFunction }); +export const gamma = new LemmyHttp(gammaUrl, { fetchFunction }); +export const delta = new LemmyHttp(deltaUrl, { fetchFunction }); +export const epsilon = new LemmyHttp(epsilonUrl, { fetchFunction }); + +export const betaAllowedInstances = [ "lemmy-alpha", "lemmy-gamma", "lemmy-delta", @@ -132,6 +146,7 @@ export async function setupLogins() { resEpsilon, ]); alpha.setHeaders({ Authorization: `Bearer ${res[0].jwt ?? ""}` }); + alphaImage.setHeaders({ Authorization: `Bearer ${res[0].jwt ?? ""}` }); beta.setHeaders({ Authorization: `Bearer ${res[1].jwt ?? ""}` }); gamma.setHeaders({ Authorization: `Bearer ${res[2].jwt ?? ""}` }); delta.setHeaders({ Authorization: `Bearer ${res[3].jwt ?? ""}` }); @@ -168,13 +183,10 @@ export async function setupLogins() { ]; await gamma.editSite(editSiteForm); + // Setup delta allowed instance editSiteForm.allowed_instances = ["lemmy-beta"]; await delta.editSite(editSiteForm); - editSiteForm.allowed_instances = []; - editSiteForm.blocked_instances = ["lemmy-alpha"]; - await epsilon.editSite(editSiteForm); - // Create the main alpha/beta communities // Ignore thrown errors of duplicates try { @@ -185,7 +197,7 @@ export async function setupLogins() { // (because last_successful_id is set to current id when federation to an instance is first started) // only needed the first time so do in this try await delay(10_000); - } catch (_) { + } catch { console.log("Communities already exist"); } } @@ -193,17 +205,20 @@ export async function setupLogins() { export async function createPost( api: LemmyHttp, community_id: number, + url: string = "https://example.com/", + body = randomString(10), + // use example.com for consistent title and embed description + name: string = randomString(5), + alt_text = randomString(10), + custom_thumbnail: string | undefined = undefined, ): Promise { - let name = randomString(5); - let body = randomString(10); - // switch from google.com to example.com for consistent title (embed_title and embed_description) - // google switches description when a google doodle appears - let url = "https://example.com/"; let form: CreatePost = { name, url, body, + alt_text, community_id, + custom_thumbnail, }; return api.createPost(form); } @@ -220,6 +235,21 @@ export async function editPost( return api.editPost(form); } +export async function createPostWithThumbnail( + api: LemmyHttp, + community_id: number, + url: string, + custom_thumbnail: string, +): Promise { + let form: CreatePost = { + name: randomString(10), + url, + community_id, + custom_thumbnail, + }; + return api.createPost(form); +} + export async function deletePost( api: LemmyHttp, deleted: boolean, @@ -287,6 +317,7 @@ export async function searchPostLocal( q: post.name, type_: "Posts", sort: "TopAll", + listing_type: "All", }; return api.search(form); } @@ -322,6 +353,7 @@ export async function getComments( post_id: post_id, type_: listingType, sort: "New", + limit: 50, }; return api.getComments(form); } @@ -332,10 +364,13 @@ export async function getUnreadCount( return api.getUnreadCount(); } -export async function getReplies(api: LemmyHttp): Promise { +export async function getReplies( + api: LemmyHttp, + unread_only: boolean = false, +): Promise { let form: GetReplies = { sort: "New", - unread_only: false, + unread_only, }; return api.getReplies(form); } @@ -384,13 +419,13 @@ export async function banPersonFromSite( api: LemmyHttp, person_id: number, ban: boolean, - remove_data: boolean, + remove_or_restore_data: boolean, ): Promise { // Make sure lemmy-beta/c/main is cached on lemmy_alpha let form: BanPerson = { person_id, ban, - remove_data: remove_data, + remove_or_restore_data, }; return api.banPerson(form); } @@ -399,13 +434,13 @@ export async function banPersonFromCommunity( api: LemmyHttp, person_id: number, community_id: number, - remove_data: boolean, + remove_or_restore_data: boolean, ban: boolean, ): Promise { let form: BanFromCommunity = { person_id, community_id, - remove_data: remove_data, + remove_or_restore_data, ban, }; return api.banFromCommunity(form); @@ -422,8 +457,9 @@ export async function followCommunity( }; const res = await api.followCommunity(form); await waitUntil( - () => resolveCommunity(api, res.community_view.community.actor_id), - g => g.community?.subscribed === (follow ? "Subscribed" : "NotSubscribed"), + () => getCommunity(api, res.community_view.community.id), + g => + g.community_view.subscribed === (follow ? "Subscribed" : "NotSubscribed"), ); // wait FOLLOW_ADDITIONS_RECHECK_DELAY (there's no API to wait for this currently) await delay(2000); @@ -517,7 +553,7 @@ export async function likeComment( export async function createCommunity( api: LemmyHttp, - name_: string = randomString(5), + name_: string = randomString(10), ): Promise { let description = "a sample description"; let form: CreateCommunity = { @@ -528,6 +564,13 @@ export async function createCommunity( return api.createCommunity(form); } +export async function editCommunity( + api: LemmyHttp, + form: EditCommunity, +): Promise { + return api.editCommunity(form); +} + export async function getCommunity( api: LemmyHttp, id: number, @@ -610,15 +653,22 @@ export async function deletePrivateMessage( export async function registerUser( api: LemmyHttp, + url: string, username: string = randomString(5), -): Promise { +): Promise { let form: Register = { username, password, password_verify: password, show_nsfw: true, }; - return api.register(form); + let login_response = await api.register(form); + + expect(login_response.jwt).toBeDefined(); + let lemmy_http = new LemmyHttp(url, { + headers: { Authorization: `Bearer ${login_response.jwt ?? ""}` }, + }); + return lemmy_http; } export async function loginUser( @@ -634,13 +684,13 @@ export async function loginUser( export async function saveUserSettingsBio( api: LemmyHttp, -): Promise { +): Promise { let form: SaveUserSettings = { show_nsfw: true, blur_nsfw: false, auto_expand: true, theme: "darkly", - default_sort_type: "Active", + default_post_sort_type: "Active", default_listing_type: "All", interface_language: "en", show_avatars: true, @@ -652,15 +702,15 @@ export async function saveUserSettingsBio( export async function saveUserSettingsFederated( api: LemmyHttp, -): Promise { - let avatar = "https://image.flaticon.com/icons/png/512/35/35896.png"; - let banner = "https://image.flaticon.com/icons/png/512/36/35896.png"; +): Promise { + let avatar = sampleImage; + let banner = sampleImage; let bio = "a changed bio"; let form: SaveUserSettings = { show_nsfw: false, blur_nsfw: true, auto_expand: false, - default_sort_type: "Hot", + default_post_sort_type: "Hot", default_listing_type: "All", interface_language: "", avatar, @@ -676,7 +726,7 @@ export async function saveUserSettingsFederated( export async function saveUserSettings( api: LemmyHttp, form: SaveUserSettings, -): Promise { +): Promise { return api.saveUserSettings(form); } export async function getPersonDetails( @@ -689,9 +739,7 @@ export async function getPersonDetails( return api.getPersonDetails(form); } -export async function deleteUser( - api: LemmyHttp, -): Promise { +export async function deleteUser(api: LemmyHttp): Promise { let form: DeleteAccount = { delete_content: true, password, @@ -722,6 +770,7 @@ export async function unfollowRemotes( await Promise.all( remoteFollowed.map(cu => followCommunity(api, false, cu.community.id)), ); + let siteRes = await getSite(api); return siteRes; } @@ -767,6 +816,18 @@ export async function reportComment( return api.createCommentReport(form); } +export async function reportPrivateMessage( + api: LemmyHttp, + private_message_id: number, + reason: string, +): Promise { + let form: CreatePrivateMessageReport = { + private_message_id, + reason, + }; + return api.createPrivateMessageReport(form); +} + export async function listCommentReports( api: LemmyHttp, ): Promise { @@ -777,9 +838,12 @@ export async function listCommentReports( export function getPosts( api: LemmyHttp, listingType?: ListingType, + community_id?: number, ): Promise { let form: GetPosts = { type_: listingType, + limit: 50, + community_id, }; return api.getPosts(form); } @@ -796,6 +860,18 @@ export function blockInstance( return api.blockInstance(form); } +export function blockCommunity( + api: LemmyHttp, + community_id: CommunityId, + block: boolean, +): Promise { + let form: BlockCommunity = { + community_id, + block, + }; + return api.blockCommunity(form); +} + export function delay(millis = 500) { return new Promise(resolve => setTimeout(resolve, millis)); } @@ -819,13 +895,49 @@ export function randomString(length: number): string { return result; } +export async function deleteAllImages(api: LemmyHttp) { + const imagesRes = await api.listAllMedia({ + limit: imageFetchLimit, + }); + Promise.all( + imagesRes.images + .map(image => { + const form: DeleteImage = { + token: image.local_image.pictrs_delete_token, + filename: image.local_image.pictrs_alias, + }; + return form; + }) + .map(form => api.deleteImage(form)), + ); +} + export async function unfollows() { await Promise.all([ unfollowRemotes(alpha), + unfollowRemotes(beta), unfollowRemotes(gamma), unfollowRemotes(delta), unfollowRemotes(epsilon), ]); + await Promise.all([ + purgeAllPosts(alpha), + purgeAllPosts(beta), + purgeAllPosts(gamma), + purgeAllPosts(delta), + purgeAllPosts(epsilon), + ]); +} + +export async function purgeAllPosts(api: LemmyHttp) { + // The best way to get all federated items, is to find the posts + let res = await api.getPosts({ type_: "All", limit: 50 }); + await Promise.all( + Array.from(new Set(res.posts.map(p => p.post.id))) + .map(post_id => api.purgePost({ post_id })) + // Ignore errors + .map(p => p.catch(e => e)), + ); } export function getCommentParentId(comment: Comment): number | undefined { @@ -836,6 +948,7 @@ export function getCommentParentId(comment: Comment): number | undefined { if (split.length > 1) { return Number(split[split.length - 2]); } else { + console.log(`Failed to extract comment parent id from ${comment.path}`); return undefined; } } diff --git a/api_tests/src/user.spec.ts b/api_tests/src/user.spec.ts index eddf568b8..2edcf54ea 100644 --- a/api_tests/src/user.spec.ts +++ b/api_tests/src/user.spec.ts @@ -12,19 +12,22 @@ import { createComment, resolveBetaCommunity, deleteUser, - resolvePost, - resolveComment, saveUserSettingsFederated, setupLogins, alphaUrl, saveUserSettings, + getPost, + getComments, + fetchFunction, + alphaImage, + unfollows, + saveUserSettingsBio, } from "./shared"; -import { LemmyHttp, SaveUserSettings } from "lemmy-js-client"; +import { LemmyHttp, SaveUserSettings, UploadImage } from "lemmy-js-client"; import { GetPosts } from "lemmy-js-client/dist/types/GetPosts"; -beforeAll(async () => { - await setupLogins(); -}); +beforeAll(setupLogins); +afterAll(unfollows); let apShortname: string; @@ -39,18 +42,14 @@ function assertUserFederation(userOne?: PersonView, userTwo?: PersonView) { } test("Create user", async () => { - let userRes = await registerUser(alpha); - expect(userRes.jwt).toBeDefined(); - let user = new LemmyHttp(alphaUrl, { - headers: { Authorization: `Bearer ${userRes.jwt ?? ""}` }, - }); + let user = await registerUser(alpha, alphaUrl); let site = await getSite(user); expect(site.my_user).toBeDefined(); if (!site.my_user) { throw "Missing site user"; } - apShortname = `@${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`; + apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`; }); test("Set some user settings, check that they are federated", async () => { @@ -70,14 +69,10 @@ test("Set some user settings, check that they are federated", async () => { }); test("Delete user", async () => { - let userRes = await registerUser(alpha); - expect(userRes.jwt).toBeDefined(); - let user = new LemmyHttp(alphaUrl, { - headers: { Authorization: `Bearer ${userRes.jwt ?? ""}` }, - }); + let user = await registerUser(alpha, alphaUrl); // make a local post and comment - let alphaCommunity = (await resolveCommunity(user, "!main@lemmy-alpha:8541")) + let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541")) .community; if (!alphaCommunity) { throw "Missing alpha community"; @@ -103,23 +98,28 @@ test("Delete user", async () => { await deleteUser(user); - await expect(resolvePost(alpha, localPost)).rejects.toBe( - "couldnt_find_object", + // check that posts and comments are marked as deleted on other instances. + // use get methods to avoid refetching from origin instance + expect((await getPost(alpha, localPost.id)).post_view.post.deleted).toBe( + true, ); - await expect(resolveComment(alpha, localComment)).rejects.toBe( - "couldnt_find_object", - ); - await expect(resolvePost(alpha, remotePost)).rejects.toBe( - "couldnt_find_object", - ); - await expect(resolveComment(alpha, remoteComment)).rejects.toBe( - "couldnt_find_object", + expect((await getPost(alpha, remotePost.id)).post_view.post.deleted).toBe( + true, ); + expect( + (await getComments(alpha, localComment.post_id)).comments[0].comment + .deleted, + ).toBe(true); + expect( + (await getComments(alpha, remoteComment.post_id)).comments[0].comment + .deleted, + ).toBe(true); }); test("Requests with invalid auth should be treated as unauthenticated", async () => { let invalid_auth = new LemmyHttp(alphaUrl, { headers: { Authorization: "Bearer foobar" }, + fetchFunction, }); let site = await getSite(invalid_auth); expect(site.my_user).toBeUndefined(); @@ -129,3 +129,88 @@ test("Requests with invalid auth should be treated as unauthenticated", async () let posts = invalid_auth.getPosts(form); expect((await posts).posts).toBeDefined(); }); + +test("Create user with Arabic name", async () => { + let user = await registerUser( + alpha, + alphaUrl, + "تجريب" + Math.random().toString().slice(2, 10), // less than actor_name_max_length + ); + + let site = await getSite(user); + expect(site.my_user).toBeDefined(); + if (!site.my_user) { + throw "Missing site user"; + } + apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`; + + let alphaPerson = (await resolvePerson(alpha, apShortname)).person; + expect(alphaPerson).toBeDefined(); +}); + +test("Create user with accept-language", async () => { + let lemmy_http = new LemmyHttp(alphaUrl, { + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language#syntax + headers: { "Accept-Language": "fr-CH, en;q=0.8, de;q=0.7, *;q=0.5" }, + }); + let user = await registerUser(lemmy_http, alphaUrl); + + let site = await getSite(user); + expect(site.my_user).toBeDefined(); + expect(site.my_user?.local_user_view.local_user.interface_language).toBe( + "fr", + ); + let langs = site.all_languages + .filter(a => site.my_user?.discussion_languages.includes(a.id)) + .map(l => l.code); + // should have languages from accept header, as well as "undetermined" + // which is automatically enabled by backend + expect(langs).toStrictEqual(["und", "de", "en", "fr"]); +}); + +test("Set a new avatar, old avatar is deleted", async () => { + const listMediaRes = await alphaImage.listMedia(); + expect(listMediaRes.images.length).toBe(0); + const upload_form1: UploadImage = { + image: Buffer.from("test1"), + }; + const upload1 = await alphaImage.uploadImage(upload_form1); + expect(upload1.url).toBeDefined(); + + let form1 = { + avatar: upload1.url, + }; + await saveUserSettings(alpha, form1); + const listMediaRes1 = await alphaImage.listMedia(); + expect(listMediaRes1.images.length).toBe(1); + + const upload_form2: UploadImage = { + image: Buffer.from("test2"), + }; + const upload2 = await alphaImage.uploadImage(upload_form2); + expect(upload2.url).toBeDefined(); + + let form2 = { + avatar: upload2.url, + }; + await saveUserSettings(alpha, form2); + // make sure only the new avatar is kept + const listMediaRes2 = await alphaImage.listMedia(); + expect(listMediaRes2.images.length).toBe(1); + + // Upload that same form2 avatar, make sure it isn't replaced / deleted + await saveUserSettings(alpha, form2); + // make sure only the new avatar is kept + const listMediaRes3 = await alphaImage.listMedia(); + expect(listMediaRes3.images.length).toBe(1); + + // Now try to save a user settings, with the icon missing, + // and make sure it doesn't clear the data, or delete the image + await saveUserSettingsBio(alpha); + let site = await getSite(alpha); + expect(site.my_user?.local_user_view.person.avatar).toBe(upload2.url); + + // make sure only the new avatar is kept + const listMediaRes4 = await alphaImage.listMedia(); + expect(listMediaRes4.images.length).toBe(1); +}); diff --git a/api_tests/yarn.lock b/api_tests/yarn.lock deleted file mode 100644 index ca73cf9fc..000000000 --- a/api_tests/yarn.lock +++ /dev/null @@ -1,3074 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@aashutoshrathi/word-wrap@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" - integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== - -"@ampproject/remapping@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" - integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.13": - version "7.22.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" - integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== - dependencies: - "@babel/highlight" "^7.22.13" - chalk "^2.4.2" - -"@babel/compat-data@^7.22.9": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.20.tgz#8df6e96661209623f1975d66c35ffca66f3306d0" - integrity sha512-BQYjKbpXjoXwFW5jGqiizJQQT/aC7pFm9Ok1OWssonuguICi264lbgMzRp2ZMmRSlfkX6DsWDDcsrctK8Rwfiw== - -"@babel/core@^7.11.6", "@babel/core@^7.12.3": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.0.tgz#f8259ae0e52a123eb40f552551e647b506a94d83" - integrity sha512-97z/ju/Jy1rZmDxybphrBuI+jtJjFVoz7Mr9yUQVVVi+DNZE333uFQeMOqcCIy1x3WYBIbWftUSLmbNXNT7qFQ== - dependencies: - "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.22.13" - "@babel/generator" "^7.23.0" - "@babel/helper-compilation-targets" "^7.22.15" - "@babel/helper-module-transforms" "^7.23.0" - "@babel/helpers" "^7.23.0" - "@babel/parser" "^7.23.0" - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.0" - "@babel/types" "^7.23.0" - convert-source-map "^2.0.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.3" - semver "^6.3.1" - -"@babel/generator@^7.23.0", "@babel/generator@^7.7.2": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" - integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== - dependencies: - "@babel/types" "^7.23.0" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" - -"@babel/helper-compilation-targets@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" - integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== - dependencies: - "@babel/compat-data" "^7.22.9" - "@babel/helper-validator-option" "^7.22.15" - browserslist "^4.21.9" - lru-cache "^5.1.1" - semver "^6.3.1" - -"@babel/helper-environment-visitor@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" - integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== - -"@babel/helper-function-name@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" - integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== - dependencies: - "@babel/template" "^7.22.15" - "@babel/types" "^7.23.0" - -"@babel/helper-hoist-variables@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" - integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-module-imports@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" - integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== - dependencies: - "@babel/types" "^7.22.15" - -"@babel/helper-module-transforms@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.0.tgz#3ec246457f6c842c0aee62a01f60739906f7047e" - integrity sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw== - dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-module-imports" "^7.22.15" - "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/helper-validator-identifier" "^7.22.20" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" - integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== - -"@babel/helper-simple-access@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" - integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-split-export-declaration@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" - integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-string-parser@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" - integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== - -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== - -"@babel/helper-validator-option@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz#694c30dfa1d09a6534cdfcafbe56789d36aba040" - integrity sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA== - -"@babel/helpers@^7.23.0": - version "7.23.1" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.1.tgz#44e981e8ce2b9e99f8f0b703f3326a4636c16d15" - integrity sha512-chNpneuK18yW5Oxsr+t553UZzzAs3aZnFm4bxhebsNTeshrC95yA7l5yl7GBAG+JG1rF0F7zzD2EixK9mWSDoA== - dependencies: - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.0" - "@babel/types" "^7.23.0" - -"@babel/highlight@^7.22.13": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" - integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== - dependencies: - "@babel/helper-validator-identifier" "^7.22.20" - chalk "^2.4.2" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.15", "@babel/parser@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" - integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" - integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" - integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/template@^7.22.15", "@babel/template@^7.3.3": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" - integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/parser" "^7.22.15" - "@babel/types" "^7.22.15" - -"@babel/traverse@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.0.tgz#18196ddfbcf4ccea324b7f6d3ada00d8c5a99c53" - integrity sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw== - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/generator" "^7.23.0" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.23.0" - "@babel/types" "^7.23.0" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.3.3": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" - integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== - dependencies: - "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.20" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": - version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" - integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== - dependencies: - eslint-visitor-keys "^3.3.0" - -"@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": - version "4.9.1" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.9.1.tgz#449dfa81a57a1d755b09aa58d826c1262e4283b4" - integrity sha512-Y27x+MBLjXa+0JWDhykM3+JE+il3kHKAEqabfEWq3SDhZjLYb6/BHL/JKFnH3fe207JaXkyDo685Oc2Glt6ifA== - -"@eslint/eslintrc@^2.1.2": - version "2.1.2" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" - integrity sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.6.0" - globals "^13.19.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@eslint/js@8.52.0": - version "8.52.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.52.0.tgz#78fe5f117840f69dc4a353adf9b9cd926353378c" - integrity sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA== - -"@humanwhocodes/config-array@^0.11.13": - version "0.11.13" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.13.tgz#075dc9684f40a531d9b26b0822153c1e832ee297" - integrity sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ== - dependencies: - "@humanwhocodes/object-schema" "^2.0.1" - debug "^4.1.1" - minimatch "^3.0.5" - -"@humanwhocodes/module-importer@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" - integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== - -"@humanwhocodes/object-schema@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz#e5211452df060fa8522b55c7b3c0c4d1981cb044" - integrity sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" - integrity sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - slash "^3.0.0" - -"@jest/core@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" - integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== - dependencies: - "@jest/console" "^29.7.0" - "@jest/reporters" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - ci-info "^3.2.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^29.7.0" - jest-config "^29.7.0" - jest-haste-map "^29.7.0" - jest-message-util "^29.7.0" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-resolve-dependencies "^29.7.0" - jest-runner "^29.7.0" - jest-runtime "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - jest-watcher "^29.7.0" - micromatch "^4.0.4" - pretty-format "^29.7.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" - integrity sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== - dependencies: - "@jest/fake-timers" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-mock "^29.7.0" - -"@jest/expect-utils@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6" - integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== - dependencies: - jest-get-type "^29.6.3" - -"@jest/expect@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.7.0.tgz#76a3edb0cb753b70dfbfe23283510d3d45432bf2" - integrity sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== - dependencies: - expect "^29.7.0" - jest-snapshot "^29.7.0" - -"@jest/fake-timers@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" - integrity sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== - dependencies: - "@jest/types" "^29.6.3" - "@sinonjs/fake-timers" "^10.0.2" - "@types/node" "*" - jest-message-util "^29.7.0" - jest-mock "^29.7.0" - jest-util "^29.7.0" - -"@jest/globals@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" - integrity sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/expect" "^29.7.0" - "@jest/types" "^29.6.3" - jest-mock "^29.7.0" - -"@jest/reporters@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" - integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@jridgewell/trace-mapping" "^0.3.18" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^6.0.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - jest-worker "^29.7.0" - slash "^3.0.0" - string-length "^4.0.1" - strip-ansi "^6.0.0" - v8-to-istanbul "^9.0.1" - -"@jest/schemas@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" - integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== - dependencies: - "@sinclair/typebox" "^0.27.8" - -"@jest/source-map@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" - integrity sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== - dependencies: - "@jridgewell/trace-mapping" "^0.3.18" - callsites "^3.0.0" - graceful-fs "^4.2.9" - -"@jest/test-result@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" - integrity sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== - dependencies: - "@jest/console" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" - integrity sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== - dependencies: - "@jest/test-result" "^29.7.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - slash "^3.0.0" - -"@jest/transform@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.7.0.tgz#df2dd9c346c7d7768b8a06639994640c642e284c" - integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== - dependencies: - "@babel/core" "^7.11.6" - "@jest/types" "^29.6.3" - "@jridgewell/trace-mapping" "^0.3.18" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^2.0.0" - fast-json-stable-stringify "^2.1.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-regex-util "^29.6.3" - jest-util "^29.7.0" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - write-file-atomic "^4.0.2" - -"@jest/types@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" - integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== - dependencies: - "@jest/schemas" "^29.6.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== - -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.19" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" - integrity sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw== - dependencies: - "@jridgewell/resolve-uri" "^3.1.0" - "@jridgewell/sourcemap-codec" "^1.4.14" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@pkgr/utils@^2.3.1": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@pkgr/utils/-/utils-2.4.2.tgz#9e638bbe9a6a6f165580dc943f138fd3309a2cbc" - integrity sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw== - dependencies: - cross-spawn "^7.0.3" - fast-glob "^3.3.0" - is-glob "^4.0.3" - open "^9.1.0" - picocolors "^1.0.0" - tslib "^2.6.0" - -"@sinclair/typebox@^0.27.8": - version "0.27.8" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" - integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== - -"@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^10.0.2": - version "10.3.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" - integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== - dependencies: - "@sinonjs/commons" "^3.0.0" - -"@types/babel__core@^7.1.14": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.2.tgz#215db4f4a35d710256579784a548907237728756" - integrity sha512-pNpr1T1xLUc2l3xJKuPtsEky3ybxN3m4fJkknfIpTCTfIZCDW57oAg+EfCgIIp2rvCe0Wn++/FfodDS4YXxBwA== - dependencies: - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.5" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.5.tgz#281f4764bcbbbc51fdded0f25aa587b4ce14da95" - integrity sha512-h9yIuWbJKdOPLJTbmSpPzkF67e659PbQDba7ifWm5BJ8xTv+sDmS7rFmywkWOvXedGTivCdeGSIIX8WLcRTz8w== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.2" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.2.tgz#843e9f1f47c957553b0c374481dc4772921d6a6b" - integrity sha512-/AVzPICMhMOMYoSx9MoKpGDKdBRsIXMNByh1PXSZoa+v6ZoLa8xxtsT/uLQ/NJm0XVAWl/BvId4MlDeXJaeIZQ== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.2.tgz#4ddf99d95cfdd946ff35d2b65c978d9c9bf2645d" - integrity sha512-ojlGK1Hsfce93J0+kn3H5R73elidKUaZonirN33GSmgTUMpzI/MIFfSpF3haANe3G1bEBS9/9/QEqwTzwqFsKw== - dependencies: - "@babel/types" "^7.20.7" - -"@types/graceful-fs@^4.1.3": - version "4.1.7" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.7.tgz#30443a2e64fd51113bc3e2ba0914d47109695e2a" - integrity sha512-MhzcwU8aUygZroVwL2jeYk6JisJrPl/oov/gsgGCue9mkgl9wjGbzReYQClxiUgFDnib9FuHqTndccKeZKxTRw== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== - -"@types/istanbul-lib-report@*": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#412e0725ef41cde73bfa03e0e833eaff41e0fd63" - integrity sha512-gPQuzaPR5h/djlAv2apEG1HVOyj1IUs7GpfMZixU0/0KXT3pm64ylHuMUI1/Akh+sq/iikxg6Z2j+fcMDXaaTQ== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.2.tgz#edc8e421991a3b4df875036d381fc0a5a982f549" - integrity sha512-kv43F9eb3Lhj+lr/Hn6OcLCs/sSM8bt+fIaP11rCYngfV6NVjzWXJ17owQtDQTL9tQ8WSLUrGsSJ6rJz0F1w1A== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@^29.5.6": - version "29.5.6" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.6.tgz#f4cf7ef1b5b0bfc1aa744e41b24d9cc52533130b" - integrity sha512-/t9NnzkOpXb4Nfvg17ieHE6EeSjDS2SGSpNYfoLbUAeL/EOueU/RSdOWFpfQTXBEM7BguYW1XQ0EbM+6RlIh6w== - dependencies: - expect "^29.0.0" - pretty-format "^29.0.0" - -"@types/json-schema@^7.0.12": - version "7.0.13" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.13.tgz#02c24f4363176d2d18fc8b70b9f3c54aba178a85" - integrity sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ== - -"@types/node@*": - version "20.8.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.8.0.tgz#10ddf0119cf20028781c06d7115562934e53f745" - integrity sha512-LzcWltT83s1bthcvjBmiBvGJiiUe84NWRHkw+ZV6Fr41z2FbIzvc815dk2nQ3RAKMuN2fkenM/z3Xv2QzEpYxQ== - -"@types/node@^20.8.7": - version "20.8.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.8.7.tgz#ad23827850843de973096edfc5abc9e922492a25" - integrity sha512-21TKHHh3eUHIi2MloeptJWALuCu5H7HQTdTrWIFReA8ad+aggoX+lRes3ex7/FtpC+sVUpFMQ+QTfYr74mruiQ== - dependencies: - undici-types "~5.25.1" - -"@types/semver@^7.5.0": - version "7.5.3" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.3.tgz#9a726e116beb26c24f1ccd6850201e1246122e04" - integrity sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw== - -"@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== - -"@types/yargs-parser@*": - version "21.0.1" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.1.tgz#07773d7160494d56aa882d7531aac7319ea67c3b" - integrity sha512-axdPBuLuEJt0c4yI5OZssC19K2Mq1uKdrfZBzuxLvaztgqUtFYZUNw7lETExPYJR9jdEoIg4mb7RQKRQzOkeGQ== - -"@types/yargs@^17.0.8": - version "17.0.26" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.26.tgz#388e5002a8b284ad7b4599ba89920a6d74d8d79a" - integrity sha512-Y3vDy2X6zw/ZCumcwLpdhM5L7jmyGpmBCTYMHDLqT2IKVMYRRLdv6ZakA+wxhra6Z/3bwhNbNl9bDGXaFU+6rw== - dependencies: - "@types/yargs-parser" "*" - -"@typescript-eslint/eslint-plugin@^6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.8.0.tgz#06abe4265e7c82f20ade2dcc0e3403c32d4f148b" - integrity sha512-GosF4238Tkes2SHPQ1i8f6rMtG6zlKwMEB0abqSJ3Npvos+doIlc/ATG+vX1G9coDF3Ex78zM3heXHLyWEwLUw== - dependencies: - "@eslint-community/regexpp" "^4.5.1" - "@typescript-eslint/scope-manager" "6.8.0" - "@typescript-eslint/type-utils" "6.8.0" - "@typescript-eslint/utils" "6.8.0" - "@typescript-eslint/visitor-keys" "6.8.0" - debug "^4.3.4" - graphemer "^1.4.0" - ignore "^5.2.4" - natural-compare "^1.4.0" - semver "^7.5.4" - ts-api-utils "^1.0.1" - -"@typescript-eslint/parser@^6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.8.0.tgz#bb2a969d583db242f1ee64467542f8b05c2e28cb" - integrity sha512-5tNs6Bw0j6BdWuP8Fx+VH4G9fEPDxnVI7yH1IAPkQH5RUtvKwRoqdecAPdQXv4rSOADAaz1LFBZvZG7VbXivSg== - dependencies: - "@typescript-eslint/scope-manager" "6.8.0" - "@typescript-eslint/types" "6.8.0" - "@typescript-eslint/typescript-estree" "6.8.0" - "@typescript-eslint/visitor-keys" "6.8.0" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.8.0.tgz#5cac7977385cde068ab30686889dd59879811efd" - integrity sha512-xe0HNBVwCph7rak+ZHcFD6A+q50SMsFwcmfdjs9Kz4qDh5hWhaPhFjRs/SODEhroBI5Ruyvyz9LfwUJ624O40g== - dependencies: - "@typescript-eslint/types" "6.8.0" - "@typescript-eslint/visitor-keys" "6.8.0" - -"@typescript-eslint/type-utils@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.8.0.tgz#50365e44918ca0fd159844b5d6ea96789731e11f" - integrity sha512-RYOJdlkTJIXW7GSldUIHqc/Hkto8E+fZN96dMIFhuTJcQwdRoGN2rEWA8U6oXbLo0qufH7NPElUb+MceHtz54g== - dependencies: - "@typescript-eslint/typescript-estree" "6.8.0" - "@typescript-eslint/utils" "6.8.0" - debug "^4.3.4" - ts-api-utils "^1.0.1" - -"@typescript-eslint/types@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.8.0.tgz#1ab5d4fe1d613e3f65f6684026ade6b94f7e3ded" - integrity sha512-p5qOxSum7W3k+llc7owEStXlGmSl8FcGvhYt8Vjy7FqEnmkCVlM3P57XQEGj58oqaBWDQXbJDZxwUWMS/EAPNQ== - -"@typescript-eslint/typescript-estree@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.8.0.tgz#9565f15e0cd12f55cf5aa0dfb130a6cb0d436ba1" - integrity sha512-ISgV0lQ8XgW+mvv5My/+iTUdRmGspducmQcDw5JxznasXNnZn3SKNrTRuMsEXv+V/O+Lw9AGcQCfVaOPCAk/Zg== - dependencies: - "@typescript-eslint/types" "6.8.0" - "@typescript-eslint/visitor-keys" "6.8.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - semver "^7.5.4" - ts-api-utils "^1.0.1" - -"@typescript-eslint/utils@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.8.0.tgz#d42939c2074c6b59844d0982ce26a51d136c4029" - integrity sha512-dKs1itdE2qFG4jr0dlYLQVppqTE+Itt7GmIf/vX6CSvsW+3ov8PbWauVKyyfNngokhIO9sKZeRGCUo1+N7U98Q== - dependencies: - "@eslint-community/eslint-utils" "^4.4.0" - "@types/json-schema" "^7.0.12" - "@types/semver" "^7.5.0" - "@typescript-eslint/scope-manager" "6.8.0" - "@typescript-eslint/types" "6.8.0" - "@typescript-eslint/typescript-estree" "6.8.0" - semver "^7.5.4" - -"@typescript-eslint/visitor-keys@6.8.0": - version "6.8.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.8.0.tgz#cffebed56ae99c45eba901c378a6447b06be58b8" - integrity sha512-oqAnbA7c+pgOhW2OhGvxm0t1BULX5peQI/rLsNDpGM78EebV3C9IGbX5HNZabuZ6UQrYveCLjKo8Iy/lLlBkkg== - dependencies: - "@typescript-eslint/types" "6.8.0" - eslint-visitor-keys "^3.4.1" - -"@ungap/structured-clone@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" - integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn@^8.9.0: - version "8.10.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" - integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== - -ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -anymatch@^3.0.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -babel-jest@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" - integrity sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== - dependencies: - "@jest/transform" "^29.7.0" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^29.6.3" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz#aadbe943464182a8922c3c927c3067ff40d24626" - integrity sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.1.14" - "@types/babel__traverse" "^7.0.6" - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz#fa05fa510e7d493896d7b0dd2033601c840f171c" - integrity sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== - dependencies: - babel-plugin-jest-hoist "^29.6.3" - babel-preset-current-node-syntax "^1.0.0" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -big-integer@^1.6.44: - version "1.6.51" - resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" - integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== - -bplist-parser@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/bplist-parser/-/bplist-parser-0.2.0.tgz#43a9d183e5bf9d545200ceac3e712f79ebbe8d0e" - integrity sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw== - dependencies: - big-integer "^1.6.44" - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browserslist@^4.21.9: - version "4.22.1" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.1.tgz#ba91958d1a59b87dab6fed8dfbcb3da5e2e9c619" - integrity sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ== - dependencies: - caniuse-lite "^1.0.30001541" - electron-to-chromium "^1.4.535" - node-releases "^2.0.13" - update-browserslist-db "^1.0.13" - -bs-logger@0.x: - version "0.2.6" - resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" - integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== - dependencies: - fast-json-stable-stringify "2.x" - -bser@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -bundle-name@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bundle-name/-/bundle-name-3.0.0.tgz#ba59bcc9ac785fb67ccdbf104a2bf60c099f0e1a" - integrity sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw== - dependencies: - run-applescript "^5.0.0" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.2.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -caniuse-lite@^1.0.30001541: - version "1.0.30001542" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001542.tgz#823ddb5aed0a70d5e2bfb49126478e84e9514b85" - integrity sha512-UrtAXVcj1mvPBFQ4sKd38daP8dEcXXr5sQe6QNNinaPd0iA/cxg9/l3VrSdL73jgw5sKyuQ6jNgiKO12W3SsVA== - -chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -char-regex@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -ci-info@^3.2.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" - integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== - -cjs-module-lexer@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" - integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== - -cliui@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" - integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.1" - wrap-ansi "^7.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== - -collect-v8-coverage@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" - integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -convert-source-map@^1.6.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" - integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== - -convert-source-map@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" - integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== - -create-jest@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" - integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== - dependencies: - "@jest/types" "^29.6.3" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-config "^29.7.0" - jest-util "^29.7.0" - prompts "^2.0.1" - -cross-fetch@^3.1.5: - version "3.1.8" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82" - integrity sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg== - dependencies: - node-fetch "^2.6.12" - -cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -dedent@^1.0.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" - integrity sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg== - -deep-is@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^4.2.2: - version "4.3.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" - integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== - -default-browser-id@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/default-browser-id/-/default-browser-id-3.0.0.tgz#bee7bbbef1f4e75d31f98f4d3f1556a14cea790c" - integrity sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA== - dependencies: - bplist-parser "^0.2.0" - untildify "^4.0.0" - -default-browser@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/default-browser/-/default-browser-4.0.0.tgz#53c9894f8810bf86696de117a6ce9085a3cbc7da" - integrity sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA== - dependencies: - bundle-name "^3.0.0" - default-browser-id "^3.0.0" - execa "^7.1.1" - titleize "^3.0.0" - -define-lazy-prop@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz#dbb19adfb746d7fc6d734a06b72f4a00d021255f" - integrity sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg== - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -diff-sequences@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" - integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -electron-to-chromium@^1.4.535: - version "1.4.537" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.537.tgz#aac4101db53066be1e49baedd000a26bc754adc9" - integrity sha512-W1+g9qs9hviII0HAwOdehGYkr+zt7KKdmCcJcjH0mYg6oL8+ioT3Skjmt7BLoAQqXhjf40AXd+HlR4oAWMlXjA== - -emittery@^0.13.1: - version "0.13.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" - integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -eslint-plugin-prettier@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.1.tgz#a3b399f04378f79f066379f544e42d6b73f11515" - integrity sha512-m3u5RnR56asrwV/lDC4GHorlW75DsFfmUcjfCYylTUs85dBRnB7VM6xG8eCMJdeDRnppzmxZVf1GEPJvl1JmNg== - dependencies: - prettier-linter-helpers "^1.0.0" - synckit "^0.8.5" - -eslint-scope@^7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" - integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: - version "3.4.3" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" - integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== - -eslint@^8.52.0: - version "8.52.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.52.0.tgz#d0cd4a1fac06427a61ef9242b9353f36ea7062fc" - integrity sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg== - dependencies: - "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.6.1" - "@eslint/eslintrc" "^2.1.2" - "@eslint/js" "8.52.0" - "@humanwhocodes/config-array" "^0.11.13" - "@humanwhocodes/module-importer" "^1.0.1" - "@nodelib/fs.walk" "^1.2.8" - "@ungap/structured-clone" "^1.2.0" - ajv "^6.12.4" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.2.2" - eslint-visitor-keys "^3.4.3" - espree "^9.6.1" - esquery "^1.4.2" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - find-up "^5.0.0" - glob-parent "^6.0.2" - globals "^13.19.0" - graphemer "^1.4.0" - ignore "^5.2.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - is-path-inside "^3.0.3" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - -espree@^9.6.0, espree@^9.6.1: - version "9.6.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" - integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== - dependencies: - acorn "^8.9.0" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.4.1" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.2: - version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" - integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -execa@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-7.2.0.tgz#657e75ba984f42a70f38928cedc87d6f2d4fe4e9" - integrity sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.1" - human-signals "^4.3.0" - is-stream "^3.0.0" - merge-stream "^2.0.0" - npm-run-path "^5.1.0" - onetime "^6.0.0" - signal-exit "^3.0.7" - strip-final-newline "^3.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expect@^29.0.0, expect@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" - integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== - dependencies: - "@jest/expect-utils" "^29.7.0" - jest-get-type "^29.6.3" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-diff@^1.1.2: - version "1.3.0" - resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0" - integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw== - -fast-glob@^3.2.9, fast-glob@^3.3.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" - integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" - integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== - dependencies: - reusify "^1.0.4" - -fb-watchman@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" - integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== - dependencies: - bser "2.1.1" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.1.0" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.1.0.tgz#0e54ab4a1a60fe87e2946b6b00657f1c99e1af3f" - integrity sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew== - dependencies: - flatted "^3.2.7" - keyv "^4.5.3" - rimraf "^3.0.2" - -flatted@^3.2.7: - version "3.2.9" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf" - integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== - -form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@^2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^6.0.0, get-stream@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -glob-parent@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob@^7.1.3, glob@^7.1.4: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.19.0: - version "13.22.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.22.0.tgz#0c9fcb9c48a2494fbb5edbfee644285543eba9d8" - integrity sha512-H1Ddc/PbZHTDVJSnj8kWptIRSD6AM3pK+mKytuIVF4uoBV7rshFlhhvA58ceJ5wp3Er58w6zj7bykMpYXt3ETw== - dependencies: - type-fest "^0.20.2" - -globby@^11.1.0: - version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -graceful-fs@^4.2.9: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -graphemer@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" - integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -human-signals@^4.3.0: - version "4.3.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-4.3.1.tgz#ab7f811e851fca97ffbd2c1fe9a958964de321b2" - integrity sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ== - -ignore@^5.2.0, ignore@^5.2.4: - version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" - integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== - -import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - -is-core-module@^2.13.0: - version "2.13.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" - integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== - dependencies: - has "^1.0.3" - -is-docker@^2.0.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== - -is-docker@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-3.0.0.tgz#90093aa3106277d8a77a5910dbae71747e15a200" - integrity sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-inside-container@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-inside-container/-/is-inside-container-1.0.0.tgz#e81fba699662eb31dbdaf26766a61d4814717ea4" - integrity sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA== - dependencies: - is-docker "^3.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-path-inside@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" - integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== - -is-stream@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -is-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" - integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA== - -is-wsl@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4: - version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" - integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-instrument@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.0.tgz#7a8af094cbfff1d5bb280f62ce043695ae8dd5b8" - integrity sha512-x58orMzEVfzPUKqlbLd1hXCnySCxKdDKa6Rjg97CwuLLRI4g3FHTdnExu1OqffVFay6zeMW+T6/DowFLndWnIw== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^7.5.4" - -istanbul-lib-report@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" - integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^4.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.6" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" - integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jest-changed-files@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" - integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== - dependencies: - execa "^5.0.0" - jest-util "^29.7.0" - p-limit "^3.1.0" - -jest-circus@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.7.0.tgz#b6817a45fcc835d8b16d5962d0c026473ee3668a" - integrity sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/expect" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^1.0.0" - is-generator-fn "^2.0.0" - jest-each "^29.7.0" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-runtime "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - p-limit "^3.1.0" - pretty-format "^29.7.0" - pure-rand "^6.0.0" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-cli@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" - integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== - dependencies: - "@jest/core" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - chalk "^4.0.0" - create-jest "^29.7.0" - exit "^0.1.2" - import-local "^3.0.2" - jest-config "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - yargs "^17.3.1" - -jest-config@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" - integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== - dependencies: - "@babel/core" "^7.11.6" - "@jest/test-sequencer" "^29.7.0" - "@jest/types" "^29.6.3" - babel-jest "^29.7.0" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-circus "^29.7.0" - jest-environment-node "^29.7.0" - jest-get-type "^29.6.3" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-runner "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^29.7.0" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" - integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== - dependencies: - chalk "^4.0.0" - diff-sequences "^29.6.3" - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-docblock@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" - integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== - dependencies: - detect-newline "^3.0.0" - -jest-each@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" - integrity sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== - dependencies: - "@jest/types" "^29.6.3" - chalk "^4.0.0" - jest-get-type "^29.6.3" - jest-util "^29.7.0" - pretty-format "^29.7.0" - -jest-environment-node@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.7.0.tgz#0b93e111dda8ec120bc8300e6d1fb9576e164376" - integrity sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/fake-timers" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-mock "^29.7.0" - jest-util "^29.7.0" - -jest-get-type@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1" - integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw== - -jest-haste-map@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.7.0.tgz#3c2396524482f5a0506376e6c858c3bbcc17b104" - integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== - dependencies: - "@jest/types" "^29.6.3" - "@types/graceful-fs" "^4.1.3" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^29.6.3" - jest-util "^29.7.0" - jest-worker "^29.7.0" - micromatch "^4.0.4" - walker "^1.0.8" - optionalDependencies: - fsevents "^2.3.2" - -jest-leak-detector@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" - integrity sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== - dependencies: - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-matcher-utils@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12" - integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g== - dependencies: - chalk "^4.0.0" - jest-diff "^29.7.0" - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-message-util@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" - integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.6.3" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^29.7.0" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" - integrity sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-util "^29.7.0" - -jest-pnp-resolver@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" - integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== - -jest-regex-util@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" - integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== - -jest-resolve-dependencies@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" - integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== - dependencies: - jest-regex-util "^29.6.3" - jest-snapshot "^29.7.0" - -jest-resolve@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.7.0.tgz#64d6a8992dd26f635ab0c01e5eef4399c6bcbc30" - integrity sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== - dependencies: - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-pnp-resolver "^1.2.2" - jest-util "^29.7.0" - jest-validate "^29.7.0" - resolve "^1.20.0" - resolve.exports "^2.0.0" - slash "^3.0.0" - -jest-runner@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" - integrity sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== - dependencies: - "@jest/console" "^29.7.0" - "@jest/environment" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.13.1" - graceful-fs "^4.2.9" - jest-docblock "^29.7.0" - jest-environment-node "^29.7.0" - jest-haste-map "^29.7.0" - jest-leak-detector "^29.7.0" - jest-message-util "^29.7.0" - jest-resolve "^29.7.0" - jest-runtime "^29.7.0" - jest-util "^29.7.0" - jest-watcher "^29.7.0" - jest-worker "^29.7.0" - p-limit "^3.1.0" - source-map-support "0.5.13" - -jest-runtime@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" - integrity sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/fake-timers" "^29.7.0" - "@jest/globals" "^29.7.0" - "@jest/source-map" "^29.6.3" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-message-util "^29.7.0" - jest-mock "^29.7.0" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-snapshot@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" - integrity sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== - dependencies: - "@babel/core" "^7.11.6" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-jsx" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/types" "^7.3.3" - "@jest/expect-utils" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^29.7.0" - graceful-fs "^4.2.9" - jest-diff "^29.7.0" - jest-get-type "^29.6.3" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - natural-compare "^1.4.0" - pretty-format "^29.7.0" - semver "^7.5.3" - -jest-util@^29.0.0, jest-util@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" - integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" - integrity sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== - dependencies: - "@jest/types" "^29.6.3" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^29.6.3" - leven "^3.1.0" - pretty-format "^29.7.0" - -jest-watcher@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" - integrity sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== - dependencies: - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.13.1" - jest-util "^29.7.0" - string-length "^4.0.1" - -jest-worker@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" - integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== - dependencies: - "@types/node" "*" - jest-util "^29.7.0" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^29.5.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-29.7.0.tgz#994676fc24177f088f1c5e3737f5697204ff2613" - integrity sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== - dependencies: - "@jest/core" "^29.7.0" - "@jest/types" "^29.6.3" - import-local "^3.0.2" - jest-cli "^29.7.0" - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -json-buffer@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" - integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== - -keyv@^4.5.3: - version "4.5.3" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.3.tgz#00873d2b046df737963157bd04f294ca818c9c25" - integrity sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug== - dependencies: - json-buffer "3.0.1" - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -lemmy-js-client@0.19.0-rc.12: - version "0.19.0-rc.12" - resolved "https://registry.yarnpkg.com/lemmy-js-client/-/lemmy-js-client-0.19.0-rc.12.tgz#e3bd4e21b1966d583ab790ef70ece8394b012b48" - integrity sha512-1iu2fW9vlb3TrI+QR/ODP3+5pWZB0rUqL1wH09IzomDXohCqoQvfmXpwArmgF4Eq8GZgjkcfeMDC2gMrfw/i7Q== - dependencies: - cross-fetch "^3.1.5" - form-data "^4.0.0" - -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash.memoize@4.x: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -make-dir@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" - integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== - dependencies: - semver "^7.5.3" - -make-error@1.x: - version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" - integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== - -makeerror@1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -mimic-fn@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc" - integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw== - -minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -node-fetch@^2.6.12: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== - -node-releases@^2.0.13: - version "2.0.13" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" - integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -npm-run-path@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.1.0.tgz#bc62f7f3f6952d9894bd08944ba011a6ee7b7e00" - integrity sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q== - dependencies: - path-key "^4.0.0" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -onetime@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4" - integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ== - dependencies: - mimic-fn "^4.0.0" - -open@^9.1.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/open/-/open-9.1.0.tgz#684934359c90ad25742f5a26151970ff8c6c80b6" - integrity sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg== - dependencies: - default-browser "^4.0.0" - define-lazy-prop "^3.0.0" - is-inside-container "^1.0.0" - is-wsl "^2.2.0" - -optionator@^0.9.3: - version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" - integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== - dependencies: - "@aashutoshrathi/word-wrap" "^1.2.3" - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - -p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2, p-limit@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-key@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18" - integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pirates@^4.0.4: - version "4.0.6" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" - integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== - -pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prettier-linter-helpers@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" - integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== - dependencies: - fast-diff "^1.1.2" - -prettier@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" - integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg== - -pretty-format@^29.0.0, pretty-format@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" - integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== - dependencies: - "@jest/schemas" "^29.6.3" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -prompts@^2.0.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -punycode@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" - integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== - -pure-rand@^6.0.0: - version "6.0.4" - resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.0.4.tgz#50b737f6a925468679bff00ad20eade53f37d5c7" - integrity sha512-LA0Y9kxMYv47GIPJy6MI84fqTd2HmYZI83W/kM/SkKfDlajnZYfmXFTxkbY+xSBPkLJxltMa9hIkmdc29eguMA== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -react-is@^18.0.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve.exports@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" - integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== - -resolve@^1.20.0: - version "1.22.6" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.6.tgz#dd209739eca3aef739c626fea1b4f3c506195362" - integrity sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -run-applescript@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/run-applescript/-/run-applescript-5.0.0.tgz#e11e1c932e055d5c6b40d98374e0268d9b11899c" - integrity sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg== - dependencies: - execa "^5.0.0" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -semver@^6.3.0, semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.5.3, semver@^7.5.4: - version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== - dependencies: - lru-cache "^6.0.0" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -signal-exit@^3.0.3, signal-exit@^3.0.7: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -source-map-support@0.5.13: - version "0.5.13" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" - integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0, source-map@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -stack-utils@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" - integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== - dependencies: - escape-string-regexp "^2.0.0" - -string-length@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-final-newline@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" - integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw== - -strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -synckit@^0.8.5: - version "0.8.5" - resolved "https://registry.yarnpkg.com/synckit/-/synckit-0.8.5.tgz#b7f4358f9bb559437f9f167eb6bc46b3c9818fa3" - integrity sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q== - dependencies: - "@pkgr/utils" "^2.3.1" - tslib "^2.5.0" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== - -titleize@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/titleize/-/titleize-3.0.0.tgz#71c12eb7fdd2558aa8a44b0be83b8a76694acd53" - integrity sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ== - -tmpl@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - -ts-api-utils@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" - integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== - -ts-jest@^29.1.0: - version "29.1.1" - resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" - integrity sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA== - dependencies: - bs-logger "0.x" - fast-json-stable-stringify "2.x" - jest-util "^29.0.0" - json5 "^2.2.3" - lodash.memoize "4.x" - make-error "1.x" - semver "^7.5.3" - yargs-parser "^21.0.1" - -tslib@^2.5.0, tslib@^2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-detect@4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -typescript@^5.0.4: - version "5.2.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" - integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== - -undici-types@~5.25.1: - version "5.25.3" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.25.3.tgz#e044115914c85f0bcbb229f346ab739f064998c3" - integrity sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA== - -untildify@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" - integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== - -update-browserslist-db@^1.0.13: - version "1.0.13" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" - integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -v8-to-istanbul@^9.0.1: - version "9.1.0" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz#1b83ed4e397f58c85c266a570fc2558b5feb9265" - integrity sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA== - dependencies: - "@jridgewell/trace-mapping" "^0.3.12" - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - -walker@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -which@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -write-file-atomic@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" - integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== - dependencies: - imurmurhash "^0.1.4" - signal-exit "^3.0.7" - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yargs-parser@^21.0.1, yargs-parser@^21.1.1: - version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" - integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== - -yargs@^17.3.1: - version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" - integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/cliff.toml b/cliff.toml new file mode 100644 index 000000000..b5b8c3f16 --- /dev/null +++ b/cliff.toml @@ -0,0 +1,89 @@ +# git-cliff ~ configuration file +# https://git-cliff.org/docs/configuration + +[remote.github] +owner = "LemmyNet" +repo = "lemmy" +# token = "" + +[changelog] +# template for the changelog body +# https://keats.github.io/tera/docs/#introduction +body = """ +## What's Changed + +{%- if version %} in {{ version }}{%- endif -%} +{% for commit in commits %} + {% if commit.github.pr_title -%} + {%- set commit_message = commit.github.pr_title -%} + {%- else -%} + {%- set commit_message = commit.message -%} + {%- endif -%} + * {{ commit_message | split(pat="\n") | first | trim }}\ + {% if commit.github.username %} by @{{ commit.github.username }}{%- endif -%} + {% if commit.github.pr_number %} in \ + [#{{ commit.github.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.github.pr_number }}) \ + {%- endif %} +{%- endfor -%} + +{%- if github -%} +{% if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %} + {% raw %}\n{% endraw -%} + ## New Contributors +{%- endif %}\ +{% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %} + * @{{ contributor.username }} made their first contribution + {%- if contributor.pr_number %} in \ + [#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \ + {%- endif %} +{%- endfor -%} +{%- endif -%} + +{% if version %} + {% if previous.version %} + **Full Changelog**: {{ self::remote_url() }}/compare/{{ previous.version }}...{{ version }} + {% endif %} +{% else -%} + {% raw %}\n{% endraw %} +{% endif %} + +{%- macro remote_url() -%} + https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }} +{%- endmacro -%} +""" +# remove the leading and trailing whitespace from the template +trim = true +# changelog footer +footer = """ + +""" +# postprocessors +postprocessors = [] + +[git] +# parse the commits based on https://www.conventionalcommits.org +conventional_commits = false +# filter out the commits that are not conventional +filter_unconventional = true +# process each line of a commit as an individual commit +split_commits = false +# regex for preprocessing the commit messages +commit_preprocessors = [ + # remove issue numbers from commits + { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "" }, +] +commit_parsers = [{ field = "author.name", pattern = "renovate", skip = true }] +# protect breaking changes from being skipped due to matching a skipping commit_parser +protect_breaking_commits = false +# filter out the commits that are not matched by commit parsers +filter_commits = false +# regex for matching git tags +tag_pattern = "[0-9].*" +# regex for skipping tags +skip_tags = "beta|alpha" +# regex for ignoring tags +ignore_tags = "rc" +# sort the tags topologically +topo_order = false +# sort the commits inside sections by oldest/newest order +sort_commits = "newest" diff --git a/config/defaults.hjson b/config/defaults.hjson index 4b616f677..f0b9d56df 100644 --- a/config/defaults.hjson +++ b/config/defaults.hjson @@ -34,17 +34,49 @@ # Name of the postgres database for lemmy database: "string" # Maximum number of active sql connections - pool_size: 95 + pool_size: 30 } - # Settings related to activitypub federation # Pictrs image server configuration. pictrs: { # Address where pictrs is available (for image hosting) url: "http://localhost:8080/" # Set a custom pictrs API key. ( Required for deleting images ) api_key: "string" - # Cache remote images - cache_remote_images: true + # Backwards compatibility with 0.18.1. False is equivalent to `image_mode: None`, true is + # equivalent to `image_mode: StoreLinkPreviews`. + # + # To be removed in 0.20 + cache_external_link_previews: true + # Specifies how to handle remote images, so that users don't have to connect directly to remote + # servers. + image_mode: + # Leave images unchanged, don't generate any local thumbnails for post urls. Instead the + # Opengraph image is directly returned as thumbnail + "None" + + # or + + # Generate thumbnails for external post urls and store them persistently in pict-rs. This + # ensures that they can be reliably retrieved and can be resized using pict-rs APIs. However + # it also increases storage usage. + # + # This is the default behaviour, and also matches Lemmy 0.18. + "StoreLinkPreviews" + + # or + + # If enabled, all images from remote domains are rewritten to pass through + # `/api/v3/image_proxy`, including embedded images in markdown. Images are stored temporarily + # in pict-rs for caching. This improves privacy as users don't expose their IP to untrusted + # servers, and decreases load on other servers. However it increases bandwidth use for the + # local server. + # + # Requires pict-rs 0.5 + "ProxyAllImages" + # Timeout for uploading images to pictrs (in seconds) + upload_timeout: 30 + # Resize post thumbnails to this maximum width/height. + max_thumbnail_size: 256 } # Email sending configuration. All options except login/password are mandatory email: { @@ -78,12 +110,17 @@ port: 8536 # Whether the site is available over TLS. Needs to be true for federation to work. tls_enabled: true - # The number of activitypub federation workers that can be in-flight concurrently - worker_count: 0 - # The number of activitypub federation retry workers that can be in-flight concurrently - retry_count: 0 + federation: { + # Limit to the number of concurrent outgoing federation requests per target instance. + # Set this to a higher value than 1 (e.g. 6) only if you have a huge instance (>10 activities + # per second) and if a receiving instance is not keeping up. + concurrent_sends_per_instance: 1 + } prometheus: { bind: "127.0.0.1" port: 10002 } + # Sets a response Access-Control-Allow-Origin CORS header + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin + cors_origin: "*" } diff --git a/crates/api/Cargo.toml b/crates/api/Cargo.toml index d9c4c1051..87879f6cd 100644 --- a/crates/api/Cargo.toml +++ b/crates/api/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "lemmy_api" +publish = false version.workspace = true edition.workspace = true description.workspace = true @@ -13,6 +14,9 @@ name = "lemmy_api" path = "src/lib.rs" doctest = false +[lints] +workspace = true + [dependencies] lemmy_utils = { workspace = true } lemmy_db_schema = { workspace = true, features = ["full"] } @@ -22,22 +26,21 @@ lemmy_db_views_actor = { workspace = true, features = ["full"] } lemmy_api_common = { workspace = true, features = ["full"] } activitypub_federation = { workspace = true } bcrypt = { workspace = true } -serde = { workspace = true } actix-web = { workspace = true } base64 = { workspace = true } -uuid = { workspace = true } -async-trait = { workspace = true } captcha = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } chrono = { workspace = true } url = { workspace = true } -wav = "1.0.0" -sitemap-rs = "0.2.0" -totp-rs = { version = "5.0.2", features = ["gen_secret", "otpauth"] } -actix-web-httpauth = "0.8.1" +hound = "3.5.1" +sitemap-rs = "0.2.1" +totp-rs = { version = "5.6.0", features = ["gen_secret", "otpauth"] } +actix-web-httpauth = "0.8.2" [dev-dependencies] serial_test = { workspace = true } tokio = { workspace = true } elementtree = "1.2.3" +pretty_assertions = { workspace = true } +lemmy_api_crud = { workspace = true } diff --git a/crates/api/src/comment/distinguish.rs b/crates/api/src/comment/distinguish.rs index f29e01f76..a1b25ea44 100644 --- a/crates/api/src/comment/distinguish.rs +++ b/crates/api/src/comment/distinguish.rs @@ -9,15 +9,20 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::{CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn distinguish_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { - let orig_comment = CommentView::read(&mut context.pool(), data.comment_id, None).await?; +) -> LemmyResult> { + let orig_comment = CommentView::read( + &mut context.pool(), + data.comment_id, + Some(&local_user_view.local_user), + ) + .await?; check_community_user_action( &local_user_view.person, @@ -26,6 +31,11 @@ pub async fn distinguish_comment( ) .await?; + // Verify that only the creator can distinguish + if local_user_view.person.id != orig_comment.creator.id { + Err(LemmyErrorType::NoCommentEditAllowed)? + } + // Verify that only a mod or admin can distinguish a comment check_community_mod_action( &local_user_view.person, @@ -47,7 +57,7 @@ pub async fn distinguish_comment( let comment_view = CommentView::read( &mut context.pool(), data.comment_id, - Some(local_user_view.person.id), + Some(&local_user_view.local_user), ) .await?; diff --git a/crates/api/src/comment/like.rs b/crates/api/src/comment/like.rs index e11a3e155..e93b8513f 100644 --- a/crates/api/src/comment/like.rs +++ b/crates/api/src/comment/like.rs @@ -5,7 +5,7 @@ use lemmy_api_common::{ comment::{CommentResponse, CreateCommentLike}, context::LemmyContext, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, check_downvotes_enabled}, + utils::{check_bot_account, check_community_user_action, check_local_vote_mode, VoteItem}, }; use lemmy_db_schema::{ newtypes::LocalUserId, @@ -17,7 +17,7 @@ use lemmy_db_schema::{ traits::Likeable, }; use lemmy_db_views::structs::{CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; use std::ops::Deref; #[tracing::instrument(skip(context))] @@ -25,16 +25,28 @@ pub async fn like_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; + let comment_id = data.comment_id; let mut recipient_ids = Vec::::new(); - // Don't do a downvote if site has downvotes disabled - check_downvotes_enabled(data.score, &local_site)?; + check_local_vote_mode( + data.score, + VoteItem::Comment(comment_id), + &local_site, + local_user_view.person.id, + &mut context.pool(), + ) + .await?; + check_bot_account(&local_user_view.person)?; - let comment_id = data.comment_id; - let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?; + let orig_comment = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; check_community_user_action( &local_user_view.person, @@ -45,7 +57,7 @@ pub async fn like_comment( // Add parent poster or commenter to recipients let comment_reply = CommentReply::read_by_comment(&mut context.pool(), comment_id).await; - if let Ok(reply) = comment_reply { + if let Ok(Some(reply)) = comment_reply { let recipient_id = reply.recipient_id; if let Ok(local_recipient) = LocalUserView::read_person(&mut context.pool(), recipient_id).await { @@ -55,7 +67,6 @@ pub async fn like_comment( let like_form = CommentLikeForm { comment_id: data.comment_id, - post_id: orig_comment.post.id, person_id: local_user_view.person.id, score: data.score, }; @@ -74,12 +85,12 @@ pub async fn like_comment( } ActivityChannel::submit_activity( - SendActivityData::LikePostOrComment( - orig_comment.comment.ap_id, - local_user_view.person.clone(), - orig_comment.community, - data.score, - ), + SendActivityData::LikePostOrComment { + object_id: orig_comment.comment.ap_id, + actor: local_user_view.person.clone(), + community: orig_comment.community, + score: data.score, + }, &context, ) .await?; diff --git a/crates/api/src/comment/list_comment_likes.rs b/crates/api/src/comment/list_comment_likes.rs new file mode 100644 index 000000000..c9721b8a0 --- /dev/null +++ b/crates/api/src/comment/list_comment_likes.rs @@ -0,0 +1,35 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + comment::{ListCommentLikes, ListCommentLikesResponse}, + context::LemmyContext, + utils::is_mod_or_admin, +}; +use lemmy_db_views::structs::{CommentView, LocalUserView, VoteView}; +use lemmy_utils::error::LemmyResult; + +/// Lists likes for a comment +#[tracing::instrument(skip(context))] +pub async fn list_comment_likes( + data: Query, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + let comment_view = CommentView::read( + &mut context.pool(), + data.comment_id, + Some(&local_user_view.local_user), + ) + .await?; + + is_mod_or_admin( + &mut context.pool(), + &local_user_view.person, + comment_view.community.id, + ) + .await?; + + let comment_likes = + VoteView::list_for_comment(&mut context.pool(), data.comment_id, data.page, data.limit).await?; + + Ok(Json(ListCommentLikesResponse { comment_likes })) +} diff --git a/crates/api/src/comment/mod.rs b/crates/api/src/comment/mod.rs index 8caeaf8b0..9830e295d 100644 --- a/crates/api/src/comment/mod.rs +++ b/crates/api/src/comment/mod.rs @@ -1,3 +1,4 @@ pub mod distinguish; pub mod like; +pub mod list_comment_likes; pub mod save; diff --git a/crates/api/src/comment/save.rs b/crates/api/src/comment/save.rs index 95c08e701..6efa6296d 100644 --- a/crates/api/src/comment/save.rs +++ b/crates/api/src/comment/save.rs @@ -8,14 +8,14 @@ use lemmy_db_schema::{ traits::Saveable, }; use lemmy_db_views::structs::{CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn save_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let comment_saved_form = CommentSavedForm { comment_id: data.comment_id, person_id: local_user_view.person.id, @@ -32,8 +32,12 @@ pub async fn save_comment( } let comment_id = data.comment_id; - let person_id = local_user_view.person.id; - let comment_view = CommentView::read(&mut context.pool(), comment_id, Some(person_id)).await?; + let comment_view = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; Ok(Json(CommentResponse { comment_view, diff --git a/crates/api/src/comment_report/create.rs b/crates/api/src/comment_report/create.rs index be892acfe..a0ff4be77 100644 --- a/crates/api/src/comment_report/create.rs +++ b/crates/api/src/comment_report/create.rs @@ -5,7 +5,11 @@ use lemmy_api_common::{ comment::{CommentReportResponse, CreateCommentReport}, context::LemmyContext, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, send_new_report_email_to_admins}, + utils::{ + check_comment_deleted_or_removed, + check_community_user_action, + send_new_report_email_to_admins, + }, }; use lemmy_db_schema::{ source::{ @@ -15,7 +19,7 @@ use lemmy_db_schema::{ traits::Reportable, }; use lemmy_db_views::structs::{CommentReportView, CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; /// Creates a comment report and notifies the moderators of the community #[tracing::instrument(skip(context))] @@ -23,7 +27,7 @@ pub async fn create_comment_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; let reason = data.reason.trim().to_string(); @@ -31,7 +35,12 @@ pub async fn create_comment_report( let person_id = local_user_view.person.id; let comment_id = data.comment_id; - let comment_view = CommentView::read(&mut context.pool(), comment_id, None).await?; + let comment_view = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; check_community_user_action( &local_user_view.person, @@ -40,6 +49,9 @@ pub async fn create_comment_report( ) .await?; + // Don't allow creating reports for removed / deleted comments + check_comment_deleted_or_removed(&comment_view.comment)?; + let report_form = CommentReportForm { creator_id: person_id, comment_id, @@ -66,12 +78,12 @@ pub async fn create_comment_report( } ActivityChannel::submit_activity( - SendActivityData::CreateReport( - comment_view.comment.ap_id.inner().clone(), - local_user_view.person, - comment_view.community, - data.reason.clone(), - ), + SendActivityData::CreateReport { + object_id: comment_view.comment.ap_id.inner().clone(), + actor: local_user_view.person, + community: comment_view.community, + reason: data.reason.clone(), + }, &context, ) .await?; diff --git a/crates/api/src/comment_report/list.rs b/crates/api/src/comment_report/list.rs index 3d434deba..d2f723819 100644 --- a/crates/api/src/comment_report/list.rs +++ b/crates/api/src/comment_report/list.rs @@ -2,10 +2,10 @@ use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ comment::{ListCommentReports, ListCommentReportsResponse}, context::LemmyContext, - utils::check_community_mod_action_opt, + utils::check_community_mod_of_any_or_admin_action, }; use lemmy_db_views::{comment_report_view::CommentReportQuery, structs::LocalUserView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; /// Lists comment reports for a community if an id is supplied /// or returns all comment reports for communities a user moderates @@ -14,16 +14,18 @@ pub async fn list_comment_reports( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community_id = data.community_id; + let comment_id = data.comment_id; let unresolved_only = data.unresolved_only.unwrap_or_default(); - check_community_mod_action_opt(&local_user_view, community_id, &mut context.pool()).await?; + check_community_mod_of_any_or_admin_action(&local_user_view, &mut context.pool()).await?; let page = data.page; let limit = data.limit; let comment_reports = CommentReportQuery { community_id, + comment_id, unresolved_only, page, limit, diff --git a/crates/api/src/comment_report/resolve.rs b/crates/api/src/comment_report/resolve.rs index 41ebe0d00..a663fdf74 100644 --- a/crates/api/src/comment_report/resolve.rs +++ b/crates/api/src/comment_report/resolve.rs @@ -6,7 +6,7 @@ use lemmy_api_common::{ }; use lemmy_db_schema::{source::comment_report::CommentReport, traits::Reportable}; use lemmy_db_views::structs::{CommentReportView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; /// Resolves or unresolves a comment report and notifies the moderators of the community #[tracing::instrument(skip(context))] @@ -14,7 +14,7 @@ pub async fn resolve_comment_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let report_id = data.report_id; let person_id = local_user_view.person.id; let report = CommentReportView::read(&mut context.pool(), report_id, person_id).await?; @@ -23,7 +23,7 @@ pub async fn resolve_comment_report( check_community_mod_action( &local_user_view.person, report.community.id, - false, + true, &mut context.pool(), ) .await?; diff --git a/crates/api/src/community/add_mod.rs b/crates/api/src/community/add_mod.rs index 9d055c654..7d04f6bb0 100644 --- a/crates/api/src/community/add_mod.rs +++ b/crates/api/src/community/add_mod.rs @@ -9,20 +9,21 @@ use lemmy_api_common::{ use lemmy_db_schema::{ source::{ community::{Community, CommunityModerator, CommunityModeratorForm}, + local_user::LocalUser, moderator::{ModAddCommunity, ModAddCommunityForm}, }, traits::{Crud, Joinable}, }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommunityModeratorView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn add_mod_to_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community_id = data.community_id; // Verify that only mods or admins can add mod @@ -33,9 +34,30 @@ pub async fn add_mod_to_community( &mut context.pool(), ) .await?; + + // If its a mod removal, also check that you're a higher mod. + if !data.added { + LocalUser::is_higher_mod_or_admin_check( + &mut context.pool(), + community_id, + local_user_view.person.id, + vec![data.person_id], + ) + .await?; + } + let community = Community::read(&mut context.pool(), community_id).await?; + + // If user is admin and community is remote, explicitly check that he is a + // moderator. This is necessary because otherwise the action would be rejected + // by the community's home instance. if local_user_view.local_user.admin && !community.local { - Err(LemmyErrorType::NotAModerator)? + CommunityModeratorView::check_is_community_moderator( + &mut context.pool(), + community.id, + local_user_view.person.id, + ) + .await?; } // Update in local database @@ -69,12 +91,12 @@ pub async fn add_mod_to_community( let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; ActivityChannel::submit_activity( - SendActivityData::AddModToCommunity( - local_user_view.person, - data.community_id, - data.person_id, - data.added, - ), + SendActivityData::AddModToCommunity { + moderator: local_user_view.person, + community_id: data.community_id, + target: data.person_id, + added: data.added, + }, &context, ) .await?; diff --git a/crates/api/src/community/ban.rs b/crates/api/src/community/ban.rs index f662c4a08..64b1c7196 100644 --- a/crates/api/src/community/ban.rs +++ b/crates/api/src/community/ban.rs @@ -4,7 +4,11 @@ use lemmy_api_common::{ community::{BanFromCommunity, BanFromCommunityResponse}, context::LemmyContext, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_mod_action, check_expire_time, remove_user_data_in_community}, + utils::{ + check_community_mod_action, + check_expire_time, + remove_or_restore_user_data_in_community, + }, }; use lemmy_db_schema::{ source::{ @@ -14,6 +18,7 @@ use lemmy_db_schema::{ CommunityPersonBan, CommunityPersonBanForm, }, + local_user::LocalUser, moderator::{ModBanFromCommunity, ModBanFromCommunityForm}, }, traits::{Bannable, Crud, Followable}, @@ -21,7 +26,7 @@ use lemmy_db_schema::{ use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::validation::is_valid_body_field, }; @@ -30,9 +35,8 @@ pub async fn ban_from_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let banned_person_id = data.person_id; - let remove_data = data.remove_data.unwrap_or(false); let expires = check_expire_time(data.expires)?; // Verify that only mods or admins can ban @@ -43,7 +47,18 @@ pub async fn ban_from_community( &mut context.pool(), ) .await?; - is_valid_body_field(&data.reason, false)?; + + LocalUser::is_higher_mod_or_admin_check( + &mut context.pool(), + data.community_id, + local_user_view.person.id, + vec![data.person_id], + ) + .await?; + + if let Some(reason) = &data.reason { + is_valid_body_field(reason, false)?; + } let community_user_ban_form = CommunityPersonBanForm { community_id: data.community_id, @@ -73,9 +88,18 @@ pub async fn ban_from_community( } // Remove/Restore their data if that's desired - if remove_data { - remove_user_data_in_community(data.community_id, banned_person_id, &mut context.pool()).await?; - } + if data.remove_or_restore_data.unwrap_or(false) { + let remove_data = data.ban; + remove_or_restore_user_data_in_community( + data.community_id, + local_user_view.person.id, + banned_person_id, + remove_data, + &data.reason, + &mut context.pool(), + ) + .await?; + }; // Mod tables let form = ModBanFromCommunityForm { @@ -92,12 +116,12 @@ pub async fn ban_from_community( let person_view = PersonView::read(&mut context.pool(), data.person_id).await?; ActivityChannel::submit_activity( - SendActivityData::BanFromCommunity( - local_user_view.person, - data.community_id, - person_view.person.clone(), - data.0.clone(), - ), + SendActivityData::BanFromCommunity { + moderator: local_user_view.person, + community_id: data.community_id, + target: person_view.person.clone(), + data: data.0.clone(), + }, &context, ) .await?; diff --git a/crates/api/src/community/block.rs b/crates/api/src/community/block.rs index fd4a5a01b..90931c762 100644 --- a/crates/api/src/community/block.rs +++ b/crates/api/src/community/block.rs @@ -14,14 +14,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommunityView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn block_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community_id = data.community_id; let person_id = local_user_view.person.id; let community_block_form = CommunityBlockForm { @@ -50,8 +50,13 @@ pub async fn block_community( .with_lemmy_type(LemmyErrorType::CommunityBlockAlreadyExists)?; } - let community_view = - CommunityView::read(&mut context.pool(), community_id, Some(person_id), false).await?; + let community_view = CommunityView::read( + &mut context.pool(), + community_id, + Some(&local_user_view.local_user), + false, + ) + .await?; ActivityChannel::submit_activity( SendActivityData::FollowCommunity( diff --git a/crates/api/src/community/follow.rs b/crates/api/src/community/follow.rs index 497aa83cf..d0f5bbf0d 100644 --- a/crates/api/src/community/follow.rs +++ b/crates/api/src/community/follow.rs @@ -15,14 +15,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommunityView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn follow_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community = Community::read(&mut context.pool(), data.community_id).await?; let mut community_follower_form = CommunityFollowerForm { community_id: community.id, @@ -45,23 +45,29 @@ pub async fn follow_community( .await .with_lemmy_type(LemmyErrorType::CommunityFollowerAlreadyExists)?; } - } - if !data.follow { + } else { CommunityFollower::unfollow(&mut context.pool(), &community_follower_form) .await .with_lemmy_type(LemmyErrorType::CommunityFollowerAlreadyExists)?; } - ActivityChannel::submit_activity( - SendActivityData::FollowCommunity(community, local_user_view.person.clone(), data.follow), - &context, + if !community.local { + ActivityChannel::submit_activity( + SendActivityData::FollowCommunity(community, local_user_view.person.clone(), data.follow), + &context, + ) + .await?; + } + + let community_id = data.community_id; + let community_view = CommunityView::read( + &mut context.pool(), + community_id, + Some(&local_user_view.local_user), + false, ) .await?; - let community_id = data.community_id; - let person_id = local_user_view.person.id; - let community_view = - CommunityView::read(&mut context.pool(), community_id, Some(person_id), false).await?; let discussion_languages = CommunityLanguage::read(&mut context.pool(), community_id).await?; Ok(Json(CommunityResponse { diff --git a/crates/api/src/community/hide.rs b/crates/api/src/community/hide.rs index 27919a42b..997d88de3 100644 --- a/crates/api/src/community/hide.rs +++ b/crates/api/src/community/hide.rs @@ -15,14 +15,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn hide_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Verify its a admin (only admin can hide or unhide it) is_admin(&local_user_view)?; diff --git a/crates/api/src/community/mod.rs b/crates/api/src/community/mod.rs index 478192229..54bdbef28 100644 --- a/crates/api/src/community/mod.rs +++ b/crates/api/src/community/mod.rs @@ -3,4 +3,5 @@ pub mod ban; pub mod block; pub mod follow; pub mod hide; +pub mod random; pub mod transfer; diff --git a/crates/api/src/community/random.rs b/crates/api/src/community/random.rs new file mode 100644 index 000000000..3cc04e126 --- /dev/null +++ b/crates/api/src/community/random.rs @@ -0,0 +1,55 @@ +use activitypub_federation::config::Data; +use actix_web::web::{Json, Query}; +use lemmy_api_common::{ + community::{CommunityResponse, GetRandomCommunity}, + context::LemmyContext, + utils::{check_private_instance, is_mod_or_admin_opt}, +}; +use lemmy_db_schema::source::{ + actor_language::CommunityLanguage, + community::Community, + local_site::LocalSite, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_db_views_actor::structs::CommunityView; +use lemmy_utils::error::LemmyResult; + +#[tracing::instrument(skip(context))] +pub async fn get_random_community( + data: Query, + context: Data, + local_user_view: Option, +) -> LemmyResult> { + let local_site = LocalSite::read(&mut context.pool()).await?; + + check_private_instance(&local_user_view, &local_site)?; + + let local_user = local_user_view.as_ref().map(|u| &u.local_user); + + let random_community_id = + Community::get_random_community_id(&mut context.pool(), &data.type_).await?; + + let is_mod_or_admin = is_mod_or_admin_opt( + &mut context.pool(), + local_user_view.as_ref(), + Some(random_community_id), + ) + .await + .is_ok(); + + let community_view = CommunityView::read( + &mut context.pool(), + random_community_id, + local_user, + is_mod_or_admin, + ) + .await?; + + let discussion_languages = + CommunityLanguage::read(&mut context.pool(), random_community_id).await?; + + Ok(Json(CommunityResponse { + community_view, + discussion_languages, + })) +} diff --git a/crates/api/src/community/transfer.rs b/crates/api/src/community/transfer.rs index 340bb6b63..195adbd8d 100644 --- a/crates/api/src/community/transfer.rs +++ b/crates/api/src/community/transfer.rs @@ -15,7 +15,7 @@ use lemmy_db_schema::{ use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, location_info, }; @@ -26,7 +26,7 @@ pub async fn transfer_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community_id = data.community_id; let mut community_mods = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; @@ -76,16 +76,16 @@ pub async fn transfer_community( ModTransferCommunity::create(&mut context.pool(), &form).await?; let community_id = data.community_id; - let person_id = local_user_view.person.id; - let community_view = - CommunityView::read(&mut context.pool(), community_id, Some(person_id), false) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; + let community_view = CommunityView::read( + &mut context.pool(), + community_id, + Some(&local_user_view.local_user), + false, + ) + .await?; let community_id = data.community_id; - let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; + let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; // Return the jwt Ok(Json(GetCommunityResponse { diff --git a/crates/api/src/lib.rs b/crates/api/src/lib.rs index 5621fe2df..6ffa52f77 100644 --- a/crates/api/src/lib.rs +++ b/crates/api/src/lib.rs @@ -1,16 +1,32 @@ +use activitypub_federation::config::Data; use actix_web::{http::header::Header, HttpRequest}; use actix_web_httpauth::headers::authorization::{Authorization, Bearer}; use base64::{engine::general_purpose::STANDARD_NO_PAD as base64, Engine}; use captcha::Captcha; use lemmy_api_common::{ claims::Claims, + community::BanFromCommunity, context::LemmyContext, - utils::{check_user_valid, local_site_to_slur_regex, AUTH_COOKIE_NAME}, + send_activity::{ActivityChannel, SendActivityData}, + utils::{check_expire_time, check_user_valid, local_site_to_slur_regex, AUTH_COOKIE_NAME}, +}; +use lemmy_db_schema::{ + source::{ + community::{ + CommunityFollower, + CommunityFollowerForm, + CommunityPersonBan, + CommunityPersonBanForm, + }, + local_site::LocalSite, + moderator::{ModBanFromCommunity, ModBanFromCommunityForm}, + person::Person, + }, + traits::{Bannable, Crud, Followable}, }; -use lemmy_db_schema::source::local_site::LocalSite; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorExt2, LemmyErrorType, LemmyResult}, + error::{LemmyErrorExt, LemmyErrorExt2, LemmyErrorType, LemmyResult}, utils::slurs::check_slurs, }; use std::io::Cursor; @@ -28,32 +44,38 @@ pub mod site; pub mod sitemap; /// Converts the captcha to a base64 encoded wav audio file -pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> Result { +pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> LemmyResult { let letters = captcha.as_wav(); // Decode each wav file, concatenate the samples let mut concat_samples: Vec = Vec::new(); - let mut any_header: Option = None; + let mut any_header: Option = None; for letter in letters { let mut cursor = Cursor::new(letter.unwrap_or_default()); - let (header, samples) = wav::read(&mut cursor)?; - any_header = Some(header); - if let Some(samples16) = samples.as_sixteen() { - concat_samples.extend(samples16); - } else { - Err(LemmyErrorType::CouldntCreateAudioCaptcha)? - } + let reader = hound::WavReader::new(&mut cursor)?; + any_header = Some(reader.spec()); + let samples16 = reader + .into_samples::() + .collect::, _>>() + .with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?; + concat_samples.extend(samples16); } // Encode the concatenated result as a wav file let mut output_buffer = Cursor::new(vec![]); if let Some(header) = any_header { - wav::write( - header, - &wav::BitDepth::Sixteen(concat_samples), - &mut output_buffer, - ) - .with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?; + let mut writer = hound::WavWriter::new(&mut output_buffer, header) + .with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?; + let mut writer16 = writer.get_i16_writer(concat_samples.len() as u32); + for sample in concat_samples { + writer16.write_sample(sample); + } + writer16 + .flush() + .with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?; + writer + .finalize() + .with_lemmy_type(LemmyErrorType::CouldntCreateAudioCaptcha)?; Ok(base64.encode(output_buffer.into_inner())) } else { @@ -62,7 +84,7 @@ pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> Result Result<(), LemmyError> { +pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> LemmyResult<()> { let slur_regex = &local_site_to_slur_regex(local_site); check_slurs(reason, slur_regex)?; @@ -75,21 +97,14 @@ pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> Resul } } -pub fn read_auth_token(req: &HttpRequest) -> Result, LemmyError> { +pub fn read_auth_token(req: &HttpRequest) -> LemmyResult> { // Try reading jwt from auth header if let Ok(header) = Authorization::::parse(req) { Ok(Some(header.as_ref().token().to_string())) } // If that fails, try to read from cookie else if let Some(cookie) = &req.cookie(AUTH_COOKIE_NAME) { - // ensure that its marked as httponly and secure - let secure = cookie.secure().unwrap_or_default(); - let http_only = cookie.http_only().unwrap_or_default(); - if !secure || !http_only { - Err(LemmyError::from(LemmyErrorType::AuthCookieInsecure)) - } else { - Ok(Some(cookie.value().to_string())) - } + Ok(Some(cookie.value().to_string())) } // Otherwise, there's no auth else { @@ -126,11 +141,7 @@ pub(crate) fn generate_totp_2fa_secret() -> String { Secret::generate_secret().to_string() } -pub(crate) fn build_totp_2fa( - site_name: &str, - username: &str, - secret: &str, -) -> Result { +fn build_totp_2fa(hostname: &str, username: &str, secret: &str) -> LemmyResult { let sec = Secret::Raw(secret.as_bytes().to_vec()); let sec_bytes = sec .to_bytes() @@ -142,17 +153,108 @@ pub(crate) fn build_totp_2fa( 1, 30, sec_bytes, - Some(site_name.to_string()), + Some(hostname.to_string()), username.to_string(), ) .with_lemmy_type(LemmyErrorType::CouldntGenerateTotp) } +/// Site bans are only federated for local users. +/// This is a problem, because site-banning non-local users will still leave content +/// they've posted to our local communities, on other servers. +/// +/// So when doing a site ban for a non-local user, you need to federate/send a +/// community ban for every local community they've participated in. +/// See https://github.com/LemmyNet/lemmy/issues/4118 +#[tracing::instrument(skip_all)] +pub(crate) async fn ban_nonlocal_user_from_local_communities( + local_user_view: &LocalUserView, + target: &Person, + ban: bool, + reason: &Option, + remove_or_restore_data: &Option, + expires: &Option, + context: &Data, +) -> LemmyResult<()> { + // Only run this code for federated users + if !target.local { + let ids = Person::list_local_community_ids(&mut context.pool(), target.id).await?; + + for community_id in ids { + let expires_dt = check_expire_time(*expires)?; + + // Ban / unban them from our local communities + let community_user_ban_form = CommunityPersonBanForm { + community_id, + person_id: target.id, + expires: Some(expires_dt), + }; + + if ban { + // Ignore all errors for these + CommunityPersonBan::ban(&mut context.pool(), &community_user_ban_form) + .await + .ok(); + + // Also unsubscribe them from the community, if they are subscribed + let community_follower_form = CommunityFollowerForm { + community_id, + person_id: target.id, + pending: false, + }; + + CommunityFollower::unfollow(&mut context.pool(), &community_follower_form) + .await + .ok(); + } else { + CommunityPersonBan::unban(&mut context.pool(), &community_user_ban_form) + .await + .ok(); + } + + // Mod tables + let form = ModBanFromCommunityForm { + mod_person_id: local_user_view.person.id, + other_person_id: target.id, + community_id, + reason: reason.clone(), + banned: Some(ban), + expires: expires_dt, + }; + + ModBanFromCommunity::create(&mut context.pool(), &form).await?; + + // Federate the ban from community + let ban_from_community = BanFromCommunity { + community_id, + person_id: target.id, + ban, + reason: reason.clone(), + remove_or_restore_data: *remove_or_restore_data, + expires: *expires, + }; + + ActivityChannel::submit_activity( + SendActivityData::BanFromCommunity { + moderator: local_user_view.person.clone(), + community_id, + target: target.clone(), + data: ban_from_community, + }, + context, + ) + .await?; + } + } + + Ok(()) +} + #[tracing::instrument(skip_all)] pub async fn local_user_view_from_jwt( jwt: &str, context: &LemmyContext, -) -> Result { +) -> LemmyResult { let local_user_id = Claims::validate(jwt, context) .await .with_lemmy_type(LemmyErrorType::NotLoggedIn)?; @@ -164,15 +266,13 @@ pub async fn local_user_view_from_jwt( #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; #[test] fn test_build_totp() { let generated_secret = generate_totp_2fa_secret(); - let totp = build_totp_2fa("lemmy", "my_name", &generated_secret); + let totp = build_totp_2fa("lemmy.ml", "my_name", &generated_secret); assert!(totp.is_ok()); } } diff --git a/crates/api/src/local_user/add_admin.rs b/crates/api/src/local_user/add_admin.rs index 502335876..299c9477a 100644 --- a/crates/api/src/local_user/add_admin.rs +++ b/crates/api/src/local_user/add_admin.rs @@ -13,23 +13,33 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn add_admin( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; + // If its an admin removal, also check that you're a higher admin + if !data.added { + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + vec![data.person_id], + ) + .await?; + } + // Make sure that the person_id added is local let added_local_user = LocalUserView::read_person(&mut context.pool(), data.person_id) .await - .with_lemmy_type(LemmyErrorType::ObjectNotLocal)?; + .map_err(|_| LemmyErrorType::ObjectNotLocal)?; - let added_admin = LocalUser::update( + LocalUser::update( &mut context.pool(), added_local_user.local_user.id, &LocalUserUpdateForm { @@ -43,7 +53,7 @@ pub async fn add_admin( // Mod tables let form = ModAddForm { mod_person_id: local_user_view.person.id, - other_person_id: added_admin.person_id, + other_person_id: added_local_user.person.id, removed: Some(!data.added), }; diff --git a/crates/api/src/local_user/ban_person.rs b/crates/api/src/local_user/ban_person.rs index d7c47e619..2ace7f031 100644 --- a/crates/api/src/local_user/ban_person.rs +++ b/crates/api/src/local_user/ban_person.rs @@ -1,13 +1,15 @@ +use crate::ban_nonlocal_user_from_local_communities; use activitypub_federation::config::Data; use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, person::{BanPerson, BanPersonResponse}, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_expire_time, is_admin, remove_user_data}, + utils::{check_expire_time, is_admin, remove_or_restore_user_data}, }; use lemmy_db_schema::{ source::{ + local_user::LocalUser, login_token::LoginToken, moderator::{ModBan, ModBanForm}, person::{Person, PersonUpdateForm}, @@ -17,7 +19,7 @@ use lemmy_db_schema::{ use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::validation::is_valid_body_field, }; @@ -26,11 +28,21 @@ pub async fn ban_from_site( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; - is_valid_body_field(&data.reason, false)?; + // Also make sure you're a higher admin than the target + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + vec![data.person_id], + ) + .await?; + + if let Some(reason) = &data.reason { + is_valid_body_field(reason, false)?; + } let expires = check_expire_time(data.expires)?; @@ -46,22 +58,29 @@ pub async fn ban_from_site( .await .with_lemmy_type(LemmyErrorType::CouldntUpdateUser)?; - let local_user_id = LocalUserView::read_person(&mut context.pool(), data.person_id) - .await? - .local_user - .id; - LoginToken::invalidate_all(&mut context.pool(), local_user_id).await?; + // if its a local user, invalidate logins + let local_user = LocalUserView::read_person(&mut context.pool(), person.id).await; + if let Ok(local_user) = local_user { + LoginToken::invalidate_all(&mut context.pool(), local_user.local_user.id).await?; + } // Remove their data if that's desired - let remove_data = data.remove_data.unwrap_or(false); - if remove_data { - remove_user_data(person.id, &context).await?; - } + if data.remove_or_restore_data.unwrap_or(false) { + let removed = data.ban; + remove_or_restore_user_data( + local_user_view.person.id, + person.id, + removed, + &data.reason, + &context, + ) + .await?; + }; // Mod tables let form = ModBanForm { mod_person_id: local_user_view.person.id, - other_person_id: data.person_id, + other_person_id: person.id, reason: data.reason.clone(), banned: Some(data.ban), expires, @@ -69,14 +88,28 @@ pub async fn ban_from_site( ModBan::create(&mut context.pool(), &form).await?; - let person_view = PersonView::read(&mut context.pool(), data.person_id).await?; + let person_view = PersonView::read(&mut context.pool(), person.id).await?; + + ban_nonlocal_user_from_local_communities( + &local_user_view, + &person, + data.ban, + &data.reason, + &data.remove_or_restore_data, + &data.expires, + &context, + ) + .await?; ActivityChannel::submit_activity( - SendActivityData::BanFromSite( - local_user_view.person, - person_view.person.clone(), - data.0.clone(), - ), + SendActivityData::BanFromSite { + moderator: local_user_view.person, + banned_user: person_view.person.clone(), + reason: data.reason.clone(), + remove_or_restore_data: data.remove_or_restore_data, + ban: data.ban, + expires: data.expires, + }, &context, ) .await?; diff --git a/crates/api/src/local_user/block.rs b/crates/api/src/local_user/block.rs index cb345616b..250277be3 100644 --- a/crates/api/src/local_user/block.rs +++ b/crates/api/src/local_user/block.rs @@ -9,14 +9,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn block_person( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let target_id = data.person_id; let person_id = local_user_view.person.id; @@ -30,8 +30,11 @@ pub async fn block_person( target_id, }; - let target_user = LocalUserView::read_person(&mut context.pool(), target_id).await; - if target_user.map(|t| t.local_user.admin) == Ok(true) { + let target_user = LocalUserView::read_person(&mut context.pool(), target_id) + .await + .ok(); + + if target_user.is_some_and(|t| t.local_user.admin) { Err(LemmyErrorType::CantBlockAdmin)? } diff --git a/crates/api/src/local_user/change_password.rs b/crates/api/src/local_user/change_password.rs index ab5b32dd9..03f873a0f 100644 --- a/crates/api/src/local_user/change_password.rs +++ b/crates/api/src/local_user/change_password.rs @@ -11,7 +11,7 @@ use lemmy_api_common::{ }; use lemmy_db_schema::source::{local_user::LocalUser, login_token::LoginToken}; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn change_password( @@ -19,7 +19,7 @@ pub async fn change_password( req: HttpRequest, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { password_length_check(&data.new_password)?; // Make sure passwords match @@ -28,11 +28,13 @@ pub async fn change_password( } // Check the old password - let valid: bool = verify( - &data.old_password, - &local_user_view.local_user.password_encrypted, - ) - .unwrap_or(false); + let valid: bool = if let Some(password_encrypted) = &local_user_view.local_user.password_encrypted + { + verify(&data.old_password, password_encrypted).unwrap_or(false) + } else { + data.old_password.is_empty() + }; + if !valid { Err(LemmyErrorType::IncorrectLogin)? } diff --git a/crates/api/src/local_user/change_password_after_reset.rs b/crates/api/src/local_user/change_password_after_reset.rs index 50a267d6a..191815d0f 100644 --- a/crates/api/src/local_user/change_password_after_reset.rs +++ b/crates/api/src/local_user/change_password_after_reset.rs @@ -10,18 +10,18 @@ use lemmy_db_schema::source::{ login_token::LoginToken, password_reset_request::PasswordResetRequest, }; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn change_password_after_reset( data: Json, context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { // Fetch the user_id from the token let token = data.token.clone(); - let local_user_id = PasswordResetRequest::read_from_token(&mut context.pool(), &token) - .await - .map(|p| p.local_user_id)?; + let local_user_id = PasswordResetRequest::read_and_delete(&mut context.pool(), &token) + .await? + .local_user_id; password_length_check(&data.password)?; diff --git a/crates/api/src/local_user/generate_totp_secret.rs b/crates/api/src/local_user/generate_totp_secret.rs index a983beaaa..03ba69759 100644 --- a/crates/api/src/local_user/generate_totp_secret.rs +++ b/crates/api/src/local_user/generate_totp_secret.rs @@ -1,17 +1,13 @@ use crate::{build_totp_2fa, generate_totp_2fa_secret}; use activitypub_federation::config::Data; use actix_web::web::Json; -use lemmy_api_common::{ - context::LemmyContext, - person::GenerateTotpSecretResponse, - sensitive::Sensitive, +use lemmy_api_common::{context::LemmyContext, person::GenerateTotpSecretResponse}; +use lemmy_db_schema::source::{ + local_user::{LocalUser, LocalUserUpdateForm}, + site::Site, }; -use lemmy_db_schema::{ - source::local_user::{LocalUser, LocalUserUpdateForm}, - traits::Crud, -}; -use lemmy_db_views::structs::{LocalUserView, SiteView}; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; /// Generate a new secret for two-factor-authentication. Afterwards you need to call [toggle_totp] /// to enable it. This can only be called if 2FA is currently disabled. @@ -19,16 +15,15 @@ use lemmy_utils::error::{LemmyError, LemmyErrorType}; pub async fn generate_totp_secret( local_user_view: LocalUserView, context: Data, -) -> Result, LemmyError> { - let site_view = SiteView::read_local(&mut context.pool()).await?; +) -> LemmyResult> { + let site = Site::read_local(&mut context.pool()).await?; if local_user_view.local_user.totp_2fa_enabled { return Err(LemmyErrorType::TotpAlreadyEnabled)?; } let secret = generate_totp_2fa_secret(); - let secret_url = - build_totp_2fa(&site_view.site.name, &local_user_view.person.name, &secret)?.get_url(); + let secret_url = build_totp_2fa(&site.name, &local_user_view.person.name, &secret)?.get_url(); let local_user_form = LocalUserUpdateForm { totp_2fa_secret: Some(Some(secret)), @@ -42,6 +37,6 @@ pub async fn generate_totp_secret( .await?; Ok(Json(GenerateTotpSecretResponse { - totp_secret_url: Sensitive::new(secret_url), + totp_secret_url: secret_url.into(), })) } diff --git a/crates/api/src/local_user/get_captcha.rs b/crates/api/src/local_user/get_captcha.rs index 5d692aa0c..ac64fa07c 100644 --- a/crates/api/src/local_user/get_captcha.rs +++ b/crates/api/src/local_user/get_captcha.rs @@ -1,5 +1,13 @@ use crate::captcha_as_wav_base64; -use actix_web::web::{Data, Json}; +use actix_web::{ + http::{ + header::{CacheControl, CacheDirective}, + StatusCode, + }, + web::{Data, Json}, + HttpResponse, + HttpResponseBuilder, +}; use captcha::{gen, Difficulty}; use lemmy_api_common::{ context::LemmyContext, @@ -9,16 +17,16 @@ use lemmy_db_schema::source::{ captcha_answer::{CaptchaAnswer, CaptchaAnswerForm}, local_site::LocalSite, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] -pub async fn get_captcha( - context: Data, -) -> Result, LemmyError> { +pub async fn get_captcha(context: Data) -> LemmyResult { let local_site = LocalSite::read(&mut context.pool()).await?; + let mut res = HttpResponseBuilder::new(StatusCode::OK); + res.insert_header(CacheControl(vec![CacheDirective::NoStore])); if !local_site.captcha_enabled { - return Ok(Json(GetCaptchaResponse { ok: None })); + return Ok(res.json(Json(GetCaptchaResponse { ok: None }))); } let captcha = gen(match local_site.captcha_difficulty.as_str() { @@ -37,11 +45,12 @@ pub async fn get_captcha( // Stores the captcha item in the db let captcha = CaptchaAnswer::insert(&mut context.pool(), &captcha_form).await?; - Ok(Json(GetCaptchaResponse { + let json = Json(GetCaptchaResponse { ok: Some(CaptchaResponse { png, wav, uuid: captcha.uuid.to_string(), }), - })) + }); + Ok(res.json(json)) } diff --git a/crates/api/src/local_user/list_banned.rs b/crates/api/src/local_user/list_banned.rs index 5c76d89a8..ba2c0d403 100644 --- a/crates/api/src/local_user/list_banned.rs +++ b/crates/api/src/local_user/list_banned.rs @@ -2,12 +2,12 @@ use actix_web::web::{Data, Json}; use lemmy_api_common::{context::LemmyContext, person::BannedPersonsResponse, utils::is_admin}; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub async fn list_banned_users( context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; diff --git a/crates/api/src/local_user/list_logins.rs b/crates/api/src/local_user/list_logins.rs index f1ae76be5..b5aaf8972 100644 --- a/crates/api/src/local_user/list_logins.rs +++ b/crates/api/src/local_user/list_logins.rs @@ -1,14 +1,14 @@ use actix_web::web::{Data, Json}; -use lemmy_api_common::context::LemmyContext; +use lemmy_api_common::{context::LemmyContext, person::ListLoginsResponse}; use lemmy_db_schema::source::login_token::LoginToken; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub async fn list_logins( context: Data, local_user_view: LocalUserView, -) -> Result>, LemmyError> { +) -> LemmyResult> { let logins = LoginToken::list(&mut context.pool(), local_user_view.local_user.id).await?; - Ok(Json(logins)) + Ok(Json(ListLoginsResponse { logins })) } diff --git a/crates/api/src/local_user/list_media.rs b/crates/api/src/local_user/list_media.rs new file mode 100644 index 000000000..779558dab --- /dev/null +++ b/crates/api/src/local_user/list_media.rs @@ -0,0 +1,25 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + person::{ListMedia, ListMediaResponse}, +}; +use lemmy_db_views::structs::{LocalImageView, LocalUserView}; +use lemmy_utils::error::LemmyResult; + +#[tracing::instrument(skip(context))] +pub async fn list_media( + data: Query, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + let page = data.page; + let limit = data.limit; + let images = LocalImageView::get_all_paged_by_local_user_id( + &mut context.pool(), + local_user_view.local_user.id, + page, + limit, + ) + .await?; + Ok(Json(ListMediaResponse { images })) +} diff --git a/crates/api/src/local_user/login.rs b/crates/api/src/local_user/login.rs index f57fd0a70..0b2514c5b 100644 --- a/crates/api/src/local_user/login.rs +++ b/crates/api/src/local_user/login.rs @@ -1,100 +1,61 @@ use crate::check_totp_2fa_valid; use actix_web::{ - http::StatusCode, web::{Data, Json}, HttpRequest, - HttpResponse, }; use bcrypt::verify; use lemmy_api_common::{ claims::Claims, context::LemmyContext, person::{Login, LoginResponse}, - utils::{check_user_valid, create_login_cookie}, -}; -use lemmy_db_schema::{ - source::{local_site::LocalSite, registration_application::RegistrationApplication}, - utils::DbPool, - RegistrationMode, + utils::{check_email_verified, check_registration_application, check_user_valid}, }; use lemmy_db_views::structs::{LocalUserView, SiteView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn login( data: Json, req: HttpRequest, context: Data, -) -> Result { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; // Fetch that username / email let username_or_email = data.username_or_email.clone(); let local_user_view = - LocalUserView::find_by_email_or_name(&mut context.pool(), &username_or_email) - .await - .with_lemmy_type(LemmyErrorType::IncorrectLogin)?; + LocalUserView::find_by_email_or_name(&mut context.pool(), &username_or_email).await?; // Verify the password - let valid: bool = verify( - &data.password, - &local_user_view.local_user.password_encrypted, - ) - .unwrap_or(false); + let valid: bool = local_user_view + .local_user + .password_encrypted + .as_ref() + .and_then(|password_encrypted| verify(&data.password, password_encrypted).ok()) + .unwrap_or(false); if !valid { Err(LemmyErrorType::IncorrectLogin)? } check_user_valid(&local_user_view.person)?; - - // Check if the user's email is verified if email verification is turned on - // However, skip checking verification if the user is an admin - if !local_user_view.local_user.admin - && site_view.local_site.require_email_verification - && !local_user_view.local_user.email_verified - { - Err(LemmyErrorType::EmailNotVerified)? - } + check_email_verified(&local_user_view, &site_view)?; check_registration_application(&local_user_view, &site_view.local_site, &mut context.pool()) .await?; // Check the totp if enabled if local_user_view.local_user.totp_2fa_enabled { - check_totp_2fa_valid(&local_user_view, &data.totp_2fa_token, &site_view.site.name)?; + check_totp_2fa_valid( + &local_user_view, + &data.totp_2fa_token, + &context.settings().hostname, + )?; } let jwt = Claims::generate(local_user_view.local_user.id, req, &context).await?; - let json = LoginResponse { + Ok(Json(LoginResponse { jwt: Some(jwt.clone()), verify_email_sent: false, registration_created: false, - }; - - let mut res = HttpResponse::build(StatusCode::OK).json(json); - res.add_cookie(&create_login_cookie(jwt))?; - Ok(res) -} - -async fn check_registration_application( - local_user_view: &LocalUserView, - local_site: &LocalSite, - pool: &mut DbPool<'_>, -) -> Result<(), LemmyError> { - if (local_site.registration_mode == RegistrationMode::RequireApplication - || local_site.registration_mode == RegistrationMode::Closed) - && !local_user_view.local_user.accepted_application - && !local_user_view.local_user.admin - { - // Fetch the registration application. If no admin id is present its still pending. Otherwise it - // was processed (either accepted or denied). - let local_user_id = local_user_view.local_user.id; - let registration = RegistrationApplication::find_by_local_user_id(pool, local_user_id).await?; - if registration.admin_id.is_some() { - Err(LemmyErrorType::RegistrationDenied(registration.deny_reason))? - } else { - Err(LemmyErrorType::RegistrationApplicationIsPending)? - } - } - Ok(()) + })) } diff --git a/crates/api/src/local_user/logout.rs b/crates/api/src/local_user/logout.rs index a2cc83b3f..10b4732b7 100644 --- a/crates/api/src/local_user/logout.rs +++ b/crates/api/src/local_user/logout.rs @@ -1,7 +1,7 @@ use crate::read_auth_token; use activitypub_federation::config::Data; use actix_web::{cookie::Cookie, HttpRequest, HttpResponse}; -use lemmy_api_common::{context::LemmyContext, utils::AUTH_COOKIE_NAME}; +use lemmy_api_common::{context::LemmyContext, utils::AUTH_COOKIE_NAME, SuccessResponse}; use lemmy_db_schema::source::login_token::LoginToken; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::error::{LemmyErrorType, LemmyResult}; @@ -16,7 +16,7 @@ pub async fn logout( let jwt = read_auth_token(&req)?.ok_or(LemmyErrorType::NotLoggedIn)?; LoginToken::invalidate(&mut context.pool(), &jwt).await?; - let mut res = HttpResponse::Ok().finish(); + let mut res = HttpResponse::Ok().json(SuccessResponse::default()); let cookie = Cookie::new(AUTH_COOKIE_NAME, ""); res.add_removal_cookie(&cookie)?; Ok(res) diff --git a/crates/api/src/local_user/mod.rs b/crates/api/src/local_user/mod.rs index 98e023fa5..b1ee7c0b6 100644 --- a/crates/api/src/local_user/mod.rs +++ b/crates/api/src/local_user/mod.rs @@ -7,6 +7,7 @@ pub mod generate_totp_secret; pub mod get_captcha; pub mod list_banned; pub mod list_logins; +pub mod list_media; pub mod login; pub mod logout; pub mod notifications; diff --git a/crates/api/src/local_user/notifications/list_mentions.rs b/crates/api/src/local_user/notifications/list_mentions.rs index 9f9ee3ae8..bf3cd8e0d 100644 --- a/crates/api/src/local_user/notifications/list_mentions.rs +++ b/crates/api/src/local_user/notifications/list_mentions.rs @@ -5,14 +5,14 @@ use lemmy_api_common::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::person_mention_view::PersonMentionQuery; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn list_mentions( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let sort = data.sort; let page = data.page; let limit = data.limit; diff --git a/crates/api/src/local_user/notifications/list_replies.rs b/crates/api/src/local_user/notifications/list_replies.rs index 555989721..d88595d96 100644 --- a/crates/api/src/local_user/notifications/list_replies.rs +++ b/crates/api/src/local_user/notifications/list_replies.rs @@ -5,14 +5,14 @@ use lemmy_api_common::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::comment_reply_view::CommentReplyQuery; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn list_replies( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let sort = data.sort; let page = data.page; let limit = data.limit; diff --git a/crates/api/src/local_user/notifications/mark_all_read.rs b/crates/api/src/local_user/notifications/mark_all_read.rs index d3667460b..558d276f7 100644 --- a/crates/api/src/local_user/notifications/mark_all_read.rs +++ b/crates/api/src/local_user/notifications/mark_all_read.rs @@ -6,13 +6,13 @@ use lemmy_db_schema::source::{ private_message::PrivateMessage, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn mark_all_notifications_read( context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_id = local_user_view.person.id; // Mark all comment_replies as read diff --git a/crates/api/src/local_user/notifications/mark_mention_read.rs b/crates/api/src/local_user/notifications/mark_mention_read.rs index 4cce598ac..9a839b2b4 100644 --- a/crates/api/src/local_user/notifications/mark_mention_read.rs +++ b/crates/api/src/local_user/notifications/mark_mention_read.rs @@ -9,14 +9,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::PersonMentionView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn mark_person_mention_as_read( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_mention_id = data.person_mention_id; let read_person_mention = PersonMention::read(&mut context.pool(), person_mention_id).await?; diff --git a/crates/api/src/local_user/notifications/mark_reply_read.rs b/crates/api/src/local_user/notifications/mark_reply_read.rs index f7b259c94..5b263145f 100644 --- a/crates/api/src/local_user/notifications/mark_reply_read.rs +++ b/crates/api/src/local_user/notifications/mark_reply_read.rs @@ -9,14 +9,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommentReplyView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn mark_reply_as_read( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let comment_reply_id = data.comment_reply_id; let read_comment_reply = CommentReply::read(&mut context.pool(), comment_reply_id).await?; diff --git a/crates/api/src/local_user/notifications/unread_count.rs b/crates/api/src/local_user/notifications/unread_count.rs index c0b1f0f2e..4c6c65263 100644 --- a/crates/api/src/local_user/notifications/unread_count.rs +++ b/crates/api/src/local_user/notifications/unread_count.rs @@ -2,18 +2,21 @@ use actix_web::web::{Data, Json}; use lemmy_api_common::{context::LemmyContext, person::GetUnreadCountResponse}; use lemmy_db_views::structs::{LocalUserView, PrivateMessageView}; use lemmy_db_views_actor::structs::{CommentReplyView, PersonMentionView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn unread_count( context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_id = local_user_view.person.id; - let replies = CommentReplyView::get_unread_replies(&mut context.pool(), person_id).await?; + let replies = + CommentReplyView::get_unread_replies(&mut context.pool(), &local_user_view.local_user).await?; - let mentions = PersonMentionView::get_unread_mentions(&mut context.pool(), person_id).await?; + let mentions = + PersonMentionView::get_unread_mentions(&mut context.pool(), &local_user_view.local_user) + .await?; let private_messages = PrivateMessageView::get_unread_messages(&mut context.pool(), person_id).await?; diff --git a/crates/api/src/local_user/report_count.rs b/crates/api/src/local_user/report_count.rs index 666886432..32448dcaa 100644 --- a/crates/api/src/local_user/report_count.rs +++ b/crates/api/src/local_user/report_count.rs @@ -1,8 +1,8 @@ -use actix_web::web::{Data, Json}; +use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ context::LemmyContext, person::{GetReportCount, GetReportCountResponse}, - utils::check_community_mod_action_opt, + utils::check_community_mod_of_any_or_admin_action, }; use lemmy_db_views::structs::{ CommentReportView, @@ -10,19 +10,19 @@ use lemmy_db_views::structs::{ PostReportView, PrivateMessageReportView, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn report_count( - data: Json, + data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_id = local_user_view.person.id; let admin = local_user_view.local_user.admin; let community_id = data.community_id; - check_community_mod_action_opt(&local_user_view, community_id, &mut context.pool()).await?; + check_community_mod_of_any_or_admin_action(&local_user_view, &mut context.pool()).await?; let comment_reports = CommentReportView::get_report_count(&mut context.pool(), person_id, admin, community_id) diff --git a/crates/api/src/local_user/reset_password.rs b/crates/api/src/local_user/reset_password.rs index 90aa910e0..5cf06f23a 100644 --- a/crates/api/src/local_user/reset_password.rs +++ b/crates/api/src/local_user/reset_password.rs @@ -2,12 +2,11 @@ use actix_web::web::{Data, Json}; use lemmy_api_common::{ context::LemmyContext, person::PasswordReset, - utils::send_password_reset_email, + utils::{check_email_verified, send_password_reset_email}, SuccessResponse, }; -use lemmy_db_schema::source::password_reset_request::PasswordResetRequest; -use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; +use lemmy_db_views::structs::{LocalUserView, SiteView}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn reset_password( @@ -18,17 +17,10 @@ pub async fn reset_password( let email = data.email.to_lowercase(); let local_user_view = LocalUserView::find_by_email(&mut context.pool(), &email) .await - .with_lemmy_type(LemmyErrorType::IncorrectLogin)?; + .map_err(|_| LemmyErrorType::IncorrectLogin)?; - // Check for too many attempts (to limit potential abuse) - let recent_resets_count = PasswordResetRequest::get_recent_password_resets_count( - &mut context.pool(), - local_user_view.local_user.id, - ) - .await?; - if recent_resets_count >= 3 { - Err(LemmyErrorType::PasswordResetLimitReached)? - } + let site_view = SiteView::read_local(&mut context.pool()).await?; + check_email_verified(&local_user_view, &site_view)?; // Email the pure token to the user. send_password_reset_email(&local_user_view, &mut context.pool(), context.settings()).await?; diff --git a/crates/api/src/local_user/save_settings.rs b/crates/api/src/local_user/save_settings.rs index 12cddba2b..ac2e321a1 100644 --- a/crates/api/src/local_user/save_settings.rs +++ b/crates/api/src/local_user/save_settings.rs @@ -1,48 +1,69 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, person::SaveUserSettings, - utils::send_verification_email, + request::replace_image, + utils::{ + get_url_blocklist, + local_site_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_api, + send_verification_email, + }, SuccessResponse, }; use lemmy_db_schema::{ source::{ actor_language::LocalUserLanguage, local_user::{LocalUser, LocalUserUpdateForm}, + local_user_vote_display_mode::{LocalUserVoteDisplayMode, LocalUserVoteDisplayModeUpdateForm}, person::{Person, PersonUpdateForm}, }, traits::Crud, - utils::{diesel_option_overwrite, diesel_option_overwrite_to_url}, + utils::{diesel_string_update, diesel_url_update}, }; use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType}, + error::{LemmyErrorType, LemmyResult}, utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id}, }; +use std::ops::Deref; #[tracing::instrument(skip(context))] pub async fn save_user_settings( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; - let avatar = diesel_option_overwrite_to_url(&data.avatar)?; - let banner = diesel_option_overwrite_to_url(&data.banner)?; - let bio = diesel_option_overwrite(data.bio.clone()); - let display_name = diesel_option_overwrite(data.display_name.clone()); - let matrix_user_id = diesel_option_overwrite(data.matrix_user_id.clone()); + let slur_regex = local_site_to_slur_regex(&site_view.local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let bio = diesel_string_update( + process_markdown_opt(&data.bio, &slur_regex, &url_blocklist, &context) + .await? + .as_deref(), + ); + + let avatar = diesel_url_update(data.avatar.as_deref())?; + replace_image(&avatar, &local_user_view.person.avatar, &context).await?; + let avatar = proxy_image_link_opt_api(avatar, &context).await?; + + let banner = diesel_url_update(data.banner.as_deref())?; + replace_image(&banner, &local_user_view.person.banner, &context).await?; + let banner = proxy_image_link_opt_api(banner, &context).await?; + + let display_name = diesel_string_update(data.display_name.as_deref()); + let matrix_user_id = diesel_string_update(data.matrix_user_id.as_deref()); let email_deref = data.email.as_deref().map(str::to_lowercase); - let email = diesel_option_overwrite(email_deref.clone()); + let email = diesel_string_update(email_deref.as_deref()); if let Some(Some(email)) = &email { let previous_email = local_user_view.local_user.email.clone().unwrap_or_default(); // if email was changed, check that it is not taken and send verification mail - if &previous_email != email { - if LocalUser::is_email_taken(&mut context.pool(), email).await? { - return Err(LemmyErrorType::EmailAlreadyExists)?; - } + if previous_email.deref() != email { + LocalUser::check_is_email_taken(&mut context.pool(), email).await?; send_verification_email( &local_user_view, email, @@ -53,7 +74,8 @@ pub async fn save_user_settings( } } - // When the site requires email, make sure email is not Some(None). IE, an overwrite to a None value + // When the site requires email, make sure email is not Some(None). IE, an overwrite to a None + // value if let Some(email) = &email { if email.is_none() && site_view.local_site.require_email_verification { Err(LemmyErrorType::EmailRequired)? @@ -78,7 +100,8 @@ pub async fn save_user_settings( let local_user_id = local_user_view.local_user.id; let person_id = local_user_view.person.id; let default_listing_type = data.default_listing_type; - let default_sort_type = data.default_sort_type; + let default_post_sort_type = data.default_post_sort_type; + let default_comment_sort_type = data.default_comment_sort_type; let person_form = PersonUpdateForm { display_name, @@ -107,10 +130,9 @@ pub async fn save_user_settings( send_notifications_to_email: data.send_notifications_to_email, show_nsfw: data.show_nsfw, blur_nsfw: data.blur_nsfw, - auto_expand: data.auto_expand, show_bot_accounts: data.show_bot_accounts, - show_scores: data.show_scores, - default_sort_type, + default_post_sort_type, + default_comment_sort_type, default_listing_type, theme: data.theme.clone(), interface_language: data.interface_language.clone(), @@ -120,14 +142,21 @@ pub async fn save_user_settings( enable_keyboard_navigation: data.enable_keyboard_navigation, enable_animated_images: data.enable_animated_images, enable_private_messages: data.enable_private_messages, + collapse_bot_comments: data.collapse_bot_comments, ..Default::default() }; - // Ignore errors, because 'no fields updated' will return an error. - // https://github.com/LemmyNet/lemmy/issues/4076 - LocalUser::update(&mut context.pool(), local_user_id, &local_user_form) - .await - .ok(); + LocalUser::update(&mut context.pool(), local_user_id, &local_user_form).await?; + + // Update the vote display modes + let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm { + score: data.show_scores, + upvotes: data.show_upvotes, + downvotes: data.show_downvotes, + upvote_percentage: data.show_upvote_percentage, + }; + LocalUserVoteDisplayMode::update(&mut context.pool(), local_user_id, &vote_display_modes_form) + .await?; Ok(Json(SuccessResponse::default())) } diff --git a/crates/api/src/local_user/update_totp.rs b/crates/api/src/local_user/update_totp.rs index 15833ae8a..c28ac7228 100644 --- a/crates/api/src/local_user/update_totp.rs +++ b/crates/api/src/local_user/update_totp.rs @@ -4,12 +4,9 @@ use lemmy_api_common::{ context::LemmyContext, person::{UpdateTotp, UpdateTotpResponse}, }; -use lemmy_db_schema::{ - source::local_user::{LocalUser, LocalUserUpdateForm}, - traits::Crud, -}; -use lemmy_db_views::structs::{LocalUserView, SiteView}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyResult; /// Enable or disable two-factor-authentication. The current setting is determined from /// [LocalUser.totp_2fa_enabled]. @@ -24,13 +21,11 @@ pub async fn update_totp( data: Json, local_user_view: LocalUserView, context: Data, -) -> Result, LemmyError> { - let site_view = SiteView::read_local(&mut context.pool()).await?; - +) -> LemmyResult> { check_totp_2fa_valid( &local_user_view, &Some(data.totp_token.clone()), - &site_view.site.name, + &context.settings().hostname, )?; // toggle the 2fa setting diff --git a/crates/api/src/local_user/validate_auth.rs b/crates/api/src/local_user/validate_auth.rs index d95195dc9..36d31ff01 100644 --- a/crates/api/src/local_user/validate_auth.rs +++ b/crates/api/src/local_user/validate_auth.rs @@ -4,7 +4,7 @@ use actix_web::{ HttpRequest, }; use lemmy_api_common::{context::LemmyContext, SuccessResponse}; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; /// Returns an error message if the auth token is invalid for any reason. Necessary because other /// endpoints silently treat any call with invalid auth as unauthenticated. @@ -12,7 +12,7 @@ use lemmy_utils::error::{LemmyError, LemmyErrorType}; pub async fn validate_auth( req: HttpRequest, context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { let jwt = read_auth_token(&req)?; if let Some(jwt) = jwt { local_user_view_from_jwt(&jwt, &context).await?; diff --git a/crates/api/src/local_user/verify_email.rs b/crates/api/src/local_user/verify_email.rs index 94ddb373a..4b6a8c928 100644 --- a/crates/api/src/local_user/verify_email.rs +++ b/crates/api/src/local_user/verify_email.rs @@ -5,17 +5,12 @@ use lemmy_api_common::{ utils::send_new_applicant_email_to_admins, SuccessResponse, }; -use lemmy_db_schema::{ - source::{ - email_verification::EmailVerification, - local_user::{LocalUser, LocalUserUpdateForm}, - person::Person, - }, - traits::Crud, - RegistrationMode, +use lemmy_db_schema::source::{ + email_verification::EmailVerification, + local_user::{LocalUser, LocalUserUpdateForm}, }; -use lemmy_db_views::structs::SiteView; -use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; +use lemmy_db_views::structs::{LocalUserView, SiteView}; +use lemmy_utils::error::LemmyResult; pub async fn verify_email( data: Json, @@ -23,9 +18,7 @@ pub async fn verify_email( ) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; let token = data.token.clone(); - let verification = EmailVerification::read_for_token(&mut context.pool(), &token) - .await - .with_lemmy_type(LemmyErrorType::TokenNotFound)?; + let verification = EmailVerification::read_for_token(&mut context.pool(), &token).await?; let form = LocalUserUpdateForm { // necessary in case this is a new signup @@ -36,17 +29,20 @@ pub async fn verify_email( }; let local_user_id = verification.local_user_id; - let local_user = LocalUser::update(&mut context.pool(), local_user_id, &form).await?; + LocalUser::update(&mut context.pool(), local_user_id, &form).await?; EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?; // send out notification about registration application to admins if enabled - if site_view.local_site.registration_mode == RegistrationMode::RequireApplication - && site_view.local_site.application_email_admins - { - let person = Person::read(&mut context.pool(), local_user.person_id).await?; - send_new_applicant_email_to_admins(&person.name, &mut context.pool(), context.settings()) - .await?; + if site_view.local_site.application_email_admins { + let local_user = LocalUserView::read(&mut context.pool(), local_user_id).await?; + + send_new_applicant_email_to_admins( + &local_user.person.name, + &mut context.pool(), + context.settings(), + ) + .await?; } Ok(Json(SuccessResponse::default())) diff --git a/crates/api/src/post/feature.rs b/crates/api/src/post/feature.rs index 8c4b4978f..cb6e6c144 100644 --- a/crates/api/src/post/feature.rs +++ b/crates/api/src/post/feature.rs @@ -16,14 +16,14 @@ use lemmy_db_schema::{ PostFeatureType, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn feature_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let post_id = data.post_id; let orig_post = Post::read(&mut context.pool(), post_id).await?; @@ -70,11 +70,5 @@ pub async fn feature_post( ) .await?; - build_post_response( - &context, - orig_post.community_id, - &local_user_view.person, - post_id, - ) - .await + build_post_response(&context, orig_post.community_id, local_user_view, post_id).await } diff --git a/crates/api/src/post/get_link_metadata.rs b/crates/api/src/post/get_link_metadata.rs index 7ab6e9a92..e469b51c7 100644 --- a/crates/api/src/post/get_link_metadata.rs +++ b/crates/api/src/post/get_link_metadata.rs @@ -1,17 +1,25 @@ -use actix_web::web::{Data, Json}; +use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ context::LemmyContext, post::{GetSiteMetadata, GetSiteMetadataResponse}, - request::fetch_site_metadata, + request::fetch_link_metadata, }; -use lemmy_utils::error::LemmyError; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::{ + error::{LemmyErrorExt, LemmyResult}, + LemmyErrorType, +}; +use url::Url; #[tracing::instrument(skip(context))] pub async fn get_link_metadata( - data: Json, + data: Query, context: Data, -) -> Result, LemmyError> { - let metadata = fetch_site_metadata(context.client(), &data.url).await?; + // Require an account for this API + _local_user_view: LocalUserView, +) -> LemmyResult> { + let url = Url::parse(&data.url).with_lemmy_type(LemmyErrorType::InvalidUrl)?; + let metadata = fetch_link_metadata(&url, &context).await?; Ok(Json(GetSiteMetadataResponse { metadata })) } diff --git a/crates/api/src/post/hide.rs b/crates/api/src/post/hide.rs new file mode 100644 index 000000000..f7c21ef31 --- /dev/null +++ b/crates/api/src/post/hide.rs @@ -0,0 +1,34 @@ +use actix_web::web::{Data, Json}; +use lemmy_api_common::{context::LemmyContext, post::HidePost, SuccessResponse}; +use lemmy_db_schema::source::post::PostHide; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}; +use std::collections::HashSet; + +#[tracing::instrument(skip(context))] +pub async fn hide_post( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + let post_ids = HashSet::from_iter(data.post_ids.clone()); + + if post_ids.len() > MAX_API_PARAM_ELEMENTS { + Err(LemmyErrorType::TooManyItems)?; + } + + let person_id = local_user_view.person.id; + + // Mark the post as hidden / unhidden + if data.hide { + PostHide::hide(&mut context.pool(), post_ids, person_id) + .await + .with_lemmy_type(LemmyErrorType::CouldntHidePost)?; + } else { + PostHide::unhide(&mut context.pool(), post_ids, person_id) + .await + .with_lemmy_type(LemmyErrorType::CouldntHidePost)?; + } + + Ok(Json(SuccessResponse::default())) +} diff --git a/crates/api/src/post/like.rs b/crates/api/src/post/like.rs index 751d1b9e5..c81d9630a 100644 --- a/crates/api/src/post/like.rs +++ b/crates/api/src/post/like.rs @@ -5,7 +5,13 @@ use lemmy_api_common::{ context::LemmyContext, post::{CreatePostLike, PostResponse}, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, check_downvotes_enabled, mark_post_as_read}, + utils::{ + check_bot_account, + check_community_user_action, + check_local_vote_mode, + mark_post_as_read, + VoteItem, + }, }; use lemmy_db_schema::{ source::{ @@ -16,7 +22,7 @@ use lemmy_db_schema::{ traits::{Crud, Likeable}, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; use std::ops::Deref; #[tracing::instrument(skip(context))] @@ -24,14 +30,21 @@ pub async fn like_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; + let post_id = data.post_id; - // Don't do a downvote if site has downvotes disabled - check_downvotes_enabled(data.score, &local_site)?; + check_local_vote_mode( + data.score, + VoteItem::Post(post_id), + &local_site, + local_user_view.person.id, + &mut context.pool(), + ) + .await?; + check_bot_account(&local_user_view.person)?; // Check for a community ban - let post_id = data.post_id; let post = Post::read(&mut context.pool(), post_id).await?; check_community_user_action( @@ -60,25 +73,20 @@ pub async fn like_post( .with_lemmy_type(LemmyErrorType::CouldntLikePost)?; } - // Mark the post as read mark_post_as_read(person_id, post_id, &mut context.pool()).await?; + let community = Community::read(&mut context.pool(), post.community_id).await?; + ActivityChannel::submit_activity( - SendActivityData::LikePostOrComment( - post.ap_id, - local_user_view.person.clone(), - Community::read(&mut context.pool(), post.community_id).await?, - data.score, - ), + SendActivityData::LikePostOrComment { + object_id: post.ap_id, + actor: local_user_view.person.clone(), + community, + score: data.score, + }, &context, ) .await?; - build_post_response( - context.deref(), - post.community_id, - &local_user_view.person, - post_id, - ) - .await + build_post_response(context.deref(), post.community_id, local_user_view, post_id).await } diff --git a/crates/api/src/post/list_post_likes.rs b/crates/api/src/post/list_post_likes.rs new file mode 100644 index 000000000..a9b302f2e --- /dev/null +++ b/crates/api/src/post/list_post_likes.rs @@ -0,0 +1,30 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + post::{ListPostLikes, ListPostLikesResponse}, + utils::is_mod_or_admin, +}; +use lemmy_db_schema::{source::post::Post, traits::Crud}; +use lemmy_db_views::structs::{LocalUserView, VoteView}; +use lemmy_utils::error::LemmyResult; + +/// Lists likes for a post +#[tracing::instrument(skip(context))] +pub async fn list_post_likes( + data: Query, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + let post = Post::read(&mut context.pool(), data.post_id).await?; + is_mod_or_admin( + &mut context.pool(), + &local_user_view.person, + post.community_id, + ) + .await?; + + let post_likes = + VoteView::list_for_post(&mut context.pool(), data.post_id, data.page, data.limit).await?; + + Ok(Json(ListPostLikesResponse { post_likes })) +} diff --git a/crates/api/src/post/lock.rs b/crates/api/src/post/lock.rs index b581f37a2..548947b78 100644 --- a/crates/api/src/post/lock.rs +++ b/crates/api/src/post/lock.rs @@ -15,14 +15,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn lock_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let post_id = data.post_id; let orig_post = Post::read(&mut context.pool(), post_id).await?; @@ -61,11 +61,5 @@ pub async fn lock_post( ) .await?; - build_post_response( - &context, - orig_post.community_id, - &local_user_view.person, - post_id, - ) - .await + build_post_response(&context, orig_post.community_id, local_user_view, post_id).await } diff --git a/crates/api/src/post/mark_read.rs b/crates/api/src/post/mark_read.rs index a46e949fa..3e534675a 100644 --- a/crates/api/src/post/mark_read.rs +++ b/crates/api/src/post/mark_read.rs @@ -2,7 +2,7 @@ use actix_web::web::{Data, Json}; use lemmy_api_common::{context::LemmyContext, post::MarkPostAsRead, SuccessResponse}; use lemmy_db_schema::source::post::PostRead; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType, MAX_API_PARAM_ELEMENTS}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}; use std::collections::HashSet; #[tracing::instrument(skip(context))] @@ -10,15 +10,8 @@ pub async fn mark_post_as_read( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { - let mut post_ids = HashSet::new(); - if let Some(post_ids_) = &data.post_ids { - post_ids.extend(post_ids_.iter().cloned()); - } - - if let Some(post_id) = data.post_id { - post_ids.insert(post_id); - } +) -> LemmyResult> { + let post_ids = HashSet::from_iter(data.post_ids.clone()); if post_ids.len() > MAX_API_PARAM_ELEMENTS { Err(LemmyErrorType::TooManyItems)?; diff --git a/crates/api/src/post/mod.rs b/crates/api/src/post/mod.rs index a3b84134f..7287010f7 100644 --- a/crates/api/src/post/mod.rs +++ b/crates/api/src/post/mod.rs @@ -1,6 +1,8 @@ pub mod feature; pub mod get_link_metadata; +pub mod hide; pub mod like; +pub mod list_post_likes; pub mod lock; pub mod mark_read; pub mod save; diff --git a/crates/api/src/post/save.rs b/crates/api/src/post/save.rs index 164840770..4549b62b1 100644 --- a/crates/api/src/post/save.rs +++ b/crates/api/src/post/save.rs @@ -9,14 +9,14 @@ use lemmy_db_schema::{ traits::Saveable, }; use lemmy_db_views::structs::{LocalUserView, PostView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn save_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let post_saved_form = PostSavedForm { post_id: data.post_id, person_id: local_user_view.person.id, @@ -34,9 +34,14 @@ pub async fn save_post( let post_id = data.post_id; let person_id = local_user_view.person.id; - let post_view = PostView::read(&mut context.pool(), post_id, Some(person_id), false).await?; + let post_view = PostView::read( + &mut context.pool(), + post_id, + Some(&local_user_view.local_user), + false, + ) + .await?; - // Mark the post as read mark_post_as_read(person_id, post_id, &mut context.pool()).await?; Ok(Json(PostResponse { post_view })) diff --git a/crates/api/src/post_report/create.rs b/crates/api/src/post_report/create.rs index e4ce2444a..590c9af40 100644 --- a/crates/api/src/post_report/create.rs +++ b/crates/api/src/post_report/create.rs @@ -5,7 +5,11 @@ use lemmy_api_common::{ context::LemmyContext, post::{CreatePostReport, PostReportResponse}, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, send_new_report_email_to_admins}, + utils::{ + check_community_user_action, + check_post_deleted_or_removed, + send_new_report_email_to_admins, + }, }; use lemmy_db_schema::{ source::{ @@ -15,7 +19,7 @@ use lemmy_db_schema::{ traits::Reportable, }; use lemmy_db_views::structs::{LocalUserView, PostReportView, PostView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; /// Creates a post report and notifies the moderators of the community #[tracing::instrument(skip(context))] @@ -23,7 +27,7 @@ pub async fn create_post_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; let reason = data.reason.trim().to_string(); @@ -40,6 +44,8 @@ pub async fn create_post_report( ) .await?; + check_post_deleted_or_removed(&post_view.post)?; + let report_form = PostReportForm { creator_id: person_id, post_id, @@ -67,12 +73,12 @@ pub async fn create_post_report( } ActivityChannel::submit_activity( - SendActivityData::CreateReport( - post_view.post.ap_id.inner().clone(), - local_user_view.person, - post_view.community, - data.reason.clone(), - ), + SendActivityData::CreateReport { + object_id: post_view.post.ap_id.inner().clone(), + actor: local_user_view.person, + community: post_view.community, + reason: data.reason.clone(), + }, &context, ) .await?; diff --git a/crates/api/src/post_report/list.rs b/crates/api/src/post_report/list.rs index 420052e35..7d1d50b0b 100644 --- a/crates/api/src/post_report/list.rs +++ b/crates/api/src/post_report/list.rs @@ -2,10 +2,10 @@ use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ context::LemmyContext, post::{ListPostReports, ListPostReportsResponse}, - utils::check_community_mod_action_opt, + utils::check_community_mod_of_any_or_admin_action, }; use lemmy_db_views::{post_report_view::PostReportQuery, structs::LocalUserView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; /// Lists post reports for a community if an id is supplied /// or returns all post reports for communities a user moderates @@ -14,16 +14,18 @@ pub async fn list_post_reports( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let community_id = data.community_id; + let post_id = data.post_id; let unresolved_only = data.unresolved_only.unwrap_or_default(); - check_community_mod_action_opt(&local_user_view, community_id, &mut context.pool()).await?; + check_community_mod_of_any_or_admin_action(&local_user_view, &mut context.pool()).await?; let page = data.page; let limit = data.limit; let post_reports = PostReportQuery { community_id, + post_id, unresolved_only, page, limit, diff --git a/crates/api/src/post_report/resolve.rs b/crates/api/src/post_report/resolve.rs index 3604055fd..a3cb85c6c 100644 --- a/crates/api/src/post_report/resolve.rs +++ b/crates/api/src/post_report/resolve.rs @@ -6,7 +6,7 @@ use lemmy_api_common::{ }; use lemmy_db_schema::{source::post_report::PostReport, traits::Reportable}; use lemmy_db_views::structs::{LocalUserView, PostReportView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; /// Resolves or unresolves a post report and notifies the moderators of the community #[tracing::instrument(skip(context))] @@ -14,7 +14,7 @@ pub async fn resolve_post_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let report_id = data.report_id; let person_id = local_user_view.person.id; let report = PostReportView::read(&mut context.pool(), report_id, person_id).await?; @@ -23,7 +23,7 @@ pub async fn resolve_post_report( check_community_mod_action( &local_user_view.person, report.community.id, - false, + true, &mut context.pool(), ) .await?; diff --git a/crates/api/src/private_message/mark_read.rs b/crates/api/src/private_message/mark_read.rs index 6b089c0ab..7c213464b 100644 --- a/crates/api/src/private_message/mark_read.rs +++ b/crates/api/src/private_message/mark_read.rs @@ -8,14 +8,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::{LocalUserView, PrivateMessageView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn mark_pm_as_read( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Checking permissions let private_message_id = data.private_message_id; let orig_private_message = PrivateMessage::read(&mut context.pool(), private_message_id).await?; diff --git a/crates/api/src/private_message_report/create.rs b/crates/api/src/private_message_report/create.rs index 75620bf8b..de8ca390f 100644 --- a/crates/api/src/private_message_report/create.rs +++ b/crates/api/src/private_message_report/create.rs @@ -14,14 +14,14 @@ use lemmy_db_schema::{ traits::{Crud, Reportable}, }; use lemmy_db_views::structs::{LocalUserView, PrivateMessageReportView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn create_pm_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; let reason = data.reason.trim().to_string(); @@ -31,6 +31,11 @@ pub async fn create_pm_report( let private_message_id = data.private_message_id; let private_message = PrivateMessage::read(&mut context.pool(), private_message_id).await?; + // Make sure that only the recipient of the private message can create a report + if person_id != private_message.recipient_id { + Err(LemmyErrorType::CouldntCreateReport)? + } + let report_form = PrivateMessageReportForm { creator_id: person_id, private_message_id, diff --git a/crates/api/src/private_message_report/list.rs b/crates/api/src/private_message_report/list.rs index 2dc3e6efc..79ef53e1c 100644 --- a/crates/api/src/private_message_report/list.rs +++ b/crates/api/src/private_message_report/list.rs @@ -8,14 +8,14 @@ use lemmy_db_views::{ private_message_report_view::PrivateMessageReportQuery, structs::LocalUserView, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn list_pm_reports( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { is_admin(&local_user_view)?; let unresolved_only = data.unresolved_only.unwrap_or_default(); diff --git a/crates/api/src/private_message_report/resolve.rs b/crates/api/src/private_message_report/resolve.rs index 202fdcd29..7d821a60c 100644 --- a/crates/api/src/private_message_report/resolve.rs +++ b/crates/api/src/private_message_report/resolve.rs @@ -6,14 +6,14 @@ use lemmy_api_common::{ }; use lemmy_db_schema::{source::private_message_report::PrivateMessageReport, traits::Reportable}; use lemmy_db_views::structs::{LocalUserView, PrivateMessageReportView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn resolve_pm_report( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { is_admin(&local_user_view)?; let report_id = data.report_id; diff --git a/crates/api/src/site/block.rs b/crates/api/src/site/block.rs index be48e8ce8..823dda612 100644 --- a/crates/api/src/site/block.rs +++ b/crates/api/src/site/block.rs @@ -9,16 +9,20 @@ use lemmy_db_schema::{ traits::Blockable, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn block_instance( data: Json, local_user_view: LocalUserView, context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { let instance_id = data.instance_id; let person_id = local_user_view.person.id; + if local_user_view.person.instance_id == instance_id { + return Err(LemmyErrorType::CantBlockLocalInstance)?; + } + let instance_block_form = InstanceBlockForm { person_id, instance_id, diff --git a/crates/api/src/site/federated_instances.rs b/crates/api/src/site/federated_instances.rs index 8f224b2eb..5943cfd9a 100644 --- a/crates/api/src/site/federated_instances.rs +++ b/crates/api/src/site/federated_instances.rs @@ -5,12 +5,12 @@ use lemmy_api_common::{ utils::build_federated_instances, }; use lemmy_db_views::structs::SiteView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn get_federated_instances( context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; let federated_instances = build_federated_instances(&site_view.local_site, &mut context.pool()).await?; diff --git a/crates/api/src/site/leave_admin.rs b/crates/api/src/site/leave_admin.rs index f25747ef3..97ad7e2e5 100644 --- a/crates/api/src/site/leave_admin.rs +++ b/crates/api/src/site/leave_admin.rs @@ -4,24 +4,26 @@ use lemmy_db_schema::{ source::{ actor_language::SiteLanguage, language::Language, + local_site_url_blocklist::LocalSiteUrlBlocklist, local_user::{LocalUser, LocalUserUpdateForm}, moderator::{ModAdd, ModAddForm}, + oauth_provider::OAuthProvider, tagline::Tagline, }, traits::Crud, }; -use lemmy_db_views::structs::{CustomEmojiView, LocalUserView, SiteView}; +use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_db_views_actor::structs::PersonView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType}, - version, + error::{LemmyErrorType, LemmyResult}, + VERSION, }; #[tracing::instrument(skip(context))] pub async fn leave_admin( context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { is_admin(&local_user_view)?; // Make sure there isn't just one admin (so if one leaves, there will still be one left) @@ -35,6 +37,9 @@ pub async fn leave_admin( local_user_view.local_user.id, &LocalUserUpdateForm { admin: Some(false), + // Necessary because admins can bypass the registration applications (if they're turned on) + // but then won't be able to log in because they haven't been approved. + accepted_application: Some(true), ..Default::default() }, ) @@ -56,18 +61,22 @@ pub async fn leave_admin( let all_languages = Language::read_all(&mut context.pool()).await?; let discussion_languages = SiteLanguage::read_local_raw(&mut context.pool()).await?; - let taglines = Tagline::get_all(&mut context.pool(), site_view.local_site.id).await?; - let custom_emojis = - CustomEmojiView::get_all(&mut context.pool(), site_view.local_site.id).await?; + let oauth_providers = OAuthProvider::get_all_public(&mut context.pool()).await?; + let blocked_urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?; + let tagline = Tagline::get_random(&mut context.pool()).await.ok(); Ok(Json(GetSiteResponse { site_view, admins, - version: version::VERSION.to_string(), + version: VERSION.to_string(), my_user: None, all_languages, discussion_languages, - taglines, - custom_emojis, + oauth_providers: Some(oauth_providers), + admin_oauth_providers: None, + blocked_urls, + tagline, + taglines: vec![], + custom_emojis: vec![], })) } diff --git a/crates/api/src/site/list_all_media.rs b/crates/api/src/site/list_all_media.rs new file mode 100644 index 000000000..4d8d2dc2a --- /dev/null +++ b/crates/api/src/site/list_all_media.rs @@ -0,0 +1,23 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + person::{ListMedia, ListMediaResponse}, + utils::is_admin, +}; +use lemmy_db_views::structs::{LocalImageView, LocalUserView}; +use lemmy_utils::error::LemmyResult; + +#[tracing::instrument(skip(context))] +pub async fn list_all_media( + data: Query, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + // Only let admins view all media + is_admin(&local_user_view)?; + + let page = data.page; + let limit = data.limit; + let images = LocalImageView::get_all(&mut context.pool(), page, limit).await?; + Ok(Json(ListMediaResponse { images })) +} diff --git a/crates/api/src/site/mod.rs b/crates/api/src/site/mod.rs index d63c77ad9..f18dea3d0 100644 --- a/crates/api/src/site/mod.rs +++ b/crates/api/src/site/mod.rs @@ -1,6 +1,7 @@ pub mod block; pub mod federated_instances; pub mod leave_admin; +pub mod list_all_media; pub mod mod_log; pub mod purge; pub mod registration_applications; diff --git a/crates/api/src/site/mod_log.rs b/crates/api/src/site/mod_log.rs index 133cce8d8..8f5538566 100644 --- a/crates/api/src/site/mod_log.rs +++ b/crates/api/src/site/mod_log.rs @@ -2,7 +2,7 @@ use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ context::LemmyContext, site::{GetModlog, GetModlogResponse}, - utils::{check_community_mod_action_opt, check_private_instance, is_admin}, + utils::{check_community_mod_of_any_or_admin_action, check_private_instance}, }; use lemmy_db_schema::{source::local_site::LocalSite, ModlogActionType}; use lemmy_db_views::structs::LocalUserView; @@ -24,7 +24,7 @@ use lemmy_db_views_moderator::structs::{ ModTransferCommunityView, ModlogListParams, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use ModlogActionType::*; #[tracing::instrument(skip(context))] @@ -32,7 +32,7 @@ pub async fn get_mod_log( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; check_private_instance(&local_user_view, &local_site)?; @@ -41,11 +41,9 @@ pub async fn get_mod_log( let community_id = data.community_id; let is_mod_or_admin = if let Some(local_user_view) = local_user_view { - let is_mod = community_id.is_some() - && check_community_mod_action_opt(&local_user_view, community_id, &mut context.pool()) - .await - .is_ok(); - is_mod || is_admin(&local_user_view).is_ok() + check_community_mod_of_any_or_admin_action(&local_user_view, &mut context.pool()) + .await + .is_ok() } else { false }; @@ -57,10 +55,15 @@ pub async fn get_mod_log( data.mod_person_id }; let other_person_id = data.other_person_id; + let post_id = data.post_id; + let comment_id = data.comment_id; + let params = ModlogListParams { community_id, mod_person_id, other_person_id, + post_id, + comment_id, page: data.page, limit: data.limit, hide_modlog_names, diff --git a/crates/api/src/site/purge/comment.rs b/crates/api/src/site/purge/comment.rs index aa55dd3c9..b21ffbc80 100644 --- a/crates/api/src/site/purge/comment.rs +++ b/crates/api/src/site/purge/comment.rs @@ -1,6 +1,8 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, + send_activity::{ActivityChannel, SendActivityData}, site::PurgeComment, utils::is_admin, SuccessResponse, @@ -8,28 +10,42 @@ use lemmy_api_common::{ use lemmy_db_schema::{ source::{ comment::Comment, + local_user::LocalUser, moderator::{AdminPurgeComment, AdminPurgeCommentForm}, }, traits::Crud, }; -use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_db_views::structs::{CommentView, LocalUserView}; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn purge_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Only let admin purge an item is_admin(&local_user_view)?; let comment_id = data.comment_id; - // Read the comment to get the post_id - let comment = Comment::read(&mut context.pool(), comment_id).await?; + // Read the comment to get the post_id and community + let comment_view = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; - let post_id = comment.post_id; + // Also check that you're a higher admin + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + vec![comment_view.creator.id], + ) + .await?; + + let post_id = comment_view.comment.post_id; // TODO read comments for pictrs images and purge them @@ -41,8 +57,18 @@ pub async fn purge_comment( reason: data.reason.clone(), post_id, }; - AdminPurgeComment::create(&mut context.pool(), &form).await?; + ActivityChannel::submit_activity( + SendActivityData::RemoveComment { + comment: comment_view.comment, + moderator: local_user_view.person.clone(), + community: comment_view.community, + reason: data.reason.clone(), + }, + &context, + ) + .await?; + Ok(Json(SuccessResponse::default())) } diff --git a/crates/api/src/site/purge/community.rs b/crates/api/src/site/purge/community.rs index 6b307a06e..bf06bd529 100644 --- a/crates/api/src/site/purge/community.rs +++ b/crates/api/src/site/purge/community.rs @@ -1,54 +1,82 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, request::purge_image_from_pictrs, + send_activity::{ActivityChannel, SendActivityData}, site::PurgeCommunity, utils::{is_admin, purge_image_posts_for_community}, SuccessResponse, }; use lemmy_db_schema::{ + newtypes::PersonId, source::{ community::Community, + local_user::LocalUser, moderator::{AdminPurgeCommunity, AdminPurgeCommunityForm}, }, traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_db_views_actor::structs::CommunityModeratorView; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn purge_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Only let admin purge an item is_admin(&local_user_view)?; - let community_id = data.community_id; - // Read the community to get its images - let community = Community::read(&mut context.pool(), community_id).await?; + let community = Community::read(&mut context.pool(), data.community_id).await?; - if let Some(banner) = community.banner { - purge_image_from_pictrs(&banner, &context).await.ok(); + // Also check that you're a higher admin than all the mods + let community_mod_person_ids = + CommunityModeratorView::for_community(&mut context.pool(), community.id) + .await? + .iter() + .map(|cmv| cmv.moderator.id) + .collect::>(); + + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + community_mod_person_ids, + ) + .await?; + + if let Some(banner) = &community.banner { + purge_image_from_pictrs(banner, &context).await.ok(); } - if let Some(icon) = community.icon { - purge_image_from_pictrs(&icon, &context).await.ok(); + if let Some(icon) = &community.icon { + purge_image_from_pictrs(icon, &context).await.ok(); } - purge_image_posts_for_community(community_id, &context).await?; + purge_image_posts_for_community(data.community_id, &context).await?; - Community::delete(&mut context.pool(), community_id).await?; + Community::delete(&mut context.pool(), data.community_id).await?; // Mod tables let form = AdminPurgeCommunityForm { admin_person_id: local_user_view.person.id, reason: data.reason.clone(), }; - AdminPurgeCommunity::create(&mut context.pool(), &form).await?; + ActivityChannel::submit_activity( + SendActivityData::RemoveCommunity { + moderator: local_user_view.person.clone(), + community, + reason: data.reason.clone(), + removed: true, + }, + &context, + ) + .await?; + Ok(Json(SuccessResponse::default())) } diff --git a/crates/api/src/site/purge/person.rs b/crates/api/src/site/purge/person.rs index c59e06931..7ab573cbc 100644 --- a/crates/api/src/site/purge/person.rs +++ b/crates/api/src/site/purge/person.rs @@ -1,53 +1,87 @@ -use actix_web::web::{Data, Json}; +use crate::ban_nonlocal_user_from_local_communities; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, - request::delete_image_from_pictrs, + send_activity::{ActivityChannel, SendActivityData}, site::PurgePerson, - utils::is_admin, + utils::{is_admin, purge_user_account}, SuccessResponse, }; use lemmy_db_schema::{ source::{ - image_upload::ImageUpload, + local_user::LocalUser, moderator::{AdminPurgePerson, AdminPurgePersonForm}, - person::Person, + person::{Person, PersonUpdateForm}, }, traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn purge_person( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Only let admin purge an item is_admin(&local_user_view)?; - // Read the person to get their images - let person_id = data.person_id; + // Also check that you're a higher admin + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + vec![data.person_id], + ) + .await?; - let local_user = LocalUserView::read_person(&mut context.pool(), person_id).await?; - let pictrs_uploads = - ImageUpload::get_all_by_local_user_id(&mut context.pool(), &local_user.local_user.id).await?; + let person = Person::read(&mut context.pool(), data.person_id).await?; - for upload in pictrs_uploads { - delete_image_from_pictrs(&upload.pictrs_alias, &upload.pictrs_delete_token, &context) - .await - .ok(); - } + ban_nonlocal_user_from_local_communities( + &local_user_view, + &person, + true, + &data.reason, + &Some(true), + &None, + &context, + ) + .await?; - Person::delete(&mut context.pool(), person_id).await?; + // Clear profile data. + purge_user_account(data.person_id, &context).await?; + + // Keep person record, but mark as banned to prevent login or refetching from home instance. + let person = Person::update( + &mut context.pool(), + data.person_id, + &PersonUpdateForm { + banned: Some(true), + ..Default::default() + }, + ) + .await?; // Mod tables let form = AdminPurgePersonForm { admin_person_id: local_user_view.person.id, reason: data.reason.clone(), }; - AdminPurgePerson::create(&mut context.pool(), &form).await?; + ActivityChannel::submit_activity( + SendActivityData::BanFromSite { + moderator: local_user_view.person, + banned_user: person, + reason: data.reason.clone(), + remove_or_restore_data: Some(true), + ban: true, + expires: None, + }, + &context, + ) + .await?; + Ok(Json(SuccessResponse::default())) } diff --git a/crates/api/src/site/purge/post.rs b/crates/api/src/site/purge/post.rs index 68ef76001..d2cacdae1 100644 --- a/crates/api/src/site/purge/post.rs +++ b/crates/api/src/site/purge/post.rs @@ -1,56 +1,73 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, request::purge_image_from_pictrs, + send_activity::{ActivityChannel, SendActivityData}, site::PurgePost, utils::is_admin, SuccessResponse, }; use lemmy_db_schema::{ source::{ + local_user::LocalUser, moderator::{AdminPurgePost, AdminPurgePostForm}, post::Post, }, traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn purge_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Only let admin purge an item is_admin(&local_user_view)?; - let post_id = data.post_id; - // Read the post to get the community_id - let post = Post::read(&mut context.pool(), post_id).await?; + let post = Post::read(&mut context.pool(), data.post_id).await?; + + // Also check that you're a higher admin + LocalUser::is_higher_admin_check( + &mut context.pool(), + local_user_view.person.id, + vec![post.creator_id], + ) + .await?; // Purge image - if let Some(url) = post.url { - purge_image_from_pictrs(&url, &context).await.ok(); + if let Some(url) = &post.url { + purge_image_from_pictrs(url, &context).await.ok(); } // Purge thumbnail - if let Some(thumbnail_url) = post.thumbnail_url { - purge_image_from_pictrs(&thumbnail_url, &context).await.ok(); + if let Some(thumbnail_url) = &post.thumbnail_url { + purge_image_from_pictrs(thumbnail_url, &context).await.ok(); } - let community_id = post.community_id; - - Post::delete(&mut context.pool(), post_id).await?; + Post::delete(&mut context.pool(), data.post_id).await?; // Mod tables let form = AdminPurgePostForm { admin_person_id: local_user_view.person.id, reason: data.reason.clone(), - community_id, + community_id: post.community_id, }; - AdminPurgePost::create(&mut context.pool(), &form).await?; + ActivityChannel::submit_activity( + SendActivityData::RemovePost { + post, + moderator: local_user_view.person.clone(), + reason: data.reason.clone(), + removed: true, + }, + &context, + ) + .await?; + Ok(Json(SuccessResponse::default())) } diff --git a/crates/api/src/site/registration_applications/approve.rs b/crates/api/src/site/registration_applications/approve.rs index 036a60e00..b8cd6c0ea 100644 --- a/crates/api/src/site/registration_applications/approve.rs +++ b/crates/api/src/site/registration_applications/approve.rs @@ -1,4 +1,5 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, site::{ApproveRegistrationApplication, RegistrationApplicationResponse}, @@ -10,48 +11,60 @@ use lemmy_db_schema::{ registration_application::{RegistrationApplication, RegistrationApplicationUpdateForm}, }, traits::Crud, - utils::diesel_option_overwrite, + utils::{diesel_string_update, get_conn}, }; use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; pub async fn approve_registration_application( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let app_id = data.id; // Only let admins do this is_admin(&local_user_view)?; - // Update the registration with reason, admin_id - let deny_reason = diesel_option_overwrite(data.deny_reason.clone()); - let app_form = RegistrationApplicationUpdateForm { - admin_id: Some(Some(local_user_view.person.id)), - deny_reason, - }; + let pool = &mut context.pool(); + let conn = &mut get_conn(pool).await?; + let tx_data = data.clone(); + let approved_user_id = conn + .build_transaction() + .run(|conn| { + Box::pin(async move { + // Update the registration with reason, admin_id + let deny_reason = diesel_string_update(tx_data.deny_reason.as_deref()); + let app_form = RegistrationApplicationUpdateForm { + admin_id: Some(Some(local_user_view.person.id)), + deny_reason, + }; - let registration_application = - RegistrationApplication::update(&mut context.pool(), app_id, &app_form).await?; + let registration_application = + RegistrationApplication::update(&mut conn.into(), app_id, &app_form).await?; - // Update the local_user row - let local_user_form = LocalUserUpdateForm { - accepted_application: Some(data.approve), - ..Default::default() - }; + // Update the local_user row + let local_user_form = LocalUserUpdateForm { + accepted_application: Some(tx_data.approve), + ..Default::default() + }; - let approved_user_id = registration_application.local_user_id; - LocalUser::update(&mut context.pool(), approved_user_id, &local_user_form).await?; + let approved_user_id = registration_application.local_user_id; + LocalUser::update(&mut conn.into(), approved_user_id, &local_user_form).await?; + + Ok::<_, LemmyError>(approved_user_id) + }) as _ + }) + .await?; if data.approve { let approved_local_user_view = LocalUserView::read(&mut context.pool(), approved_user_id).await?; - if approved_local_user_view.local_user.email.is_some() { + // Email sending may fail, but this won't revert the application approval send_application_approved_email(&approved_local_user_view, context.settings()).await?; } - } + }; // Read the view let registration_application = diff --git a/crates/api/src/site/registration_applications/get.rs b/crates/api/src/site/registration_applications/get.rs new file mode 100644 index 000000000..23c6fb4d0 --- /dev/null +++ b/crates/api/src/site/registration_applications/get.rs @@ -0,0 +1,26 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + site::{GetRegistrationApplication, RegistrationApplicationResponse}, + utils::is_admin, +}; +use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView}; +use lemmy_utils::error::LemmyResult; + +/// Lists registration applications, filterable by undenied only. +pub async fn get_registration_application( + data: Query, + context: Data, + local_user_view: LocalUserView, +) -> LemmyResult> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + // Read the view + let registration_application = + RegistrationApplicationView::read_by_person(&mut context.pool(), data.person_id).await?; + + Ok(Json(RegistrationApplicationResponse { + registration_application, + })) +} diff --git a/crates/api/src/site/registration_applications/list.rs b/crates/api/src/site/registration_applications/list.rs index 30ce9aaf2..877e83796 100644 --- a/crates/api/src/site/registration_applications/list.rs +++ b/crates/api/src/site/registration_applications/list.rs @@ -1,4 +1,5 @@ -use actix_web::web::{Data, Json, Query}; +use activitypub_federation::config::Data; +use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, site::{ListRegistrationApplications, ListRegistrationApplicationsResponse}, @@ -9,14 +10,14 @@ use lemmy_db_views::{ registration_application_view::RegistrationApplicationQuery, structs::LocalUserView, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; /// Lists registration applications, filterable by undenied only. pub async fn list_registration_applications( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; // Make sure user is an admin diff --git a/crates/api/src/site/registration_applications/mod.rs b/crates/api/src/site/registration_applications/mod.rs index e5082615a..c9a63cdef 100644 --- a/crates/api/src/site/registration_applications/mod.rs +++ b/crates/api/src/site/registration_applications/mod.rs @@ -1,3 +1,6 @@ pub mod approve; +pub mod get; pub mod list; +#[cfg(test)] +mod tests; pub mod unread_count; diff --git a/crates/api/src/site/registration_applications/tests.rs b/crates/api/src/site/registration_applications/tests.rs new file mode 100644 index 000000000..022cbf236 --- /dev/null +++ b/crates/api/src/site/registration_applications/tests.rs @@ -0,0 +1,414 @@ +use crate::site::registration_applications::{ + approve::approve_registration_application, + list::list_registration_applications, + unread_count::get_unread_registration_application_count, +}; +use activitypub_federation::config::Data; +use actix_web::web::{Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + site::{ + ApproveRegistrationApplication, + EditSite, + GetUnreadRegistrationApplicationCountResponse, + ListRegistrationApplicationsResponse, + }, +}; +use lemmy_api_crud::site::update::update_site; +use lemmy_db_schema::{ + newtypes::InstanceId, + source::{ + instance::Instance, + local_site::{LocalSite, LocalSiteInsertForm}, + local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm}, + local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, + person::{Person, PersonInsertForm}, + registration_application::{RegistrationApplication, RegistrationApplicationInsertForm}, + site::{Site, SiteInsertForm}, + }, + traits::Crud, + utils::DbPool, + RegistrationMode, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::{error::LemmyResult, LemmyErrorType, CACHE_DURATION_API}; +use serial_test::serial; + +async fn create_test_site(context: &Data) -> LemmyResult<(Instance, LocalUserView)> { + let pool = &mut context.pool(); + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let admin_person = Person::create( + pool, + &PersonInsertForm::test_form(inserted_instance.id, "admin"), + ) + .await?; + LocalUser::create( + pool, + &LocalUserInsertForm::test_form_admin(admin_person.id), + vec![], + ) + .await?; + + let admin_local_user_view = LocalUserView::read_person(pool, admin_person.id).await?; + + let site_form = SiteInsertForm::new("test site".to_string(), inserted_instance.id); + let site = Site::create(pool, &site_form).await?; + + // Create a local site, since this is necessary for determining if email verification is + // required + let local_site_form = LocalSiteInsertForm { + require_email_verification: Some(true), + application_question: Some(".".to_string()), + registration_mode: Some(RegistrationMode::RequireApplication), + site_setup: Some(true), + ..LocalSiteInsertForm::new(site.id) + }; + let local_site = LocalSite::create(pool, &local_site_form).await?; + + // Required to have a working local SiteView when updating the site to change email verification + // requirement or registration mode + let rate_limit_form = LocalSiteRateLimitInsertForm::new(local_site.id); + LocalSiteRateLimit::create(pool, &rate_limit_form).await?; + + Ok((inserted_instance, admin_local_user_view)) +} + +async fn signup( + pool: &mut DbPool<'_>, + instance_id: InstanceId, + name: &str, + email: Option<&str>, +) -> LemmyResult<(LocalUser, RegistrationApplication)> { + let person_insert_form = PersonInsertForm::test_form(instance_id, name); + let person = Person::create(pool, &person_insert_form).await?; + + let local_user_insert_form = match email { + Some(email) => LocalUserInsertForm { + email: Some(email.to_string()), + email_verified: Some(false), + ..LocalUserInsertForm::test_form(person.id) + }, + None => LocalUserInsertForm::test_form(person.id), + }; + + let local_user = LocalUser::create(pool, &local_user_insert_form, vec![]).await?; + + let application_insert_form = RegistrationApplicationInsertForm { + local_user_id: local_user.id, + answer: "x".to_string(), + }; + let application = RegistrationApplication::create(pool, &application_insert_form).await?; + + Ok((local_user, application)) +} + +async fn get_application_statuses( + context: &Data, + admin: LocalUserView, +) -> LemmyResult<( + Json, + Json, + Json, +)> { + let application_count = + get_unread_registration_application_count(context.reset_request_count(), admin.clone()).await?; + + let unread_applications = list_registration_applications( + Query::from_query("unread_only=true")?, + context.reset_request_count(), + admin.clone(), + ) + .await?; + + let all_applications = list_registration_applications( + Query::from_query("unread_only=false")?, + context.reset_request_count(), + admin, + ) + .await?; + + Ok((application_count, unread_applications, all_applications)) +} + +#[serial] +#[tokio::test] +#[expect(clippy::indexing_slicing)] +async fn test_application_approval() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); + + let (instance, admin_local_user_view) = create_test_site(&context).await?; + + // Non-unread counts unfortunately are duplicated due to different types (i64 vs usize) + let mut expected_total_applications = 0; + let mut expected_unread_applications = 0u8; + + let (local_user_with_email, app_with_email) = + signup(pool, instance.id, "user_w_email", Some("lemmy@localhost")).await?; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // When email verification is required and the email is not verified the application should not + // be visible to admins + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + LocalUser::update( + pool, + local_user_with_email.id, + &LocalUserUpdateForm { + email_verified: Some(true), + ..Default::default() + }, + ) + .await?; + + expected_total_applications += 1; + expected_unread_applications += 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // When email verification is required and the email is verified the application should be + // visible to admins + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert!( + !unread_applications.registration_applications[0] + .creator_local_user + .accepted_application + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + let approval = approve_registration_application( + Json(ApproveRegistrationApplication { + id: app_with_email.id, + approve: true, + deny_reason: None, + }), + context.reset_request_count(), + admin_local_user_view.clone(), + ) + .await; + // Approval should be processed up until email sending is attempted + assert!(approval.is_err_and(|e| e.error_type == LemmyErrorType::NoEmailSetup)); + + expected_unread_applications -= 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // When the application is approved it should only be returned for unread queries + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + assert!( + all_applications.registration_applications[0] + .creator_local_user + .accepted_application + ); + + let (_local_user, app_with_email_2) = signup( + pool, + instance.id, + "user_w_email_2", + Some("lemmy2@localhost"), + ) + .await?; + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // Email not verified, so application still not visible + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + update_site( + Json(EditSite { + require_email_verification: Some(false), + ..Default::default() + }), + context.reset_request_count(), + admin_local_user_view.clone(), + ) + .await?; + + // TODO: There is probably a better way to ensure cache invalidation + tokio::time::sleep(CACHE_DURATION_API).await; + + expected_total_applications += 1; + expected_unread_applications += 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // After disabling email verification the application should now be visible + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + approve_registration_application( + Json(ApproveRegistrationApplication { + id: app_with_email_2.id, + approve: false, + deny_reason: None, + }), + context.reset_request_count(), + admin_local_user_view.clone(), + ) + .await?; + + expected_unread_applications -= 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // Denied applications should not be marked as unread + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + signup(pool, instance.id, "user_wo_email", None).await?; + + expected_total_applications += 1; + expected_unread_applications += 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // New user without email should immediately be visible + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + signup(pool, instance.id, "user_w_email_3", None).await?; + + expected_total_applications += 1; + expected_unread_applications += 1; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // New user with email should immediately be visible + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + update_site( + Json(EditSite { + registration_mode: Some(RegistrationMode::Open), + ..Default::default() + }), + context.reset_request_count(), + admin_local_user_view.clone(), + ) + .await?; + + // TODO: There is probably a better way to ensure cache invalidation + tokio::time::sleep(CACHE_DURATION_API).await; + + let (application_count, unread_applications, all_applications) = + get_application_statuses(&context, admin_local_user_view.clone()).await?; + + // TODO: At this time applications do not get approved when switching to open registration, so the + // numbers will not change. See https://github.com/LemmyNet/lemmy/issues/4969 + // expected_application_count = 0; + // expected_unread_applications_len = 0; + + // When applications are not required all previous applications should become approved but still + // visible + assert_eq!( + application_count.registration_applications, + i64::from(expected_unread_applications), + ); + assert_eq!( + unread_applications.registration_applications.len(), + usize::from(expected_unread_applications), + ); + assert_eq!( + all_applications.registration_applications.len(), + expected_total_applications, + ); + + LocalSite::delete(pool).await?; + // Instance deletion cascades cleanup of all created persons + Instance::delete(pool, instance.id).await?; + + Ok(()) +} diff --git a/crates/api/src/site/registration_applications/unread_count.rs b/crates/api/src/site/registration_applications/unread_count.rs index 255859198..5cc391ed0 100644 --- a/crates/api/src/site/registration_applications/unread_count.rs +++ b/crates/api/src/site/registration_applications/unread_count.rs @@ -1,4 +1,5 @@ -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, site::GetUnreadRegistrationApplicationCountResponse, @@ -6,12 +7,12 @@ use lemmy_api_common::{ }; use lemmy_db_schema::source::local_site::LocalSite; use lemmy_db_views::structs::{LocalUserView, RegistrationApplicationView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub async fn get_unread_registration_application_count( context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; // Only let admins do this diff --git a/crates/api/src/sitemap.rs b/crates/api/src/sitemap.rs index 438a8b8e8..57b39a5b3 100644 --- a/crates/api/src/sitemap.rs +++ b/crates/api/src/sitemap.rs @@ -14,9 +14,9 @@ async fn generate_urlset( ) -> LemmyResult { let urls = posts .into_iter() - .map_while(|post| { - Url::builder(post.0.to_string()) - .last_modified(post.1.into()) + .map_while(|(url, date_time)| { + Url::builder(url.to_string()) + .last_modified(date_time.into()) .build() .ok() }) @@ -26,7 +26,7 @@ async fn generate_urlset( } pub async fn get_sitemap(context: Data) -> LemmyResult { - info!("Generating sitemap with posts from last {} hours...", 24); + info!("Generating sitemap...",); let posts = Post::list_for_sitemap(&mut context.pool()).await?; info!("Loaded latest {} posts", posts.len()); @@ -36,49 +36,46 @@ pub async fn get_sitemap(context: Data) -> LemmyResult LemmyResult<()> { let posts: Vec<(DbUrl, DateTime)> = vec![ ( - Url::parse("https://example.com").unwrap().into(), + Url::parse("https://example.com")?.into(), NaiveDate::from_ymd_opt(2022, 12, 1) - .unwrap() + .unwrap_or_default() .and_hms_opt(9, 10, 11) - .unwrap() + .unwrap_or_default() .and_utc(), ), ( - Url::parse("https://lemmy.ml").unwrap().into(), + Url::parse("https://lemmy.ml")?.into(), NaiveDate::from_ymd_opt(2023, 1, 1) - .unwrap() + .unwrap_or_default() .and_hms_opt(1, 2, 3) - .unwrap() + .unwrap_or_default() .and_utc(), ), ]; let mut buf = Vec::::new(); - generate_urlset(posts) - .await - .unwrap() - .write(&mut buf) - .unwrap(); - let root = Element::from_reader(buf.as_slice()).unwrap(); + generate_urlset(posts).await?.write(&mut buf)?; + let root = Element::from_reader(buf.as_slice())?; assert_eq!(root.tag().name(), "urlset"); assert_eq!(root.child_count(), 2); @@ -98,45 +95,43 @@ pub(crate) mod tests { root .children() .next() - .unwrap() - .children() - .find(|element| element.tag().name() == "loc") - .unwrap() - .text(), + .and_then(|n| n.children().find(|element| element.tag().name() == "loc")) + .map(Element::text) + .unwrap_or_default(), "https://example.com/" ); assert_eq!( root .children() .next() - .unwrap() - .children() - .find(|element| element.tag().name() == "lastmod") - .unwrap() - .text(), + .and_then(|n| n + .children() + .find(|element| element.tag().name() == "lastmod")) + .map(Element::text) + .unwrap_or_default(), "2022-12-01T09:10:11+00:00" ); assert_eq!( root .children() .nth(1) - .unwrap() - .children() - .find(|element| element.tag().name() == "loc") - .unwrap() - .text(), + .and_then(|n| n.children().find(|element| element.tag().name() == "loc")) + .map(Element::text) + .unwrap_or_default(), "https://lemmy.ml/" ); assert_eq!( root .children() .nth(1) - .unwrap() - .children() - .find(|element| element.tag().name() == "lastmod") - .unwrap() - .text(), + .and_then(|n| n + .children() + .find(|element| element.tag().name() == "lastmod")) + .map(Element::text) + .unwrap_or_default(), "2023-01-01T01:02:03+00:00" ); + + Ok(()) } } diff --git a/crates/api_common/Cargo.toml b/crates/api_common/Cargo.toml index a01e6008c..f939985e8 100644 --- a/crates/api_common/Cargo.toml +++ b/crates/api_common/Cargo.toml @@ -13,18 +13,19 @@ name = "lemmy_api_common" path = "src/lib.rs" doctest = false +[lints] +workspace = true + [features] full = [ "tracing", "rosetta-i18n", - "chrono", - "lemmy_utils", "lemmy_db_views/full", "lemmy_db_views_actor/full", "lemmy_db_views_moderator/full", + "lemmy_utils/full", "activitypub_federation", - "percent-encoding", - "encoding", + "encoding_rs", "reqwest-middleware", "webpage", "ts-rs", @@ -33,8 +34,8 @@ full = [ "reqwest", "actix-web", "futures", - "once_cell", "jsonwebtoken", + "mime", ] [dependencies] @@ -42,34 +43,39 @@ lemmy_db_views = { workspace = true } lemmy_db_views_moderator = { workspace = true } lemmy_db_views_actor = { workspace = true } lemmy_db_schema = { workspace = true } -lemmy_utils = { workspace = true, optional = true } +lemmy_utils = { workspace = true } activitypub_federation = { workspace = true, optional = true } serde = { workspace = true } serde_with = { workspace = true } url = { workspace = true } -chrono = { workspace = true, optional = true } +chrono = { workspace = true } tracing = { workspace = true, optional = true } reqwest-middleware = { workspace = true, optional = true } regex = { workspace = true } rosetta-i18n = { workspace = true, optional = true } -percent-encoding = { workspace = true, optional = true } -webpage = { version = "1.6", default-features = false, features = [ - "serde", -], optional = true } -encoding = { version = "0.2.33", optional = true } -anyhow = { workspace = true } futures = { workspace = true, optional = true } uuid = { workspace = true, optional = true } tokio = { workspace = true, optional = true } reqwest = { workspace = true, optional = true } ts-rs = { workspace = true, optional = true } -once_cell = { workspace = true, optional = true } +moka.workspace = true +anyhow.workspace = true actix-web = { workspace = true, optional = true } -jsonwebtoken = { version = "8.3.0", optional = true } -# necessary for wasmt compilation -getrandom = { version = "0.2.10", features = ["js"] } enum-map = { workspace = true } +urlencoding = { workspace = true } +mime = { version = "0.3.17", optional = true } +webpage = { version = "2.0", default-features = false, features = [ + "serde", +], optional = true } +encoding_rs = { version = "0.8.34", optional = true } +jsonwebtoken = { version = "9.3.0", optional = true } +# necessary for wasmt compilation +getrandom = { version = "0.2.15", features = ["js"] } + +[package.metadata.cargo-shear] +ignored = ["getrandom"] [dev-dependencies] serial_test = { workspace = true } reqwest-middleware = { workspace = true } +pretty_assertions = { workspace = true } diff --git a/crates/api_common/src/build_response.rs b/crates/api_common/src/build_response.rs index a85e29765..d40f4c23d 100644 --- a/crates/api_common/src/build_response.rs +++ b/crates/api_common/src/build_response.rs @@ -3,7 +3,12 @@ use crate::{ community::CommunityResponse, context::LemmyContext, post::PostResponse, - utils::{check_person_block, get_interface_language, is_mod_or_admin, send_email_to_user}, + utils::{ + check_person_instance_community_block, + get_interface_language, + is_mod_or_admin, + send_email_to_user, + }, }; use actix_web::web::Json; use lemmy_db_schema::{ @@ -14,14 +19,13 @@ use lemmy_db_schema::{ comment_reply::{CommentReply, CommentReplyInsertForm}, person::Person, person_mention::{PersonMention, PersonMentionInsertForm}, - post::Post, }, traits::Crud, }; use lemmy_db_views::structs::{CommentView, LocalUserView, PostView}; use lemmy_db_views_actor::structs::CommunityView; use lemmy_utils::{ - error::LemmyError, + error::LemmyResult, utils::{markdown::markdown_to_html, mention::MentionData}, }; @@ -30,9 +34,10 @@ pub async fn build_comment_response( comment_id: CommentId, local_user_view: Option, recipient_ids: Vec, -) -> Result { - let person_id = local_user_view.map(|l| l.person.id); - let comment_view = CommentView::read(&mut context.pool(), comment_id, person_id).await?; +) -> LemmyResult { + let local_user = local_user_view.map(|l| l.local_user); + let comment_view = + CommentView::read(&mut context.pool(), comment_id, local_user.as_ref()).await?; Ok(CommentResponse { comment_view, recipient_ids, @@ -43,15 +48,15 @@ pub async fn build_community_response( context: &LemmyContext, local_user_view: LocalUserView, community_id: CommunityId, -) -> Result, LemmyError> { +) -> LemmyResult> { let is_mod_or_admin = is_mod_or_admin(&mut context.pool(), &local_user_view.person, community_id) .await .is_ok(); - let person_id = local_user_view.person.id; + let local_user = local_user_view.local_user; let community_view = CommunityView::read( &mut context.pool(), community_id, - Some(person_id), + Some(&local_user), is_mod_or_admin, ) .await?; @@ -66,35 +71,48 @@ pub async fn build_community_response( pub async fn build_post_response( context: &LemmyContext, community_id: CommunityId, - person: &Person, + local_user_view: LocalUserView, post_id: PostId, -) -> Result, LemmyError> { - let is_mod_or_admin = is_mod_or_admin(&mut context.pool(), person, community_id) +) -> LemmyResult> { + let local_user = local_user_view.local_user; + let is_mod_or_admin = is_mod_or_admin(&mut context.pool(), &local_user_view.person, community_id) .await .is_ok(); let post_view = PostView::read( &mut context.pool(), post_id, - Some(person.id), + Some(&local_user), is_mod_or_admin, ) .await?; Ok(Json(PostResponse { post_view })) } -// TODO: this function is a mess and should be split up to handle email seperately +// TODO: this function is a mess and should be split up to handle email separately #[tracing::instrument(skip_all)] pub async fn send_local_notifs( mentions: Vec, - comment: &Comment, + comment_id: CommentId, person: &Person, - post: &Post, do_send_email: bool, context: &LemmyContext, -) -> Result, LemmyError> { + local_user_view: Option<&LocalUserView>, +) -> LemmyResult> { let mut recipient_ids = Vec::new(); let inbox_link = format!("{}/inbox", context.settings().get_protocol_and_hostname()); + // let person = my_local_user.person; + // Read the comment view to get extra info + let comment_view = CommentView::read( + &mut context.pool(), + comment_id, + local_user_view.map(|view| &view.local_user), + ) + .await?; + let comment = comment_view.comment; + let post = comment_view.post; + let community = comment_view.community; + // Send the local mentions for mention in mentions .iter() @@ -105,12 +123,13 @@ pub async fn send_local_notifs( if let Ok(mention_user_view) = user_view { // TODO // At some point, make it so you can't tag the parent creator either - // This can cause two notifications, one for reply and the other for mention + // Potential duplication of notifications, one for reply and the other for mention, is handled + // below by checking recipient ids recipient_ids.push(mention_user_view.local_user.id); let user_mention_form = PersonMentionInsertForm { recipient_id: mention_user_view.person.id, - comment_id: comment.id, + comment_id, read: None, }; @@ -142,77 +161,94 @@ pub async fn send_local_notifs( // Get the parent commenter local_user let parent_creator_id = parent_comment.creator_id; - // Only add to recipients if that person isn't blocked - let creator_blocked = check_person_block(person.id, parent_creator_id, &mut context.pool()) - .await - .is_err(); + let check_blocks = check_person_instance_community_block( + person.id, + parent_creator_id, + // Only block from the community's instance_id + community.instance_id, + community.id, + &mut context.pool(), + ) + .await + .is_err(); // Don't send a notif to yourself - if parent_comment.creator_id != person.id && !creator_blocked { + if parent_comment.creator_id != person.id && !check_blocks { let user_view = LocalUserView::read_person(&mut context.pool(), parent_creator_id).await; if let Ok(parent_user_view) = user_view { - recipient_ids.push(parent_user_view.local_user.id); + // Don't duplicate notif if already mentioned by checking recipient ids + if !recipient_ids.contains(&parent_user_view.local_user.id) { + recipient_ids.push(parent_user_view.local_user.id); - let comment_reply_form = CommentReplyInsertForm { - recipient_id: parent_user_view.person.id, - comment_id: comment.id, - read: None, - }; + let comment_reply_form = CommentReplyInsertForm { + recipient_id: parent_user_view.person.id, + comment_id: comment.id, + read: None, + }; - // Allow this to fail softly, since comment edits might re-update or replace it - // Let the uniqueness handle this fail - CommentReply::create(&mut context.pool(), &comment_reply_form) - .await - .ok(); + // Allow this to fail softly, since comment edits might re-update or replace it + // Let the uniqueness handle this fail + CommentReply::create(&mut context.pool(), &comment_reply_form) + .await + .ok(); - if do_send_email { - let lang = get_interface_language(&parent_user_view); - let content = markdown_to_html(&comment.content); - send_email_to_user( - &parent_user_view, - &lang.notification_comment_reply_subject(&person.name), - &lang.notification_comment_reply_body(&content, &inbox_link, &person.name), - context.settings(), - ) - .await + if do_send_email { + let lang = get_interface_language(&parent_user_view); + let content = markdown_to_html(&comment.content); + send_email_to_user( + &parent_user_view, + &lang.notification_comment_reply_subject(&person.name), + &lang.notification_comment_reply_body(&content, &inbox_link, &person.name), + context.settings(), + ) + .await + } } } } } else { - // If there's no parent, its the post creator - // Only add to recipients if that person isn't blocked - let creator_blocked = check_person_block(person.id, post.creator_id, &mut context.pool()) - .await - .is_err(); + // Use the post creator to check blocks + let check_blocks = check_person_instance_community_block( + person.id, + post.creator_id, + // Only block from the community's instance_id + community.instance_id, + community.id, + &mut context.pool(), + ) + .await + .is_err(); - if post.creator_id != person.id && !creator_blocked { + if post.creator_id != person.id && !check_blocks { let creator_id = post.creator_id; let parent_user = LocalUserView::read_person(&mut context.pool(), creator_id).await; if let Ok(parent_user_view) = parent_user { - recipient_ids.push(parent_user_view.local_user.id); + if !recipient_ids.contains(&parent_user_view.local_user.id) { + recipient_ids.push(parent_user_view.local_user.id); - let comment_reply_form = CommentReplyInsertForm { - recipient_id: parent_user_view.person.id, - comment_id: comment.id, - read: None, - }; + let comment_reply_form = CommentReplyInsertForm { + recipient_id: parent_user_view.person.id, + comment_id: comment.id, + read: None, + }; - // Allow this to fail softly, since comment edits might re-update or replace it - // Let the uniqueness handle this fail - CommentReply::create(&mut context.pool(), &comment_reply_form) - .await - .ok(); + // Allow this to fail softly, since comment edits might re-update or replace it + // Let the uniqueness handle this fail + CommentReply::create(&mut context.pool(), &comment_reply_form) + .await + .ok(); - if do_send_email { - let lang = get_interface_language(&parent_user_view); - let content = markdown_to_html(&comment.content); - send_email_to_user( - &parent_user_view, - &lang.notification_post_reply_subject(&person.name), - &lang.notification_post_reply_body(&content, &inbox_link, &person.name), - context.settings(), - ) - .await + if do_send_email { + let lang = get_interface_language(&parent_user_view); + let content = markdown_to_html(&comment.content); + send_email_to_user( + &parent_user_view, + &lang.notification_post_reply_subject(&person.name), + &lang.notification_post_reply_body(&content, &inbox_link, &person.name), + context.settings(), + ) + .await + } } } } diff --git a/crates/api_common/src/claims.rs b/crates/api_common/src/claims.rs index 09191ad71..6476f855a 100644 --- a/crates/api_common/src/claims.rs +++ b/crates/api_common/src/claims.rs @@ -1,15 +1,16 @@ -use crate::{context::LemmyContext, sensitive::Sensitive}; +use crate::context::LemmyContext; use actix_web::{http::header::USER_AGENT, HttpRequest}; use chrono::Utc; use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation}; use lemmy_db_schema::{ newtypes::LocalUserId, + sensitive::SensitiveString, source::login_token::{LoginToken, LoginTokenCreateForm}, }; use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; use serde::{Deserialize, Serialize}; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] pub struct Claims { /// local_user_id, standard claim by RFC 7519. pub sub: String, @@ -28,19 +29,15 @@ impl Claims { let claims = decode::(jwt, &key, &validation).with_lemmy_type(LemmyErrorType::NotLoggedIn)?; let user_id = LocalUserId(claims.claims.sub.parse()?); - let is_valid = LoginToken::validate(&mut context.pool(), user_id, jwt).await?; - if !is_valid { - Err(LemmyErrorType::NotLoggedIn)? - } else { - Ok(user_id) - } + LoginToken::validate(&mut context.pool(), user_id, jwt).await?; + Ok(user_id) } pub async fn generate( user_id: LocalUserId, req: HttpRequest, context: &LemmyContext, - ) -> LemmyResult> { + ) -> LemmyResult { let hostname = context.settings().hostname.clone(); let my_claims = Claims { sub: user_id.0.to_string(), @@ -50,7 +47,7 @@ impl Claims { let secret = &context.secret().jwt_secret; let key = EncodingKey::from_secret(secret.as_ref()); - let token = encode(&Header::default(), &my_claims, &key)?; + let token: SensitiveString = encode(&Header::default(), &my_claims, &key)?.into(); let ip = req .connection_info() .realip_remote_addr() @@ -67,14 +64,12 @@ impl Claims { user_agent, }; LoginToken::create(&mut context.pool(), form).await?; - Ok(Sensitive::new(token)) + Ok(token) } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{claims::Claims, context::LemmyContext}; use actix_web::test::TestRequest; @@ -88,17 +83,18 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; - use lemmy_utils::rate_limit::RateLimitCell; + use lemmy_utils::{error::LemmyResult, rate_limit::RateLimitCell}; + use pretty_assertions::assert_eq; use reqwest::Client; use reqwest_middleware::ClientBuilder; use serial_test::serial; #[tokio::test] #[serial] - async fn test_should_not_validate_user_token_after_password_change() { + async fn test_should_not_validate_user_token_after_password_change() -> LemmyResult<()> { let pool_ = build_db_pool_for_tests().await; let pool = &mut (&pool_).into(); - let secret = Secret::init(pool).await.unwrap(); + let secret = Secret::init(pool).await?; let context = LemmyContext::create( pool_.clone(), ClientBuilder::new(Client::default()).build(), @@ -106,34 +102,25 @@ mod tests { RateLimitCell::with_test_config(), ); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("Gerry9812".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "Gerry9812"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_person.id) - .password_encrypted("123456".to_string()) - .build(); + let local_user_form = LocalUserInsertForm::test_form(inserted_person.id); - let inserted_local_user = LocalUser::create(pool, &local_user_form).await.unwrap(); + let inserted_local_user = LocalUser::create(pool, &local_user_form, vec![]).await?; let req = TestRequest::default().to_http_request(); - let jwt = Claims::generate(inserted_local_user.id, req, &context) - .await - .unwrap(); + let jwt = Claims::generate(inserted_local_user.id, req, &context).await?; let valid = Claims::validate(&jwt, &context).await; assert!(valid.is_ok()); - let num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + let num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, num_deleted); + + Ok(()) } } diff --git a/crates/api_common/src/comment.rs b/crates/api_common/src/comment.rs index c2589fb2a..48800cf8d 100644 --- a/crates/api_common/src/comment.rs +++ b/crates/api_common/src/comment.rs @@ -3,14 +3,14 @@ use lemmy_db_schema::{ CommentSortType, ListingType, }; -use lemmy_db_views::structs::{CommentReportView, CommentView}; +use lemmy_db_views::structs::{CommentReportView, CommentView, VoteView}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Create a comment. @@ -22,7 +22,7 @@ pub struct CreateComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Fetch an individual comment. @@ -31,7 +31,7 @@ pub struct GetComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edit a comment. @@ -42,7 +42,7 @@ pub struct EditComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Distinguish a comment (IE speak as moderator). @@ -52,7 +52,7 @@ pub struct DistinguishComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete your own comment. @@ -62,7 +62,7 @@ pub struct DeleteComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Remove a comment (only doable by mods). @@ -72,7 +72,7 @@ pub struct RemoveComment { pub reason: Option, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Save / bookmark a comment. @@ -91,7 +91,7 @@ pub struct CommentResponse { pub recipient_ids: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Like a comment. @@ -102,7 +102,7 @@ pub struct CreateCommentLike { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get a list of comments. @@ -146,7 +146,7 @@ pub struct CommentReportResponse { pub comment_report_view: CommentReportView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Resolve a comment report (only doable by mods). @@ -156,11 +156,12 @@ pub struct ResolveCommentReport { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// List comment reports. pub struct ListCommentReports { + pub comment_id: Option, pub page: Option, pub limit: Option, /// Only shows the unresolved reports @@ -176,3 +177,22 @@ pub struct ListCommentReports { pub struct ListCommentReportsResponse { pub comment_reports: Vec, } + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// List comment likes. Admins-only. +pub struct ListCommentLikes { + pub comment_id: CommentId, + pub page: Option, + pub limit: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// The comment likes response +pub struct ListCommentLikesResponse { + pub comment_likes: Vec, +} diff --git a/crates/api_common/src/community.rs b/crates/api_common/src/community.rs index 1f4a94636..1def2111b 100644 --- a/crates/api_common/src/community.rs +++ b/crates/api_common/src/community.rs @@ -1,17 +1,22 @@ use lemmy_db_schema::{ newtypes::{CommunityId, LanguageId, PersonId}, source::site::Site, + CommunityVisibility, ListingType, - SortType, }; -use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView, PersonView}; +use lemmy_db_views_actor::structs::{ + CommunityModeratorView, + CommunitySortType, + CommunityView, + PersonView, +}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get a community. Must provide either an id, or a name. @@ -36,14 +41,16 @@ pub struct GetCommunityResponse { #[skip_serializing_none] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] /// Create a community. pub struct CreateCommunity { /// The unique name. pub name: String, /// A longer title. pub title: String, - /// A longer sidebar, or description of your community, in markdown. + /// A sidebar for the community in markdown. + pub sidebar: Option, + /// A shorter, one line description of your community. pub description: Option, /// An icon URL. pub icon: Option, @@ -54,6 +61,7 @@ pub struct CreateCommunity { /// Whether to restrict posting only to moderators. pub posting_restricted_to_mods: Option, pub discussion_languages: Option>, + pub visibility: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -66,13 +74,13 @@ pub struct CommunityResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Fetches a list of communities. pub struct ListCommunities { pub type_: Option, - pub sort: Option, + pub sort: Option, pub show_nsfw: Option, pub page: Option, pub limit: Option, @@ -87,7 +95,7 @@ pub struct ListCommunitiesResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Ban a user from a community. @@ -95,8 +103,13 @@ pub struct BanFromCommunity { pub community_id: CommunityId, pub person_id: PersonId, pub ban: bool, - pub remove_data: Option, + /// Optionally remove or restore all their data. Useful for new troll accounts. + /// If ban is true, then this means remove. If ban is false, it means restore. + pub remove_or_restore_data: Option, pub reason: Option, + /// A time that the ban will expire, in unix epoch seconds. + /// + /// An i64 unix timestamp is used for a simpler API client implementation. pub expires: Option, } @@ -109,7 +122,7 @@ pub struct BanFromCommunityResponse { pub banned: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Add a moderator to a community. @@ -128,7 +141,7 @@ pub struct AddModToCommunityResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edit a community. @@ -136,7 +149,9 @@ pub struct EditCommunity { pub community_id: CommunityId, /// A longer title. pub title: Option, - /// A longer sidebar, or description of your community, in markdown. + /// A sidebar for the community in markdown. + pub sidebar: Option, + /// A shorter, one line description of your community. pub description: Option, /// An icon URL. pub icon: Option, @@ -147,10 +162,11 @@ pub struct EditCommunity { /// Whether to restrict posting only to moderators. pub posting_restricted_to_mods: Option, pub discussion_languages: Option>, + pub visibility: Option, } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Hide a community from the main view. @@ -161,7 +177,7 @@ pub struct HideCommunity { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete your own community. @@ -171,7 +187,7 @@ pub struct DeleteCommunity { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Remove a community (only doable by moderators). @@ -181,7 +197,7 @@ pub struct RemoveCommunity { pub reason: Option, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Follow / subscribe to a community. @@ -190,7 +206,7 @@ pub struct FollowCommunity { pub follow: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Block a community. @@ -209,7 +225,7 @@ pub struct BlockCommunityResponse { pub blocked: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Transfer a community to a new owner. @@ -217,3 +233,12 @@ pub struct TransferCommunity { pub community_id: CommunityId, pub person_id: PersonId, } + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Fetches a random community +pub struct GetRandomCommunity { + pub type_: Option, +} diff --git a/crates/api_common/src/context.rs b/crates/api_common/src/context.rs index 888a98741..334983b20 100644 --- a/crates/api_common/src/context.rs +++ b/crates/api_common/src/context.rs @@ -1,12 +1,14 @@ +use crate::request::client_builder; +use activitypub_federation::config::{Data, FederationConfig}; use lemmy_db_schema::{ source::secret::Secret, - utils::{ActualDbPool, DbPool}, + utils::{build_db_pool_for_tests, ActualDbPool, DbPool}, }; use lemmy_utils::{ rate_limit::RateLimitCell, settings::{structs::Settings, SETTINGS}, }; -use reqwest_middleware::ClientWithMiddleware; +use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; use std::sync::Arc; #[derive(Clone)] @@ -49,4 +51,38 @@ impl LemmyContext { pub fn rate_limit_cell(&self) -> &RateLimitCell { &self.rate_limit_cell } + + /// Initialize a context for use in tests which blocks federation network calls. + /// + /// Do not use this in production code. + pub async fn init_test_federation_config() -> FederationConfig { + // call this to run migrations + let pool = build_db_pool_for_tests().await; + + let client = client_builder(&SETTINGS).build().expect("build client"); + + let client = ClientBuilder::new(client).build(); + let secret = Secret { + id: 0, + jwt_secret: String::new().into(), + }; + + let rate_limit_cell = RateLimitCell::with_test_config(); + + let context = LemmyContext::create(pool, client, secret, rate_limit_cell.clone()); + + FederationConfig::builder() + .domain(context.settings().hostname.clone()) + .app_data(context) + .debug(true) + // Dont allow any network fetches + .http_fetch_limit(0) + .build() + .await + .expect("build federation config") + } + pub async fn init_test_context() -> Data { + let config = Self::init_test_federation_config().await; + config.to_request_data() + } } diff --git a/crates/api_common/src/custom_emoji.rs b/crates/api_common/src/custom_emoji.rs index d2900853e..3804b71af 100644 --- a/crates/api_common/src/custom_emoji.rs +++ b/crates/api_common/src/custom_emoji.rs @@ -1,11 +1,12 @@ use lemmy_db_schema::newtypes::CustomEmojiId; use lemmy_db_views::structs::CustomEmojiView; use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; use url::Url; -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Create a custom emoji. @@ -18,7 +19,7 @@ pub struct CreateCustomEmoji { pub keywords: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edit a custom emoji. @@ -31,7 +32,7 @@ pub struct EditCustomEmoji { pub keywords: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete a custom emoji. @@ -46,3 +47,23 @@ pub struct DeleteCustomEmoji { pub struct CustomEmojiResponse { pub custom_emoji: CustomEmojiView, } + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// A response for custom emojis. +pub struct ListCustomEmojisResponse { + pub custom_emojis: Vec, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Fetches a list of custom emojis. +pub struct ListCustomEmojis { + pub page: Option, + pub limit: Option, + pub category: Option, + pub ignore_page_limits: Option, +} diff --git a/crates/api_common/src/lib.rs b/crates/api_common/src/lib.rs index 6f7da52ee..68eeadecc 100644 --- a/crates/api_common/src/lib.rs +++ b/crates/api_common/src/lib.rs @@ -7,6 +7,7 @@ pub mod community; #[cfg(feature = "full")] pub mod context; pub mod custom_emoji; +pub mod oauth_provider; pub mod person; pub mod post; pub mod private_message; @@ -14,8 +15,8 @@ pub mod private_message; pub mod request; #[cfg(feature = "full")] pub mod send_activity; -pub mod sensitive; pub mod site; +pub mod tagline; #[cfg(feature = "full")] pub mod utils; @@ -23,8 +24,11 @@ pub extern crate lemmy_db_schema; pub extern crate lemmy_db_views; pub extern crate lemmy_db_views_actor; pub extern crate lemmy_db_views_moderator; +pub extern crate lemmy_utils; +pub use lemmy_utils::LemmyErrorType; use serde::{Deserialize, Serialize}; +use std::{cmp::min, time::Duration}; #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(ts_rs::TS))] @@ -39,3 +43,40 @@ impl Default for SuccessResponse { SuccessResponse { success: true } } } + +// TODO: use from_days once stabilized +// https://github.com/rust-lang/rust/issues/120301 +const DAY: Duration = Duration::from_secs(24 * 60 * 60); + +/// Calculate how long to sleep until next federation send based on how many +/// retries have already happened. Uses exponential backoff with maximum of one day. The first +/// error is ignored. +pub fn federate_retry_sleep_duration(retry_count: i32) -> Duration { + debug_assert!(retry_count != 0); + if retry_count == 1 { + return Duration::from_secs(0); + } + let retry_count = retry_count - 1; + let pow = 1.25_f64.powf(retry_count.into()); + let pow = Duration::try_from_secs_f64(pow).unwrap_or(DAY); + min(DAY, pow) +} + +#[cfg(test)] +pub(crate) mod tests { + use super::*; + + #[test] + fn test_federate_retry_sleep_duration() { + assert_eq!(Duration::from_secs(0), federate_retry_sleep_duration(1)); + assert_eq!( + Duration::new(1, 250000000), + federate_retry_sleep_duration(2) + ); + assert_eq!( + Duration::new(2, 441406250), + federate_retry_sleep_duration(5) + ); + assert_eq!(DAY, federate_retry_sleep_duration(100)); + } +} diff --git a/crates/api_common/src/oauth_provider.rs b/crates/api_common/src/oauth_provider.rs new file mode 100644 index 000000000..14847edf1 --- /dev/null +++ b/crates/api_common/src/oauth_provider.rs @@ -0,0 +1,71 @@ +use lemmy_db_schema::newtypes::OAuthProviderId; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; +use url::Url; + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Create an external auth method. +pub struct CreateOAuthProvider { + pub display_name: String, + pub issuer: String, + pub authorization_endpoint: String, + pub token_endpoint: String, + pub userinfo_endpoint: String, + pub id_claim: String, + pub client_id: String, + pub client_secret: String, + pub scopes: String, + pub auto_verify_email: Option, + pub account_linking_enabled: Option, + pub enabled: Option, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Edit an external auth method. +pub struct EditOAuthProvider { + pub id: OAuthProviderId, + pub display_name: Option, + pub authorization_endpoint: Option, + pub token_endpoint: Option, + pub userinfo_endpoint: Option, + pub id_claim: Option, + pub client_secret: Option, + pub scopes: Option, + pub auto_verify_email: Option, + pub account_linking_enabled: Option, + pub enabled: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Delete an external auth method. +pub struct DeleteOAuthProvider { + pub id: OAuthProviderId, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Logging in with an OAuth 2.0 authorization +pub struct AuthenticateWithOauth { + pub code: String, + #[cfg_attr(feature = "full", ts(type = "string"))] + pub oauth_provider_id: OAuthProviderId, + #[cfg_attr(feature = "full", ts(type = "string"))] + pub redirect_uri: Url, + pub show_nsfw: Option, + /// Username is mandatory at registration time + pub username: Option, + /// An answer is mandatory if require application is enabled on the server + pub answer: Option, +} diff --git a/crates/api_common/src/person.rs b/crates/api_common/src/person.rs index af42530a2..08d952070 100644 --- a/crates/api_common/src/person.rs +++ b/crates/api_common/src/person.rs @@ -1,12 +1,13 @@ -use crate::sensitive::Sensitive; use lemmy_db_schema::{ newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId}, + sensitive::SensitiveString, + source::{login_token::LoginToken, site::Site}, CommentSortType, ListingType, PostListingMode, - SortType, + PostSortType, }; -use lemmy_db_views::structs::{CommentView, PostView}; +use lemmy_db_views::structs::{CommentView, LocalImageView, PostView}; use lemmy_db_views_actor::structs::{ CommentReplyView, CommunityModeratorView, @@ -19,29 +20,29 @@ use serde_with::skip_serializing_none; use ts_rs::TS; #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Logging into lemmy. pub struct Login { - pub username_or_email: Sensitive, - pub password: Sensitive, + pub username_or_email: SensitiveString, + pub password: SensitiveString, /// May be required, if totp is enabled for their account. pub totp_2fa_token: Option, } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Register / Sign up to lemmy. pub struct Register { pub username: String, - pub password: Sensitive, - pub password_verify: Sensitive, - pub show_nsfw: bool, + pub password: SensitiveString, + pub password_verify: SensitiveString, + pub show_nsfw: Option, /// email is mandatory if email verification is enabled on the server - pub email: Option>, + pub email: Option, /// The UUID of the captcha item. pub captcha_uuid: Option, /// Your captcha answer. @@ -76,21 +77,25 @@ pub struct CaptchaResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Saves settings for your user. pub struct SaveUserSettings { /// Show nsfw posts. pub show_nsfw: Option, + /// Blur nsfw posts. pub blur_nsfw: Option, - pub auto_expand: Option, - /// Show post and comment scores. - pub show_scores: Option, /// Your user's theme. pub theme: Option, - pub default_sort_type: Option, + /// The default post listing type, usually "local" pub default_listing_type: Option, + /// A post-view mode that changes how multiple post listings look. + pub post_listing_mode: Option, + /// The default post sort, usually "active" + pub default_post_sort_type: Option, + /// The default comment sort, usually "hot" + pub default_comment_sort_type: Option, /// The language of the lemmy interface pub interface_language: Option, /// A URL for your avatar. @@ -100,7 +105,7 @@ pub struct SaveUserSettings { /// Your display name, which can contain strange characters, and does not need to be unique. pub display_name: Option, /// Your email. - pub email: Option>, + pub email: Option, /// Your bio / info, in markdown. pub bio: Option, /// Your matrix user id. Ex: @my_user:matrix.org @@ -115,32 +120,36 @@ pub struct SaveUserSettings { pub show_bot_accounts: Option, /// Whether to show read posts. pub show_read_posts: Option, - /// Whether to show notifications for new posts. - // TODO notifs need to be reworked. - pub show_new_post_notifs: Option, /// A list of languages you are able to see discussion in. pub discussion_languages: Option>, /// Open links in a new tab pub open_links_in_new_tab: Option, /// Enable infinite scroll pub infinite_scroll_enabled: Option, - pub post_listing_mode: Option, /// Whether to allow keyboard navigation (for browsing and interacting with posts and comments). pub enable_keyboard_navigation: Option, - /// Whether user avatars or inline images in the UI that are gifs should be allowed to play or should be paused + /// Whether user avatars or inline images in the UI that are gifs should be allowed to play or + /// should be paused pub enable_animated_images: Option, /// Whether a user can send / receive private messages pub enable_private_messages: Option, + /// Whether to auto-collapse bot comments. + pub collapse_bot_comments: Option, + /// Some vote display mode settings + pub show_scores: Option, + pub show_upvotes: Option, + pub show_downvotes: Option, + pub show_upvote_percentage: Option, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Changes your account password. pub struct ChangePassword { - pub new_password: Sensitive, - pub new_password_verify: Sensitive, - pub old_password: Sensitive, + pub new_password: SensitiveString, + pub new_password_verify: SensitiveString, + pub old_password: SensitiveString, } #[skip_serializing_none] @@ -149,8 +158,9 @@ pub struct ChangePassword { #[cfg_attr(feature = "full", ts(export))] /// A response for your login. pub struct LoginResponse { - /// This is None in response to `Register` if email verification is enabled, or the server requires registration applications. - pub jwt: Option>, + /// This is None in response to `Register` if email verification is enabled, or the server + /// requires registration applications. + pub jwt: Option, /// If registration applications are required, this will return true for a signup response. pub registration_created: bool, /// If email verifications are required, this will return true for a signup response. @@ -158,7 +168,7 @@ pub struct LoginResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Gets a person's details. @@ -168,25 +178,27 @@ pub struct GetPersonDetails { pub person_id: Option, /// Example: dessalines , or dessalines@xyz.tld pub username: Option, - pub sort: Option, + pub sort: Option, pub page: Option, pub limit: Option, pub community_id: Option, pub saved_only: Option, } +#[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// A person's details response. pub struct GetPersonDetailsResponse { pub person_view: PersonView, + pub site: Option, pub comments: Vec, pub posts: Vec, pub moderates: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Adds an admin to a site. @@ -204,16 +216,20 @@ pub struct AddAdminResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Ban a person from the site. pub struct BanPerson { pub person_id: PersonId, pub ban: bool, - /// Optionally remove all their data. Useful for new troll accounts. - pub remove_data: Option, + /// Optionally remove or restore all their data. Useful for new troll accounts. + /// If ban is true, then this means remove. If ban is false, it means restore. + pub remove_or_restore_data: Option, pub reason: Option, + /// A time that the ban will expire, in unix epoch seconds. + /// + /// An i64 unix timestamp is used for a simpler API client implementation. pub expires: Option, } @@ -235,7 +251,7 @@ pub struct BanPersonResponse { pub banned: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Block a person. @@ -254,7 +270,7 @@ pub struct BlockPersonResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get comment replies. @@ -275,7 +291,7 @@ pub struct GetRepliesResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get mentions for your user. @@ -294,7 +310,7 @@ pub struct GetPersonMentionsResponse { pub mentions: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Mark a person mention as read. @@ -311,7 +327,7 @@ pub struct PersonMentionResponse { pub person_mention_view: PersonMentionView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Mark a comment reply as read. @@ -328,35 +344,35 @@ pub struct CommentReplyResponse { pub comment_reply_view: CommentReplyView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete your account. pub struct DeleteAccount { - pub password: Sensitive, + pub password: SensitiveString, pub delete_content: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Reset your password via email. pub struct PasswordReset { - pub email: Sensitive, + pub email: SensitiveString, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Change your password after receiving a reset request. pub struct PasswordChangeAfterReset { - pub token: Sensitive, - pub password: Sensitive, - pub password_verify: Sensitive, + pub token: SensitiveString, + pub password: SensitiveString, + pub password_verify: SensitiveString, } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get a count of the number of reports. @@ -386,7 +402,7 @@ pub struct GetUnreadCountResponse { pub private_messages: i64, } -#[derive(Serialize, Deserialize, Clone, Default, Debug)] +#[derive(Serialize, Deserialize, Clone, Default, Debug, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Verify your email. @@ -398,10 +414,10 @@ pub struct VerifyEmail { #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] pub struct GenerateTotpSecretResponse { - pub totp_secret_url: Sensitive, + pub totp_secret_url: SensitiveString, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] pub struct UpdateTotp { @@ -415,3 +431,27 @@ pub struct UpdateTotp { pub struct UpdateTotpResponse { pub enabled: bool, } + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Get your user's image / media uploads. +pub struct ListMedia { + pub page: Option, + pub limit: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +pub struct ListMediaResponse { + pub images: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +pub struct ListLoginsResponse { + pub logins: Vec, +} diff --git a/crates/api_common/src/post.rs b/crates/api_common/src/post.rs index c7ee08983..fa45459e2 100644 --- a/crates/api_common/src/post.rs +++ b/crates/api_common/src/post.rs @@ -2,32 +2,36 @@ use lemmy_db_schema::{ newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, PostReportId}, ListingType, PostFeatureType, - SortType, + PostSortType, }; -use lemmy_db_views::structs::{PaginationCursor, PostReportView, PostView}; +use lemmy_db_views::structs::{PaginationCursor, PostReportView, PostView, VoteView}; use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use url::Url; #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Create a post. pub struct CreatePost { pub name: String, pub community_id: CommunityId, - #[cfg_attr(feature = "full", ts(type = "string"))] - pub url: Option, + pub url: Option, /// An optional body for the post in markdown. pub body: Option, + /// An optional alt_text, usable for image posts. + pub alt_text: Option, /// A honeypot to catch bots. Should be None. pub honeypot: Option, pub nsfw: Option, pub language_id: Option, + /// Instead of fetching a thumbnail, use a custom one. + pub custom_thumbnail: Option, + /// Time when this post should be scheduled. Null means publish immediately. + pub scheduled_publish_time: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -38,7 +42,7 @@ pub struct PostResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get a post. Needs either the post id, or comment_id. @@ -61,13 +65,13 @@ pub struct GetPostResponse { } #[skip_serializing_none] -#[derive(Serialize, Deserialize, Debug, Clone, Default)] +#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get a list of posts. pub struct GetPosts { pub type_: Option, - pub sort: Option, + pub sort: Option, /// DEPRECATED, use page_cursor pub page: Option, pub limit: Option, @@ -76,6 +80,11 @@ pub struct GetPosts { pub saved_only: Option, pub liked_only: Option, pub disliked_only: Option, + pub show_hidden: Option, + /// If true, then show the read posts (even if your user setting is to hide them) + pub show_read: Option, + /// If true, then show the nsfw posts (even if your user setting is to hide them) + pub show_nsfw: Option, pub page_cursor: Option, } @@ -90,7 +99,7 @@ pub struct GetPostsResponse { pub next_page: Option, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Like a post. @@ -101,22 +110,27 @@ pub struct CreatePostLike { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edit a post. pub struct EditPost { pub post_id: PostId, pub name: Option, - #[cfg_attr(feature = "full", ts(type = "string"))] - pub url: Option, + pub url: Option, /// An optional body for the post in markdown. pub body: Option, + /// An optional alt_text, usable for image posts. + pub alt_text: Option, pub nsfw: Option, pub language_id: Option, + /// Instead of fetching a thumbnail, use a custom one. + pub custom_thumbnail: Option, + /// Time when this post should be scheduled. Null means publish immediately. + pub scheduled_publish_time: Option, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete a post. @@ -126,7 +140,7 @@ pub struct DeletePost { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Remove a post (only doable by mods). @@ -137,18 +151,26 @@ pub struct RemovePost { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Mark a post as read. pub struct MarkPostAsRead { - /// TODO: deprecated, send `post_ids` instead - pub post_id: Option, - pub post_ids: Option>, + pub post_ids: Vec, pub read: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Hide a post from list views +pub struct HidePost { + pub post_ids: Vec, + pub hide: bool, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Lock a post (prevent new comments). @@ -157,7 +179,7 @@ pub struct LockPost { pub locked: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Feature a post (stickies / pins to the top). @@ -167,7 +189,7 @@ pub struct FeaturePost { pub feature_type: PostFeatureType, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Save / bookmark a post. @@ -193,7 +215,7 @@ pub struct PostReportResponse { pub post_report_view: PostReportView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Resolve a post report (mods only). @@ -203,7 +225,7 @@ pub struct ResolvePostReport { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// List post reports. @@ -214,6 +236,7 @@ pub struct ListPostReports { pub unresolved_only: Option, /// if no community is given, it returns reports for all communities moderated by the auth user pub community_id: Option, + pub post_id: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -224,13 +247,12 @@ pub struct ListPostReportsResponse { pub post_reports: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get metadata for a given site. pub struct GetSiteMetadata { - #[cfg_attr(feature = "full", ts(type = "string"))] - pub url: Url, + pub url: String, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -238,17 +260,47 @@ pub struct GetSiteMetadata { #[cfg_attr(feature = "full", ts(export))] /// The site metadata response. pub struct GetSiteMetadataResponse { - pub metadata: SiteMetadata, + pub metadata: LinkMetadata, } #[skip_serializing_none] -#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Clone)] +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Clone, Default, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Site metadata, from its opengraph tags. -pub struct SiteMetadata { +pub struct LinkMetadata { + #[serde(flatten)] + pub opengraph_data: OpenGraphData, + pub content_type: Option, +} + +#[skip_serializing_none] +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Clone, Default, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Site metadata, from its opengraph tags. +pub struct OpenGraphData { pub title: Option, pub description: Option, pub(crate) image: Option, pub embed_video_url: Option, } + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// List post likes. Admins-only. +pub struct ListPostLikes { + pub post_id: PostId, + pub page: Option, + pub limit: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// The post likes response +pub struct ListPostLikesResponse { + pub post_likes: Vec, +} diff --git a/crates/api_common/src/private_message.rs b/crates/api_common/src/private_message.rs index 8d469127d..429d68643 100644 --- a/crates/api_common/src/private_message.rs +++ b/crates/api_common/src/private_message.rs @@ -5,7 +5,7 @@ use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Create a private message. @@ -14,7 +14,7 @@ pub struct CreatePrivateMessage { pub recipient_id: PersonId, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edit a private message. @@ -23,7 +23,7 @@ pub struct EditPrivateMessage { pub content: String, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Delete a private message. @@ -32,7 +32,7 @@ pub struct DeletePrivateMessage { pub deleted: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Mark a private message as read. @@ -42,7 +42,7 @@ pub struct MarkPrivateMessageAsRead { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Get your private messages. @@ -69,7 +69,7 @@ pub struct PrivateMessageResponse { pub private_message_view: PrivateMessageView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Create a report for a private message. @@ -86,7 +86,7 @@ pub struct PrivateMessageReportResponse { pub private_message_report_view: PrivateMessageReportView, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Resolve a private message report. @@ -96,7 +96,7 @@ pub struct ResolvePrivateMessageReport { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// List private message reports. diff --git a/crates/api_common/src/request.rs b/crates/api_common/src/request.rs index 0064c8045..b0da6cf4d 100644 --- a/crates/api_common/src/request.rs +++ b/crates/api_common/src/request.rs @@ -1,52 +1,188 @@ -use crate::{context::LemmyContext, post::SiteMetadata}; -use encoding::{all::encodings, DecoderTrap}; -use lemmy_db_schema::newtypes::DbUrl; -use lemmy_utils::{ - error::{LemmyError, LemmyErrorType}, - settings::structs::Settings, - version::VERSION, - REQWEST_TIMEOUT, +use crate::{ + context::LemmyContext, + lemmy_db_schema::traits::Crud, + post::{LinkMetadata, OpenGraphData}, + send_activity::{ActivityChannel, SendActivityData}, + utils::proxy_image_link, +}; +use activitypub_federation::config::Data; +use chrono::{DateTime, Utc}; +use encoding_rs::{Encoding, UTF_8}; +use futures::StreamExt; +use lemmy_db_schema::{ + newtypes::DbUrl, + source::{ + images::{ImageDetailsForm, LocalImage, LocalImageForm}, + post::{Post, PostUpdateForm}, + site::Site, + }, +}; +use lemmy_utils::{ + error::{LemmyError, LemmyErrorType, LemmyResult}, + settings::structs::{PictrsImageMode, Settings}, + REQWEST_TIMEOUT, + VERSION, +}; +use mime::Mime; +use reqwest::{ + header::{CONTENT_TYPE, RANGE}, + Client, + ClientBuilder, + Response, }; -use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; -use reqwest::{Client, ClientBuilder}; use reqwest_middleware::ClientWithMiddleware; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use tracing::info; use url::Url; +use urlencoding::encode; use webpage::HTML; -/// Fetches the post link html tags (like title, description, image, etc) -#[tracing::instrument(skip_all)] -pub async fn fetch_site_metadata( - client: &ClientWithMiddleware, - url: &Url, -) -> Result { - info!("Fetching site metadata for url: {}", url); - let response = client.get(url.as_str()).send().await?; +pub fn client_builder(settings: &Settings) -> ClientBuilder { + let user_agent = format!("Lemmy/{VERSION}; +{}", settings.get_protocol_and_hostname()); - // Can't use .text() here, because it only checks the content header, not the actual bytes - // https://github.com/LemmyNet/lemmy/issues/1964 - let html_bytes = response.bytes().await.map_err(LemmyError::from)?.to_vec(); - - let tags = html_to_site_metadata(&html_bytes, url)?; - - Ok(tags) + Client::builder() + .user_agent(user_agent.clone()) + .timeout(REQWEST_TIMEOUT) + .connect_timeout(REQWEST_TIMEOUT) + .use_rustls_tls() } -fn html_to_site_metadata(html_bytes: &[u8], url: &Url) -> Result { - let html = String::from_utf8_lossy(html_bytes); +/// Fetches metadata for the given link and optionally generates thumbnail. +#[tracing::instrument(skip_all)] +pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult { + info!("Fetching site metadata for url: {}", url); + // We only fetch the first 64kB of data in order to not waste bandwidth especially for large + // binary files + let bytes_to_fetch = 64 * 1024; + let response = context + .client() + .get(url.as_str()) + // we only need the first chunk of data. Note that we do not check for Accept-Range so the + // server may ignore this and still respond with the full response + .header(RANGE, format!("bytes=0-{}", bytes_to_fetch - 1)) /* -1 because inclusive */ + .send() + .await?; - // Make sure the first line is doctype html - let first_line = html - .trim_start() - .lines() - .next() - .ok_or(LemmyErrorType::NoLinesInHtml)? - .to_lowercase(); + let content_type: Option = response + .headers() + .get(CONTENT_TYPE) + .and_then(|h| h.to_str().ok()) + .and_then(|h| h.parse().ok()); - if !first_line.starts_with("") { - Err(LemmyErrorType::SiteMetadataPageIsNotDoctypeHtml)? + let opengraph_data = { + // if the content type is not text/html, we don't need to parse it + let is_html = content_type + .as_ref() + .map(|c| { + (c.type_() == mime::TEXT && c.subtype() == mime::HTML) + || + // application/xhtml+xml is a subset of HTML + (c.type_() == mime::APPLICATION && c.subtype() == "xhtml") + }) + .unwrap_or(false); + if !is_html { + Default::default() + } else { + // Can't use .text() here, because it only checks the content header, not the actual bytes + // https://github.com/LemmyNet/lemmy/issues/1964 + // So we want to do deep inspection of the actually returned bytes but need to be careful not + // spend too much time parsing binary data as HTML + + // only take first bytes regardless of how many bytes the server returns + let html_bytes = collect_bytes_until_limit(response, bytes_to_fetch).await?; + extract_opengraph_data(&html_bytes, url) + .map_err(|e| info!("{e}")) + .unwrap_or_default() + } + }; + Ok(LinkMetadata { + opengraph_data, + content_type: content_type.map(|c| c.to_string()), + }) +} + +async fn collect_bytes_until_limit( + response: Response, + requested_bytes: usize, +) -> Result, LemmyError> { + let mut stream = response.bytes_stream(); + let mut bytes = Vec::with_capacity(requested_bytes); + while let Some(chunk) = stream.next().await { + let chunk = chunk.map_err(LemmyError::from)?; + // we may go over the requested size here but the important part is we don't keep aggregating + // more chunks than needed + bytes.extend_from_slice(&chunk); + if bytes.len() >= requested_bytes { + bytes.truncate(requested_bytes); + break; + } } + Ok(bytes) +} + +/// Generates and saves a post thumbnail and metadata. +/// +/// Takes a callback to generate a send activity task, so that post can be federated with metadata. +/// +/// TODO: `federated_thumbnail` param can be removed once we federate full metadata and can +/// write it to db directly, without calling this function. +/// https://github.com/LemmyNet/lemmy/issues/4598 +pub async fn generate_post_link_metadata( + post: Post, + custom_thumbnail: Option, + send_activity: impl FnOnce(Post) -> Option + Send + 'static, + context: Data, +) -> LemmyResult<()> { + let metadata = match &post.url { + Some(url) => fetch_link_metadata(url, &context).await.unwrap_or_default(), + _ => Default::default(), + }; + + let is_image_post = metadata + .content_type + .as_ref() + .is_some_and(|content_type| content_type.starts_with("image")); + + // Decide if we are allowed to generate local thumbnail + let site = Site::read_local(&mut context.pool()).await?; + let allow_sensitive = site.content_warning.is_some(); + let allow_generate_thumbnail = allow_sensitive || !post.nsfw; + + let image_url = if is_image_post { + post.url + } else { + metadata.opengraph_data.image.clone() + }; + + let thumbnail_url = if let (false, Some(url)) = (is_image_post, custom_thumbnail) { + proxy_image_link(url, &context).await.ok() + } else if let (true, Some(url)) = (allow_generate_thumbnail, image_url) { + generate_pictrs_thumbnail(&url, &context) + .await + .ok() + .map(Into::into) + } else { + metadata.opengraph_data.image.clone() + }; + + let form = PostUpdateForm { + embed_title: Some(metadata.opengraph_data.title), + embed_description: Some(metadata.opengraph_data.description), + embed_video_url: Some(metadata.opengraph_data.embed_video_url), + thumbnail_url: Some(thumbnail_url), + url_content_type: Some(metadata.content_type), + ..Default::default() + }; + let updated_post = Post::update(&mut context.pool(), post.id, &form).await?; + if let Some(send_activity) = send_activity(updated_post) { + ActivityChannel::submit_activity(send_activity, &context).await?; + } + Ok(()) +} + +/// Extract site metadata from HTML Opengraph attributes. +fn extract_opengraph_data(html_bytes: &[u8], url: &Url) -> LemmyResult { + let html = String::from_utf8_lossy(html_bytes); let mut page = HTML::from_string(html.to_string(), None)?; @@ -54,11 +190,9 @@ fn html_to_site_metadata(html_bytes: &[u8], url: &Url) -> Result Result Result, - msg: String, +#[derive(Deserialize, Serialize, Debug)] +pub struct PictrsResponse { + pub files: Option>, + pub msg: String, } -#[derive(Deserialize, Debug, Clone)] -pub(crate) struct PictrsFile { - file: String, - #[allow(dead_code)] - delete_token: String, +#[derive(Deserialize, Serialize, Debug)] +pub struct PictrsFile { + pub file: String, + pub delete_token: String, + pub details: PictrsFileDetails, } -#[derive(Deserialize, Debug, Clone)] -pub(crate) struct PictrsPurgeResponse { - msg: String, -} - -#[tracing::instrument(skip_all)] -pub(crate) async fn fetch_pictrs( - client: &ClientWithMiddleware, - settings: &Settings, - image_url: &Url, -) -> Result { - let pictrs_config = settings.pictrs_config()?; - is_image_content_type(client, image_url).await?; - - if pictrs_config.cache_remote_images { - // fetch remote non-pictrs images for persistent thumbnail link - let fetch_url = format!( - "{}image/download?url={}", - pictrs_config.url, - utf8_percent_encode(image_url.as_str(), NON_ALPHANUMERIC) // TODO this might not be needed - ); - - let response = client - .get(&fetch_url) - .timeout(REQWEST_TIMEOUT) - .send() - .await?; - - let response: PictrsResponse = response.json().await.map_err(LemmyError::from)?; - - if response.msg == "ok" { - Ok(response) - } else { - Err(LemmyErrorType::PictrsResponseError(response.msg))? - } - } else { - Err(LemmyErrorType::PictrsCachingDisabled)? +impl PictrsFile { + pub fn thumbnail_url(&self, protocol_and_hostname: &str) -> Result { + Url::parse(&format!( + "{protocol_and_hostname}/pictrs/image/{}", + self.file + )) } } +/// Stores extra details about a Pictrs image. +#[derive(Deserialize, Serialize, Debug)] +pub struct PictrsFileDetails { + /// In pixels + pub width: u16, + /// In pixels + pub height: u16, + pub content_type: String, + pub created_at: DateTime, +} + +impl PictrsFileDetails { + /// Builds the image form. This should always use the thumbnail_url, + /// Because the post_view joins to it + pub fn build_image_details_form(&self, thumbnail_url: &Url) -> ImageDetailsForm { + ImageDetailsForm { + link: thumbnail_url.clone().into(), + width: self.width.into(), + height: self.height.into(), + content_type: self.content_type.clone(), + } + } +} + +#[derive(Deserialize, Serialize, Debug)] +struct PictrsPurgeResponse { + msg: String, +} + /// Purges an image from pictrs /// Note: This should often be coerced from a Result to .ok() in order to fail softly, because: /// - It might fail due to image being not local /// - It might not be an image /// - Pictrs might not be set up -pub async fn purge_image_from_pictrs( - image_url: &Url, - context: &LemmyContext, -) -> Result<(), LemmyError> { +pub async fn purge_image_from_pictrs(image_url: &Url, context: &LemmyContext) -> LemmyResult<()> { is_image_content_type(context.client(), image_url).await?; let alias = image_url @@ -167,13 +296,6 @@ pub async fn purge_image_from_pictrs( .next_back() .ok_or(LemmyErrorType::ImageUrlMissingLastPathSegment)?; - purge_image_from_pictrs_by_alias(alias, context).await -} - -pub async fn purge_image_from_pictrs_by_alias( - alias: &str, - context: &LemmyContext, -) -> Result<(), LemmyError> { let pictrs_config = context.settings().pictrs_config()?; let purge_url = format!("{}internal/purge?alias={}", pictrs_config.url, alias); @@ -190,10 +312,9 @@ pub async fn purge_image_from_pictrs_by_alias( let response: PictrsPurgeResponse = response.json().await.map_err(LemmyError::from)?; - if response.msg == "ok" { - Ok(()) - } else { - Err(LemmyErrorType::PictrsPurgeResponseError(response.msg))? + match response.msg.as_str() { + "ok" => Ok(()), + _ => Err(LemmyErrorType::PictrsPurgeResponseError(response.msg))?, } } @@ -201,7 +322,7 @@ pub async fn delete_image_from_pictrs( alias: &str, delete_token: &str, context: &LemmyContext, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let pictrs_config = context.settings().pictrs_config()?; let url = format!( "{}image/delete/{}/{}", @@ -217,64 +338,102 @@ pub async fn delete_image_from_pictrs( Ok(()) } -/// Both are options, since the URL might be either an html page, or an image -/// Returns the SiteMetadata, and an image URL, if there is a picture associated +/// Retrieves the image with local pict-rs and generates a thumbnail. Returns the thumbnail url. #[tracing::instrument(skip_all)] -pub async fn fetch_site_data( - client: &ClientWithMiddleware, - settings: &Settings, - url: Option<&Url>, - include_image: bool, -) -> (Option, Option) { - match &url { - Some(url) => { - // Fetch metadata - // Ignore errors, since it may be an image, or not have the data. - // Warning, this may ignore SSL errors - let metadata_option = fetch_site_metadata(client, url).await.ok(); - if !include_image { - (metadata_option, None) - } else { - let thumbnail_url = - fetch_pictrs_url_from_site_metadata(client, &metadata_option, settings, url) - .await - .ok(); - (metadata_option, thumbnail_url) - } +async fn generate_pictrs_thumbnail(image_url: &Url, context: &LemmyContext) -> LemmyResult { + let pictrs_config = context.settings().pictrs_config()?; + + match pictrs_config.image_mode() { + PictrsImageMode::None => return Ok(image_url.clone()), + PictrsImageMode::ProxyAllImages => { + return Ok(proxy_image_link(image_url.clone(), context).await?.into()) } - None => (None, None), - } -} - -async fn fetch_pictrs_url_from_site_metadata( - client: &ClientWithMiddleware, - metadata_option: &Option, - settings: &Settings, - url: &Url, -) -> Result { - let pictrs_res = match metadata_option { - Some(metadata_res) => match &metadata_res.image { - // Metadata, with image - // Try to generate a small thumbnail if there's a full sized one from post-links - Some(metadata_image) => fetch_pictrs(client, settings, metadata_image).await, - // Metadata, but no image - None => fetch_pictrs(client, settings, url).await, - }, - // No metadata, try to fetch the URL as an image - None => fetch_pictrs(client, settings, url).await, - }?; - - Url::parse(&format!( - "{}/pictrs/image/{}", - settings.get_protocol_and_hostname(), - pictrs_res.files.first().expect("missing pictrs file").file - )) - .map(Into::into) - .map_err(Into::into) + _ => {} + }; + + // fetch remote non-pictrs images for persistent thumbnail link + // TODO: should limit size once supported by pictrs + let fetch_url = format!( + "{}image/download?url={}&resize={}", + pictrs_config.url, + encode(image_url.as_str()), + context.settings().pictrs_config()?.max_thumbnail_size + ); + + let res = context + .client() + .get(&fetch_url) + .timeout(REQWEST_TIMEOUT) + .send() + .await? + .json::() + .await?; + + let files = res.files.unwrap_or_default(); + + let image = files + .first() + .ok_or(LemmyErrorType::PictrsResponseError(res.msg))?; + + let form = LocalImageForm { + // This is none because its an internal request. + // IE, a local user shouldn't get to delete the thumbnails for their link posts + local_user_id: None, + pictrs_alias: image.file.clone(), + pictrs_delete_token: image.delete_token.clone(), + }; + let protocol_and_hostname = context.settings().get_protocol_and_hostname(); + let thumbnail_url = image.thumbnail_url(&protocol_and_hostname)?; + + // Also store the details for the image + let details_form = image.details.build_image_details_form(&thumbnail_url); + LocalImage::create(&mut context.pool(), &form, &details_form).await?; + + Ok(thumbnail_url) } +/// Fetches the image details for pictrs proxied images +/// +/// We don't need to check for image mode, as that's already been done #[tracing::instrument(skip_all)] -async fn is_image_content_type(client: &ClientWithMiddleware, url: &Url) -> Result<(), LemmyError> { +pub async fn fetch_pictrs_proxied_image_details( + image_url: &Url, + context: &LemmyContext, +) -> LemmyResult { + let pictrs_url = context.settings().pictrs_config()?.url; + let encoded_image_url = encode(image_url.as_str()); + + // Pictrs needs you to fetch the proxied image before you can fetch the details + let proxy_url = format!("{pictrs_url}image/original?proxy={encoded_image_url}"); + + let res = context + .client() + .get(&proxy_url) + .timeout(REQWEST_TIMEOUT) + .send() + .await? + .status(); + if !res.is_success() { + Err(LemmyErrorType::NotAnImageType)? + } + + let details_url = format!("{pictrs_url}image/details/original?proxy={encoded_image_url}"); + + let res = context + .client() + .get(&details_url) + .timeout(REQWEST_TIMEOUT) + .send() + .await? + .json() + .await?; + + Ok(res) +} + +// TODO: get rid of this by reading content type from db +#[tracing::instrument(skip_all)] +async fn is_image_content_type(client: &ClientWithMiddleware, url: &Url) -> LemmyResult<()> { let response = client.get(url.as_str()).send().await?; if response .headers() @@ -289,99 +448,109 @@ async fn is_image_content_type(client: &ClientWithMiddleware, url: &Url) -> Resu } } -pub fn client_builder(settings: &Settings) -> ClientBuilder { - let user_agent = format!( - "Lemmy/{}; +{}", - VERSION, - settings.get_protocol_and_hostname() - ); - - Client::builder() - .user_agent(user_agent.clone()) - .timeout(REQWEST_TIMEOUT) - .connect_timeout(REQWEST_TIMEOUT) +/// When adding a new avatar, banner or similar image, delete the old one. +pub async fn replace_image( + new_image: &Option>, + old_image: &Option, + context: &Data, +) -> LemmyResult<()> { + if let (Some(Some(new_image)), Some(old_image)) = (new_image, old_image) { + // Note: Oftentimes front ends will include the current image in the form. + // In this case, deleting `old_image` would also be deletion of `new_image`, + // so the deletion must be skipped for the image to be kept. + if new_image != old_image { + // Ignore errors because image may be stored externally. + let image = LocalImage::delete_by_url(&mut context.pool(), old_image) + .await + .ok(); + if let Some(image) = image { + delete_image_from_pictrs(&image.pictrs_alias, &image.pictrs_delete_token, context).await?; + } + } + } + Ok(()) } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::request::{client_builder, fetch_site_metadata, html_to_site_metadata, SiteMetadata}; - use lemmy_utils::settings::SETTINGS; + use crate::{ + context::LemmyContext, + request::{extract_opengraph_data, fetch_link_metadata}, + }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; + use serial_test::serial; use url::Url; // These helped with testing #[tokio::test] - async fn test_site_metadata() { - let settings = &SETTINGS.clone(); - let client = client_builder(settings).build().unwrap().into(); - let sample_url = Url::parse("https://gitlab.com/IzzyOnDroid/repo/-/wikis/FAQ").unwrap(); - let sample_res = fetch_site_metadata(&client, &sample_url).await.unwrap(); + #[serial] + async fn test_link_metadata() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let sample_url = Url::parse("https://gitlab.com/IzzyOnDroid/repo/-/wikis/FAQ")?; + let sample_res = fetch_link_metadata(&sample_url, &context).await?; assert_eq!( - SiteMetadata { - title: Some("FAQ · Wiki · IzzyOnDroid / repo · GitLab".to_string()), - description: Some( - "The F-Droid compatible repo at https://apt.izzysoft.de/fdroid/".to_string() - ), - image: Some( - Url::parse("https://gitlab.com/uploads/-/system/project/avatar/4877469/iod_logo.png") - .unwrap() - .into() - ), - embed_video_url: None, - }, - sample_res + Some("FAQ · Wiki · IzzyOnDroid / repo · GitLab".to_string()), + sample_res.opengraph_data.title ); + assert_eq!( + Some("The F-Droid compatible repo at https://apt.izzysoft.de/fdroid/".to_string()), + sample_res.opengraph_data.description + ); + assert_eq!( + Some( + Url::parse("https://gitlab.com/uploads/-/system/project/avatar/4877469/iod_logo.png")? + .into() + ), + sample_res.opengraph_data.image + ); + assert_eq!(None, sample_res.opengraph_data.embed_video_url); + assert_eq!( + Some(mime::TEXT_HTML_UTF_8.to_string()), + sample_res.content_type + ); + + Ok(()) } - // #[test] - // fn test_pictshare() { - // let res = fetch_pictshare("https://upload.wikimedia.org/wikipedia/en/2/27/The_Mandalorian_logo.jpg"); - // assert!(res.is_ok()); - // let res_other = fetch_pictshare("https://upload.wikimedia.org/wikipedia/en/2/27/The_Mandalorian_logo.jpgaoeu"); - // assert!(res_other.is_err()); - // } - #[test] - fn test_resolve_image_url() { + fn test_resolve_image_url() -> LemmyResult<()> { // url that lists the opengraph fields - let url = Url::parse("https://example.com/one/two.html").unwrap(); + let url = Url::parse("https://example.com/one/two.html")?; // root relative url let html_bytes = b""; - let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata"); assert_eq!( metadata.image, - Some(Url::parse("https://example.com/image.jpg").unwrap().into()) + Some(Url::parse("https://example.com/image.jpg")?.into()) ); // base relative url let html_bytes = b""; - let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata"); assert_eq!( metadata.image, - Some( - Url::parse("https://example.com/one/image.jpg") - .unwrap() - .into() - ) + Some(Url::parse("https://example.com/one/image.jpg")?.into()) ); // absolute url let html_bytes = b""; - let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata"); assert_eq!( metadata.image, - Some(Url::parse("https://cdn.host.com/image.jpg").unwrap().into()) + Some(Url::parse("https://cdn.host.com/image.jpg")?.into()) ); // protocol relative url let html_bytes = b""; - let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata"); assert_eq!( metadata.image, - Some(Url::parse("https://example.com/image.jpg").unwrap().into()) + Some(Url::parse("https://example.com/image.jpg")?.into()) ); + + Ok(()) } } diff --git a/crates/api_common/src/send_activity.rs b/crates/api_common/src/send_activity.rs index 6d9c722a1..465e074f4 100644 --- a/crates/api_common/src/send_activity.rs +++ b/crates/api_common/src/send_activity.rs @@ -1,9 +1,4 @@ -use crate::{ - community::BanFromCommunity, - context::LemmyContext, - person::BanPerson, - post::{DeletePost, RemovePost}, -}; +use crate::{community::BanFromCommunity, context::LemmyContext, post::DeletePost}; use activitypub_federation::config::Data; use futures::future::BoxFuture; use lemmy_db_schema::{ @@ -18,7 +13,7 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::PrivateMessageView; use lemmy_utils::error::LemmyResult; -use once_cell::sync::{Lazy, OnceCell}; +use std::sync::{LazyLock, OnceLock}; use tokio::{ sync::{ mpsc, @@ -33,38 +28,80 @@ type MatchOutgoingActivitiesBoxed = Box fn(SendActivityData, &'a Data) -> BoxFuture<'a, LemmyResult<()>>>; /// This static is necessary so that the api_common crates don't need to depend on lemmy_apub -pub static MATCH_OUTGOING_ACTIVITIES: OnceCell = OnceCell::new(); +pub static MATCH_OUTGOING_ACTIVITIES: OnceLock = OnceLock::new(); #[derive(Debug)] pub enum SendActivityData { CreatePost(Post), UpdatePost(Post), DeletePost(Post, Person, DeletePost), - RemovePost(Post, Person, RemovePost), + RemovePost { + post: Post, + moderator: Person, + reason: Option, + removed: bool, + }, LockPost(Post, Person, bool), FeaturePost(Post, Person, bool), CreateComment(Comment), UpdateComment(Comment), DeleteComment(Comment, Person, Community), - RemoveComment(Comment, Person, Community, Option), - LikePostOrComment(DbUrl, Person, Community, i16), + RemoveComment { + comment: Comment, + moderator: Person, + community: Community, + reason: Option, + }, + LikePostOrComment { + object_id: DbUrl, + actor: Person, + community: Community, + score: i16, + }, FollowCommunity(Community, Person, bool), UpdateCommunity(Person, Community), DeleteCommunity(Person, Community, bool), - RemoveCommunity(Person, Community, Option, bool), - AddModToCommunity(Person, CommunityId, PersonId, bool), - BanFromCommunity(Person, CommunityId, Person, BanFromCommunity), - BanFromSite(Person, Person, BanPerson), + RemoveCommunity { + moderator: Person, + community: Community, + reason: Option, + removed: bool, + }, + AddModToCommunity { + moderator: Person, + community_id: CommunityId, + target: PersonId, + added: bool, + }, + BanFromCommunity { + moderator: Person, + community_id: CommunityId, + target: Person, + data: BanFromCommunity, + }, + BanFromSite { + moderator: Person, + banned_user: Person, + reason: Option, + remove_or_restore_data: Option, + ban: bool, + expires: Option, + }, CreatePrivateMessage(PrivateMessageView), UpdatePrivateMessage(PrivateMessageView), DeletePrivateMessage(Person, PrivateMessage, bool), DeleteUser(Person, bool), - CreateReport(Url, Person, Community, String), + CreateReport { + object_id: Url, + actor: Person, + community: Community, + reason: String, + }, } // TODO: instead of static, move this into LemmyContext. make sure that stopping the process with // ctrl+c still works. -static ACTIVITY_CHANNEL: Lazy = Lazy::new(|| { +static ACTIVITY_CHANNEL: LazyLock = LazyLock::new(|| { let (sender, receiver) = mpsc::unbounded_channel(); let weak_sender = sender.downgrade(); ActivityChannel { @@ -98,9 +135,9 @@ impl ActivityChannel { Ok(()) } - pub async fn close(outgoing_activities_task: JoinHandle>) -> LemmyResult<()> { + pub async fn close(outgoing_activities_task: JoinHandle<()>) -> LemmyResult<()> { ACTIVITY_CHANNEL.keepalive_sender.lock().await.take(); - outgoing_activities_task.await??; + outgoing_activities_task.await?; Ok(()) } } diff --git a/crates/api_common/src/sensitive.rs b/crates/api_common/src/sensitive.rs deleted file mode 100644 index 4dd120805..000000000 --- a/crates/api_common/src/sensitive.rs +++ /dev/null @@ -1,116 +0,0 @@ -use serde::{Deserialize, Serialize}; -use std::{ - borrow::Borrow, - ops::{Deref, DerefMut}, -}; -#[cfg(feature = "full")] -use ts_rs::TS; - -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)] -#[serde(transparent)] -pub struct Sensitive(T); - -impl Sensitive { - pub fn new(item: T) -> Self { - Sensitive(item) - } - pub fn into_inner(self) -> T { - self.0 - } -} - -impl std::fmt::Debug for Sensitive { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Sensitive").finish() - } -} - -impl AsRef for Sensitive { - fn as_ref(&self) -> &T { - &self.0 - } -} - -impl AsRef for Sensitive { - fn as_ref(&self) -> &str { - &self.0 - } -} - -impl AsRef<[u8]> for Sensitive { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl AsRef<[u8]> for Sensitive> { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl AsMut for Sensitive { - fn as_mut(&mut self) -> &mut T { - &mut self.0 - } -} - -impl AsMut for Sensitive { - fn as_mut(&mut self) -> &mut str { - &mut self.0 - } -} - -impl Deref for Sensitive { - type Target = str; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for Sensitive { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl From for Sensitive { - fn from(t: T) -> Self { - Sensitive(t) - } -} - -impl From<&str> for Sensitive { - fn from(s: &str) -> Self { - Sensitive(s.into()) - } -} - -impl Borrow for Sensitive { - fn borrow(&self) -> &T { - &self.0 - } -} - -impl Borrow for Sensitive { - fn borrow(&self) -> &str { - &self.0 - } -} - -#[cfg(feature = "full")] -impl TS for Sensitive { - fn name() -> String { - "string".to_string() - } - fn name_with_type_args(_args: Vec) -> String { - "string".to_string() - } - fn dependencies() -> Vec { - Vec::new() - } - fn transparent() -> bool { - true - } -} diff --git a/crates/api_common/src/site.rs b/crates/api_common/src/site.rs index d40729e35..8fc091e9d 100644 --- a/crates/api_common/src/site.rs +++ b/crates/api_common/src/site.rs @@ -1,27 +1,45 @@ +use crate::federate_retry_sleep_duration; +use chrono::{DateTime, Utc}; use lemmy_db_schema::{ - newtypes::{CommentId, CommunityId, InstanceId, LanguageId, PersonId, PostId}, - source::{instance::Instance, language::Language, tagline::Tagline}, + newtypes::{ + CommentId, + CommunityId, + InstanceId, + LanguageId, + PersonId, + PostId, + RegistrationApplicationId, + }, + source::{ + community::Community, + federation_queue_state::FederationQueueState, + instance::Instance, + language::Language, + local_site_url_blocklist::LocalSiteUrlBlocklist, + oauth_provider::{OAuthProvider, PublicOAuthProvider}, + person::Person, + tagline::Tagline, + }, + CommentSortType, + FederationMode, ListingType, ModlogActionType, + PostListingMode, + PostSortType, RegistrationMode, SearchType, - SortType, }; use lemmy_db_views::structs::{ CommentView, - CustomEmojiView, LocalUserView, PostView, RegistrationApplicationView, SiteView, }; use lemmy_db_views_actor::structs::{ - CommunityBlockView, CommunityFollowerView, CommunityModeratorView, CommunityView, - InstanceBlockView, - PersonBlockView, PersonView, }; use lemmy_db_views_moderator::structs::{ @@ -47,7 +65,7 @@ use serde_with::skip_serializing_none; use ts_rs::TS; #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Searches the site, given a query string, and some optional filters. @@ -57,10 +75,15 @@ pub struct Search { pub community_name: Option, pub creator_id: Option, pub type_: Option, - pub sort: Option, + pub sort: Option, pub listing_type: Option, pub page: Option, pub limit: Option, + pub title_only: Option, + pub post_url_only: Option, + pub saved_only: Option, + pub liked_only: Option, + pub disliked_only: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -76,7 +99,7 @@ pub struct SearchResponse { pub users: Vec, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Does an apub fetch for an object. @@ -99,7 +122,7 @@ pub struct ResolveObjectResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Fetches the modlog. @@ -110,6 +133,8 @@ pub struct GetModlog { pub limit: Option, pub type_: Option, pub other_person_id: Option, + pub post_id: Option, + pub comment_id: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -136,7 +161,7 @@ pub struct GetModlogResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Creates a site. Should be done after first running lemmy. @@ -146,7 +171,6 @@ pub struct CreateSite { pub description: Option, pub icon: Option, pub banner: Option, - pub enable_downvotes: Option, pub enable_nsfw: Option, pub community_creation_admin_only: Option, pub require_email_verification: Option, @@ -154,6 +178,9 @@ pub struct CreateSite { pub private_instance: Option, pub default_theme: Option, pub default_post_listing_type: Option, + pub default_post_listing_mode: Option, + pub default_post_sort_type: Option, + pub default_comment_sort_type: Option, pub legal_information: Option, pub application_email_admins: Option, pub hide_modlog_mod_names: Option, @@ -178,17 +205,23 @@ pub struct CreateSite { pub captcha_difficulty: Option, pub allowed_instances: Option>, pub blocked_instances: Option>, - pub taglines: Option>, pub registration_mode: Option, + pub oauth_registration: Option, + pub content_warning: Option, + pub post_upvotes: Option, + pub post_downvotes: Option, + pub comment_upvotes: Option, + pub comment_downvotes: Option, } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Edits a site. pub struct EditSite { pub name: Option, + /// A sidebar for the site, in markdown. pub sidebar: Option, /// A shorter, one line description of your site. pub description: Option, @@ -196,8 +229,6 @@ pub struct EditSite { pub icon: Option, /// A url for your site's banner. pub banner: Option, - /// Whether to enable downvotes. - pub enable_downvotes: Option, /// Whether to enable NSFW. pub enable_nsfw: Option, /// Limits community creation to admins only. @@ -210,7 +241,14 @@ pub struct EditSite { pub private_instance: Option, /// The default theme. Usually "browser" pub default_theme: Option, + /// The default post listing type, usually "local" pub default_post_listing_type: Option, + /// Default value for listing mode, usually "list" + pub default_post_listing_mode: Option, + /// The default post sort, usually "active" + pub default_post_sort_type: Option, + /// The default comment sort, usually "hot" + pub default_comment_sort_type: Option, /// An optional page of legal information pub legal_information: Option, /// Whether to email admins when receiving a new application. @@ -253,11 +291,24 @@ pub struct EditSite { pub allowed_instances: Option>, /// A list of blocked instances. pub blocked_instances: Option>, - /// A list of taglines shown at the top of the front page. - pub taglines: Option>, + /// A list of blocked URLs + pub blocked_urls: Option>, pub registration_mode: Option, /// Whether to email admins for new reports. pub reports_email_admins: Option, + /// If present, nsfw content is visible by default. Should be displayed by frontends/clients + /// when the site is first opened by a user. + pub content_warning: Option, + /// Whether or not external auth methods can auto-register users. + pub oauth_registration: Option, + /// What kind of post upvotes your site allows. + pub post_upvotes: Option, + /// What kind of post downvotes your site allows. + pub post_downvotes: Option, + /// What kind of comment upvotes your site allows. + pub comment_upvotes: Option, + /// What kind of comment downvotes your site allows. + pub comment_downvotes: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -266,7 +317,8 @@ pub struct EditSite { /// The response for a site. pub struct SiteResponse { pub site_view: SiteView, - pub taglines: Vec, + /// deprecated, use field `tagline` or /api/v3/tagline/list + pub taglines: Vec<()>, } #[skip_serializing_none] @@ -281,10 +333,16 @@ pub struct GetSiteResponse { pub my_user: Option, pub all_languages: Vec, pub discussion_languages: Vec, - /// A list of taglines shown at the top of the front page. - pub taglines: Vec, - /// A list of custom emojis your site supports. - pub custom_emojis: Vec, + /// deprecated, use field `tagline` or /api/v3/tagline/list + pub taglines: Vec<()>, + /// deprecated, use /api/v3/custom_emoji/list + pub custom_emojis: Vec<()>, + /// If the site has any taglines, a random one is included here for displaying + pub tagline: Option, + /// A list of external auth methods your site supports. + pub oauth_providers: Option>, + pub admin_oauth_providers: Option>, + pub blocked_urls: Vec, } #[skip_serializing_none] @@ -305,9 +363,9 @@ pub struct MyUserInfo { pub local_user_view: LocalUserView, pub follows: Vec, pub moderates: Vec, - pub community_blocks: Vec, - pub instance_blocks: Vec, - pub person_blocks: Vec, + pub community_blocks: Vec, + pub instance_blocks: Vec, + pub person_blocks: Vec, pub discussion_languages: Vec, } @@ -316,15 +374,50 @@ pub struct MyUserInfo { #[cfg_attr(feature = "full", ts(export))] /// A list of federated instances. pub struct FederatedInstances { - pub linked: Vec, - pub allowed: Vec, - pub blocked: Vec, + pub linked: Vec, + pub allowed: Vec, + pub blocked: Vec, } #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] +pub struct ReadableFederationState { + #[serde(flatten)] + internal_state: FederationQueueState, + /// timestamp of the next retry attempt (null if fail count is 0) + next_retry: Option>, +} + +impl From for ReadableFederationState { + fn from(internal_state: FederationQueueState) -> Self { + ReadableFederationState { + next_retry: internal_state.last_retry.map(|r| { + r + chrono::Duration::from_std(federate_retry_sleep_duration(internal_state.fail_count)) + .expect("sleep duration longer than 2**63 ms (262 million years)") + }), + internal_state, + } + } +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +pub struct InstanceWithFederationState { + #[serde(flatten)] + pub instance: Instance, + /// if federation to this instance is or was active, show state of outgoing federation to this + /// instance + pub federation_state: Option, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] /// Purges a person from the database. This will delete all content attached to that person. pub struct PurgePerson { pub person_id: PersonId, @@ -332,7 +425,7 @@ pub struct PurgePerson { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Purges a community from the database. This will delete all content attached to that community. @@ -342,7 +435,7 @@ pub struct PurgeCommunity { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Purges a post from the database. This will delete all content attached to that post. @@ -352,7 +445,7 @@ pub struct PurgePost { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Purges a comment from the database. This will delete all content attached to that comment. @@ -362,7 +455,7 @@ pub struct PurgeComment { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Fetches a list of registration applications. @@ -382,12 +475,21 @@ pub struct ListRegistrationApplicationsResponse { } #[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Gets a registration application for a person +pub struct GetRegistrationApplication { + pub person_id: PersonId, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Approves a registration application. pub struct ApproveRegistrationApplication { - pub id: i32, + pub id: RegistrationApplicationId, pub approve: bool, pub deny_reason: Option, } @@ -408,7 +510,7 @@ pub struct GetUnreadRegistrationApplicationCountResponse { pub registration_applications: i64, } -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// Block an instance as user diff --git a/crates/api_common/src/tagline.rs b/crates/api_common/src/tagline.rs new file mode 100644 index 000000000..3090a2678 --- /dev/null +++ b/crates/api_common/src/tagline.rs @@ -0,0 +1,55 @@ +use lemmy_db_schema::{newtypes::TaglineId, source::tagline::Tagline}; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Create a tagline +pub struct CreateTagline { + pub content: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Update a tagline +pub struct UpdateTagline { + pub id: TaglineId, + pub content: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Delete a tagline +pub struct DeleteTagline { + pub id: TaglineId, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +pub struct TaglineResponse { + pub tagline: Tagline, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// A response for taglines. +pub struct ListTaglinesResponse { + pub taglines: Vec, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +/// Fetches a list of taglines. +pub struct ListTaglines { + pub page: Option, + pub limit: Option, +} diff --git a/crates/api_common/src/utils.rs b/crates/api_common/src/utils.rs index 9e661aa61..ee9f7d153 100644 --- a/crates/api_common/src/utils.rs +++ b/crates/api_common/src/utils.rs @@ -1,31 +1,46 @@ use crate::{ context::LemmyContext, - request::purge_image_from_pictrs, - sensitive::Sensitive, - site::FederatedInstances, + request::{ + delete_image_from_pictrs, + fetch_pictrs_proxied_image_details, + purge_image_from_pictrs, + }, + site::{FederatedInstances, InstanceWithFederationState}, }; -use actix_web::cookie::{Cookie, SameSite}; -use anyhow::Context; use chrono::{DateTime, Days, Local, TimeZone, Utc}; use enum_map::{enum_map, EnumMap}; use lemmy_db_schema::{ - newtypes::{CommunityId, DbUrl, PersonId, PostId}, + aggregates::structs::{PersonPostAggregates, PersonPostAggregatesForm}, + newtypes::{CommentId, CommunityId, DbUrl, InstanceId, PersonId, PostId}, source::{ - comment::{Comment, CommentUpdateForm}, + comment::{Comment, CommentLike, CommentUpdateForm}, community::{Community, CommunityModerator, CommunityUpdateForm}, + community_block::CommunityBlock, email_verification::{EmailVerification, EmailVerificationForm}, + images::{ImageDetails, RemoteImage}, instance::Instance, + instance_block::InstanceBlock, local_site::LocalSite, local_site_rate_limit::LocalSiteRateLimit, + local_site_url_blocklist::LocalSiteUrlBlocklist, + moderator::{ModRemoveComment, ModRemoveCommentForm, ModRemovePost, ModRemovePostForm}, + oauth_account::OAuthAccount, password_reset_request::PasswordResetRequest, person::{Person, PersonUpdateForm}, person_block::PersonBlock, - post::{Post, PostRead}, + post::{Post, PostLike, PostRead}, + registration_application::RegistrationApplication, + site::Site, }, - traits::Crud, + traits::{Crud, Likeable}, utils::DbPool, + FederationMode, + RegistrationMode, +}; +use lemmy_db_views::{ + comment_view::CommentQuery, + structs::{LocalImageView, LocalUserView, SiteView}, }; -use lemmy_db_views::{comment_view::CommentQuery, structs::LocalUserView}; use lemmy_db_views_actor::structs::{ CommunityModeratorView, CommunityPersonBanView, @@ -34,33 +49,36 @@ use lemmy_db_views_actor::structs::{ use lemmy_utils::{ email::{send_email, translations::Lang}, error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}, - location_info, rate_limit::{ActionType, BucketConfig}, - settings::structs::Settings, - utils::slurs::build_slur_regex, + settings::{ + structs::{PictrsImageMode, Settings}, + SETTINGS, + }, + utils::{ + markdown::{image_links::markdown_rewrite_image_links, markdown_check_for_blocked_urls}, + slurs::{build_slur_regex, remove_slurs}, + validation::clean_urls_in_text, + }, + CACHE_DURATION_FEDERATION, }; -use regex::Regex; +use moka::future::Cache; +use regex::{escape, Regex, RegexSet}; use rosetta_i18n::{Language, LanguageId}; -use std::collections::HashSet; +use std::{collections::HashSet, sync::LazyLock}; use tracing::warn; use url::{ParseError, Url}; +use urlencoding::encode; -pub static AUTH_COOKIE_NAME: &str = "auth"; +pub static AUTH_COOKIE_NAME: &str = "jwt"; #[tracing::instrument(skip_all)] pub async fn is_mod_or_admin( pool: &mut DbPool<'_>, person: &Person, community_id: CommunityId, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { check_user_valid(person)?; - - let is_mod_or_admin = CommunityView::is_mod_or_admin(pool, person.id, community_id).await?; - if !is_mod_or_admin { - Err(LemmyErrorType::NotAModOrAdmin)? - } else { - Ok(()) - } + CommunityView::check_is_mod_or_admin(pool, person.id, community_id).await } #[tracing::instrument(skip_all)] @@ -68,7 +86,7 @@ pub async fn is_mod_or_admin_opt( pool: &mut DbPool<'_>, local_user_view: Option<&LocalUserView>, community_id: Option, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { if let Some(local_user_view) = local_user_view { if let Some(community_id) = community_id { is_mod_or_admin(pool, &local_user_view.person, community_id).await @@ -80,7 +98,21 @@ pub async fn is_mod_or_admin_opt( } } -pub fn is_admin(local_user_view: &LocalUserView) -> Result<(), LemmyError> { +/// Check that a person is either a mod of any community, or an admin +/// +/// Should only be used for read operations +#[tracing::instrument(skip_all)] +pub async fn check_community_mod_of_any_or_admin_action( + local_user_view: &LocalUserView, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { + let person = &local_user_view.person; + + check_user_valid(person)?; + CommunityView::check_is_mod_of_any_or_admin(pool, person.id).await +} + +pub fn is_admin(local_user_view: &LocalUserView) -> LemmyResult<()> { check_user_valid(&local_user_view.person)?; if !local_user_view.local_user.admin { Err(LemmyErrorType::NotAnAdmin)? @@ -94,7 +126,7 @@ pub fn is_admin(local_user_view: &LocalUserView) -> Result<(), LemmyError> { pub fn is_top_mod( local_user_view: &LocalUserView, community_mods: &[CommunityModeratorView], -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { check_user_valid(&local_user_view.person)?; if local_user_view.person.id != community_mods @@ -108,26 +140,40 @@ pub fn is_top_mod( } } -#[tracing::instrument(skip_all)] -pub async fn get_post(post_id: PostId, pool: &mut DbPool<'_>) -> Result { - Post::read(pool, post_id) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindPost) -} - +/// Marks a post as read for a given person. #[tracing::instrument(skip_all)] pub async fn mark_post_as_read( person_id: PersonId, post_id: PostId, pool: &mut DbPool<'_>, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { PostRead::mark_as_read(pool, HashSet::from([post_id]), person_id) .await .with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?; Ok(()) } -pub fn check_user_valid(person: &Person) -> Result<(), LemmyError> { +/// Updates the read comment count for a post. Usually done when reading or creating a new comment. +#[tracing::instrument(skip_all)] +pub async fn update_read_comments( + person_id: PersonId, + post_id: PostId, + read_comments: i64, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { + let person_post_agg_form = PersonPostAggregatesForm { + person_id, + post_id, + read_comments, + ..PersonPostAggregatesForm::default() + }; + + PersonPostAggregates::upsert(pool, &person_post_agg_form).await?; + + Ok(()) +} + +pub fn check_user_valid(person: &Person) -> LemmyResult<()> { // Check for a site ban if person.banned { Err(LemmyErrorType::SiteBan)? @@ -140,6 +186,44 @@ pub fn check_user_valid(person: &Person) -> Result<(), LemmyError> { } } +/// Check if the user's email is verified if email verification is turned on +/// However, skip checking verification if the user is an admin +pub fn check_email_verified( + local_user_view: &LocalUserView, + site_view: &SiteView, +) -> LemmyResult<()> { + if !local_user_view.local_user.admin + && site_view.local_site.require_email_verification + && !local_user_view.local_user.email_verified + { + Err(LemmyErrorType::EmailNotVerified)? + } + Ok(()) +} + +pub async fn check_registration_application( + local_user_view: &LocalUserView, + local_site: &LocalSite, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { + if (local_site.registration_mode == RegistrationMode::RequireApplication + || local_site.registration_mode == RegistrationMode::Closed) + && !local_user_view.local_user.accepted_application + && !local_user_view.local_user.admin + { + // Fetch the registration application. If no admin id is present its still pending. Otherwise it + // was processed (either accepted or denied). + let local_user_id = local_user_view.local_user.id; + let registration = RegistrationApplication::find_by_local_user_id(pool, local_user_id).await?; + if registration.admin_id.is_some() { + Err(LemmyErrorType::RegistrationDenied(registration.deny_reason))? + } else { + Err(LemmyErrorType::RegistrationApplicationIsPending)? + } + } + Ok(()) +} + /// Checks that a normal user action (eg posting or voting) is allowed in a given community. /// /// In particular it checks that neither the user nor community are banned or deleted, and that @@ -151,7 +235,7 @@ pub async fn check_community_user_action( ) -> LemmyResult<()> { check_user_valid(person)?; check_community_deleted_removed(community_id, pool).await?; - check_community_ban(person, community_id, pool).await?; + CommunityPersonBanView::check(pool, person.id, community_id).await?; Ok(()) } @@ -159,28 +243,13 @@ async fn check_community_deleted_removed( community_id: CommunityId, pool: &mut DbPool<'_>, ) -> LemmyResult<()> { - let community = Community::read(pool, community_id) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; + let community = Community::read(pool, community_id).await?; if community.deleted || community.removed { Err(LemmyErrorType::Deleted)? } Ok(()) } -async fn check_community_ban( - person: &Person, - community_id: CommunityId, - pool: &mut DbPool<'_>, -) -> LemmyResult<()> { - // check if user was banned from site or community - let is_banned = CommunityPersonBanView::get(pool, person.id, community_id).await?; - if is_banned { - Err(LemmyErrorType::BannedFromCommunity)? - } - Ok(()) -} - /// Check that the given user can perform a mod action in the community. /// /// In particular it checks that he is an admin or mod, wasn't banned and the community isn't @@ -192,7 +261,7 @@ pub async fn check_community_mod_action( pool: &mut DbPool<'_>, ) -> LemmyResult<()> { is_mod_or_admin(pool, person, community_id).await?; - check_community_ban(person, community_id, pool).await?; + CommunityPersonBanView::check(pool, person.id, community_id).await?; // it must be possible to restore deleted community if !allow_deleted { @@ -201,20 +270,8 @@ pub async fn check_community_mod_action( Ok(()) } -pub async fn check_community_mod_action_opt( - local_user_view: &LocalUserView, - community_id: Option, - pool: &mut DbPool<'_>, -) -> LemmyResult<()> { - if let Some(community_id) = community_id { - check_community_mod_action(&local_user_view.person, community_id, false, pool).await?; - } else { - is_admin(local_user_view)?; - } - Ok(()) -} - -pub fn check_post_deleted_or_removed(post: &Post) -> Result<(), LemmyError> { +/// Don't allow creating reports for removed / deleted posts +pub fn check_post_deleted_or_removed(post: &Post) -> LemmyResult<()> { if post.deleted || post.removed { Err(LemmyErrorType::Deleted)? } else { @@ -222,26 +279,65 @@ pub fn check_post_deleted_or_removed(post: &Post) -> Result<(), LemmyError> { } } -#[tracing::instrument(skip_all)] -pub async fn check_person_block( - my_id: PersonId, - potential_blocker_id: PersonId, - pool: &mut DbPool<'_>, -) -> Result<(), LemmyError> { - let is_blocked = PersonBlock::read(pool, potential_blocker_id, my_id) - .await - .is_ok(); - if is_blocked { - Err(LemmyErrorType::PersonIsBlocked)? +pub fn check_comment_deleted_or_removed(comment: &Comment) -> LemmyResult<()> { + if comment.deleted || comment.removed { + Err(LemmyErrorType::Deleted)? } else { Ok(()) } } #[tracing::instrument(skip_all)] -pub fn check_downvotes_enabled(score: i16, local_site: &LocalSite) -> Result<(), LemmyError> { - if score == -1 && !local_site.enable_downvotes { - Err(LemmyErrorType::DownvotesAreDisabled)? +pub async fn check_person_instance_community_block( + my_id: PersonId, + potential_blocker_id: PersonId, + community_instance_id: InstanceId, + community_id: CommunityId, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { + PersonBlock::read(pool, potential_blocker_id, my_id).await?; + InstanceBlock::read(pool, potential_blocker_id, community_instance_id).await?; + CommunityBlock::read(pool, potential_blocker_id, community_id).await?; + Ok(()) +} + +/// A vote item type used to check the vote mode. +pub enum VoteItem { + Post(PostId), + Comment(CommentId), +} + +#[tracing::instrument(skip_all)] +pub async fn check_local_vote_mode( + score: i16, + vote_item: VoteItem, + local_site: &LocalSite, + person_id: PersonId, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { + let (downvote_setting, upvote_setting) = match vote_item { + VoteItem::Post(_) => (local_site.post_downvotes, local_site.post_upvotes), + VoteItem::Comment(_) => (local_site.comment_downvotes, local_site.comment_upvotes), + }; + + let downvote_fail = score == -1 && downvote_setting == FederationMode::Disable; + let upvote_fail = score == 1 && upvote_setting == FederationMode::Disable; + + // Undo previous vote for item if new vote fails + if downvote_fail || upvote_fail { + match vote_item { + VoteItem::Post(post_id) => PostLike::remove(pool, person_id, post_id).await?, + VoteItem::Comment(comment_id) => CommentLike::remove(pool, person_id, comment_id).await?, + }; + } + Ok(()) +} + +/// Dont allow bots to do certain actions, like voting +#[tracing::instrument(skip_all)] +pub fn check_bot_account(person: &Person) -> LemmyResult<()> { + if person.bot_account { + Err(LemmyErrorType::InvalidBotAction)? } else { Ok(()) } @@ -251,7 +347,7 @@ pub fn check_downvotes_enabled(score: i16, local_site: &LocalSite) -> Result<(), pub fn check_private_instance( local_user_view: &Option, local_site: &LocalSite, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { if local_user_view.is_none() && local_site.private_instance { Err(LemmyErrorType::InstanceIsPrivate)? } else { @@ -273,14 +369,30 @@ pub fn check_private_messages_enabled(local_user_view: &LocalUserView) -> Result pub async fn build_federated_instances( local_site: &LocalSite, pool: &mut DbPool<'_>, -) -> Result, LemmyError> { +) -> LemmyResult> { if local_site.federation_enabled { - // TODO I hate that this requires 3 queries - let (linked, allowed, blocked) = lemmy_db_schema::try_join_with_pool!(pool => ( - Instance::linked, - Instance::allowlist, - Instance::blocklist - ))?; + let mut linked = Vec::new(); + let mut allowed = Vec::new(); + let mut blocked = Vec::new(); + + let all = Instance::read_all_with_fed_state(pool).await?; + for (instance, federation_state, is_blocked, is_allowed) in all { + let i = InstanceWithFederationState { + instance, + federation_state: federation_state.map(std::convert::Into::into), + }; + if is_blocked { + // blocked instances will only have an entry here if they had been federated with in the + // past. + blocked.push(i); + } else if is_allowed { + allowed.push(i.clone()); + linked.push(i); + } else { + // not explicitly allowed but implicitly linked + linked.push(i); + } + } Ok(Some(FederatedInstances { linked, @@ -293,7 +405,7 @@ pub async fn build_federated_instances( } /// Checks the password length -pub fn password_length_check(pass: &str) -> Result<(), LemmyError> { +pub fn password_length_check(pass: &str) -> LemmyResult<()> { if !(10..=60).contains(&pass.chars().count()) { Err(LemmyErrorType::InvalidPassword)? } else { @@ -302,7 +414,7 @@ pub fn password_length_check(pass: &str) -> Result<(), LemmyError> { } /// Checks for a honeypot. If this field is filled, fail the rest of the function -pub fn honeypot_check(honeypot: &Option) -> Result<(), LemmyError> { +pub fn honeypot_check(honeypot: &Option) -> LemmyResult<()> { if honeypot.is_some() && honeypot != &Some(String::new()) { Err(LemmyErrorType::HoneypotFailed)? } else { @@ -340,21 +452,23 @@ pub async fn send_password_reset_email( user: &LocalUserView, pool: &mut DbPool<'_>, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { // Generate a random token let token = uuid::Uuid::new_v4().to_string(); - // Insert the row - let local_user_id = user.local_user.id; - PasswordResetRequest::create_token(pool, local_user_id, token.clone()).await?; - let email = &user.local_user.email.clone().expect("email"); let lang = get_interface_language(user); let subject = &lang.password_reset_subject(&user.person.name); let protocol_and_hostname = settings.get_protocol_and_hostname(); let reset_link = format!("{}/password_change/{}", protocol_and_hostname, &token); let body = &lang.password_reset_body(reset_link, &user.person.name); - send_email(subject, email, &user.person.name, body, settings).await + send_email(subject, email, &user.person.name, body, settings).await?; + + // Insert the row after successful send, to avoid using daily reset limit while + // email sending is broken. + let local_user_id = user.local_user.id; + PasswordResetRequest::create(pool, local_user_id, token.clone()).await?; + Ok(()) } /// Send a verification email @@ -363,7 +477,7 @@ pub async fn send_verification_email( new_email: &str, pool: &mut DbPool<'_>, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let form = EmailVerificationForm { local_user_id: user.local_user.id, email: new_email.to_string(), @@ -429,17 +543,34 @@ pub fn local_site_opt_to_slur_regex(local_site: &Option) -> Option) -> bool { - local_site - .as_ref() - .map(|site| site.enable_nsfw) - .unwrap_or(false) +pub async fn get_url_blocklist(context: &LemmyContext) -> LemmyResult { + static URL_BLOCKLIST: LazyLock> = LazyLock::new(|| { + Cache::builder() + .max_capacity(1) + .time_to_live(CACHE_DURATION_FEDERATION) + .build() + }); + + Ok( + URL_BLOCKLIST + .try_get_with::<_, LemmyError>((), async { + let urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?; + + // The urls are already validated on saving, so just escape them. + let regexes = urls.iter().map(|url| escape(&url.url)); + + let set = RegexSet::new(regexes)?; + Ok(set) + }) + .await + .map_err(|e| anyhow::anyhow!("Failed to build URL blocklist due to `{}`", e))?, + ) } pub async fn send_application_approved_email( user: &LocalUserView, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let email = &user.local_user.email.clone().expect("email"); let lang = get_interface_language(user); let subject = lang.registration_approved_subject(&user.person.actor_id); @@ -452,7 +583,7 @@ pub async fn send_new_applicant_email_to_admins( applicant_username: &str, pool: &mut DbPool<'_>, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { // Collect the admins with emails let admins = LocalUserView::list_admins_with_emails(pool).await?; @@ -477,7 +608,7 @@ pub async fn send_new_report_email_to_admins( reported_username: &str, pool: &mut DbPool<'_>, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { // Collect the admins with emails let admins = LocalUserView::list_admins_with_emails(pool).await?; @@ -493,9 +624,7 @@ pub async fn send_new_report_email_to_admins( Ok(()) } -pub fn check_private_instance_and_federation_enabled( - local_site: &LocalSite, -) -> Result<(), LemmyError> { +pub fn check_private_instance_and_federation_enabled(local_site: &LocalSite) -> LemmyResult<()> { if local_site.private_instance && local_site.federation_enabled { Err(LemmyErrorType::CantEnablePrivateInstanceAndFederationTogether)? } else { @@ -503,10 +632,22 @@ pub fn check_private_instance_and_federation_enabled( } } +/// Read the site for an actor_id. +/// +/// Used for GetCommunityResponse and GetPersonDetails +pub async fn read_site_for_actor( + actor_id: DbUrl, + context: &LemmyContext, +) -> LemmyResult> { + let site_id = Site::instance_actor_id_from_url(actor_id.clone().into()); + let site = Site::read_from_apub_id(&mut context.pool(), &site_id.into()).await?; + Ok(site) +} + pub async fn purge_image_posts_for_person( banned_person_id: PersonId, context: &LemmyContext, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let pool = &mut context.pool(); let posts = Post::fetch_pictrs_posts_for_creator(pool, banned_person_id).await?; for post in posts { @@ -523,10 +664,31 @@ pub async fn purge_image_posts_for_person( Ok(()) } +/// Delete a local_user's images +async fn delete_local_user_images(person_id: PersonId, context: &LemmyContext) -> LemmyResult<()> { + if let Ok(local_user) = LocalUserView::read_person(&mut context.pool(), person_id).await { + let pictrs_uploads = + LocalImageView::get_all_by_local_user_id(&mut context.pool(), local_user.local_user.id) + .await?; + + // Delete their images + for upload in pictrs_uploads { + delete_image_from_pictrs( + &upload.local_image.pictrs_alias, + &upload.local_image.pictrs_delete_token, + context, + ) + .await + .ok(); + } + } + Ok(()) +} + pub async fn purge_image_posts_for_community( banned_community_id: CommunityId, context: &LemmyContext, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let pool = &mut context.pool(); let posts = Post::fetch_pictrs_posts_for_community(pool, banned_community_id).await?; for post in posts { @@ -543,104 +705,189 @@ pub async fn purge_image_posts_for_community( Ok(()) } -pub async fn remove_user_data( +/// Removes or restores user data. +pub async fn remove_or_restore_user_data( + mod_person_id: PersonId, banned_person_id: PersonId, + removed: bool, + reason: &Option, context: &LemmyContext, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let pool = &mut context.pool(); - // Purge user images - let person = Person::read(pool, banned_person_id).await?; - if let Some(avatar) = person.avatar { - purge_image_from_pictrs(&avatar, context).await.ok(); - } - if let Some(banner) = person.banner { - purge_image_from_pictrs(&banner, context).await.ok(); + + // Only these actions are possible when removing, not restoring + if removed { + // Purge user images + let person = Person::read(pool, banned_person_id).await?; + if let Some(avatar) = person.avatar { + purge_image_from_pictrs(&avatar, context).await.ok(); + } + if let Some(banner) = person.banner { + purge_image_from_pictrs(&banner, context).await.ok(); + } + + // Update the fields to None + Person::update( + pool, + banned_person_id, + &PersonUpdateForm { + avatar: Some(None), + banner: Some(None), + bio: Some(None), + ..Default::default() + }, + ) + .await?; + + // Purge image posts + purge_image_posts_for_person(banned_person_id, context).await?; + + // Communities + // Remove all communities where they're the top mod + // for now, remove the communities manually + let first_mod_communities = CommunityModeratorView::get_community_first_mods(pool).await?; + + // Filter to only this banned users top communities + let banned_user_first_communities: Vec = first_mod_communities + .into_iter() + .filter(|fmc| fmc.moderator.id == banned_person_id) + .collect(); + + for first_mod_community in banned_user_first_communities { + let community_id = first_mod_community.community.id; + Community::update( + pool, + community_id, + &CommunityUpdateForm { + removed: Some(removed), + ..Default::default() + }, + ) + .await?; + + // Delete the community images + if let Some(icon) = first_mod_community.community.icon { + purge_image_from_pictrs(&icon, context).await.ok(); + } + if let Some(banner) = first_mod_community.community.banner { + purge_image_from_pictrs(&banner, context).await.ok(); + } + // Update the fields to None + Community::update( + pool, + community_id, + &CommunityUpdateForm { + icon: Some(None), + banner: Some(None), + ..Default::default() + }, + ) + .await?; + } } - // Update the fields to None - Person::update( + // Posts + let removed_or_restored_posts = + Post::update_removed_for_creator(pool, banned_person_id, None, removed).await?; + create_modlog_entries_for_removed_or_restored_posts( pool, - banned_person_id, - &PersonUpdateForm { - avatar: Some(None), - banner: Some(None), - bio: Some(None), - ..Default::default() - }, + mod_person_id, + removed_or_restored_posts.iter().map(|r| r.id).collect(), + removed, + reason, ) .await?; - // Posts - Post::update_removed_for_creator(pool, banned_person_id, None, true).await?; - - // Purge image posts - purge_image_posts_for_person(banned_person_id, context).await?; - - // Communities - // Remove all communities where they're the top mod - // for now, remove the communities manually - let first_mod_communities = CommunityModeratorView::get_community_first_mods(pool).await?; - - // Filter to only this banned users top communities - let banned_user_first_communities: Vec = first_mod_communities - .into_iter() - .filter(|fmc| fmc.moderator.id == banned_person_id) - .collect(); - - for first_mod_community in banned_user_first_communities { - let community_id = first_mod_community.community.id; - Community::update( - pool, - community_id, - &CommunityUpdateForm { - removed: Some(true), - ..Default::default() - }, - ) - .await?; - - // Delete the community images - if let Some(icon) = first_mod_community.community.icon { - purge_image_from_pictrs(&icon, context).await.ok(); - } - if let Some(banner) = first_mod_community.community.banner { - purge_image_from_pictrs(&banner, context).await.ok(); - } - // Update the fields to None - Community::update( - pool, - community_id, - &CommunityUpdateForm { - icon: Some(None), - banner: Some(None), - ..Default::default() - }, - ) - .await?; - } - // Comments - Comment::update_removed_for_creator(pool, banned_person_id, true).await?; + let removed_or_restored_comments = + Comment::update_removed_for_creator(pool, banned_person_id, removed).await?; + create_modlog_entries_for_removed_or_restored_comments( + pool, + mod_person_id, + removed_or_restored_comments.iter().map(|r| r.id).collect(), + removed, + reason, + ) + .await?; Ok(()) } -pub async fn remove_user_data_in_community( - community_id: CommunityId, - banned_person_id: PersonId, +async fn create_modlog_entries_for_removed_or_restored_posts( pool: &mut DbPool<'_>, -) -> Result<(), LemmyError> { + mod_person_id: PersonId, + post_ids: Vec, + removed: bool, + reason: &Option, +) -> LemmyResult<()> { + // Build the forms + let forms = post_ids + .iter() + .map(|&post_id| ModRemovePostForm { + mod_person_id, + post_id, + removed: Some(removed), + reason: reason.clone(), + }) + .collect(); + + ModRemovePost::create_multiple(pool, &forms).await?; + + Ok(()) +} + +async fn create_modlog_entries_for_removed_or_restored_comments( + pool: &mut DbPool<'_>, + mod_person_id: PersonId, + comment_ids: Vec, + removed: bool, + reason: &Option, +) -> LemmyResult<()> { + // Build the forms + let forms = comment_ids + .iter() + .map(|&comment_id| ModRemoveCommentForm { + mod_person_id, + comment_id, + removed: Some(removed), + reason: reason.clone(), + }) + .collect(); + + ModRemoveComment::create_multiple(pool, &forms).await?; + + Ok(()) +} + +pub async fn remove_or_restore_user_data_in_community( + community_id: CommunityId, + mod_person_id: PersonId, + banned_person_id: PersonId, + remove: bool, + reason: &Option, + pool: &mut DbPool<'_>, +) -> LemmyResult<()> { // Posts - Post::update_removed_for_creator(pool, banned_person_id, Some(community_id), true).await?; + let posts = + Post::update_removed_for_creator(pool, banned_person_id, Some(community_id), remove).await?; + create_modlog_entries_for_removed_or_restored_posts( + pool, + mod_person_id, + posts.iter().map(|r| r.id).collect(), + remove, + reason, + ) + .await?; // Comments // TODO Diesel doesn't allow updates with joins, so this has to be a loop + let site = Site::read_local(pool).await?; let comments = CommentQuery { creator_id: Some(banned_person_id), community_id: Some(community_id), ..Default::default() } - .list(pool) + .list(&site, pool) .await?; for comment_view in &comments { @@ -649,30 +896,43 @@ pub async fn remove_user_data_in_community( pool, comment_id, &CommentUpdateForm { - removed: Some(true), + removed: Some(remove), ..Default::default() }, ) .await?; } + create_modlog_entries_for_removed_or_restored_comments( + pool, + mod_person_id, + comments.iter().map(|r| r.comment.id).collect(), + remove, + reason, + ) + .await?; + Ok(()) } -pub async fn purge_user_account( - person_id: PersonId, - context: &LemmyContext, -) -> Result<(), LemmyError> { +pub async fn purge_user_account(person_id: PersonId, context: &LemmyContext) -> LemmyResult<()> { let pool = &mut context.pool(); - // Delete their images + let person = Person::read(pool, person_id).await?; + + // Delete their local images, if they're a local user + delete_local_user_images(person_id, context).await.ok(); + + // No need to update avatar and banner, those are handled in Person::delete_account if let Some(avatar) = person.avatar { purge_image_from_pictrs(&avatar, context).await.ok(); } if let Some(banner) = person.banner { purge_image_from_pictrs(&banner, context).await.ok(); } - // No need to update avatar and banner, those are handled in Person::delete_account + + // Purge image posts + purge_image_posts_for_person(person_id, context).await.ok(); // Comments Comment::permadelete_for_creator(pool, person_id) @@ -684,12 +944,14 @@ pub async fn purge_user_account( .await .with_lemmy_type(LemmyErrorType::CouldntUpdatePost)?; - // Purge image posts - purge_image_posts_for_person(person_id, context).await?; - // Leave communities they mod CommunityModerator::leave_all_communities(pool, person_id).await?; + // Delete the oauth accounts linked to the local user + if let Ok(local_user) = LocalUserView::read_person(pool, person_id).await { + OAuthAccount::delete_user_accounts(pool, local_user.local_user.id).await?; + } + Person::delete_account(pool, person_id).await?; Ok(()) @@ -724,28 +986,8 @@ pub fn generate_followers_url(actor_id: &DbUrl) -> Result { Ok(Url::parse(&format!("{actor_id}/followers"))?.into()) } -pub fn generate_inbox_url(actor_id: &DbUrl) -> Result { - Ok(Url::parse(&format!("{actor_id}/inbox"))?.into()) -} - -pub fn generate_site_inbox_url(actor_id: &DbUrl) -> Result { - let mut actor_id: Url = actor_id.clone().into(); - actor_id.set_path("site_inbox"); - Ok(actor_id.into()) -} - -pub fn generate_shared_inbox_url(actor_id: &DbUrl) -> Result { - let actor_id: Url = actor_id.clone().into(); - let url = format!( - "{}://{}{}/inbox", - &actor_id.scheme(), - &actor_id.host_str().context(location_info!())?, - if let Some(port) = actor_id.port() { - format!(":{port}") - } else { - String::new() - }, - ); +pub fn generate_inbox_url() -> LemmyResult { + let url = format!("{}/inbox", SETTINGS.get_protocol_and_hostname()); Ok(Url::parse(&url)?.into()) } @@ -757,18 +999,10 @@ pub fn generate_featured_url(actor_id: &DbUrl) -> Result { Ok(Url::parse(&format!("{actor_id}/featured"))?.into()) } -pub fn generate_moderators_url(community_id: &DbUrl) -> Result { +pub fn generate_moderators_url(community_id: &DbUrl) -> LemmyResult { Ok(Url::parse(&format!("{community_id}/moderators"))?.into()) } -pub fn create_login_cookie(jwt: Sensitive) -> Cookie<'static> { - let mut cookie = Cookie::new(AUTH_COOKIE_NAME, jwt.into_inner()); - cookie.set_secure(true); - cookie.set_same_site(SameSite::Lax); - cookie.set_http_only(true); - cookie -} - /// Ensure that ban/block expiry is in valid range. If its in past, throw error. If its more /// than 10 years in future, convert to permanent ban. Otherwise return the same value. pub fn check_expire_time(expires_unix_opt: Option) -> LemmyResult>> { @@ -796,13 +1030,169 @@ fn limit_expire_time(expires: DateTime) -> LemmyResult } } +#[tracing::instrument(skip_all)] +pub fn check_conflicting_like_filters( + liked_only: Option, + disliked_only: Option, +) -> LemmyResult<()> { + if liked_only.unwrap_or_default() && disliked_only.unwrap_or_default() { + Err(LemmyErrorType::ContradictingFilters)? + } else { + Ok(()) + } +} + +pub async fn process_markdown( + text: &str, + slur_regex: &Option, + url_blocklist: &RegexSet, + context: &LemmyContext, +) -> LemmyResult { + let text = remove_slurs(text, slur_regex); + let text = clean_urls_in_text(&text); + + markdown_check_for_blocked_urls(&text, url_blocklist)?; + + if context.settings().pictrs_config()?.image_mode() == PictrsImageMode::ProxyAllImages { + let (text, links) = markdown_rewrite_image_links(text); + RemoteImage::create(&mut context.pool(), links.clone()).await?; + + // Create images and image detail rows + for link in links { + // Insert image details for the remote image + let details_res = fetch_pictrs_proxied_image_details(&link, context).await; + if let Ok(details) = details_res { + let proxied = + build_proxied_image_url(&link, &context.settings().get_protocol_and_hostname())?; + let details_form = details.build_image_details_form(&proxied); + ImageDetails::create(&mut context.pool(), &details_form).await?; + } + } + Ok(text) + } else { + Ok(text) + } +} + +pub async fn process_markdown_opt( + text: &Option, + slur_regex: &Option, + url_blocklist: &RegexSet, + context: &LemmyContext, +) -> LemmyResult> { + match text { + Some(t) => process_markdown(t, slur_regex, url_blocklist, context) + .await + .map(Some), + None => Ok(None), + } +} + +/// A wrapper for `proxy_image_link` for use in tests. +/// +/// The parameter `force_image_proxy` is the config value of `pictrs.image_proxy`. Its necessary to +/// pass as separate parameter so it can be changed in tests. +async fn proxy_image_link_internal( + link: Url, + image_mode: PictrsImageMode, + context: &LemmyContext, +) -> LemmyResult { + // Dont rewrite links pointing to local domain. + if link.domain() == Some(&context.settings().hostname) { + Ok(link.into()) + } else if image_mode == PictrsImageMode::ProxyAllImages { + RemoteImage::create(&mut context.pool(), vec![link.clone()]).await?; + + let proxied = build_proxied_image_url(&link, &context.settings().get_protocol_and_hostname())?; + // This should fail softly, since pictrs might not even be running + let details_res = fetch_pictrs_proxied_image_details(&link, context).await; + + if let Ok(details) = details_res { + let details_form = details.build_image_details_form(&proxied); + ImageDetails::create(&mut context.pool(), &details_form).await?; + }; + + Ok(proxied.into()) + } else { + Ok(link.into()) + } +} + +/// Rewrite a link to go through `/api/v3/image_proxy` endpoint. This is only for remote urls and +/// if image_proxy setting is enabled. +pub async fn proxy_image_link(link: Url, context: &LemmyContext) -> LemmyResult { + proxy_image_link_internal( + link, + context.settings().pictrs_config()?.image_mode(), + context, + ) + .await +} + +pub async fn proxy_image_link_opt_api( + link: Option>, + context: &LemmyContext, +) -> LemmyResult>> { + if let Some(Some(link)) = link { + proxy_image_link(link.into(), context) + .await + .map(Some) + .map(Some) + } else { + Ok(link) + } +} + +pub async fn proxy_image_link_api( + link: Option, + context: &LemmyContext, +) -> LemmyResult> { + if let Some(link) = link { + proxy_image_link(link.into(), context).await.map(Some) + } else { + Ok(link) + } +} + +pub async fn proxy_image_link_opt_apub( + link: Option, + context: &LemmyContext, +) -> LemmyResult> { + if let Some(l) = link { + proxy_image_link(l, context).await.map(Some) + } else { + Ok(None) + } +} + +fn build_proxied_image_url( + link: &Url, + protocol_and_hostname: &str, +) -> Result { + Url::parse(&format!( + "{}/api/v3/image_proxy?url={}", + protocol_and_hostname, + encode(link.as_str()) + )) +} + #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::utils::{honeypot_check, limit_expire_time, password_length_check}; - use chrono::{Days, Utc}; + use super::*; + use lemmy_db_schema::source::{ + comment::CommentInsertForm, + community::CommunityInsertForm, + person::PersonInsertForm, + post::PostInsertForm, + }; + use lemmy_db_views_moderator::structs::{ + ModRemoveCommentView, + ModRemovePostView, + ModlogListParams, + }; + use pretty_assertions::assert_eq; + use serial_test::serial; #[test] #[rustfmt::skip] @@ -822,23 +1212,207 @@ mod tests { } #[test] - fn test_limit_ban_term() { + fn test_limit_ban_term() -> LemmyResult<()> { // Ban expires in past, should throw error assert!(limit_expire_time(Utc::now() - Days::new(5)).is_err()); // Legitimate ban term, return same value let fourteen_days = Utc::now() + Days::new(14); - assert_eq!( - limit_expire_time(fourteen_days).unwrap(), - Some(fourteen_days) - ); + assert_eq!(limit_expire_time(fourteen_days)?, Some(fourteen_days)); let nine_years = Utc::now() + Days::new(365 * 9); - assert_eq!(limit_expire_time(nine_years).unwrap(), Some(nine_years)); + assert_eq!(limit_expire_time(nine_years)?, Some(nine_years)); // Too long ban term, changes to None (permanent ban) + assert_eq!(limit_expire_time(Utc::now() + Days::new(365 * 11))?, None); + + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_proxy_image_link() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + + // image from local domain is unchanged + let local_url = Url::parse("http://lemmy-alpha/image.png")?; + let proxied = + proxy_image_link_internal(local_url.clone(), PictrsImageMode::ProxyAllImages, &context) + .await?; + assert_eq!(&local_url, proxied.inner()); + + // image from remote domain is proxied + let remote_image = Url::parse("http://lemmy-beta/image.png")?; + let proxied = proxy_image_link_internal( + remote_image.clone(), + PictrsImageMode::ProxyAllImages, + &context, + ) + .await?; assert_eq!( - limit_expire_time(Utc::now() + Days::new(365 * 11)).unwrap(), - None + "https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Flemmy-beta%2Fimage.png", + proxied.as_str() ); + + // This fails, because the details can't be fetched without pictrs running, + // And a remote image won't be inserted. + assert!( + RemoteImage::validate(&mut context.pool(), remote_image.into()) + .await + .is_ok() + ); + + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_mod_remove_or_restore_data() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let new_mod = PersonInsertForm::test_form(inserted_instance.id, "modder"); + let inserted_mod = Person::create(pool, &new_mod).await?; + + let new_person = PersonInsertForm::test_form(inserted_instance.id, "chrimbus"); + let inserted_person = Person::create(pool, &new_person).await?; + + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "mod_community crepes".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; + + let post_form_1 = PostInsertForm::new( + "A test post tubular".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post_1 = Post::create(pool, &post_form_1).await?; + + let post_form_2 = PostInsertForm::new( + "A test post radical".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post_2 = Post::create(pool, &post_form_2).await?; + + let comment_form_1 = CommentInsertForm::new( + inserted_person.id, + inserted_post_1.id, + "A test comment tubular".into(), + ); + let _inserted_comment_1 = Comment::create(pool, &comment_form_1, None).await?; + + let comment_form_2 = CommentInsertForm::new( + inserted_person.id, + inserted_post_2.id, + "A test comment radical".into(), + ); + let _inserted_comment_2 = Comment::create(pool, &comment_form_2, None).await?; + + // Remove the user data + remove_or_restore_user_data( + inserted_mod.id, + inserted_person.id, + true, + &Some("a remove reason".to_string()), + &context, + ) + .await?; + + // Verify that their posts and comments are removed. + let params = ModlogListParams { + community_id: None, + mod_person_id: None, + other_person_id: None, + post_id: None, + comment_id: None, + page: None, + limit: None, + hide_modlog_names: false, + }; + + // Posts + let post_modlog = ModRemovePostView::list(pool, params).await?; + assert_eq!(2, post_modlog.len()); + + let mod_removed_posts = post_modlog + .iter() + .map(|p| p.mod_remove_post.removed) + .collect::>(); + assert_eq!(vec![true, true], mod_removed_posts); + + let removed_posts = post_modlog + .iter() + .map(|p| p.post.removed) + .collect::>(); + assert_eq!(vec![true, true], removed_posts); + + // Comments + let comment_modlog = ModRemoveCommentView::list(pool, params).await?; + assert_eq!(2, comment_modlog.len()); + + let mod_removed_comments = comment_modlog + .iter() + .map(|p| p.mod_remove_comment.removed) + .collect::>(); + assert_eq!(vec![true, true], mod_removed_comments); + + let removed_comments = comment_modlog + .iter() + .map(|p| p.comment.removed) + .collect::>(); + assert_eq!(vec![true, true], removed_comments); + + // Now restore the content, and make sure it got appended + remove_or_restore_user_data( + inserted_mod.id, + inserted_person.id, + false, + &Some("a restore reason".to_string()), + &context, + ) + .await?; + + // Posts + let post_modlog = ModRemovePostView::list(pool, params).await?; + assert_eq!(4, post_modlog.len()); + + let mod_restored_posts = post_modlog + .iter() + .map(|p| p.mod_remove_post.removed) + .collect::>(); + assert_eq!(vec![false, false, true, true], mod_restored_posts); + + let restored_posts = post_modlog + .iter() + .map(|p| p.post.removed) + .collect::>(); + // All of these will be false, cause its the current state of the post + assert_eq!(vec![false, false, false, false], restored_posts); + + // Comments + let comment_modlog = ModRemoveCommentView::list(pool, params).await?; + assert_eq!(4, comment_modlog.len()); + + let mod_restored_comments = comment_modlog + .iter() + .map(|p| p.mod_remove_comment.removed) + .collect::>(); + assert_eq!(vec![false, false, true, true], mod_restored_comments); + + let restored_comments = comment_modlog + .iter() + .map(|p| p.comment.removed) + .collect::>(); + assert_eq!(vec![false, false, false, false], restored_comments); + + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/api_crud/Cargo.toml b/crates/api_crud/Cargo.toml index 06e29044b..723864705 100644 --- a/crates/api_crud/Cargo.toml +++ b/crates/api_crud/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "lemmy_api_crud" +publish = false version.workspace = true edition.workspace = true description.workspace = true @@ -8,19 +9,30 @@ homepage.workspace = true documentation.workspace = true repository.workspace = true +[lints] +workspace = true + [dependencies] -lemmy_utils = { workspace = true } +lemmy_utils = { workspace = true, features = ["full"] } lemmy_db_schema = { workspace = true, features = ["full"] } lemmy_db_views = { workspace = true, features = ["full"] } lemmy_db_views_actor = { workspace = true, features = ["full"] } lemmy_api_common = { workspace = true, features = ["full"] } activitypub_federation = { workspace = true } bcrypt = { workspace = true } -serde = { workspace = true } actix-web = { workspace = true } tracing = { workspace = true } url = { workspace = true } -async-trait = { workspace = true } -webmention = "0.5.0" -chrono = { workspace = true } +futures.workspace = true uuid = { workspace = true } +moka.workspace = true +anyhow.workspace = true +chrono.workspace = true +webmention = "0.6.0" +accept-language = "3.1.0" +serde_json = { workspace = true } +serde = { workspace = true } +serde_with = { workspace = true } + +[package.metadata.cargo-shear] +ignored = ["futures"] diff --git a/crates/api_crud/src/comment/create.rs b/crates/api_crud/src/comment/create.rs index 2e719eda2..2f67fa7e7 100644 --- a/crates/api_crud/src/comment/create.rs +++ b/crates/api_crud/src/comment/create.rs @@ -8,59 +8,67 @@ use lemmy_api_common::{ utils::{ check_community_user_action, check_post_deleted_or_removed, - generate_local_apub_endpoint, - get_post, + get_url_blocklist, + is_mod_or_admin, local_site_to_slur_regex, - EndpointType, + process_markdown, + update_read_comments, }, }; use lemmy_db_schema::{ impls::actor_language::default_post_language, source::{ actor_language::CommunityLanguage, - comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm, CommentUpdateForm}, + comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm}, comment_reply::{CommentReply, CommentReplyUpdateForm}, local_site::LocalSite, person_mention::{PersonMention, PersonMentionUpdateForm}, }, traits::{Crud, Likeable}, }; -use lemmy_db_views::structs::LocalUserView; +use lemmy_db_views::structs::{LocalUserView, PostView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - utils::{ - mention::scrape_text_for_mentions, - slurs::remove_slurs, - validation::is_valid_body_field, - }, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, + utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, + MAX_COMMENT_DEPTH_LIMIT, }; -const MAX_COMMENT_DEPTH_LIMIT: usize = 100; - #[tracing::instrument(skip(context))] pub async fn create_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; - let content = remove_slurs( - &data.content.clone(), - &local_site_to_slur_regex(&local_site), - ); - is_valid_body_field(&Some(content.clone()), false)?; + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?; + is_valid_body_field(&content, false)?; // Check for a community ban let post_id = data.post_id; - let post = get_post(post_id, &mut context.pool()).await?; - let community_id = post.community_id; + + // Read the full post view in order to get the comments count. + let post_view = PostView::read( + &mut context.pool(), + post_id, + Some(&local_user_view.local_user), + true, + ) + .await?; + + let post = post_view.post; + let community_id = post_view.community.id; check_community_user_action(&local_user_view.person, community_id, &mut context.pool()).await?; check_post_deleted_or_removed(&post)?; // Check if post is locked, no new comments - if post.locked { + let is_mod_or_admin = is_mod_or_admin(&mut context.pool(), &local_user_view.person, community_id) + .await + .is_ok(); + if post.locked && !is_mod_or_admin { Err(LemmyErrorType::Locked)? } @@ -80,16 +88,9 @@ pub async fn create_comment( check_comment_depth(parent)?; } - CommunityLanguage::is_allowed_community_language( - &mut context.pool(), - data.language_id, - community_id, - ) - .await?; - // attempt to set default language if none was provided let language_id = match data.language_id { - Some(lid) => Some(lid), + Some(lid) => lid, None => { default_post_language( &mut context.pool(), @@ -100,12 +101,13 @@ pub async fn create_comment( } }; - let comment_form = CommentInsertForm::builder() - .content(content.clone()) - .post_id(data.post_id) - .creator_id(local_user_view.person.id) - .language_id(language_id) - .build(); + CommunityLanguage::is_allowed_community_language(&mut context.pool(), language_id, community_id) + .await?; + + let comment_form = CommentInsertForm { + language_id: Some(language_id), + ..CommentInsertForm::new(local_user_view.person.id, data.post_id, content.clone()) + }; // Create the comment let parent_path = parent_opt.clone().map(|t| t.path); @@ -113,42 +115,23 @@ pub async fn create_comment( .await .with_lemmy_type(LemmyErrorType::CouldntCreateComment)?; - // Necessary to update the ap_id let inserted_comment_id = inserted_comment.id; - let protocol_and_hostname = context.settings().get_protocol_and_hostname(); - - let apub_id = generate_local_apub_endpoint( - EndpointType::Comment, - &inserted_comment_id.to_string(), - &protocol_and_hostname, - )?; - let updated_comment = Comment::update( - &mut context.pool(), - inserted_comment_id, - &CommentUpdateForm { - ap_id: Some(apub_id), - ..Default::default() - }, - ) - .await - .with_lemmy_type(LemmyErrorType::CouldntCreateComment)?; // Scan the comment for user mentions, add those rows let mentions = scrape_text_for_mentions(&content); let recipient_ids = send_local_notifs( mentions, - &updated_comment, + inserted_comment_id, &local_user_view.person, - &post, true, &context, + Some(&local_user_view), ) .await?; // You like your own comment by default let like_form = CommentLikeForm { comment_id: inserted_comment.id, - post_id: post.id, person_id: local_user_view.person.id, score: 1, }; @@ -158,16 +141,30 @@ pub async fn create_comment( .with_lemmy_type(LemmyErrorType::CouldntLikeComment)?; ActivityChannel::submit_activity( - SendActivityData::CreateComment(updated_comment.clone()), + SendActivityData::CreateComment(inserted_comment.clone()), &context, ) .await?; - // If its a reply, mark the parent as read + // Update the read comments, so your own new comment doesn't appear as a +1 unread + update_read_comments( + local_user_view.person.id, + post_id, + post_view.counts.comments + 1, + &mut context.pool(), + ) + .await?; + + // If we're responding to a comment where we're the recipient, + // (ie we're the grandparent, or the recipient of the parent comment_reply), + // then mark the parent as read. + // Then we don't have to do it manually after we respond to a comment. if let Some(parent) = parent_opt { + let person_id = local_user_view.person.id; let parent_id = parent.id; - let comment_reply = CommentReply::read_by_comment(&mut context.pool(), parent_id).await; - if let Ok(reply) = comment_reply { + let comment_reply = + CommentReply::read_by_comment_and_person(&mut context.pool(), parent_id, person_id).await; + if let Ok(Some(reply)) = comment_reply { CommentReply::update( &mut context.pool(), reply.id, @@ -178,10 +175,9 @@ pub async fn create_comment( } // If the parent has PersonMentions mark them as read too - let person_id = local_user_view.person.id; let person_mention = PersonMention::read_by_comment_and_person(&mut context.pool(), parent_id, person_id).await; - if let Ok(mention) = person_mention { + if let Ok(Some(mention)) = person_mention { PersonMention::update( &mut context.pool(), mention.id, @@ -203,7 +199,7 @@ pub async fn create_comment( )) } -pub fn check_comment_depth(comment: &Comment) -> Result<(), LemmyError> { +pub fn check_comment_depth(comment: &Comment) -> LemmyResult<()> { let path = &comment.path.0; let length = path.split('.').count(); if length > MAX_COMMENT_DEPTH_LIMIT { diff --git a/crates/api_crud/src/comment/delete.rs b/crates/api_crud/src/comment/delete.rs index 2de2a7955..2b5f35827 100644 --- a/crates/api_crud/src/comment/delete.rs +++ b/crates/api_crud/src/comment/delete.rs @@ -8,23 +8,25 @@ use lemmy_api_common::{ utils::check_community_user_action, }; use lemmy_db_schema::{ - source::{ - comment::{Comment, CommentUpdateForm}, - post::Post, - }, + source::comment::{Comment, CommentUpdateForm}, traits::Crud, }; use lemmy_db_views::structs::{CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn delete_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let comment_id = data.comment_id; - let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?; + let orig_comment = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; // Dont delete it if its already been deleted. if orig_comment.comment.deleted == data.deleted { @@ -56,15 +58,13 @@ pub async fn delete_comment( .await .with_lemmy_type(LemmyErrorType::CouldntUpdateComment)?; - let post_id = updated_comment.post_id; - let post = Post::read(&mut context.pool(), post_id).await?; let recipient_ids = send_local_notifs( vec![], - &updated_comment, + comment_id, &local_user_view.person, - &post, false, &context, + Some(&local_user_view), ) .await?; let updated_comment_id = updated_comment.id; diff --git a/crates/api_crud/src/comment/read.rs b/crates/api_crud/src/comment/read.rs index 733d08682..39852081f 100644 --- a/crates/api_crud/src/comment/read.rs +++ b/crates/api_crud/src/comment/read.rs @@ -7,14 +7,14 @@ use lemmy_api_common::{ }; use lemmy_db_schema::source::local_site::LocalSite; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn get_comment( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; check_private_instance(&local_user_view, &local_site)?; diff --git a/crates/api_crud/src/comment/remove.rs b/crates/api_crud/src/comment/remove.rs index cbfbcd22c..3c137a984 100644 --- a/crates/api_crud/src/comment/remove.rs +++ b/crates/api_crud/src/comment/remove.rs @@ -10,22 +10,28 @@ use lemmy_api_common::{ use lemmy_db_schema::{ source::{ comment::{Comment, CommentUpdateForm}, + comment_report::CommentReport, + local_user::LocalUser, moderator::{ModRemoveComment, ModRemoveCommentForm}, - post::Post, }, - traits::Crud, + traits::{Crud, Reportable}, }; use lemmy_db_views::structs::{CommentView, LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn remove_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let comment_id = data.comment_id; - let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?; + let orig_comment = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; check_community_mod_action( &local_user_view.person, @@ -35,6 +41,20 @@ pub async fn remove_comment( ) .await?; + LocalUser::is_higher_mod_or_admin_check( + &mut context.pool(), + orig_comment.community.id, + local_user_view.person.id, + vec![orig_comment.creator.id], + ) + .await?; + + // Don't allow removing or restoring comment which was deleted by user, as it would reveal + // the comment text in mod log. + if orig_comment.comment.deleted { + return Err(LemmyErrorType::CouldntUpdateComment.into()); + } + // Do the remove let removed = data.removed; let updated_comment = Comment::update( @@ -48,6 +68,9 @@ pub async fn remove_comment( .await .with_lemmy_type(LemmyErrorType::CouldntUpdateComment)?; + CommentReport::resolve_all_for_object(&mut context.pool(), comment_id, local_user_view.person.id) + .await?; + // Mod tables let form = ModRemoveCommentForm { mod_person_id: local_user_view.person.id, @@ -57,26 +80,24 @@ pub async fn remove_comment( }; ModRemoveComment::create(&mut context.pool(), &form).await?; - let post_id = updated_comment.post_id; - let post = Post::read(&mut context.pool(), post_id).await?; let recipient_ids = send_local_notifs( vec![], - &updated_comment, - &local_user_view.person.clone(), - &post, + comment_id, + &local_user_view.person, false, &context, + Some(&local_user_view), ) .await?; let updated_comment_id = updated_comment.id; ActivityChannel::submit_activity( - SendActivityData::RemoveComment( - updated_comment, - local_user_view.person.clone(), - orig_comment.community, - data.reason.clone(), - ), + SendActivityData::RemoveComment { + comment: updated_comment, + moderator: local_user_view.person.clone(), + community: orig_comment.community, + reason: data.reason.clone(), + }, &context, ) .await?; diff --git a/crates/api_crud/src/comment/update.rs b/crates/api_crud/src/comment/update.rs index 21cf54cfa..51f65aa67 100644 --- a/crates/api_crud/src/comment/update.rs +++ b/crates/api_crud/src/comment/update.rs @@ -5,7 +5,12 @@ use lemmy_api_common::{ comment::{CommentResponse, EditComment}, context::LemmyContext, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, local_site_to_slur_regex}, + utils::{ + check_community_user_action, + get_url_blocklist, + local_site_to_slur_regex, + process_markdown_opt, + }, }; use lemmy_db_schema::{ source::{ @@ -18,12 +23,8 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::{CommentView, LocalUserView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - utils::{ - mention::scrape_text_for_mentions, - slurs::remove_slurs, - validation::is_valid_body_field, - }, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, + utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, }; #[tracing::instrument(skip(context))] @@ -31,11 +32,16 @@ pub async fn update_comment( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; let comment_id = data.comment_id; - let orig_comment = CommentView::read(&mut context.pool(), comment_id, None).await?; + let orig_comment = CommentView::read( + &mut context.pool(), + comment_id, + Some(&local_user_view.local_user), + ) + .await?; check_community_user_action( &local_user_view.person, @@ -49,20 +55,21 @@ pub async fn update_comment( Err(LemmyErrorType::NoCommentEditAllowed)? } - let language_id = data.language_id; - CommunityLanguage::is_allowed_community_language( - &mut context.pool(), - language_id, - orig_comment.community.id, - ) - .await?; + if let Some(language_id) = data.language_id { + CommunityLanguage::is_allowed_community_language( + &mut context.pool(), + language_id, + orig_comment.community.id, + ) + .await?; + } - // Update the Content - let content = data - .content - .as_ref() - .map(|c| remove_slurs(c, &local_site_to_slur_regex(&local_site))); - is_valid_body_field(&content, false)?; + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown_opt(&data.content, &slur_regex, &url_blocklist, &context).await?; + if let Some(content) = &content { + is_valid_body_field(content, false)?; + } let comment_id = data.comment_id; let form = CommentUpdateForm { @@ -80,11 +87,11 @@ pub async fn update_comment( let mentions = scrape_text_for_mentions(&updated_comment_content); let recipient_ids = send_local_notifs( mentions, - &updated_comment, + comment_id, &local_user_view.person, - &orig_comment.post, false, &context, + Some(&local_user_view), ) .await?; diff --git a/crates/api_crud/src/community/create.rs b/crates/api_crud/src/community/create.rs index 91725c409..cd0fc985e 100644 --- a/crates/api_crud/src/community/create.rs +++ b/crates/api_crud/src/community/create.rs @@ -8,9 +8,11 @@ use lemmy_api_common::{ generate_followers_url, generate_inbox_url, generate_local_apub_endpoint, - generate_shared_inbox_url, + get_url_blocklist, is_admin, local_site_to_slur_regex, + process_markdown_opt, + proxy_image_link_api, EndpointType, }, }; @@ -27,14 +29,18 @@ use lemmy_db_schema::{ }, }, traits::{ApubActor, Crud, Followable, Joinable}, - utils::diesel_option_overwrite_to_url_create, + utils::diesel_url_create, }; use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{ - slurs::{check_slurs, check_slurs_opt}, - validation::{is_valid_actor_name, is_valid_body_field}, + slurs::check_slurs, + validation::{ + is_valid_actor_name, + is_valid_body_field, + site_or_community_description_length_check, + }, }, }; @@ -43,7 +49,7 @@ pub async fn create_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; let local_site = site_view.local_site; @@ -51,17 +57,30 @@ pub async fn create_community( Err(LemmyErrorType::OnlyAdminsCanCreateCommunities)? } - // Check to make sure the icon and banners are urls - let icon = diesel_option_overwrite_to_url_create(&data.icon)?; - let banner = diesel_option_overwrite_to_url_create(&data.banner)?; - let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; check_slurs(&data.name, &slur_regex)?; check_slurs(&data.title, &slur_regex)?; - check_slurs_opt(&data.description, &slur_regex)?; + let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?; + + // Ensure that the sidebar has fewer than the max num characters... + if let Some(sidebar) = &sidebar { + is_valid_body_field(sidebar, false)?; + } + + let description = data.description.clone(); + if let Some(desc) = &description { + site_or_community_description_length_check(desc)?; + check_slurs(desc, &slur_regex)?; + } + + let icon = diesel_url_create(data.icon.as_deref())?; + let icon = proxy_image_link_api(icon, &context).await?; + + let banner = diesel_url_create(data.banner.as_deref())?; + let banner = proxy_image_link_api(banner, &context).await?; is_valid_actor_name(&data.name, local_site.actor_name_max_length as usize)?; - is_valid_body_field(&data.description, false)?; // Double check for duplicate community actor_ids let community_actor_id = generate_local_apub_endpoint( @@ -78,22 +97,25 @@ pub async fn create_community( // When you create a community, make sure the user becomes a moderator and a follower let keypair = generate_actor_keypair()?; - let community_form = CommunityInsertForm::builder() - .name(data.name.clone()) - .title(data.title.clone()) - .description(data.description.clone()) - .icon(icon) - .banner(banner) - .nsfw(data.nsfw) - .actor_id(Some(community_actor_id.clone())) - .private_key(Some(keypair.private_key)) - .public_key(keypair.public_key) - .followers_url(Some(generate_followers_url(&community_actor_id)?)) - .inbox_url(Some(generate_inbox_url(&community_actor_id)?)) - .shared_inbox_url(Some(generate_shared_inbox_url(&community_actor_id)?)) - .posting_restricted_to_mods(data.posting_restricted_to_mods) - .instance_id(site_view.site.instance_id) - .build(); + let community_form = CommunityInsertForm { + sidebar, + description, + icon, + banner, + nsfw: data.nsfw, + actor_id: Some(community_actor_id.clone()), + private_key: Some(keypair.private_key), + followers_url: Some(generate_followers_url(&community_actor_id)?), + inbox_url: Some(generate_inbox_url()?), + posting_restricted_to_mods: data.posting_restricted_to_mods, + visibility: data.visibility, + ..CommunityInsertForm::new( + site_view.site.instance_id, + data.name.clone(), + data.title.clone(), + keypair.public_key, + ) + }; let inserted_community = Community::create(&mut context.pool(), &community_form) .await diff --git a/crates/api_crud/src/community/delete.rs b/crates/api_crud/src/community/delete.rs index 60b79fd79..a2ceaff50 100644 --- a/crates/api_crud/src/community/delete.rs +++ b/crates/api_crud/src/community/delete.rs @@ -13,14 +13,14 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommunityModeratorView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn delete_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Fetch the community mods let community_id = data.community_id; let community_mods = diff --git a/crates/api_crud/src/community/list.rs b/crates/api_crud/src/community/list.rs index 0879421ba..9c13ae89f 100644 --- a/crates/api_crud/src/community/list.rs +++ b/crates/api_crud/src/community/list.rs @@ -4,24 +4,23 @@ use lemmy_api_common::{ context::LemmyContext, utils::{check_private_instance, is_admin}, }; -use lemmy_db_schema::source::local_site::LocalSite; -use lemmy_db_views::structs::LocalUserView; +use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_db_views_actor::community_view::CommunityQuery; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn list_communities( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { + let local_site = SiteView::read_local(&mut context.pool()).await?; let is_admin = local_user_view .as_ref() .map(|luv| is_admin(luv).is_ok()) .unwrap_or_default(); - check_private_instance(&local_user_view, &local_site)?; + check_private_instance(&local_user_view, &local_site.local_site)?; let sort = data.sort; let listing_type = data.type_; @@ -39,7 +38,7 @@ pub async fn list_communities( is_mod_or_admin: is_admin, ..Default::default() } - .list(&mut context.pool()) + .list(&local_site.site, &mut context.pool()) .await?; // Return the jwt diff --git a/crates/api_crud/src/community/remove.rs b/crates/api_crud/src/community/remove.rs index 3c21c02b2..f4271565d 100644 --- a/crates/api_crud/src/community/remove.rs +++ b/crates/api_crud/src/community/remove.rs @@ -15,14 +15,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn remove_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { check_community_mod_action( &local_user_view.person, data.community_id, @@ -58,12 +58,12 @@ pub async fn remove_community( ModRemoveCommunity::create(&mut context.pool(), &form).await?; ActivityChannel::submit_activity( - SendActivityData::RemoveCommunity( - local_user_view.person.clone(), + SendActivityData::RemoveCommunity { + moderator: local_user_view.person.clone(), community, - data.reason.clone(), - data.removed, - ), + reason: data.reason.clone(), + removed: data.removed, + }, &context, ) .await?; diff --git a/crates/api_crud/src/community/update.rs b/crates/api_crud/src/community/update.rs index 40ba1a2a1..cde8058ee 100644 --- a/crates/api_crud/src/community/update.rs +++ b/crates/api_crud/src/community/update.rs @@ -4,8 +4,15 @@ use lemmy_api_common::{ build_response::build_community_response, community::{CommunityResponse, EditCommunity}, context::LemmyContext, + request::replace_image, send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_mod_action, local_site_to_slur_regex}, + utils::{ + check_community_mod_action, + get_url_blocklist, + local_site_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_api, + }, }; use lemmy_db_schema::{ source::{ @@ -14,11 +21,11 @@ use lemmy_db_schema::{ local_site::LocalSite, }, traits::Crud, - utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now}, + utils::{diesel_string_update, diesel_url_update, naive_now}, }; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{slurs::check_slurs_opt, validation::is_valid_body_field}, }; @@ -27,17 +34,34 @@ pub async fn update_community( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; check_slurs_opt(&data.title, &slur_regex)?; - check_slurs_opt(&data.description, &slur_regex)?; - is_valid_body_field(&data.description, false)?; - let icon = diesel_option_overwrite_to_url(&data.icon)?; - let banner = diesel_option_overwrite_to_url(&data.banner)?; - let description = diesel_option_overwrite(data.description.clone()); + let sidebar = diesel_string_update( + process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context) + .await? + .as_deref(), + ); + + if let Some(Some(sidebar)) = &sidebar { + is_valid_body_field(sidebar, false)?; + } + + let description = diesel_string_update(data.description.as_deref()); + + let old_community = Community::read(&mut context.pool(), data.community_id).await?; + + let icon = diesel_url_update(data.icon.as_deref())?; + replace_image(&icon, &old_community.icon, &context).await?; + let icon = proxy_image_link_opt_api(icon, &context).await?; + + let banner = diesel_url_update(data.banner.as_deref())?; + replace_image(&banner, &old_community.banner, &context).await?; + let banner = proxy_image_link_opt_api(banner, &context).await?; // Verify its a mod (only mods can edit it) check_community_mod_action( @@ -62,11 +86,13 @@ pub async fn update_community( let community_form = CommunityUpdateForm { title: data.title.clone(), + sidebar, description, icon, banner, nsfw: data.nsfw, posting_restricted_to_mods: data.posting_restricted_to_mods, + visibility: data.visibility, updated: Some(Some(naive_now())), ..Default::default() }; diff --git a/crates/api_crud/src/custom_emoji/create.rs b/crates/api_crud/src/custom_emoji/create.rs index cd30ef1e9..333a7ce89 100644 --- a/crates/api_crud/src/custom_emoji/create.rs +++ b/crates/api_crud/src/custom_emoji/create.rs @@ -5,38 +5,36 @@ use lemmy_api_common::{ custom_emoji::{CreateCustomEmoji, CustomEmojiResponse}, utils::is_admin, }; -use lemmy_db_schema::source::{ - custom_emoji::{CustomEmoji, CustomEmojiInsertForm}, - custom_emoji_keyword::{CustomEmojiKeyword, CustomEmojiKeywordInsertForm}, - local_site::LocalSite, +use lemmy_db_schema::{ + source::{ + custom_emoji::{CustomEmoji, CustomEmojiInsertForm}, + custom_emoji_keyword::{CustomEmojiKeyword, CustomEmojiKeywordInsertForm}, + }, + traits::Crud, }; use lemmy_db_views::structs::{CustomEmojiView, LocalUserView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn create_custom_emoji( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; - let emoji_form = CustomEmojiInsertForm::builder() - .local_site_id(local_site.id) - .shortcode(data.shortcode.to_lowercase().trim().to_string()) - .alt_text(data.alt_text.to_string()) - .category(data.category.to_string()) - .image_url(data.clone().image_url.into()) - .build(); + let emoji_form = CustomEmojiInsertForm::new( + data.shortcode.to_lowercase().trim().to_string(), + data.clone().image_url.into(), + data.alt_text.to_string(), + data.category.to_string(), + ); let emoji = CustomEmoji::create(&mut context.pool(), &emoji_form).await?; let mut keywords = vec![]; for keyword in &data.keywords { - let keyword_form = CustomEmojiKeywordInsertForm::builder() - .custom_emoji_id(emoji.id) - .keyword(keyword.to_lowercase().trim().to_string()) - .build(); + let keyword_form = + CustomEmojiKeywordInsertForm::new(emoji.id, keyword.to_lowercase().trim().to_string()); keywords.push(keyword_form); } CustomEmojiKeyword::create(&mut context.pool(), keywords).await?; diff --git a/crates/api_crud/src/custom_emoji/delete.rs b/crates/api_crud/src/custom_emoji/delete.rs index 93c5f8d80..818fd4d88 100644 --- a/crates/api_crud/src/custom_emoji/delete.rs +++ b/crates/api_crud/src/custom_emoji/delete.rs @@ -6,16 +6,16 @@ use lemmy_api_common::{ utils::is_admin, SuccessResponse, }; -use lemmy_db_schema::source::custom_emoji::CustomEmoji; +use lemmy_db_schema::{source::custom_emoji::CustomEmoji, traits::Crud}; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn delete_custom_emoji( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; diff --git a/crates/api_crud/src/custom_emoji/list.rs b/crates/api_crud/src/custom_emoji/list.rs new file mode 100644 index 000000000..6ee5a44b0 --- /dev/null +++ b/crates/api_crud/src/custom_emoji/list.rs @@ -0,0 +1,25 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + custom_emoji::{ListCustomEmojis, ListCustomEmojisResponse}, +}; +use lemmy_db_views::structs::{CustomEmojiView, LocalUserView}; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn list_custom_emojis( + data: Query, + local_user_view: Option, + context: Data, +) -> Result, LemmyError> { + let custom_emojis = CustomEmojiView::list( + &mut context.pool(), + &data.category, + data.page, + data.limit, + data.ignore_page_limits.unwrap_or(false), + ) + .await?; + + Ok(Json(ListCustomEmojisResponse { custom_emojis })) +} diff --git a/crates/api_crud/src/custom_emoji/mod.rs b/crates/api_crud/src/custom_emoji/mod.rs index fdb2f5561..ffd48daf6 100644 --- a/crates/api_crud/src/custom_emoji/mod.rs +++ b/crates/api_crud/src/custom_emoji/mod.rs @@ -1,3 +1,4 @@ pub mod create; pub mod delete; +pub mod list; pub mod update; diff --git a/crates/api_crud/src/custom_emoji/update.rs b/crates/api_crud/src/custom_emoji/update.rs index 5a2631a62..6087f6969 100644 --- a/crates/api_crud/src/custom_emoji/update.rs +++ b/crates/api_crud/src/custom_emoji/update.rs @@ -5,38 +5,36 @@ use lemmy_api_common::{ custom_emoji::{CustomEmojiResponse, EditCustomEmoji}, utils::is_admin, }; -use lemmy_db_schema::source::{ - custom_emoji::{CustomEmoji, CustomEmojiUpdateForm}, - custom_emoji_keyword::{CustomEmojiKeyword, CustomEmojiKeywordInsertForm}, - local_site::LocalSite, +use lemmy_db_schema::{ + source::{ + custom_emoji::{CustomEmoji, CustomEmojiUpdateForm}, + custom_emoji_keyword::{CustomEmojiKeyword, CustomEmojiKeywordInsertForm}, + }, + traits::Crud, }; use lemmy_db_views::structs::{CustomEmojiView, LocalUserView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn update_custom_emoji( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { // Make sure user is an admin is_admin(&local_user_view)?; - let emoji_form = CustomEmojiUpdateForm::builder() - .local_site_id(local_site.id) - .alt_text(data.alt_text.to_string()) - .category(data.category.to_string()) - .image_url(data.clone().image_url.into()) - .build(); + let emoji_form = CustomEmojiUpdateForm::new( + data.clone().image_url.into(), + data.alt_text.to_string(), + data.category.to_string(), + ); let emoji = CustomEmoji::update(&mut context.pool(), data.id, &emoji_form).await?; CustomEmojiKeyword::delete(&mut context.pool(), data.id).await?; let mut keywords = vec![]; for keyword in &data.keywords { - let keyword_form = CustomEmojiKeywordInsertForm::builder() - .custom_emoji_id(emoji.id) - .keyword(keyword.to_lowercase().trim().to_string()) - .build(); + let keyword_form = + CustomEmojiKeywordInsertForm::new(emoji.id, keyword.to_lowercase().trim().to_string()); keywords.push(keyword_form); } CustomEmojiKeyword::create(&mut context.pool(), keywords).await?; diff --git a/crates/api_crud/src/lib.rs b/crates/api_crud/src/lib.rs index aee3e8134..7d1b901b9 100644 --- a/crates/api_crud/src/lib.rs +++ b/crates/api_crud/src/lib.rs @@ -1,7 +1,9 @@ pub mod comment; pub mod community; pub mod custom_emoji; +pub mod oauth_provider; pub mod post; pub mod private_message; pub mod site; +pub mod tagline; pub mod user; diff --git a/crates/api_crud/src/oauth_provider/create.rs b/crates/api_crud/src/oauth_provider/create.rs new file mode 100644 index 000000000..fe44ae56e --- /dev/null +++ b/crates/api_crud/src/oauth_provider/create.rs @@ -0,0 +1,42 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{ + context::LemmyContext, + oauth_provider::CreateOAuthProvider, + utils::is_admin, +}; +use lemmy_db_schema::{ + source::oauth_provider::{OAuthProvider, OAuthProviderInsertForm}, + traits::Crud, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; +use url::Url; + +#[tracing::instrument(skip(context))] +pub async fn create_oauth_provider( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + let cloned_data = data.clone(); + let oauth_provider_form = OAuthProviderInsertForm { + display_name: cloned_data.display_name, + issuer: Url::parse(&cloned_data.issuer)?.into(), + authorization_endpoint: Url::parse(&cloned_data.authorization_endpoint)?.into(), + token_endpoint: Url::parse(&cloned_data.token_endpoint)?.into(), + userinfo_endpoint: Url::parse(&cloned_data.userinfo_endpoint)?.into(), + id_claim: cloned_data.id_claim, + client_id: data.client_id.to_string(), + client_secret: data.client_secret.to_string(), + scopes: data.scopes.to_string(), + auto_verify_email: data.auto_verify_email, + account_linking_enabled: data.account_linking_enabled, + enabled: data.enabled, + }; + let oauth_provider = OAuthProvider::create(&mut context.pool(), &oauth_provider_form).await?; + Ok(Json(oauth_provider)) +} diff --git a/crates/api_crud/src/oauth_provider/delete.rs b/crates/api_crud/src/oauth_provider/delete.rs new file mode 100644 index 000000000..0d4d616cc --- /dev/null +++ b/crates/api_crud/src/oauth_provider/delete.rs @@ -0,0 +1,25 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{ + context::LemmyContext, + oauth_provider::DeleteOAuthProvider, + utils::is_admin, + SuccessResponse, +}; +use lemmy_db_schema::{source::oauth_provider::OAuthProvider, traits::Crud}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; + +#[tracing::instrument(skip(context))] +pub async fn delete_oauth_provider( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + OAuthProvider::delete(&mut context.pool(), data.id) + .await + .with_lemmy_type(LemmyErrorType::CouldntDeleteOauthProvider)?; + Ok(Json(SuccessResponse::default())) +} diff --git a/crates/api_crud/src/oauth_provider/mod.rs b/crates/api_crud/src/oauth_provider/mod.rs new file mode 100644 index 000000000..fdb2f5561 --- /dev/null +++ b/crates/api_crud/src/oauth_provider/mod.rs @@ -0,0 +1,3 @@ +pub mod create; +pub mod delete; +pub mod update; diff --git a/crates/api_crud/src/oauth_provider/update.rs b/crates/api_crud/src/oauth_provider/update.rs new file mode 100644 index 000000000..b4734bf36 --- /dev/null +++ b/crates/api_crud/src/oauth_provider/update.rs @@ -0,0 +1,42 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{context::LemmyContext, oauth_provider::EditOAuthProvider, utils::is_admin}; +use lemmy_db_schema::{ + source::oauth_provider::{OAuthProvider, OAuthProviderUpdateForm}, + traits::Crud, + utils::{diesel_required_string_update, diesel_required_url_update, naive_now}, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn update_oauth_provider( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + let cloned_data = data.clone(); + let oauth_provider_form = OAuthProviderUpdateForm { + display_name: diesel_required_string_update(cloned_data.display_name.as_deref()), + authorization_endpoint: diesel_required_url_update( + cloned_data.authorization_endpoint.as_deref(), + )?, + token_endpoint: diesel_required_url_update(cloned_data.token_endpoint.as_deref())?, + userinfo_endpoint: diesel_required_url_update(cloned_data.userinfo_endpoint.as_deref())?, + id_claim: diesel_required_string_update(data.id_claim.as_deref()), + client_secret: diesel_required_string_update(data.client_secret.as_deref()), + scopes: diesel_required_string_update(data.scopes.as_deref()), + auto_verify_email: data.auto_verify_email, + account_linking_enabled: data.account_linking_enabled, + enabled: data.enabled, + updated: Some(Some(naive_now())), + }; + + let update_result = + OAuthProvider::update(&mut context.pool(), data.id, &oauth_provider_form).await?; + let oauth_provider = OAuthProvider::read(&mut context.pool(), update_result.id).await?; + Ok(Json(oauth_provider)) +} diff --git a/crates/api_crud/src/post/create.rs b/crates/api_crud/src/post/create.rs index e4af92916..90c68bdbd 100644 --- a/crates/api_crud/src/post/create.rs +++ b/crates/api_crud/src/post/create.rs @@ -1,18 +1,19 @@ +use super::convert_published_time; use activitypub_federation::config::Data; use actix_web::web::Json; use lemmy_api_common::{ build_response::build_post_response, context::LemmyContext, post::{CreatePost, PostResponse}, - request::fetch_site_data, - send_activity::{ActivityChannel, SendActivityData}, + request::generate_post_link_metadata, + send_activity::SendActivityData, utils::{ check_community_user_action, - generate_local_apub_endpoint, + get_url_blocklist, honeypot_check, local_site_to_slur_regex, mark_post_as_read, - EndpointType, + process_markdown_opt, }, }; use lemmy_db_schema::{ @@ -21,18 +22,26 @@ use lemmy_db_schema::{ actor_language::CommunityLanguage, community::Community, local_site::LocalSite, - post::{Post, PostInsertForm, PostLike, PostLikeForm, PostUpdateForm}, + post::{Post, PostInsertForm, PostLike, PostLikeForm}, }, traits::{Crud, Likeable}, + utils::diesel_url_create, + CommunityVisibility, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_db_views_actor::structs::CommunityView; +use lemmy_db_views_actor::structs::CommunityModeratorView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, spawn_try_task, utils::{ - slurs::{check_slurs, check_slurs_opt}, - validation::{check_url_scheme, clean_url_params, is_valid_body_field, is_valid_post_title}, + slurs::check_slurs, + validation::{ + is_url_blocked, + is_valid_alt_text_field, + is_valid_body_field, + is_valid_post_title, + is_valid_url, + }, }, }; use tracing::Instrument; @@ -44,20 +53,37 @@ pub async fn create_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; + honeypot_check(&data.honeypot)?; + let slur_regex = local_site_to_slur_regex(&local_site); check_slurs(&data.name, &slur_regex)?; - check_slurs_opt(&data.body, &slur_regex)?; - honeypot_check(&data.honeypot)?; + let url_blocklist = get_url_blocklist(&context).await?; - let data_url = data.url.as_ref(); - let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear" + let body = process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context).await?; + let url = diesel_url_create(data.url.as_deref())?; + let custom_thumbnail = diesel_url_create(data.custom_thumbnail.as_deref())?; is_valid_post_title(&data.name)?; - is_valid_body_field(&data.body, true)?; - check_url_scheme(&data.url)?; + + if let Some(url) = &url { + is_url_blocked(url, &url_blocklist)?; + is_valid_url(url)?; + } + + if let Some(custom_thumbnail) = &custom_thumbnail { + is_valid_url(custom_thumbnail)?; + } + + if let Some(alt_text) = &data.alt_text { + is_valid_alt_text_field(alt_text)?; + } + + if let Some(body) = &body { + is_valid_body_field(body, true)?; + } check_community_user_action( &local_user_view.person, @@ -70,36 +96,17 @@ pub async fn create_post( let community = Community::read(&mut context.pool(), community_id).await?; if community.posting_restricted_to_mods { let community_id = data.community_id; - let is_mod = CommunityView::is_mod_or_admin( + CommunityModeratorView::check_is_community_moderator( &mut context.pool(), - local_user_view.local_user.person_id, community_id, + local_user_view.local_user.person_id, ) .await?; - if !is_mod { - Err(LemmyErrorType::OnlyModsCanPostInCommunity)? - } } - // Fetch post links and pictrs cached image - let (metadata_res, thumbnail_url) = - fetch_site_data(context.client(), context.settings(), data_url, true).await; - let (embed_title, embed_description, embed_video_url) = metadata_res - .map(|u| (u.title, u.description, u.embed_video_url)) - .unwrap_or_default(); - - // Only need to check if language is allowed in case user set it explicitly. When using default - // language, it already only returns allowed languages. - CommunityLanguage::is_allowed_community_language( - &mut context.pool(), - data.language_id, - community_id, - ) - .await?; - // attempt to set default language if none was provided let language_id = match data.language_id { - Some(lid) => Some(lid), + Some(lid) => lid, None => { default_post_language( &mut context.pool(), @@ -110,41 +117,44 @@ pub async fn create_post( } }; - let post_form = PostInsertForm::builder() - .name(data.name.trim().to_string()) - .url(url) - .body(data.body.clone()) - .community_id(data.community_id) - .creator_id(local_user_view.person.id) - .nsfw(data.nsfw) - .embed_title(embed_title) - .embed_description(embed_description) - .embed_video_url(embed_video_url) - .language_id(language_id) - .thumbnail_url(thumbnail_url) - .build(); + // Only need to check if language is allowed in case user set it explicitly. When using default + // language, it already only returns allowed languages. + CommunityLanguage::is_allowed_community_language(&mut context.pool(), language_id, community_id) + .await?; + + let scheduled_publish_time = + convert_published_time(data.scheduled_publish_time, &local_user_view, &context).await?; + let post_form = PostInsertForm { + url: url.map(Into::into), + body, + alt_text: data.alt_text.clone(), + nsfw: data.nsfw, + language_id: Some(language_id), + scheduled_publish_time, + ..PostInsertForm::new( + data.name.trim().to_string(), + local_user_view.person.id, + data.community_id, + ) + }; let inserted_post = Post::create(&mut context.pool(), &post_form) .await .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; - let inserted_post_id = inserted_post.id; - let protocol_and_hostname = context.settings().get_protocol_and_hostname(); - let apub_id = generate_local_apub_endpoint( - EndpointType::Post, - &inserted_post_id.to_string(), - &protocol_and_hostname, - )?; - let updated_post = Post::update( - &mut context.pool(), - inserted_post_id, - &PostUpdateForm { - ap_id: Some(apub_id), - ..Default::default() - }, + let federate_post = if scheduled_publish_time.is_none() { + send_webmention(inserted_post.clone(), community); + |post| Some(SendActivityData::CreatePost(post)) + } else { + |_| None + }; + generate_post_link_metadata( + inserted_post.clone(), + custom_thumbnail.map(Into::into), + federate_post, + context.reset_request_count(), ) - .await - .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; + .await?; // They like their own post by default let person_id = local_user_view.person.id; @@ -159,28 +169,27 @@ pub async fn create_post( .await .with_lemmy_type(LemmyErrorType::CouldntLikePost)?; - ActivityChannel::submit_activity(SendActivityData::CreatePost(updated_post.clone()), &context) - .await?; - - // Mark the post as read mark_post_as_read(person_id, post_id, &mut context.pool()).await?; - if let Some(url) = updated_post.url.clone() { - spawn_try_task(async move { - let mut webmention = - Webmention::new::(updated_post.ap_id.clone().into(), url.clone().into())?; - webmention.set_checked(true); - match webmention - .send() - .instrument(tracing::info_span!("Sending webmention")) - .await - { - Err(WebmentionError::NoEndpointDiscovered(_)) => Ok(()), - Ok(_) => Ok(()), - Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention), - } - }); - }; - - build_post_response(&context, community_id, &local_user_view.person, post_id).await + build_post_response(&context, community_id, local_user_view, post_id).await +} + +pub fn send_webmention(post: Post, community: Community) { + if let Some(url) = post.url.clone() { + if community.visibility == CommunityVisibility::Public { + spawn_try_task(async move { + let mut webmention = Webmention::new::(post.ap_id.clone().into(), url.clone().into())?; + webmention.set_checked(true); + match webmention + .send() + .instrument(tracing::info_span!("Sending webmention")) + .await + { + Err(WebmentionError::NoEndpointDiscovered(_)) => Ok(()), + Ok(_) => Ok(()), + Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention), + } + }); + } + }; } diff --git a/crates/api_crud/src/post/delete.rs b/crates/api_crud/src/post/delete.rs index 630bfa357..be31759d5 100644 --- a/crates/api_crud/src/post/delete.rs +++ b/crates/api_crud/src/post/delete.rs @@ -12,14 +12,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn delete_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let post_id = data.post_id; let orig_post = Post::read(&mut context.pool(), post_id).await?; @@ -52,7 +52,7 @@ pub async fn delete_post( .await?; ActivityChannel::submit_activity( - SendActivityData::DeletePost(post, local_user_view.person.clone(), data.0.clone()), + SendActivityData::DeletePost(post, local_user_view.person.clone(), data.0), &context, ) .await?; @@ -60,7 +60,7 @@ pub async fn delete_post( build_post_response( &context, orig_post.community_id, - &local_user_view.person, + local_user_view, data.post_id, ) .await diff --git a/crates/api_crud/src/post/mod.rs b/crates/api_crud/src/post/mod.rs index 8bb842b70..95df9663c 100644 --- a/crates/api_crud/src/post/mod.rs +++ b/crates/api_crud/src/post/mod.rs @@ -1,5 +1,38 @@ +use chrono::{DateTime, TimeZone, Utc}; +use lemmy_api_common::context::LemmyContext; +use lemmy_db_schema::source::post::Post; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; + pub mod create; pub mod delete; pub mod read; pub mod remove; pub mod update; + +async fn convert_published_time( + scheduled_publish_time: Option, + local_user_view: &LocalUserView, + context: &LemmyContext, +) -> LemmyResult>> { + const MAX_SCHEDULED_POSTS: i64 = 10; + if let Some(scheduled_publish_time) = scheduled_publish_time { + let converted = Utc + .timestamp_opt(scheduled_publish_time, 0) + .single() + .ok_or(LemmyErrorType::InvalidUnixTime)?; + if converted < Utc::now() { + Err(LemmyErrorType::PostScheduleTimeMustBeInFuture)?; + } + if !local_user_view.local_user.admin { + let count = + Post::user_scheduled_post_count(local_user_view.person.id, &mut context.pool()).await?; + if count >= MAX_SCHEDULED_POSTS { + Err(LemmyErrorType::TooManyScheduledPosts)?; + } + } + Ok(Some(converted)) + } else { + Ok(None) + } +} diff --git a/crates/api_crud/src/post/read.rs b/crates/api_crud/src/post/read.rs index 352f97fe1..7677d59ef 100644 --- a/crates/api_crud/src/post/read.rs +++ b/crates/api_crud/src/post/read.rs @@ -2,29 +2,28 @@ use actix_web::web::{Data, Json, Query}; use lemmy_api_common::{ context::LemmyContext, post::{GetPost, GetPostResponse}, - utils::{check_private_instance, is_mod_or_admin_opt, mark_post_as_read}, + utils::{check_private_instance, is_mod_or_admin_opt, mark_post_as_read, update_read_comments}, }; use lemmy_db_schema::{ - aggregates::structs::{PersonPostAggregates, PersonPostAggregatesForm}, - source::{comment::Comment, local_site::LocalSite, post::Post}, + source::{comment::Comment, post::Post}, traits::Crud, }; use lemmy_db_views::{ post_view::PostQuery, - structs::{LocalUserView, PostView}, + structs::{LocalUserView, PostView, SiteView}, }; use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn get_post( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { + let local_site = SiteView::read_local(&mut context.pool()).await?; - check_private_instance(&local_user_view, &local_site)?; + check_private_instance(&local_user_view, &local_site.local_site)?; let person_id = local_user_view.as_ref().map(|u| u.person.id); @@ -33,15 +32,17 @@ pub async fn get_post( id } else if let Some(comment_id) = data.comment_id { Comment::read(&mut context.pool(), comment_id) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindPost)? + .await? .post_id } else { - Err(LemmyErrorType::CouldntFindPost)? + Err(LemmyErrorType::NotFound)? }; // Check to see if the person is a mod or admin, to show deleted / removed - let community_id = Post::read(&mut context.pool(), post_id).await?.community_id; + let community_id = Post::read_xx(&mut context.pool(), post_id) + .await? + .community_id; + let is_mod_or_admin = is_mod_or_admin_opt( &mut context.pool(), local_user_view.as_ref(), @@ -50,50 +51,48 @@ pub async fn get_post( .await .is_ok(); - let post_view = PostView::read(&mut context.pool(), post_id, person_id, is_mod_or_admin) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindPost)?; + let local_user = local_user_view.map(|l| l.local_user); + let post_view = PostView::read( + &mut context.pool(), + post_id, + local_user.as_ref(), + is_mod_or_admin, + ) + .await?; - // Mark the post as read let post_id = post_view.post.id; if let Some(person_id) = person_id { mark_post_as_read(person_id, post_id, &mut context.pool()).await?; + + update_read_comments( + person_id, + post_id, + post_view.counts.comments, + &mut context.pool(), + ) + .await?; } // Necessary for the sidebar subscribed let community_view = CommunityView::read( &mut context.pool(), community_id, - person_id, + local_user.as_ref(), is_mod_or_admin, ) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; - - // Insert into PersonPostAggregates - // to update the read_comments count - if let Some(person_id) = person_id { - let read_comments = post_view.counts.comments; - let person_post_agg_form = PersonPostAggregatesForm { - person_id, - post_id, - read_comments, - ..PersonPostAggregatesForm::default() - }; - PersonPostAggregates::upsert(&mut context.pool(), &person_post_agg_form) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindPost)?; - } + .await?; let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; // Fetch the cross_posts let cross_posts = if let Some(url) = &post_view.post.url { let mut x_posts = PostQuery { - url_search: Some(url.inner().as_str().into()), + url_only: Some(true), + search_term: Some(url.inner().as_str().into()), + local_user: local_user.as_ref(), ..Default::default() } - .list(&mut context.pool()) + .list(&local_site.site, &mut context.pool()) .await?; // Don't return this post as one of the cross_posts diff --git a/crates/api_crud/src/post/remove.rs b/crates/api_crud/src/post/remove.rs index 2dd35d598..c53a4552c 100644 --- a/crates/api_crud/src/post/remove.rs +++ b/crates/api_crud/src/post/remove.rs @@ -9,20 +9,22 @@ use lemmy_api_common::{ }; use lemmy_db_schema::{ source::{ + local_user::LocalUser, moderator::{ModRemovePost, ModRemovePostForm}, post::{Post, PostUpdateForm}, + post_report::PostReport, }, - traits::Crud, + traits::{Crud, Reportable}, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn remove_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let post_id = data.post_id; let orig_post = Post::read(&mut context.pool(), post_id).await?; @@ -34,6 +36,14 @@ pub async fn remove_post( ) .await?; + LocalUser::is_higher_mod_or_admin_check( + &mut context.pool(), + orig_post.community_id, + local_user_view.person.id, + vec![orig_post.creator_id], + ) + .await?; + // Update the post let post_id = data.post_id; let removed = data.removed; @@ -47,6 +57,9 @@ pub async fn remove_post( ) .await?; + PostReport::resolve_all_for_object(&mut context.pool(), post_id, local_user_view.person.id) + .await?; + // Mod tables let form = ModRemovePostForm { mod_person_id: local_user_view.person.id, @@ -57,16 +70,15 @@ pub async fn remove_post( ModRemovePost::create(&mut context.pool(), &form).await?; ActivityChannel::submit_activity( - SendActivityData::RemovePost(post, local_user_view.person.clone(), data.0), + SendActivityData::RemovePost { + post, + moderator: local_user_view.person.clone(), + reason: data.reason.clone(), + removed: data.removed, + }, &context, ) .await?; - build_post_response( - &context, - orig_post.community_id, - &local_user_view.person, - post_id, - ) - .await + build_post_response(&context, orig_post.community_id, local_user_view, post_id).await } diff --git a/crates/api_crud/src/post/update.rs b/crates/api_crud/src/post/update.rs index b17981c55..cef8bfea8 100644 --- a/crates/api_crud/src/post/update.rs +++ b/crates/api_crud/src/post/update.rs @@ -1,28 +1,41 @@ +use super::{convert_published_time, create::send_webmention}; use activitypub_federation::config::Data; use actix_web::web::Json; use lemmy_api_common::{ build_response::build_post_response, context::LemmyContext, post::{EditPost, PostResponse}, - request::fetch_site_data, - send_activity::{ActivityChannel, SendActivityData}, - utils::{check_community_user_action, local_site_to_slur_regex}, + request::generate_post_link_metadata, + send_activity::SendActivityData, + utils::{ + check_community_user_action, + get_url_blocklist, + local_site_to_slur_regex, + process_markdown_opt, + }, }; use lemmy_db_schema::{ source::{ actor_language::CommunityLanguage, + community::Community, local_site::LocalSite, post::{Post, PostUpdateForm}, }, traits::Crud, - utils::{diesel_option_overwrite, naive_now}, + utils::{diesel_string_update, diesel_url_update, naive_now}, }; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{ - slurs::check_slurs_opt, - validation::{check_url_scheme, clean_url_params, is_valid_body_field, is_valid_post_title}, + slurs::check_slurs, + validation::{ + is_url_blocked, + is_valid_alt_text_field, + is_valid_body_field, + is_valid_post_title, + is_valid_url, + }, }, }; use std::ops::Deref; @@ -32,25 +45,46 @@ pub async fn update_post( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; - let data_url = data.url.as_ref(); + let url = diesel_url_update(data.url.as_deref())?; - // TODO No good way to handle a clear. - // Issue link: https://github.com/LemmyNet/lemmy/issues/2287 - let url = Some(data_url.map(clean_url_params).map(Into::into)); + let custom_thumbnail = diesel_url_update(data.custom_thumbnail.as_deref())?; + + let url_blocklist = get_url_blocklist(&context).await?; let slur_regex = local_site_to_slur_regex(&local_site); - check_slurs_opt(&data.name, &slur_regex)?; - check_slurs_opt(&data.body, &slur_regex)?; + + let body = diesel_string_update( + process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context) + .await? + .as_deref(), + ); + + let alt_text = diesel_string_update(data.alt_text.as_deref()); if let Some(name) = &data.name { is_valid_post_title(name)?; + check_slurs(name, &slur_regex)?; } - is_valid_body_field(&data.body, true)?; - check_url_scheme(&data.url)?; + if let Some(Some(body)) = &body { + is_valid_body_field(body, true)?; + } + + if let Some(Some(alt_text)) = &alt_text { + is_valid_alt_text_field(alt_text)?; + } + + if let Some(Some(url)) = &url { + is_url_blocked(url, &url_blocklist)?; + is_valid_url(url)?; + } + + if let Some(Some(custom_thumbnail)) = &custom_thumbnail { + is_valid_url(custom_thumbnail)?; + } let post_id = data.post_id; let orig_post = Post::read(&mut context.pool(), post_id).await?; @@ -67,33 +101,39 @@ pub async fn update_post( Err(LemmyErrorType::NoPostEditAllowed)? } - // Fetch post links and Pictrs cached image - let data_url = data.url.as_ref(); - let (metadata_res, thumbnail_url) = - fetch_site_data(context.client(), context.settings(), data_url, true).await; - let (embed_title, embed_description, embed_video_url) = metadata_res - .map(|u| (Some(u.title), Some(u.description), Some(u.embed_video_url))) - .unwrap_or_default(); + if let Some(language_id) = data.language_id { + CommunityLanguage::is_allowed_community_language( + &mut context.pool(), + language_id, + orig_post.community_id, + ) + .await?; + } - let language_id = data.language_id; - CommunityLanguage::is_allowed_community_language( - &mut context.pool(), - language_id, - orig_post.community_id, - ) - .await?; + // handle changes to scheduled_publish_time + let scheduled_publish_time = match ( + orig_post.scheduled_publish_time, + data.scheduled_publish_time, + ) { + // schedule time can be changed if post is still scheduled (and not published yet) + (Some(_), Some(_)) => { + Some(convert_published_time(data.scheduled_publish_time, &local_user_view, &context).await?) + } + // post was scheduled, gets changed to publish immediately + (Some(_), None) => Some(None), + // unchanged + (_, _) => None, + }; let post_form = PostUpdateForm { name: data.name.clone(), url, - body: diesel_option_overwrite(data.body.clone()), + body, + alt_text, nsfw: data.nsfw, - embed_title, - embed_description, - embed_video_url, language_id: data.language_id, - thumbnail_url: Some(thumbnail_url), updated: Some(Some(naive_now())), + scheduled_publish_time, ..Default::default() }; @@ -102,12 +142,41 @@ pub async fn update_post( .await .with_lemmy_type(LemmyErrorType::CouldntUpdatePost)?; - ActivityChannel::submit_activity(SendActivityData::UpdatePost(updated_post), &context).await?; + // send out federation/webmention if necessary + match ( + orig_post.scheduled_publish_time, + data.scheduled_publish_time, + ) { + // schedule was removed, send create activity and webmention + (Some(_), None) => { + let community = Community::read(&mut context.pool(), orig_post.community_id).await?; + send_webmention(updated_post.clone(), community); + generate_post_link_metadata( + updated_post.clone(), + custom_thumbnail.flatten().map(Into::into), + |post| Some(SendActivityData::CreatePost(post)), + context.reset_request_count(), + ) + .await?; + } + // post was already public, send update + (None, _) => { + generate_post_link_metadata( + updated_post.clone(), + custom_thumbnail.flatten().map(Into::into), + |post| Some(SendActivityData::UpdatePost(post)), + context.reset_request_count(), + ) + .await? + } + // schedule was changed, do nothing + (Some(_), Some(_)) => {} + }; build_post_response( context.deref(), orig_post.community_id, - &local_user_view.person, + local_user_view, post_id, ) .await diff --git a/crates/api_crud/src/private_message/create.rs b/crates/api_crud/src/private_message/create.rs index 3c94d6127..456ebefa2 100644 --- a/crates/api_crud/src/private_message/create.rs +++ b/crates/api_crud/src/private_message/create.rs @@ -5,26 +5,26 @@ use lemmy_api_common::{ private_message::{CreatePrivateMessage, PrivateMessageResponse}, send_activity::{ActivityChannel, SendActivityData}, utils::{ - check_person_block, check_private_messages_enabled, - generate_local_apub_endpoint, get_interface_language, + get_url_blocklist, local_site_to_slur_regex, + process_markdown, send_email_to_user, - EndpointType, }, }; use lemmy_db_schema::{ source::{ local_site::LocalSite, - private_message::{PrivateMessage, PrivateMessageInsertForm, PrivateMessageUpdateForm}, + person_block::PersonBlock, + private_message::{PrivateMessage, PrivateMessageInsertForm}, }, traits::Crud, }; use lemmy_db_views::structs::{LocalUserView, PrivateMessageView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - utils::{markdown::markdown_to_html, slurs::remove_slurs, validation::is_valid_body_field}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, + utils::{markdown::markdown_to_html, validation::is_valid_body_field}, }; #[tracing::instrument(skip(context))] @@ -32,16 +32,18 @@ pub async fn create_private_message( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; - let content = remove_slurs(&data.content, &local_site_to_slur_regex(&local_site)); - is_valid_body_field(&Some(content.clone()), false)?; + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?; + is_valid_body_field(&content, false)?; - check_person_block( - local_user_view.person.id, - data.recipient_id, + PersonBlock::read( &mut context.pool(), + data.recipient_id, + local_user_view.person.id, ) .await?; @@ -55,34 +57,16 @@ pub async fn create_private_message( check_private_messages_enabled(&recipient_local_user)?; } - let private_message_form = PrivateMessageInsertForm::builder() - .content(content.clone()) - .creator_id(local_user_view.person.id) - .recipient_id(data.recipient_id) - .build(); + let private_message_form = PrivateMessageInsertForm::new( + local_user_view.person.id, + data.recipient_id, + content.clone(), + ); let inserted_private_message = PrivateMessage::create(&mut context.pool(), &private_message_form) .await .with_lemmy_type(LemmyErrorType::CouldntCreatePrivateMessage)?; - let inserted_private_message_id = inserted_private_message.id; - let protocol_and_hostname = context.settings().get_protocol_and_hostname(); - let apub_id = generate_local_apub_endpoint( - EndpointType::PrivateMessage, - &inserted_private_message_id.to_string(), - &protocol_and_hostname, - )?; - PrivateMessage::update( - &mut context.pool(), - inserted_private_message.id, - &PrivateMessageUpdateForm { - ap_id: Some(apub_id), - ..Default::default() - }, - ) - .await - .with_lemmy_type(LemmyErrorType::CouldntCreatePrivateMessage)?; - let view = PrivateMessageView::read(&mut context.pool(), inserted_private_message.id).await?; // Send email to the local recipient, if one exists diff --git a/crates/api_crud/src/private_message/delete.rs b/crates/api_crud/src/private_message/delete.rs index ef0864d70..936ff57b8 100644 --- a/crates/api_crud/src/private_message/delete.rs +++ b/crates/api_crud/src/private_message/delete.rs @@ -10,14 +10,14 @@ use lemmy_db_schema::{ traits::Crud, }; use lemmy_db_views::structs::{LocalUserView, PrivateMessageView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn delete_private_message( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { // Checking permissions let private_message_id = data.private_message_id; let orig_private_message = PrivateMessage::read(&mut context.pool(), private_message_id).await?; diff --git a/crates/api_crud/src/private_message/read.rs b/crates/api_crud/src/private_message/read.rs index 933d410f1..7558b97fc 100644 --- a/crates/api_crud/src/private_message/read.rs +++ b/crates/api_crud/src/private_message/read.rs @@ -4,21 +4,21 @@ use lemmy_api_common::{ private_message::{GetPrivateMessages, PrivateMessagesResponse}, }; use lemmy_db_views::{private_message_view::PrivateMessageQuery, structs::LocalUserView}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn get_private_message( data: Query, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_id = local_user_view.person.id; let page = data.page; let limit = data.limit; let unread_only = data.unread_only.unwrap_or_default(); let creator_id = data.creator_id; - let mut messages = PrivateMessageQuery { + let messages = PrivateMessageQuery { page, limit, unread_only, @@ -27,14 +27,6 @@ pub async fn get_private_message( .list(&mut context.pool(), person_id) .await?; - // Messages sent by ourselves should be marked as read. The `read` column in database is only - // for the recipient, and shouldnt be exposed to sender. - messages.iter_mut().for_each(|pmv| { - if pmv.creator.id == person_id { - pmv.private_message.read = true - } - }); - Ok(Json(PrivateMessagesResponse { private_messages: messages, })) diff --git a/crates/api_crud/src/private_message/update.rs b/crates/api_crud/src/private_message/update.rs index 9e3b7c6b3..20eaadb36 100644 --- a/crates/api_crud/src/private_message/update.rs +++ b/crates/api_crud/src/private_message/update.rs @@ -4,7 +4,7 @@ use lemmy_api_common::{ context::LemmyContext, private_message::{EditPrivateMessage, PrivateMessageResponse}, send_activity::{ActivityChannel, SendActivityData}, - utils::local_site_to_slur_regex, + utils::{get_url_blocklist, local_site_to_slur_regex, process_markdown}, }; use lemmy_db_schema::{ source::{ @@ -16,8 +16,8 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::{LocalUserView, PrivateMessageView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - utils::{slurs::remove_slurs, validation::is_valid_body_field}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, + utils::validation::is_valid_body_field, }; #[tracing::instrument(skip(context))] @@ -25,7 +25,7 @@ pub async fn update_private_message( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; // Checking permissions @@ -36,8 +36,10 @@ pub async fn update_private_message( } // Doing the update - let content = remove_slurs(&data.content, &local_site_to_slur_regex(&local_site)); - is_valid_body_field(&Some(content.clone()), false)?; + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?; + is_valid_body_field(&content, false)?; let private_message_id = data.private_message_id; PrivateMessage::update( diff --git a/crates/api_crud/src/site/create.rs b/crates/api_crud/src/site/create.rs index 1449f4844..e1ea1d992 100644 --- a/crates/api_crud/src/site/create.rs +++ b/crates/api_crud/src/site/create.rs @@ -1,10 +1,19 @@ +use super::not_zero; use crate::site::{application_question_check, site_default_post_listing_type_check}; -use activitypub_federation::http_signatures::generate_actor_keypair; -use actix_web::web::{Data, Json}; +use activitypub_federation::{config::Data, http_signatures::generate_actor_keypair}; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, site::{CreateSite, SiteResponse}, - utils::{generate_site_inbox_url, is_admin, local_site_rate_limit_to_rate_limit_config}, + utils::{ + generate_inbox_url, + get_url_blocklist, + is_admin, + local_site_rate_limit_to_rate_limit_config, + local_site_to_slur_regex, + process_markdown_opt, + proxy_image_link_api, + }, }; use lemmy_db_schema::{ newtypes::DbUrl, @@ -12,22 +21,21 @@ use lemmy_db_schema::{ local_site::{LocalSite, LocalSiteUpdateForm}, local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitUpdateForm}, site::{Site, SiteUpdateForm}, - tagline::Tagline, }, traits::Crud, - utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now}, + utils::{diesel_string_update, diesel_url_create, naive_now}, }; use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType, LemmyResult}, + error::{LemmyErrorType, LemmyResult}, utils::{ - slurs::{check_slurs, check_slurs_opt}, + slurs::check_slurs, validation::{ build_and_check_regex, check_site_visibility_valid, is_valid_body_field, - site_description_length_check, site_name_length_check, + site_or_community_description_length_check, }, }, }; @@ -38,7 +46,7 @@ pub async fn create_site( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; // Make sure user is an admin; other types of users should not create site data... @@ -47,20 +55,31 @@ pub async fn create_site( validate_create_payload(&local_site, &data)?; let actor_id: DbUrl = Url::parse(&context.settings().get_protocol_and_hostname())?.into(); - let inbox_url = Some(generate_site_inbox_url(&actor_id)?); + let inbox_url = Some(generate_inbox_url()?); let keypair = generate_actor_keypair()?; + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?; + + let icon = diesel_url_create(data.icon.as_deref())?; + let icon = proxy_image_link_api(icon, &context).await?; + + let banner = diesel_url_create(data.banner.as_deref())?; + let banner = proxy_image_link_api(banner, &context).await?; + let site_form = SiteUpdateForm { name: Some(data.name.clone()), - sidebar: diesel_option_overwrite(data.sidebar.clone()), - description: diesel_option_overwrite(data.description.clone()), - icon: diesel_option_overwrite_to_url(&data.icon)?, - banner: diesel_option_overwrite_to_url(&data.banner)?, + sidebar: diesel_string_update(sidebar.as_deref()), + description: diesel_string_update(data.description.as_deref()), + icon: Some(icon), + banner: Some(banner), actor_id: Some(actor_id), last_refreshed_at: Some(naive_now()), inbox_url, private_key: Some(Some(keypair.private_key)), public_key: Some(keypair.public_key), + content_warning: diesel_string_update(data.content_warning.as_deref()), ..Default::default() }; @@ -71,24 +90,29 @@ pub async fn create_site( let local_site_form = LocalSiteUpdateForm { // Set the site setup to true site_setup: Some(true), - enable_downvotes: data.enable_downvotes, registration_mode: data.registration_mode, - enable_nsfw: data.enable_nsfw, community_creation_admin_only: data.community_creation_admin_only, require_email_verification: data.require_email_verification, - application_question: diesel_option_overwrite(data.application_question.clone()), + application_question: diesel_string_update(data.application_question.as_deref()), private_instance: data.private_instance, default_theme: data.default_theme.clone(), default_post_listing_type: data.default_post_listing_type, - legal_information: diesel_option_overwrite(data.legal_information.clone()), + default_post_sort_type: data.default_post_sort_type, + default_comment_sort_type: data.default_comment_sort_type, + legal_information: diesel_string_update(data.legal_information.as_deref()), application_email_admins: data.application_email_admins, hide_modlog_mod_names: data.hide_modlog_mod_names, updated: Some(Some(naive_now())), - slur_filter_regex: diesel_option_overwrite(data.slur_filter_regex.clone()), + slur_filter_regex: diesel_string_update(data.slur_filter_regex.as_deref()), actor_name_max_length: data.actor_name_max_length, federation_enabled: data.federation_enabled, captcha_enabled: data.captcha_enabled, captcha_difficulty: data.captcha_difficulty.clone(), + default_post_listing_mode: data.default_post_listing_mode, + post_upvotes: data.post_upvotes, + post_downvotes: data.post_downvotes, + comment_upvotes: data.comment_upvotes, + comment_downvotes: data.comment_downvotes, ..Default::default() }; @@ -96,17 +120,17 @@ pub async fn create_site( let local_site_rate_limit_form = LocalSiteRateLimitUpdateForm { message: data.rate_limit_message, - message_per_second: data.rate_limit_message_per_second, + message_per_second: not_zero(data.rate_limit_message_per_second), post: data.rate_limit_post, - post_per_second: data.rate_limit_post_per_second, + post_per_second: not_zero(data.rate_limit_post_per_second), register: data.rate_limit_register, - register_per_second: data.rate_limit_register_per_second, + register_per_second: not_zero(data.rate_limit_register_per_second), image: data.rate_limit_image, - image_per_second: data.rate_limit_image_per_second, + image_per_second: not_zero(data.rate_limit_image_per_second), comment: data.rate_limit_comment, - comment_per_second: data.rate_limit_comment_per_second, + comment_per_second: not_zero(data.rate_limit_comment_per_second), search: data.rate_limit_search, - search_per_second: data.rate_limit_search_per_second, + search_per_second: not_zero(data.rate_limit_search_per_second), ..Default::default() }; @@ -114,16 +138,13 @@ pub async fn create_site( let site_view = SiteView::read_local(&mut context.pool()).await?; - let new_taglines = data.taglines.clone(); - let taglines = Tagline::replace(&mut context.pool(), local_site.id, new_taglines).await?; - let rate_limit_config = local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); context.rate_limit_cell().set_config(rate_limit_config); Ok(Json(SiteResponse { site_view, - taglines, + taglines: vec![], })) } @@ -146,8 +167,8 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> check_slurs(&create_site.name, &slur_regex)?; if let Some(desc) = &create_site.description { - site_description_length_check(desc)?; - check_slurs_opt(&create_site.description, &slur_regex)?; + site_or_community_description_length_check(desc)?; + check_slurs(desc, &slur_regex)?; } site_default_post_listing_type_check(&create_site.default_post_listing_type)?; @@ -160,7 +181,9 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> )?; // Ensure that the sidebar has fewer than the max num characters... - is_valid_body_field(&create_site.sidebar, false)?; + if let Some(body) = &create_site.sidebar { + is_valid_body_field(body, false)?; + } application_question_check( &local_site.application_question, @@ -173,12 +196,15 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::site::create::validate_create_payload; use lemmy_api_common::site::CreateSite; - use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; + use lemmy_db_schema::{ + source::local_site::LocalSite, + ListingType, + PostSortType, + RegistrationMode, + }; use lemmy_utils::error::LemmyErrorType; #[test] @@ -187,163 +213,114 @@ mod tests { ( "CreateSite attempted on set up LocalSite", LemmyErrorType::SiteAlreadyExists, - &generate_local_site( - true, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: true, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + ..Default::default() + }, ), ( "CreateSite name matches LocalSite slur filter", LemmyErrorType::Slurs, - &generate_local_site( - false, - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("foo site_name"), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("foo site_name"), + ..Default::default() + }, ), ( "CreateSite name matches new slur filter", LemmyErrorType::Slurs, - &generate_local_site( - false, - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("zeta site_name"), - None::, - None::, - None::, - Some(String::from("(zeta|alpha)")), - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("zeta site_name"), + slur_filter_regex: Some(String::from("(zeta|alpha)")), + ..Default::default() + }, ), ( "CreateSite listing type is Subscribed, which is invalid", LemmyErrorType::InvalidDefaultPostListingType, - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - Some(ListingType::Subscribed), - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + default_post_listing_type: Some(ListingType::Subscribed), + ..Default::default() + }, ), ( "CreateSite is both private and federated", LemmyErrorType::CantEnablePrivateInstanceAndFederationTogether, - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - Some(true), - Some(true), - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + private_instance: Some(true), + federation_enabled: Some(true), + ..Default::default() + }, ), ( "LocalSite is private, but CreateSite also makes it federated", LemmyErrorType::CantEnablePrivateInstanceAndFederationTogether, - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - None::, - Some(true), - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + federation_enabled: Some(true), + ..Default::default() + }, ), ( "CreateSite requires application, but neither it nor LocalSite has an application question", LemmyErrorType::ApplicationQuestionRequired, - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - Some(RegistrationMode::RequireApplication), - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + registration_mode: Some(RegistrationMode::RequireApplication), + ..Default::default() + }, ), ]; @@ -382,91 +359,72 @@ mod tests { let valid_payloads = [ ( "No changes between LocalSite and CreateSite", - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + ..Default::default() + }, ), ( "CreateSite allows clearing and changing values", - &generate_local_site( - false, - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - Some(String::new()), - Some(String::new()), - Some(ListingType::All), - Some(String::new()), - Some(false), - Some(true), - Some(String::new()), - Some(RegistrationMode::Open), - ), + &LocalSite { + site_setup: false, + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + sidebar: Some(String::new()), + description: Some(String::new()), + application_question: Some(String::new()), + private_instance: Some(false), + default_post_listing_type: Some(ListingType::All), + default_post_sort_type: Some(PostSortType::Active), + slur_filter_regex: Some(String::new()), + federation_enabled: Some(true), + registration_mode: Some(RegistrationMode::Open), + ..Default::default() + }, ), ( "CreateSite clears existing slur filter regex", - &generate_local_site( - false, - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_create_site( - String::from("foo site_name"), - None::, - None::, - None::, - Some(String::new()), - None::, - None::, - None::, - None::, - ), + &LocalSite { + site_setup: false, + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("foo site_name"), + slur_filter_regex: Some(String::new()), + ..Default::default() + }, ), ( "LocalSite has application question and CreateSite now requires applications,", - &generate_local_site( - false, - None::, - true, - false, - Some(String::from("question")), - RegistrationMode::Open, - ), - &generate_create_site( - String::from("site_name"), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - Some(RegistrationMode::RequireApplication), - ), + &LocalSite { + site_setup: false, + application_question: Some(String::from("question")), + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &CreateSite { + name: String::from("site_name"), + registration_mode: Some(RegistrationMode::RequireApplication), + ..Default::default() + }, ), ]; @@ -482,96 +440,4 @@ mod tests { ); }) } - - fn generate_local_site( - site_setup: bool, - site_slur_filter_regex: Option, - site_is_private: bool, - site_is_federated: bool, - site_application_question: Option, - site_registration_mode: RegistrationMode, - ) -> LocalSite { - LocalSite { - id: Default::default(), - site_id: Default::default(), - site_setup, - enable_downvotes: false, - enable_nsfw: false, - community_creation_admin_only: false, - require_email_verification: false, - application_question: site_application_question, - private_instance: site_is_private, - default_theme: String::new(), - default_post_listing_type: ListingType::All, - legal_information: None, - hide_modlog_mod_names: false, - application_email_admins: false, - slur_filter_regex: site_slur_filter_regex, - actor_name_max_length: 0, - federation_enabled: site_is_federated, - captcha_enabled: false, - captcha_difficulty: String::new(), - published: Default::default(), - updated: None, - registration_mode: site_registration_mode, - reports_email_admins: false, - } - } - - // Allow the test helper function to have too many arguments. - // It's either this or generate the entire struct each time for testing. - #[allow(clippy::too_many_arguments)] - fn generate_create_site( - site_name: String, - site_description: Option, - site_sidebar: Option, - site_listing_type: Option, - site_slur_filter_regex: Option, - site_is_private: Option, - site_is_federated: Option, - site_application_question: Option, - site_registration_mode: Option, - ) -> CreateSite { - CreateSite { - name: site_name, - sidebar: site_sidebar, - description: site_description, - icon: None, - banner: None, - enable_downvotes: None, - enable_nsfw: None, - community_creation_admin_only: None, - require_email_verification: None, - application_question: site_application_question, - private_instance: site_is_private, - default_theme: None, - default_post_listing_type: site_listing_type, - legal_information: None, - application_email_admins: None, - hide_modlog_mod_names: None, - discussion_languages: None, - slur_filter_regex: site_slur_filter_regex, - actor_name_max_length: None, - rate_limit_message: None, - rate_limit_message_per_second: None, - rate_limit_post: None, - rate_limit_post_per_second: None, - rate_limit_register: None, - rate_limit_register_per_second: None, - rate_limit_image: None, - rate_limit_image_per_second: None, - rate_limit_comment: None, - rate_limit_comment_per_second: None, - rate_limit_search: None, - rate_limit_search_per_second: None, - federation_enabled: site_is_federated, - federation_debug: None, - captcha_enabled: None, - captcha_difficulty: None, - allowed_instances: None, - blocked_instances: None, - taglines: None, - registration_mode: site_registration_mode, - } - } } diff --git a/crates/api_crud/src/site/mod.rs b/crates/api_crud/src/site/mod.rs index e4911ba48..48b819c38 100644 --- a/crates/api_crud/src/site/mod.rs +++ b/crates/api_crud/src/site/mod.rs @@ -40,12 +40,17 @@ pub fn application_question_check( } } +fn not_zero(val: Option) -> Option { + match val { + Some(0) => None, + v => v, + } +} + #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::site::{application_question_check, site_default_post_listing_type_check}; + use crate::site::{application_question_check, not_zero, site_default_post_listing_type_check}; use lemmy_db_schema::{ListingType, RegistrationMode}; #[test] @@ -93,4 +98,11 @@ mod tests { RegistrationMode::RequireApplication ); } + + #[test] + fn test_not_zero() { + assert_eq!(None, not_zero(None)); + assert_eq!(None, not_zero(Some(0))); + assert_eq!(Some(5), not_zero(Some(5))); + } } diff --git a/crates/api_crud/src/site/read.rs b/crates/api_crud/src/site/read.rs index aceee29d4..47fd1f154 100644 --- a/crates/api_crud/src/site/read.rs +++ b/crates/api_crud/src/site/read.rs @@ -5,65 +5,92 @@ use lemmy_api_common::{ }; use lemmy_db_schema::source::{ actor_language::{LocalUserLanguage, SiteLanguage}, + community_block::CommunityBlock, + instance_block::InstanceBlock, language::Language, + local_site_url_blocklist::LocalSiteUrlBlocklist, + oauth_provider::OAuthProvider, + person_block::PersonBlock, tagline::Tagline, }; -use lemmy_db_views::structs::{CustomEmojiView, LocalUserView, SiteView}; -use lemmy_db_views_actor::structs::{ - CommunityBlockView, - CommunityFollowerView, - CommunityModeratorView, - InstanceBlockView, - PersonBlockView, - PersonView, -}; +use lemmy_db_views::structs::{LocalUserView, SiteView}; +use lemmy_db_views_actor::structs::{CommunityFollowerView, CommunityModeratorView, PersonView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - version, + error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}, + CACHE_DURATION_API, + VERSION, }; +use moka::future::Cache; +use std::sync::LazyLock; #[tracing::instrument(skip(context))] pub async fn get_site( local_user_view: Option, context: Data, -) -> Result, LemmyError> { - let site_view = SiteView::read_local(&mut context.pool()).await?; +) -> LemmyResult> { + static CACHE: LazyLock> = LazyLock::new(|| { + Cache::builder() + .max_capacity(1) + .time_to_live(CACHE_DURATION_API) + .build() + }); - let admins = PersonView::admins(&mut context.pool()).await?; + // This data is independent from the user account so we can cache it across requests + let mut site_response = CACHE + .try_get_with::<_, LemmyError>((), async { + let site_view = SiteView::read_local(&mut context.pool()).await?; + let admins = PersonView::admins(&mut context.pool()).await?; + let all_languages = Language::read_all(&mut context.pool()).await?; + let discussion_languages = SiteLanguage::read_local_raw(&mut context.pool()).await?; + let blocked_urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?; + let tagline = Tagline::get_random(&mut context.pool()).await.ok(); + let admin_oauth_providers = OAuthProvider::get_all(&mut context.pool()).await?; + let oauth_providers = + OAuthProvider::convert_providers_to_public(admin_oauth_providers.clone()); - // Build the local user - let my_user = if let Some(local_user_view) = local_user_view { + Ok(GetSiteResponse { + site_view, + admins, + version: VERSION.to_string(), + my_user: None, + all_languages, + discussion_languages, + blocked_urls, + tagline, + oauth_providers: Some(oauth_providers), + admin_oauth_providers: Some(admin_oauth_providers), + taglines: vec![], + custom_emojis: vec![], + }) + }) + .await + .map_err(|e| anyhow::anyhow!("Failed to construct site response: {e}"))?; + + // Build the local user with parallel queries and add it to site response + site_response.my_user = if let Some(ref local_user_view) = local_user_view { let person_id = local_user_view.person.id; let local_user_id = local_user_view.local_user.id; + let pool = &mut context.pool(); - let follows = CommunityFollowerView::for_person(&mut context.pool(), person_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; - - let person_id = local_user_view.person.id; - let community_blocks = CommunityBlockView::for_person(&mut context.pool(), person_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; - - let instance_blocks = InstanceBlockView::for_person(&mut context.pool(), person_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; - - let person_id = local_user_view.person.id; - let person_blocks = PersonBlockView::for_person(&mut context.pool(), person_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; - - let moderates = CommunityModeratorView::for_person(&mut context.pool(), person_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; - - let discussion_languages = LocalUserLanguage::read(&mut context.pool(), local_user_id) - .await - .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; + let ( + follows, + community_blocks, + instance_blocks, + person_blocks, + moderates, + discussion_languages, + ) = lemmy_db_schema::try_join_with_pool!(pool => ( + |pool| CommunityFollowerView::for_person(pool, person_id), + |pool| CommunityBlock::for_person(pool, person_id), + |pool| InstanceBlock::for_person(pool, person_id), + |pool| PersonBlock::for_person(pool, person_id), + |pool| CommunityModeratorView::for_person(pool, person_id, Some(&local_user_view.local_user)), + |pool| LocalUserLanguage::read(pool, local_user_id) + )) + .with_lemmy_type(LemmyErrorType::SystemErrLogin)?; Some(MyUserInfo { - local_user_view, + local_user_view: local_user_view.clone(), follows, moderates, community_blocks, @@ -75,20 +102,13 @@ pub async fn get_site( None }; - let all_languages = Language::read_all(&mut context.pool()).await?; - let discussion_languages = SiteLanguage::read_local_raw(&mut context.pool()).await?; - let taglines = Tagline::get_all(&mut context.pool(), site_view.local_site.id).await?; - let custom_emojis = - CustomEmojiView::get_all(&mut context.pool(), site_view.local_site.id).await?; + // filter oauth_providers for public access + if !local_user_view + .map(|l| l.local_user.admin) + .unwrap_or_default() + { + site_response.admin_oauth_providers = None; + } - Ok(Json(GetSiteResponse { - site_view, - admins, - version: version::VERSION.to_string(), - my_user, - all_languages, - discussion_languages, - taglines, - custom_emojis, - })) + Ok(Json(site_response)) } diff --git a/crates/api_crud/src/site/update.rs b/crates/api_crud/src/site/update.rs index b9d8f6a7f..085ed69d1 100644 --- a/crates/api_crud/src/site/update.rs +++ b/crates/api_crud/src/site/update.rs @@ -1,9 +1,19 @@ +use super::not_zero; use crate::site::{application_question_check, site_default_post_listing_type_check}; -use actix_web::web::{Data, Json}; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ context::LemmyContext, + request::replace_image, site::{EditSite, SiteResponse}, - utils::{is_admin, local_site_rate_limit_to_rate_limit_config}, + utils::{ + get_url_blocklist, + is_admin, + local_site_rate_limit_to_rate_limit_config, + local_site_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_api, + }, }; use lemmy_db_schema::{ source::{ @@ -12,25 +22,26 @@ use lemmy_db_schema::{ federation_blocklist::FederationBlockList, local_site::{LocalSite, LocalSiteUpdateForm}, local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitUpdateForm}, + local_site_url_blocklist::LocalSiteUrlBlocklist, local_user::LocalUser, site::{Site, SiteUpdateForm}, - tagline::Tagline, }, traits::Crud, - utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now}, + utils::{diesel_string_update, diesel_url_update, naive_now}, RegistrationMode, }; use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{ slurs::check_slurs_opt, validation::{ build_and_check_regex, check_site_visibility_valid, + check_urls_are_valid, is_valid_body_field, - site_description_length_check, site_name_length_check, + site_or_community_description_length_check, }, }, }; @@ -40,7 +51,7 @@ pub async fn update_site( data: Json, context: Data, local_user_view: LocalUserView, -) -> Result, LemmyError> { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; let local_site = site_view.local_site; let site = site_view.site; @@ -54,12 +65,29 @@ pub async fn update_site( SiteLanguage::update(&mut context.pool(), discussion_languages.clone(), &site).await?; } + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let sidebar = diesel_string_update( + process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context) + .await? + .as_deref(), + ); + + let icon = diesel_url_update(data.icon.as_deref())?; + replace_image(&icon, &site.icon, &context).await?; + let icon = proxy_image_link_opt_api(icon, &context).await?; + + let banner = diesel_url_update(data.banner.as_deref())?; + replace_image(&banner, &site.banner, &context).await?; + let banner = proxy_image_link_opt_api(banner, &context).await?; + let site_form = SiteUpdateForm { name: data.name.clone(), - sidebar: diesel_option_overwrite(data.sidebar.clone()), - description: diesel_option_overwrite(data.description.clone()), - icon: diesel_option_overwrite_to_url(&data.icon)?, - banner: diesel_option_overwrite_to_url(&data.banner)?, + sidebar, + description: diesel_string_update(data.description.as_deref()), + icon, + banner, + content_warning: diesel_string_update(data.content_warning.as_deref()), updated: Some(Some(naive_now())), ..Default::default() }; @@ -71,25 +99,31 @@ pub async fn update_site( .ok(); let local_site_form = LocalSiteUpdateForm { - enable_downvotes: data.enable_downvotes, registration_mode: data.registration_mode, - enable_nsfw: data.enable_nsfw, community_creation_admin_only: data.community_creation_admin_only, require_email_verification: data.require_email_verification, - application_question: diesel_option_overwrite(data.application_question.clone()), + application_question: diesel_string_update(data.application_question.as_deref()), private_instance: data.private_instance, default_theme: data.default_theme.clone(), default_post_listing_type: data.default_post_listing_type, - legal_information: diesel_option_overwrite(data.legal_information.clone()), + default_post_sort_type: data.default_post_sort_type, + default_comment_sort_type: data.default_comment_sort_type, + legal_information: diesel_string_update(data.legal_information.as_deref()), application_email_admins: data.application_email_admins, hide_modlog_mod_names: data.hide_modlog_mod_names, updated: Some(Some(naive_now())), - slur_filter_regex: diesel_option_overwrite(data.slur_filter_regex.clone()), + slur_filter_regex: diesel_string_update(data.slur_filter_regex.as_deref()), actor_name_max_length: data.actor_name_max_length, federation_enabled: data.federation_enabled, captcha_enabled: data.captcha_enabled, captcha_difficulty: data.captcha_difficulty.clone(), reports_email_admins: data.reports_email_admins, + default_post_listing_mode: data.default_post_listing_mode, + oauth_registration: data.oauth_registration, + post_upvotes: data.post_upvotes, + post_downvotes: data.post_downvotes, + comment_upvotes: data.comment_upvotes, + comment_downvotes: data.comment_downvotes, ..Default::default() }; @@ -99,17 +133,17 @@ pub async fn update_site( let local_site_rate_limit_form = LocalSiteRateLimitUpdateForm { message: data.rate_limit_message, - message_per_second: data.rate_limit_message_per_second, + message_per_second: not_zero(data.rate_limit_message_per_second), post: data.rate_limit_post, - post_per_second: data.rate_limit_post_per_second, + post_per_second: not_zero(data.rate_limit_post_per_second), register: data.rate_limit_register, - register_per_second: data.rate_limit_register_per_second, + register_per_second: not_zero(data.rate_limit_register_per_second), image: data.rate_limit_image, - image_per_second: data.rate_limit_image_per_second, + image_per_second: not_zero(data.rate_limit_image_per_second), comment: data.rate_limit_comment, - comment_per_second: data.rate_limit_comment_per_second, + comment_per_second: not_zero(data.rate_limit_comment_per_second), search: data.rate_limit_search, - search_per_second: data.rate_limit_search_per_second, + search_per_second: not_zero(data.rate_limit_search_per_second), ..Default::default() }; @@ -123,10 +157,16 @@ pub async fn update_site( let blocked = data.blocked_instances.clone(); FederationBlockList::replace(&mut context.pool(), blocked).await?; + if let Some(url_blocklist) = data.blocked_urls.clone() { + let parsed_urls = check_urls_are_valid(&url_blocklist)?; + LocalSiteUrlBlocklist::replace(&mut context.pool(), parsed_urls).await?; + } + // TODO can't think of a better way to do this. // If the server suddenly requires email verification, or required applications, no old users // will be able to log in. It really only wants this to be a requirement for NEW signups. - // So if it was set from false, to true, you need to update all current users columns to be verified. + // So if it was set from false, to true, you need to update all current users columns to be + // verified. let old_require_application = local_site.registration_mode == RegistrationMode::RequireApplication; @@ -150,9 +190,6 @@ pub async fn update_site( .with_lemmy_type(LemmyErrorType::CouldntSetAllEmailVerified)?; } - let new_taglines = data.taglines.clone(); - let taglines = Tagline::replace(&mut context.pool(), local_site.id, new_taglines).await?; - let site_view = SiteView::read_local(&mut context.pool()).await?; let rate_limit_config = @@ -161,7 +198,7 @@ pub async fn update_site( Ok(Json(SiteResponse { site_view, - taglines, + taglines: vec![], })) } @@ -182,7 +219,7 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm } if let Some(desc) = &edit_site.description { - site_description_length_check(desc)?; + site_or_community_description_length_check(desc)?; check_slurs_opt(&edit_site.description, &slur_regex)?; } @@ -196,7 +233,9 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm )?; // Ensure that the sidebar has fewer than the max num characters... - is_valid_body_field(&edit_site.sidebar, false)?; + if let Some(body) = &edit_site.sidebar { + is_valid_body_field(body, false)?; + } application_question_check( &local_site.application_question, @@ -209,12 +248,15 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::site::update::validate_update_payload; use lemmy_api_common::site::EditSite; - use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; + use lemmy_db_schema::{ + source::local_site::LocalSite, + ListingType, + PostSortType, + RegistrationMode, + }; use lemmy_utils::error::LemmyErrorType; #[test] @@ -223,134 +265,94 @@ mod tests { ( "EditSite name matches LocalSite slur filter", LemmyErrorType::Slurs, - &generate_local_site( - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("foo site_name")), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("foo site_name")), + ..Default::default() + }, ), ( "EditSite name matches new slur filter", LemmyErrorType::Slurs, - &generate_local_site( - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("zeta site_name")), - None::, - None::, - None::, - Some(String::from("(zeta|alpha)")), - None::, - None::, - None::, - None::, - ), + &LocalSite { + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("zeta site_name")), + slur_filter_regex: Some(String::from("(zeta|alpha)")), + ..Default::default() + }, ), ( "EditSite listing type is Subscribed, which is invalid", LemmyErrorType::InvalidDefaultPostListingType, - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - None::, - None::, - Some(ListingType::Subscribed), - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + default_post_listing_type: Some(ListingType::Subscribed), + ..Default::default() + }, ), ( "EditSite is both private and federated", LemmyErrorType::CantEnablePrivateInstanceAndFederationTogether, - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - None::, - None::, - None::, - None::, - Some(true), - Some(true), - None::, - None::, - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + private_instance: Some(true), + federation_enabled: Some(true), + ..Default::default() + }, ), ( "LocalSite is private, but EditSite also makes it federated", LemmyErrorType::CantEnablePrivateInstanceAndFederationTogether, - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - None::, - None::, - None::, - None::, - None::, - Some(true), - None::, - None::, - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + federation_enabled: Some(true), + ..Default::default() + }, ), ( "EditSite requires application, but neither it nor LocalSite has an application question", LemmyErrorType::ApplicationQuestionRequired, - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - Some(RegistrationMode::RequireApplication), - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + registration_mode: Some(RegistrationMode::RequireApplication), + ..Default::default() + }, ), ]; @@ -386,87 +388,65 @@ mod tests { let valid_payloads = [ ( "No changes between LocalSite and EditSite", - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - None::, - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite::default(), ), ( "EditSite allows clearing and changing values", - &generate_local_site( - None::, - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - Some(String::new()), - Some(String::new()), - Some(ListingType::All), - Some(String::new()), - Some(false), - Some(true), - Some(String::new()), - Some(RegistrationMode::Open), - ), + &LocalSite { + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + sidebar: Some(String::new()), + description: Some(String::new()), + application_question: Some(String::new()), + private_instance: Some(false), + default_post_listing_type: Some(ListingType::All), + default_post_sort_type: Some(PostSortType::Active), + slur_filter_regex: Some(String::new()), + registration_mode: Some(RegistrationMode::Open), + federation_enabled: Some(true), + ..Default::default() + }, ), ( "EditSite name passes slur filter regex", - &generate_local_site( - Some(String::from("(foo|bar)")), - true, - false, - None::, - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("foo site_name")), - None::, - None::, - None::, - Some(String::new()), - None::, - None::, - None::, - None::, - ), + &LocalSite { + private_instance: true, + slur_filter_regex: Some(String::from("(foo|bar)")), + registration_mode: RegistrationMode::Open, + federation_enabled: false, + ..Default::default() + }, + &EditSite { + name: Some(String::from("foo site_name")), + slur_filter_regex: Some(String::new()), + ..Default::default() + }, ), ( "LocalSite has application question and EditSite now requires applications,", - &generate_local_site( - None::, - true, - false, - Some(String::from("question")), - RegistrationMode::Open, - ), - &generate_edit_site( - Some(String::from("site_name")), - None::, - None::, - None::, - None::, - None::, - None::, - None::, - Some(RegistrationMode::RequireApplication), - ), + &LocalSite { + application_question: Some(String::from("question")), + private_instance: true, + federation_enabled: false, + registration_mode: RegistrationMode::Open, + ..Default::default() + }, + &EditSite { + name: Some(String::from("site_name")), + registration_mode: Some(RegistrationMode::RequireApplication), + ..Default::default() + }, ), ]; @@ -482,96 +462,4 @@ mod tests { ); }) } - - fn generate_local_site( - site_slur_filter_regex: Option, - site_is_private: bool, - site_is_federated: bool, - site_application_question: Option, - site_registration_mode: RegistrationMode, - ) -> LocalSite { - LocalSite { - id: Default::default(), - site_id: Default::default(), - site_setup: true, - enable_downvotes: false, - enable_nsfw: false, - community_creation_admin_only: false, - require_email_verification: false, - application_question: site_application_question, - private_instance: site_is_private, - default_theme: String::new(), - default_post_listing_type: ListingType::All, - legal_information: None, - hide_modlog_mod_names: false, - application_email_admins: false, - slur_filter_regex: site_slur_filter_regex, - actor_name_max_length: 0, - federation_enabled: site_is_federated, - captcha_enabled: false, - captcha_difficulty: String::new(), - published: Default::default(), - updated: None, - registration_mode: site_registration_mode, - reports_email_admins: false, - } - } - - // Allow the test helper function to have too many arguments. - // It's either this or generate the entire struct each time for testing. - #[allow(clippy::too_many_arguments)] - fn generate_edit_site( - site_name: Option, - site_description: Option, - site_sidebar: Option, - site_listing_type: Option, - site_slur_filter_regex: Option, - site_is_private: Option, - site_is_federated: Option, - site_application_question: Option, - site_registration_mode: Option, - ) -> EditSite { - EditSite { - name: site_name, - sidebar: site_sidebar, - description: site_description, - icon: None, - banner: None, - enable_downvotes: None, - enable_nsfw: None, - community_creation_admin_only: None, - require_email_verification: None, - application_question: site_application_question, - private_instance: site_is_private, - default_theme: None, - default_post_listing_type: site_listing_type, - legal_information: None, - application_email_admins: None, - hide_modlog_mod_names: None, - discussion_languages: None, - slur_filter_regex: site_slur_filter_regex, - actor_name_max_length: None, - rate_limit_message: None, - rate_limit_message_per_second: None, - rate_limit_post: None, - rate_limit_post_per_second: None, - rate_limit_register: None, - rate_limit_register_per_second: None, - rate_limit_image: None, - rate_limit_image_per_second: None, - rate_limit_comment: None, - rate_limit_comment_per_second: None, - rate_limit_search: None, - rate_limit_search_per_second: None, - federation_enabled: site_is_federated, - federation_debug: None, - captcha_enabled: None, - captcha_difficulty: None, - allowed_instances: None, - blocked_instances: None, - taglines: None, - registration_mode: site_registration_mode, - reports_email_admins: None, - } - } } diff --git a/crates/api_crud/src/tagline/create.rs b/crates/api_crud/src/tagline/create.rs new file mode 100644 index 000000000..f67a26f68 --- /dev/null +++ b/crates/api_crud/src/tagline/create.rs @@ -0,0 +1,38 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{ + context::LemmyContext, + tagline::{CreateTagline, TaglineResponse}, + utils::{get_url_blocklist, is_admin, local_site_to_slur_regex, process_markdown}, +}; +use lemmy_db_schema::{ + source::{ + local_site::LocalSite, + tagline::{Tagline, TaglineInsertForm}, + }, + traits::Crud, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn create_tagline( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + let local_site = LocalSite::read(&mut context.pool()).await?; + + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?; + + let tagline_form = TaglineInsertForm { content }; + + let tagline = Tagline::create(&mut context.pool(), &tagline_form).await?; + + Ok(Json(TaglineResponse { tagline })) +} diff --git a/crates/api_crud/src/tagline/delete.rs b/crates/api_crud/src/tagline/delete.rs new file mode 100644 index 000000000..9add3cfe6 --- /dev/null +++ b/crates/api_crud/src/tagline/delete.rs @@ -0,0 +1,25 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{ + context::LemmyContext, + tagline::DeleteTagline, + utils::is_admin, + SuccessResponse, +}; +use lemmy_db_schema::{source::tagline::Tagline, traits::Crud}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn delete_tagline( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + Tagline::delete(&mut context.pool(), data.id).await?; + + Ok(Json(SuccessResponse::default())) +} diff --git a/crates/api_crud/src/tagline/list.rs b/crates/api_crud/src/tagline/list.rs new file mode 100644 index 000000000..21929f547 --- /dev/null +++ b/crates/api_crud/src/tagline/list.rs @@ -0,0 +1,19 @@ +use actix_web::web::{Data, Json, Query}; +use lemmy_api_common::{ + context::LemmyContext, + tagline::{ListTaglines, ListTaglinesResponse}, +}; +use lemmy_db_schema::source::tagline::Tagline; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn list_taglines( + data: Query, + local_user_view: Option, + context: Data, +) -> Result, LemmyError> { + let taglines = Tagline::list(&mut context.pool(), data.page, data.limit).await?; + + Ok(Json(ListTaglinesResponse { taglines })) +} diff --git a/crates/api_crud/src/tagline/mod.rs b/crates/api_crud/src/tagline/mod.rs new file mode 100644 index 000000000..ffd48daf6 --- /dev/null +++ b/crates/api_crud/src/tagline/mod.rs @@ -0,0 +1,4 @@ +pub mod create; +pub mod delete; +pub mod list; +pub mod update; diff --git a/crates/api_crud/src/tagline/update.rs b/crates/api_crud/src/tagline/update.rs new file mode 100644 index 000000000..043589d26 --- /dev/null +++ b/crates/api_crud/src/tagline/update.rs @@ -0,0 +1,42 @@ +use activitypub_federation::config::Data; +use actix_web::web::Json; +use lemmy_api_common::{ + context::LemmyContext, + tagline::{TaglineResponse, UpdateTagline}, + utils::{get_url_blocklist, is_admin, local_site_to_slur_regex, process_markdown}, +}; +use lemmy_db_schema::{ + source::{ + local_site::LocalSite, + tagline::{Tagline, TaglineUpdateForm}, + }, + traits::Crud, + utils::naive_now, +}; +use lemmy_db_views::structs::LocalUserView; +use lemmy_utils::error::LemmyError; + +#[tracing::instrument(skip(context))] +pub async fn update_tagline( + data: Json, + context: Data, + local_user_view: LocalUserView, +) -> Result, LemmyError> { + // Make sure user is an admin + is_admin(&local_user_view)?; + + let local_site = LocalSite::read(&mut context.pool()).await?; + + let slur_regex = local_site_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(&context).await?; + let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?; + + let tagline_form = TaglineUpdateForm { + content, + updated: naive_now(), + }; + + let tagline = Tagline::update(&mut context.pool(), data.id, &tagline_form).await?; + + Ok(Json(TaglineResponse { tagline })) +} diff --git a/crates/api_crud/src/user/create.rs b/crates/api_crud/src/user/create.rs index 4a326a3ac..ed560e3d6 100644 --- a/crates/api_crud/src/user/create.rs +++ b/crates/api_crud/src/user/create.rs @@ -1,14 +1,16 @@ use activitypub_federation::{config::Data, http_signatures::generate_actor_keypair}; -use actix_web::{http::StatusCode, web::Json, HttpRequest, HttpResponse, HttpResponseBuilder}; +use actix_web::{web::Json, HttpRequest}; use lemmy_api_common::{ claims::Claims, context::LemmyContext, + oauth_provider::AuthenticateWithOauth, person::{LoginResponse, Register}, utils::{ - create_login_cookie, + check_email_verified, + check_registration_application, + check_user_valid, generate_inbox_url, generate_local_apub_endpoint, - generate_shared_inbox_url, honeypot_check, local_site_to_slur_regex, password_length_check, @@ -19,9 +21,15 @@ use lemmy_api_common::{ }; use lemmy_db_schema::{ aggregates::structs::PersonAggregates, + newtypes::{InstanceId, OAuthProviderId}, source::{ captcha_answer::{CaptchaAnswer, CheckCaptchaAnswer}, + language::Language, + local_site::LocalSite, local_user::{LocalUser, LocalUserInsertForm}, + local_user_vote_display_mode::LocalUserVoteDisplayMode, + oauth_account::{OAuthAccount, OAuthAccountInsertForm}, + oauth_provider::OAuthProvider, person::{Person, PersonInsertForm}, registration_application::{RegistrationApplication, RegistrationApplicationInsertForm}, }, @@ -30,19 +38,32 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{ slurs::{check_slurs, check_slurs_opt}, validation::is_valid_actor_name, }, }; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +use std::collections::HashSet; + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +/// Response from OAuth token endpoint +struct TokenResponse { + pub access_token: String, + pub token_type: String, + pub expires_in: Option, + pub refresh_token: Option, + pub scope: Option, +} -#[tracing::instrument(skip(context))] pub async fn register( data: Json, req: HttpRequest, context: Data, -) -> Result { +) -> LemmyResult> { let site_view = SiteView::read_local(&mut context.pool()).await?; let local_site = site_view.local_site; let require_registration_application = @@ -59,8 +80,9 @@ pub async fn register( Err(LemmyErrorType::EmailRequired)? } - if local_site.site_setup && require_registration_application && data.answer.is_none() { - Err(LemmyErrorType::RegistrationApplicationAnswerRequired)? + // make sure the registration answer is provided when the registration application is required + if local_site.site_setup { + validate_registration_answer(require_registration_application, &data.answer)?; } // Make sure passwords match @@ -69,77 +91,61 @@ pub async fn register( } if local_site.site_setup && local_site.captcha_enabled { - if let Some(captcha_uuid) = &data.captcha_uuid { - let uuid = uuid::Uuid::parse_str(captcha_uuid)?; - let check = CaptchaAnswer::check_captcha( - &mut context.pool(), - CheckCaptchaAnswer { - uuid, - answer: data.captcha_answer.clone().unwrap_or_default(), - }, - ) - .await?; - if !check { - Err(LemmyErrorType::CaptchaIncorrect)? - } - } else { - Err(LemmyErrorType::CaptchaIncorrect)? - } + let uuid = uuid::Uuid::parse_str(&data.captcha_uuid.clone().unwrap_or_default())?; + CaptchaAnswer::check_captcha( + &mut context.pool(), + CheckCaptchaAnswer { + uuid, + answer: data.captcha_answer.clone().unwrap_or_default(), + }, + ) + .await?; } let slur_regex = local_site_to_slur_regex(&local_site); check_slurs(&data.username, &slur_regex)?; check_slurs_opt(&data.answer, &slur_regex)?; - let actor_keypair = generate_actor_keypair()?; - is_valid_actor_name(&data.username, local_site.actor_name_max_length as usize)?; - let actor_id = generate_local_apub_endpoint( - EndpointType::Person, - &data.username, - &context.settings().get_protocol_and_hostname(), - )?; + Person::check_username_taken(&mut context.pool(), &data.username).await?; if let Some(email) = &data.email { - if LocalUser::is_email_taken(&mut context.pool(), email).await? { - Err(LemmyErrorType::EmailAlreadyExists)? - } + LocalUser::check_is_email_taken(&mut context.pool(), email).await?; } // We have to create both a person, and local_user - - // Register the new person - let person_form = PersonInsertForm::builder() - .name(data.username.clone()) - .actor_id(Some(actor_id.clone())) - .private_key(Some(actor_keypair.private_key)) - .public_key(actor_keypair.public_key) - .inbox_url(Some(generate_inbox_url(&actor_id)?)) - .shared_inbox_url(Some(generate_shared_inbox_url(&actor_id)?)) - .instance_id(site_view.site.instance_id) - .build(); - - // insert the person - let inserted_person = Person::create(&mut context.pool(), &person_form) - .await - .with_lemmy_type(LemmyErrorType::UserAlreadyExists)?; + let inserted_person = create_person( + data.username.clone(), + &local_site, + site_view.site.instance_id, + &context, + ) + .await?; // Automatically set their application as accepted, if they created this with open registration. // Also fixes a bug which allows users to log in when registrations are changed to closed. let accepted_application = Some(!require_registration_application); - // Create the local user - let local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_person.id) - .email(data.email.as_deref().map(str::to_lowercase)) - .password_encrypted(data.password.to_string()) - .show_nsfw(Some(data.show_nsfw)) - .accepted_application(accepted_application) - .default_listing_type(Some(local_site.default_post_listing_type)) - // If its the initial site setup, they are an admin - .admin(Some(!local_site.site_setup)) - .build(); + // Show nsfw content if param is true, or if content_warning exists + let show_nsfw = data + .show_nsfw + .unwrap_or(site_view.site.content_warning.is_some()); - let inserted_local_user = LocalUser::create(&mut context.pool(), &local_user_form).await?; + let language_tags = get_language_tags(&req); + + // Create the local user + let local_user_form = LocalUserInsertForm { + email: data.email.as_deref().map(str::to_lowercase), + show_nsfw: Some(show_nsfw), + accepted_application, + default_listing_type: Some(local_site.default_post_listing_type), + post_listing_mode: Some(local_site.default_post_listing_mode), + interface_language: language_tags.first().cloned(), + // If its the initial site setup, they are an admin + admin: Some(!local_site.site_setup), + ..LocalUserInsertForm::new(inserted_person.id, Some(data.password.to_string())) + }; + + let inserted_local_user = create_local_user(&context, language_tags, &local_user_form).await?; if local_site.site_setup && require_registration_application { // Create the registration application @@ -158,48 +164,415 @@ pub async fn register( .await?; } - let mut res = HttpResponseBuilder::new(StatusCode::OK); let mut login_response = LoginResponse { jwt: None, registration_created: false, verify_email_sent: false, }; - // Log the user in directly if the site is not setup, or email verification and application aren't required + // Log the user in directly if the site is not setup, or email verification and application aren't + // required if !local_site.site_setup || (!require_registration_application && !local_site.require_email_verification) { let jwt = Claims::generate(inserted_local_user.id, req, &context).await?; - res.cookie(create_login_cookie(jwt.clone())); login_response.jwt = Some(jwt); } else { - if local_site.require_email_verification { - let local_user_view = LocalUserView { - local_user: inserted_local_user, - person: inserted_person, - counts: PersonAggregates::default(), - }; - // we check at the beginning of this method that email is set - let email = local_user_view - .local_user - .email - .clone() - .expect("email was provided"); - - send_verification_email( - &local_user_view, - &email, - &mut context.pool(), - context.settings(), - ) - .await?; - login_response.verify_email_sent = true; - } + login_response.verify_email_sent = send_verification_email_if_required( + &context, + &local_site, + &inserted_local_user, + &inserted_person, + ) + .await?; if require_registration_application { login_response.registration_created = true; } } - Ok(res.json(login_response)) + Ok(Json(login_response)) +} + +#[tracing::instrument(skip(context))] +pub async fn authenticate_with_oauth( + data: Json, + req: HttpRequest, + context: Data, +) -> LemmyResult> { + let site_view = SiteView::read_local(&mut context.pool()).await?; + let local_site = site_view.local_site.clone(); + + // validate inputs + if data.oauth_provider_id == OAuthProviderId(0) || data.code.is_empty() || data.code.len() > 300 { + return Err(LemmyErrorType::OauthAuthorizationInvalid)?; + } + + // validate the redirect_uri + let redirect_uri = &data.redirect_uri; + if redirect_uri.host_str().unwrap_or("").is_empty() + || !redirect_uri.path().eq(&String::from("/oauth/callback")) + || !redirect_uri.query().unwrap_or("").is_empty() + { + Err(LemmyErrorType::OauthAuthorizationInvalid)? + } + + // Fetch the OAUTH provider and make sure it's enabled + let oauth_provider_id = data.oauth_provider_id; + let oauth_provider = OAuthProvider::read(&mut context.pool(), oauth_provider_id) + .await + .ok() + .ok_or(LemmyErrorType::OauthAuthorizationInvalid)?; + + if !oauth_provider.enabled { + return Err(LemmyErrorType::OauthAuthorizationInvalid)?; + } + + let token_response = + oauth_request_access_token(&context, &oauth_provider, &data.code, redirect_uri.as_str()) + .await?; + + let user_info = oidc_get_user_info( + &context, + &oauth_provider, + token_response.access_token.as_str(), + ) + .await?; + + let oauth_user_id = read_user_info(&user_info, oauth_provider.id_claim.as_str())?; + + let mut login_response = LoginResponse { + jwt: None, + registration_created: false, + verify_email_sent: false, + }; + + // Lookup user by oauth_user_id + let mut local_user_view = + LocalUserView::find_by_oauth_id(&mut context.pool(), oauth_provider.id, &oauth_user_id).await; + + let local_user: LocalUser; + if let Ok(user_view) = local_user_view { + // user found by oauth_user_id => Login user + local_user = user_view.clone().local_user; + + check_user_valid(&user_view.person)?; + check_email_verified(&user_view, &site_view)?; + check_registration_application(&user_view, &site_view.local_site, &mut context.pool()).await?; + } else { + // user has never previously registered using oauth + + // prevent registration if registration is closed + if local_site.registration_mode == RegistrationMode::Closed { + Err(LemmyErrorType::RegistrationClosed)? + } + + // prevent registration if registration is closed for OAUTH providers + if !local_site.oauth_registration { + return Err(LemmyErrorType::OauthRegistrationClosed)?; + } + + // Extract the OAUTH email claim from the returned user_info + let email = read_user_info(&user_info, "email")?; + + let require_registration_application = + local_site.registration_mode == RegistrationMode::RequireApplication; + + // Lookup user by OAUTH email and link accounts + local_user_view = LocalUserView::find_by_email(&mut context.pool(), &email).await; + + let person; + if let Ok(user_view) = local_user_view { + // user found by email => link and login if linking is allowed + + // we only allow linking by email when email_verification is required otherwise emails cannot + // be trusted + if oauth_provider.account_linking_enabled && site_view.local_site.require_email_verification { + // WARNING: + // If an admin switches the require_email_verification config from false to true, + // users who signed up before the switch could have accounts with unverified emails falsely + // marked as verified. + + check_user_valid(&user_view.person)?; + check_email_verified(&user_view, &site_view)?; + check_registration_application(&user_view, &site_view.local_site, &mut context.pool()) + .await?; + + // Link with OAUTH => Login user + let oauth_account_form = + OAuthAccountInsertForm::new(user_view.local_user.id, oauth_provider.id, oauth_user_id); + + OAuthAccount::create(&mut context.pool(), &oauth_account_form) + .await + .map_err(|_| LemmyErrorType::OauthLoginFailed)?; + + local_user = user_view.local_user.clone(); + } else { + return Err(LemmyErrorType::EmailAlreadyExists)?; + } + } else { + // No user was found by email => Register as new user + + // make sure the registration answer is provided when the registration application is required + validate_registration_answer(require_registration_application, &data.answer)?; + + // make sure the username is provided + let username = data + .username + .as_ref() + .ok_or(LemmyErrorType::RegistrationUsernameRequired)?; + + let slur_regex = local_site_to_slur_regex(&local_site); + check_slurs(username, &slur_regex)?; + check_slurs_opt(&data.answer, &slur_regex)?; + + Person::check_username_taken(&mut context.pool(), username).await?; + + // We have to create a person, a local_user, and an oauth_account + person = create_person( + username.clone(), + &local_site, + site_view.site.instance_id, + &context, + ) + .await?; + + // Show nsfw content if param is true, or if content_warning exists + let show_nsfw = data + .show_nsfw + .unwrap_or(site_view.site.content_warning.is_some()); + + let language_tags = get_language_tags(&req); + + // Create the local user + let local_user_form = LocalUserInsertForm { + email: Some(str::to_lowercase(&email)), + show_nsfw: Some(show_nsfw), + accepted_application: Some(!require_registration_application), + email_verified: Some(oauth_provider.auto_verify_email), + post_listing_mode: Some(local_site.default_post_listing_mode), + interface_language: language_tags.first().cloned(), + // If its the initial site setup, they are an admin + admin: Some(!local_site.site_setup), + ..LocalUserInsertForm::new(person.id, None) + }; + + local_user = create_local_user(&context, language_tags, &local_user_form).await?; + + // Create the oauth account + let oauth_account_form = + OAuthAccountInsertForm::new(local_user.id, oauth_provider.id, oauth_user_id); + + OAuthAccount::create(&mut context.pool(), &oauth_account_form) + .await + .map_err(|_| LemmyErrorType::IncorrectLogin)?; + + // prevent sign in until application is accepted + if local_site.site_setup + && require_registration_application + && !local_user.accepted_application + && !local_user.admin + { + // Create the registration application + RegistrationApplication::create( + &mut context.pool(), + &RegistrationApplicationInsertForm { + local_user_id: local_user.id, + answer: data.answer.clone().expect("must have an answer"), + }, + ) + .await?; + + login_response.registration_created = true; + } + + // Check email is verified when required + login_response.verify_email_sent = + send_verification_email_if_required(&context, &local_site, &local_user, &person).await?; + } + } + + if !login_response.registration_created && !login_response.verify_email_sent { + let jwt = Claims::generate(local_user.id, req, &context).await?; + login_response.jwt = Some(jwt); + } + + return Ok(Json(login_response)); +} + +async fn create_person( + username: String, + local_site: &LocalSite, + instance_id: InstanceId, + context: &Data, +) -> Result { + let actor_keypair = generate_actor_keypair()?; + is_valid_actor_name(&username, local_site.actor_name_max_length as usize)?; + let actor_id = generate_local_apub_endpoint( + EndpointType::Person, + &username, + &context.settings().get_protocol_and_hostname(), + )?; + + // Register the new person + let person_form = PersonInsertForm { + actor_id: Some(actor_id.clone()), + inbox_url: Some(generate_inbox_url()?), + private_key: Some(actor_keypair.private_key), + ..PersonInsertForm::new(username.clone(), actor_keypair.public_key, instance_id) + }; + + // insert the person + let inserted_person = Person::create(&mut context.pool(), &person_form) + .await + .with_lemmy_type(LemmyErrorType::UserAlreadyExists)?; + + Ok(inserted_person) +} + +fn get_language_tags(req: &HttpRequest) -> Vec { + req + .headers() + .get("Accept-Language") + .map(|hdr| accept_language::parse(hdr.to_str().unwrap_or_default())) + .iter() + .flatten() + // Remove the optional region code + .map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string()) + .collect::>() +} + +async fn create_local_user( + context: &Data, + language_tags: Vec, + local_user_form: &LocalUserInsertForm, +) -> Result { + let all_languages = Language::read_all(&mut context.pool()).await?; + // use hashset to avoid duplicates + let mut language_ids = HashSet::new(); + for l in language_tags { + if let Some(found) = all_languages.iter().find(|all| all.code == l) { + language_ids.insert(found.id); + } + } + let language_ids = language_ids.into_iter().collect(); + + let inserted_local_user = + LocalUser::create(&mut context.pool(), local_user_form, language_ids).await?; + + Ok(inserted_local_user) +} + +async fn send_verification_email_if_required( + context: &Data, + local_site: &LocalSite, + local_user: &LocalUser, + person: &Person, +) -> LemmyResult { + let mut sent = false; + if !local_user.admin && local_site.require_email_verification && !local_user.email_verified { + let local_user_view = LocalUserView { + local_user: local_user.clone(), + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), + person: person.clone(), + counts: PersonAggregates::default(), + }; + + send_verification_email( + &local_user_view, + &local_user + .email + .clone() + .expect("invalid verification email"), + &mut context.pool(), + context.settings(), + ) + .await?; + + sent = true; + } + Ok(sent) +} + +fn validate_registration_answer( + require_registration_application: bool, + answer: &Option, +) -> LemmyResult<()> { + if require_registration_application && answer.is_none() { + Err(LemmyErrorType::RegistrationApplicationAnswerRequired)? + } + + Ok(()) +} + +async fn oauth_request_access_token( + context: &Data, + oauth_provider: &OAuthProvider, + code: &str, + redirect_uri: &str, +) -> LemmyResult { + // Request an Access Token from the OAUTH provider + let response = context + .client() + .post(oauth_provider.token_endpoint.as_str()) + .header("Accept", "application/json") + .form(&[ + ("grant_type", "authorization_code"), + ("code", code), + ("redirect_uri", redirect_uri), + ("client_id", &oauth_provider.client_id), + ("client_secret", &oauth_provider.client_secret), + ]) + .send() + .await; + + let response = response.map_err(|_| LemmyErrorType::OauthLoginFailed)?; + if !response.status().is_success() { + Err(LemmyErrorType::OauthLoginFailed)?; + } + + // Extract the access token + let token_response = response + .json::() + .await + .map_err(|_| LemmyErrorType::OauthLoginFailed)?; + + Ok(token_response) +} + +async fn oidc_get_user_info( + context: &Data, + oauth_provider: &OAuthProvider, + access_token: &str, +) -> LemmyResult { + // Request the user info from the OAUTH provider + let response = context + .client() + .get(oauth_provider.userinfo_endpoint.as_str()) + .header("Accept", "application/json") + .bearer_auth(access_token) + .send() + .await; + + let response = response.map_err(|_| LemmyErrorType::OauthLoginFailed)?; + if !response.status().is_success() { + Err(LemmyErrorType::OauthLoginFailed)?; + } + + // Extract the OAUTH user_id claim from the returned user_info + let user_info = response + .json::() + .await + .map_err(|_| LemmyErrorType::OauthLoginFailed)?; + + Ok(user_info) +} + +fn read_user_info(user_info: &serde_json::Value, key: &str) -> LemmyResult { + if let Some(value) = user_info.get(key) { + let result = serde_json::from_value::(value.clone()) + .map_err(|_| LemmyErrorType::OauthLoginFailed)?; + return Ok(result); + } + Err(LemmyErrorType::OauthLoginFailed)? } diff --git a/crates/api_crud/src/user/delete.rs b/crates/api_crud/src/user/delete.rs index 363230d83..d1825425c 100644 --- a/crates/api_crud/src/user/delete.rs +++ b/crates/api_crud/src/user/delete.rs @@ -8,7 +8,11 @@ use lemmy_api_common::{ utils::purge_user_account, SuccessResponse, }; -use lemmy_db_schema::source::{login_token::LoginToken, person::Person}; +use lemmy_db_schema::source::{ + login_token::LoginToken, + oauth_account::OAuthAccount, + person::Person, +}; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::error::{LemmyErrorType, LemmyResult}; @@ -19,11 +23,12 @@ pub async fn delete_account( local_user_view: LocalUserView, ) -> LemmyResult> { // Verify the password - let valid: bool = verify( - &data.password, - &local_user_view.local_user.password_encrypted, - ) - .unwrap_or(false); + let valid: bool = local_user_view + .local_user + .password_encrypted + .as_ref() + .and_then(|password_encrypted| verify(&data.password, password_encrypted).ok()) + .unwrap_or(false); if !valid { Err(LemmyErrorType::IncorrectLogin)? } @@ -31,6 +36,7 @@ pub async fn delete_account( if data.delete_content { purge_user_account(local_user_view.person.id, &context).await?; } else { + OAuthAccount::delete_user_accounts(&mut context.pool(), local_user_view.local_user.id).await?; Person::delete_account(&mut context.pool(), local_user_view.person.id).await?; } diff --git a/crates/apub/Cargo.toml b/crates/apub/Cargo.toml index 748fe3335..55eadeaf9 100644 --- a/crates/apub/Cargo.toml +++ b/crates/apub/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "lemmy_apub" +publish = false version.workspace = true edition.workspace = true description.workspace = true @@ -13,8 +14,11 @@ name = "lemmy_apub" path = "src/lib.rs" doctest = false +[lints] +workspace = true + [dependencies] -lemmy_utils = { workspace = true } +lemmy_utils = { workspace = true, features = ["full"] } lemmy_db_schema = { workspace = true, features = ["full"] } lemmy_db_views = { workspace = true, features = ["full"] } lemmy_db_views_actor = { workspace = true, features = ["full"] } @@ -27,25 +31,22 @@ serde = { workspace = true } actix-web = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } -strum_macros = { workspace = true } +strum = { workspace = true } url = { workspace = true } -http = { workspace = true } futures = { workspace = true } itertools = { workspace = true } uuid = { workspace = true } async-trait = { workspace = true } anyhow = { workspace = true } reqwest = { workspace = true } -once_cell = { workspace = true } +moka.workspace = true +serde_with.workspace = true html2md = "0.2.14" -html2text = "0.6.0" +html2text = "0.12.5" stringreader = "0.1.1" -serde_with = { workspace = true } enum_delegate = "0.2.0" -moka = { version = "0.11", features = ["future"] } [dev-dependencies] serial_test = { workspace = true } -reqwest-middleware = { workspace = true } -task-local-extensions = "0.1.4" assert-json-diff = "2.0.2" +pretty_assertions = { workspace = true } diff --git a/crates/apub/assets/discourse/objects/group.json b/crates/apub/assets/discourse/objects/group.json new file mode 100644 index 000000000..5ed2c8c05 --- /dev/null +++ b/crates/apub/assets/discourse/objects/group.json @@ -0,0 +1,22 @@ +{ + "id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146", + "type": "Group", + "updated": "2024-04-05T12:49:51Z", + "url": "https://socialhub.activitypub.rocks/c/meeting/threadiverse-wg/88", + "name": "Threadiverse Working Group (SocialHub)", + "inbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/inbox", + "outbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/outbox", + "followers": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/followers", + "preferredUsername": "threadiverse-wg", + "publicKey": { + "id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146#main-key", + "owner": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApJi4iAcW6bPiHVCxT9p0\n8DVnrDDO4QtLNy7bpRFdMFifmmmXprsuAi9D2MSwbhH49V54HtIkxBpKd2IR/UD8\nmhMDY4CNI9FHpjqLw0wtkzxcqF9urSqhn0/vWX+9oxyhIgQS5KMiIkYDMJiAc691\niEcZ8LCran23xIGl6Dk54Nr3TqTMLcjDhzQYUJbxMrLq5/knWqOKG3IF5OxK+9ZZ\n1wxDF872eJTxJLkmpag+WYNtHzvB2SGTp8j5IF1/pZ9J1c3cpYfaeolTch/B/GQn\najCB4l27U52rIIObxJqFXSY8wHyd0aAmNmxzPZ7cduRlBDhmI40cAmnCV1YQPvpk\nDwIDAQAB\n-----END PUBLIC KEY-----\n" + }, + "icon": { + "type": "Image", + "mediaType": "image/png", + "url": "https://socialhub.activitypub.rocks/uploads/default/original/1X/8faac84234dc73d074dadaa2bcf24dc746b8647f.png" + }, + "@context": "https://www.w3.org/ns/activitystreams" +} diff --git a/crates/apub/assets/discourse/objects/page.json b/crates/apub/assets/discourse/objects/page.json new file mode 100644 index 000000000..c020ce9f5 --- /dev/null +++ b/crates/apub/assets/discourse/objects/page.json @@ -0,0 +1,13 @@ +{ + "id": "https://socialhub.activitypub.rocks/ap/object/1899f65c062200daec50a4c89ed76dc9", + "type": "Note", + "audience": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146", + "published": "2024-04-13T14:36:19Z", + "updated": "2024-04-13T14:36:19Z", + "url": "https://socialhub.activitypub.rocks/t/our-next-meeting/4079/1", + "attributedTo": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1", + "name": "Our next meeting", + "context": "https://socialhub.activitypub.rocks/ap/collection/8850f6e85b57c490da915a5dfbbd5045", + "content": "

Last Meeting

\n

Recording

\nhttps://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000\nPasscode: z+1*4pUB\n

Minutes

\nTo refresh your memory, you can read the minutes of last week's meeting Lounge and recreation facility

\n
\n

Welcome to the Enterprise!.

\n", + "summary": "A description of ten forward.", + "content": "

Lounge and recreation facility

\n
\n

Welcome to the Enterprise!.

\n", "source": { - "content": "Lounge and recreation facility\n\n---\n\nWelcome to the [Enterprise](https://memory-alpha.fandom.com/wiki/USS_Enterprise_(NCC-1701-D))!.", + "content": "Lounge and recreation facility\n\n---\n\nWelcome to the Enterprise!", "mediaType": "text/markdown" }, + "mediaType": "text/html", "sensitive": false, "icon": { "type": "Image", diff --git a/crates/apub/assets/lemmy/objects/instance.json b/crates/apub/assets/lemmy/objects/instance.json index 1e07043d9..92053bac6 100644 --- a/crates/apub/assets/lemmy/objects/instance.json +++ b/crates/apub/assets/lemmy/objects/instance.json @@ -2,6 +2,7 @@ "type": "Application", "id": "https://enterprise.lemmy.ml/", "name": "Enterprise", + "preferredUsername": "enterprise.lemmy.ml", "summary": "A test instance", "content": "

Enterprise sidebar

\\n", "mediaType": "text/html", diff --git a/crates/apub/assets/lemmy/objects/page.json b/crates/apub/assets/lemmy/objects/page.json index 6b536dd90..20af5dfd2 100644 --- a/crates/apub/assets/lemmy/objects/page.json +++ b/crates/apub/assets/lemmy/objects/page.json @@ -25,7 +25,6 @@ "url": "https://enterprise.lemmy.ml/pictrs/image/eOtYb9iEiB.png" }, "sensitive": false, - "commentsEnabled": true, "language": { "identifier": "fr", "name": "Français" diff --git a/crates/apub/assets/mastodon/activities/flag.json b/crates/apub/assets/mastodon/activities/flag.json new file mode 100644 index 000000000..83f4c0817 --- /dev/null +++ b/crates/apub/assets/mastodon/activities/flag.json @@ -0,0 +1,13 @@ +{ + "@context": "https://www.w3.org/ns/activitystreams", + "id": "https://mastodon.example/ccb4f39a-506a-490e-9a8c-71831c7713a4", + "type": "Flag", + "actor": "https://mastodon.example/actor", + "content": "Please take a look at this user and their posts", + "object": [ + "https://example.com/users/1", + "https://example.com/posts/380590", + "https://example.com/posts/380591" + ], + "to": "https://example.com/users/1" +} diff --git a/crates/apub/assets/mastodon/objects/note.json b/crates/apub/assets/mastodon/objects/note_1.json similarity index 100% rename from crates/apub/assets/mastodon/objects/note.json rename to crates/apub/assets/mastodon/objects/note_1.json diff --git a/crates/apub/assets/mastodon/objects/note_2.json b/crates/apub/assets/mastodon/objects/note_2.json new file mode 100644 index 000000000..b8c22b976 --- /dev/null +++ b/crates/apub/assets/mastodon/objects/note_2.json @@ -0,0 +1,79 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "ostatus": "http://ostatus.org#", + "atomUri": "ostatus:atomUri", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "conversation": "ostatus:conversation", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount", + "blurhash": "toot:blurhash", + "focalPoint": { + "@container": "@list", + "@id": "toot:focalPoint" + } + } + ], + "id": "https://floss.social/users/kde/statuses/113306831140126616", + "type": "Note", + "summary": null, + "inReplyTo": "https://floss.social/users/kde/statuses/113306824627995724", + "published": "2024-10-14T16:57:15Z", + "url": "https://floss.social/@kde/113306831140126616", + "attributedTo": "https://floss.social/users/kde", + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": [ + "https://floss.social/users/kde/followers", + "https://lemmy.kde.social/c/kde", + "https://lemmy.kde.social/c/kde/followers" + ], + "sensitive": false, + "atomUri": "https://floss.social/users/kde/statuses/113306831140126616", + "inReplyToAtomUri": "https://floss.social/users/kde/statuses/113306824627995724", + "conversation": "tag:floss.social,2024-10-14:objectId=71424279:objectType=Conversation", + "content": "

@kde@lemmy.kde.social

We also need funding 💶 to keep the gears turning! Please support us with a donation:

https://kde.org/donate/

[3/3]

", + "contentMap": { + "en": "

@kde@lemmy.kde.social

We also need funding 💶 to keep the gears turning! Please support us with a donation:

https://kde.org/donate/

[3/3]

" + }, + "attachment": [ + { + "type": "Document", + "mediaType": "image/jpeg", + "url": "https://cdn.masto.host/floss/media_attachments/files/113/306/826/682/985/891/original/c8d906a2f2ab2334.jpg", + "name": "The KDE dragons Katie and Konqi stand on either side of a pot filling up with gold coins. Donate!", + "blurhash": "USQv:h-W-qI-^,W;RPs=^-R%NZxbo#sDobSc", + "focalPoint": [0.0, 0.0], + "width": 1500, + "height": 1095 + } + ], + "tag": [ + { + "type": "Mention", + "href": "https://lemmy.kde.social/c/kde", + "name": "@kde@lemmy.kde.social" + } + ], + "replies": { + "id": "https://floss.social/users/kde/statuses/113306831140126616/replies", + "type": "Collection", + "first": { + "type": "CollectionPage", + "next": "https://floss.social/users/kde/statuses/113306831140126616/replies?only_other_accounts=true&page=true", + "partOf": "https://floss.social/users/kde/statuses/113306831140126616/replies", + "items": [] + } + }, + "likes": { + "id": "https://floss.social/users/kde/statuses/113306831140126616/likes", + "type": "Collection", + "totalItems": 39 + }, + "shares": { + "id": "https://floss.social/users/kde/statuses/113306831140126616/shares", + "type": "Collection", + "totalItems": 24 + } +} diff --git a/crates/apub/assets/mastodon/objects/page.json b/crates/apub/assets/mastodon/objects/page.json index ec4c13080..2965b4b8d 100644 --- a/crates/apub/assets/mastodon/objects/page.json +++ b/crates/apub/assets/mastodon/objects/page.json @@ -11,21 +11,21 @@ "votersCount": "toot:votersCount" } ], - "id": "https://dice.camp/users/thekernelinyellow/statuses/110830743680706519", + "id": "https://masto.qa.urbanwildlife.biz/users/mastodon/statuses/110830743680706519", "type": "Note", "summary": null, "inReplyTo": null, "published": "2023-08-04T09:55:39Z", - "url": "https://dice.camp/@thekernelinyellow/110830743680706519", - "attributedTo": "https://dice.camp/users/thekernelinyellow", + "url": "https://masto.qa.urbanwildlife.biz/110830743680706519", + "attributedTo": "https://masto.qa.urbanwildlife.biz/users/mastodon", "to": ["https://www.w3.org/ns/activitystreams#Public"], "cc": [ - "https://dice.camp/users/thekernelinyellow/followers", + "https://masto.qa.urbanwildlife.biz/users/mastodon/followers", "https://enterprise.lemmy.ml/c/tenforward", "https://enterprise.lemmy.ml/c/tenforward/followers" ], "sensitive": false, - "atomUri": "https://dice.camp/users/thekernelinyellow/statuses/110830743680706519", + "atomUri": "https://masto.qa.urbanwildlife.biz/statuses/110830743680706519", "inReplyToAtomUri": null, "conversation": "tag:dice.camp,2023-08-04:objectId=29969291:objectType=Conversation", "content": "

@tenforward Variable never resetting at refresh

Hi! I'm using a variable to count elements in my generator but every time I generate a new character, the counter's value carries on from the previous one. Is there a function to reset it (I set it to 0 at the beginning of the file)

", @@ -41,12 +41,12 @@ } ], "replies": { - "id": "https://dice.camp/users/thekernelinyellow/statuses/110830743680706519/replies", + "id": "https://masto.qa.urbanwildlife.biz/users/mastodon/statuses/110830743680706519/replies", "type": "Collection", "first": { "type": "CollectionPage", - "next": "https://dice.camp/users/thekernelinyellow/statuses/110830743680706519/replies?only_other_accounts=true&page=true", - "partOf": "https://dice.camp/users/thekernelinyellow/statuses/110830743680706519/replies", + "next": "https://masto.qa.urbanwildlife.biz/users/mastodon/statuses/110830743680706519/replies?only_other_accounts=true&page=true", + "partOf": "https://masto.qa.urbanwildlife.biz/users/mastodon/statuses/110830743680706519/replies", "items": [] } } diff --git a/crates/apub/assets/mbin/activities/accept.json b/crates/apub/assets/mbin/activities/accept.json new file mode 100644 index 000000000..3a190977c --- /dev/null +++ b/crates/apub/assets/mbin/activities/accept.json @@ -0,0 +1,12 @@ +{ + "@context": "https://www.w3.org/ns/activitystreams", + "id": "https://some-mbin.instance/f/object/2721ffc3-f8a9-417e-a124-af057434a3af#accept", + "type": "Accept", + "actor": "https://some-mbin.instance/m/someMag", + "object": { + "id": "https://some-other.instance/f/object/c51ea652-e594-4920-a989-f5350f0cec05", + "type": "Follow", + "actor": "https://some-other.instance/u/someUser", + "object": "https://some-mbin.instance/m/someMag" + } +} diff --git a/crates/apub/assets/mbin/activities/flag.json b/crates/apub/assets/mbin/activities/flag.json new file mode 100644 index 000000000..7c1e5ae23 --- /dev/null +++ b/crates/apub/assets/mbin/activities/flag.json @@ -0,0 +1,11 @@ +{ + "@context": ["https://www.w3.org/ns/activitystreams"], + "id": "https://mbin-test1/reports/45f8a01d-a73e-4575-bffa-c9f24c61f458", + "type": "Flag", + "actor": "https://mbin-test1/u/BentiGorlich", + "object": ["https://lemmy-test/post/4", "https://lemmy-test/u/BentiGorlich"], + "audience": "https://lemmy-test/c/test_mag", + "summary": "dikjhgasdpas dsaü", + "content": "dikjhgasdpas dsaü", + "to": ["https://lemmy-test/c/test_mag"] +} diff --git a/crates/apub/assets/nodebb/objects/group.json b/crates/apub/assets/nodebb/objects/group.json new file mode 100644 index 000000000..462300ce9 --- /dev/null +++ b/crates/apub/assets/nodebb/objects/group.json @@ -0,0 +1,22 @@ +{ + "@context": "https://www.w3.org/ns/activitystreams", + "id": "https://community.nodebb.org/category/31", + "url": "https://community.nodebb.org/category/31/threadiverse-working-group", + "inbox": "https://community.nodebb.org/category/31/inbox", + "outbox": "https://community.nodebb.org/category/31/outbox", + "sharedInbox": "https://community.nodebb.org/inbox", + "type": "Group", + "name": "Threadiverse Working Group", + "preferredUsername": "swicg-threadiverse-wg", + "summary": "Discussion and announcements related to the SWICG Threadiverse task force", + "icon": { + "type": "Image", + "mediaType": "image/png", + "url": "https://community.nodebb.org/assets/uploads/system/site-logo.png" + }, + "publicKey": { + "id": "https://community.nodebb.org/category/31#key", + "owner": "https://community.nodebb.org/category/31", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0/Or3Ox2/jbhBZzF8W0Y\nWuS/4lgm5O5rxQk2nDRBXU/qNaZnMPkW2FxFPuPetndUVKSD2+vWF3SUlFyZ/vhT\nITzLkbRSILMiZCUg+0mvqi6va1WMBglMe5jLkc7wdfgNsosqBzKMdyMxqDZr++mJ\n8DjuqzWHENcjWcbMfSfAa9nkZHBIQUsHGGIwxEbKNlPqF0JIB66py7xmXbboDxpD\nPVF3EMkgZNnbmDGtlkZCKbztradyNRVl/u6KJpV3fbi+m/8CZ+POc4I5sKCQY1Hr\ndslHlm6tCkJQxIIKQtz0ZJ5yCUYmk48C2gFCndfJtYoEy9iR62xSemky6y04gWVc\naQIDAQAB\n-----END PUBLIC KEY-----\n" + } +} diff --git a/crates/apub/assets/nodebb/objects/page.json b/crates/apub/assets/nodebb/objects/page.json new file mode 100644 index 000000000..794ea1714 --- /dev/null +++ b/crates/apub/assets/nodebb/objects/page.json @@ -0,0 +1,38 @@ +{ + "@context": "https://www.w3.org/ns/activitystreams", + "id": "https://community.nodebb.org/topic/17908", + "type": "Page", + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": ["https://community.nodebb.org/uid/2/followers"], + "inReplyTo": null, + "published": "2024-03-19T20:25:39.462Z", + "url": "https://community.nodebb.org/topic/17908/threadiverse-working-group", + "attributedTo": "https://community.nodebb.org/uid/2", + "audience": "https://community.nodebb.org/category/31/threadiverse-working-group", + "sensitive": false, + "summary": null, + "name": "Threadiverse Working Group", + "content": "

NodeBB is at this year's FediForum, and one of the breakout sessions centred around the Theadiverse, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.

\n

Some of the topic touched upon included:

\n
    \n
  • Aligning on a standard representation for collections of Notes
  • \n
  • FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.
  • \n
  • Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging
  • \n
  • Going forward: collaborating on building compatible threadiverse implementations
  • \n
\n

The main action item involved the genesis of an informal working group for the threadiverse, in order to align our disparate implementations toward a common path.

\n

We intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.

\n

The topic of the first WG call is: Representation of the higherlevel collection of Notes (posts, etc.) — Article vs. Page, etc?

\n

Interested?

\n
    \n
  • Publicly reply to this post (NodeBB does not support non-public posts at this time) if you'd like to join the list
  • \n
  • If you prefer to remain private, please email julian@nodebb.org
  • \n
\n
\n

As an aside, I'd love to try something new and attempt tokeep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?

\n", + "source": { + "content": "NodeBB is at this year's FediForum, and one of the breakout sessions centred around **the Theadiverse**, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.\n\nSome of the topic touched upon included:\n\n* Aligning on a standard representation for collections of Notes\n* FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.\n* Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging\n* Going forward: collaborating on building compatible threadiverse implementations\n\nThe main action item involved **the genesis of an informal working group for the threadiverse**, in order to align our disparate implementations toward a common path.\n\nWe intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.\n\nThe topic of the first WG call is: **Representation of the higher level collection of Notes (posts, etc.) — Article vs. Page, etc?**\n\nInterested?\n\n* Publicly reply to this post (NodeBB does not support non-public postsat this time) if you'd like to join the list\n* If you prefer to remain private, please email julian@nodebb.org\n\n----\n\nAs an aside, I'd love to try something new and attempt to keep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?", + "mediaType": "text/markdown" + }, + "tag": [ + { + "type": "Hashtag", + "href": "https://community.nodebb.org/tags/fediforum", + "name": "#fediforum" + }, + { + "type": "Hashtag", + "href": "https://community.nodebb.org/tags/activitypub", + "name": "#activitypub" + }, + { + "type": "Hashtag", + "href": "https://community.nodebb.org/tags/threadiverse", + "name": "#threadiverse" + } + ], + "attachment": [] +} diff --git a/crates/apub/assets/nodebb/objects/person.json b/crates/apub/assets/nodebb/objects/person.json new file mode 100644 index 000000000..aa2ee5c8c --- /dev/null +++ b/crates/apub/assets/nodebb/objects/person.json @@ -0,0 +1,29 @@ +{ + "@context": "https://www.w3.org/ns/activitystreams", + "id": "https://community.nodebb.org/uid/2", + "url": "https://community.nodebb.org/user/julian", + "followers": "https://community.nodebb.org/uid/2/followers", + "following": "https://community.nodebb.org/uid/2/following", + "inbox": "https://community.nodebb.org/uid/2/inbox", + "outbox": "https://community.nodebb.org/uid/2/outbox", + "sharedInbox": "https://community.nodebb.org/inbox", + "type": "Person", + "name": "julian", + "preferredUsername": "julian", + "summary": "Hi! I'm Julian, one of the co-founders of NodeBB, the forum software you are using right now.\r\n\r\nI started this company with two colleagues, Baris and Andrew, in 2013, and have been doing the startup thing since (although I think at some point along the way we stopped being a startup and just became a boring ol' small business).\r\n\r\nIn my free time I rock climb, cycle, and lift weights. I live just outside Toronto, Canada, with my wife and three children.", + "icon": { + "type": "Image", + "mediaType": "image/jpeg", + "url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profileavatar-1701457270279.jpeg" + }, + "image": { + "type": "Image", + "mediaType": "image/jpeg", + "url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profilecover-1649468285913.jpeg" + }, + "publicKey": { + "id": "https://community.nodebb.org/uid/2#key", + "owner": "https://community.nodebb.org/uid/2", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzEr0sFdATahQzprS4EOT\nZq+KMc6UTbt2GDP20OrQi/P5AXAbMaQiRCRdGWhYGjnH0jicn5NnozNxRo+HchJT\nV6NOHxpsxqPCoaLeoBkhfhbSCLr2Gzil6mmfqf9TjnI7A7ZTtCc0G+n0ztyL9HwL\nkEAI178l2gckk4XKKYnEd+dyiIevExrq/ROLgwW1o428FZvlF5amKxhpVUEygRU8\nCd1hqWYs+xYDOJURCP5qEx/MmRPpV/yGMTMyF+/gcQc0TUZnhWAM2E4M+aq3aKh6\nJP/vsry+5YZPUaPWfopbT5Ijyt6ZSElp6Avkg56eTz0a5SRcjCVS6IFVPwiLlzOe\nYwIDAQAB\n-----END PUBLIC KEY-----\n" + } +} diff --git a/crates/apub/assets/peertube/objects/group.json b/crates/apub/assets/peertube/objects/group.json index cf4e216c4..1817fb202 100644 --- a/crates/apub/assets/peertube/objects/group.json +++ b/crates/apub/assets/peertube/objects/group.json @@ -1,30 +1,4 @@ { - "type": "Group", - "id": "https://framatube.org/video-channels/joinpeertube", - "following": "https://framatube.org/video-channels/joinpeertube/following", - "followers": "https://framatube.org/video-channels/joinpeertube/followers", - "playlists": "https://framatube.org/video-channels/joinpeertube/playlists", - "inbox": "https://framatube.org/video-channels/joinpeertube/inbox", - "outbox": "https://framatube.org/video-channels/joinpeertube/outbox", - "preferredUsername": "joinpeertube", - "url": "https://framatube.org/video-channels/joinpeertube", - "name": "A propos de PeerTube", - "endpoints": { - "sharedInbox": "https://framatube.org/inbox" - }, - "publicKey": { - "id": "https://framatube.org/video-channels/joinpeertube#main-key", - "owner": "https://framatube.org/video-channels/joinpeertube", - "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsJCIZJga+4Kumb9Wrmpy\ntyV7kWdINImoXBiFkGG+6OHreHN2C3UPwTu9IkX/e20NaX6Ly6c0busieW7yh//q\nomHl2U8zz2Z5xQHUN/2ljQjUNO+89OV6cFIGyEvcwc6QhuqGvrcxonjrEkux7xSv\nxQM4kZ3YW1Sii4piFpGGIm1pcUkOxFab8PWVB5Hzpg/df2/XOmH8UECT5vaMRPE6\ns6hNiQNE34z9QmPiG6nUlaWb/WDcMYbma3sUVWW3DI008ukLlwLaLIm30ax8CEYt\nHEv2jOQb1E1sXtBPe1FI+dXRgTIk40KF50KLqcgwJH1y5ck7c8IEeooj+tYGVqPr\npQIDAQAB\n-----END PUBLIC KEY-----" - }, - "published": "2021-08-09T14:26:09.514Z", - "icon": { - "type": "Image", - "mediaType": "image/png", - "height": 120, - "width": 120, - "url": "https://framatube.org/lazy-static/avatars/a2c2ff10-9da6-4c6c-9b25-2e557fa74b66.png" - }, "@context": [ "https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1", @@ -33,99 +7,66 @@ }, { "pt": "https://joinpeertube.org/ns#", - "sc": "http://schema.org#", - "Hashtag": "as:Hashtag", - "uuid": "sc:identifier", - "category": "sc:category", - "licence": "sc:license", - "subtitleLanguage": "sc:subtitleLanguage", - "sensitive": "as:sensitive", - "language": "sc:inLanguage", - "isLiveBroadcast": "sc:isLiveBroadcast", - "liveSaveReplay": { - "@type": "sc:Boolean", - "@id": "pt:liveSaveReplay" - }, - "permanentLive": { - "@type": "sc:Boolean", - "@id": "pt:permanentLive" - }, - "Infohash": "pt:Infohash", - "Playlist": "pt:Playlist", - "PlaylistElement": "pt:PlaylistElement", - "originallyPublishedAt": "sc:datePublished", - "views": { - "@type": "sc:Number", - "@id": "pt:views" - }, - "state": { - "@type": "sc:Number", - "@id": "pt:state" - }, - "size": { - "@type": "sc:Number", - "@id": "pt:size" - }, - "fps": { - "@type": "sc:Number", - "@id": "pt:fps" - }, - "startTimestamp": { - "@type": "sc:Number", - "@id": "pt:startTimestamp" - }, - "stopTimestamp": { - "@type": "sc:Number", - "@id": "pt:stopTimestamp" - }, - "position": { - "@type": "sc:Number", - "@id": "pt:position" - }, - "commentsEnabled": { - "@type": "sc:Boolean", - "@id": "pt:commentsEnabled" - }, - "downloadEnabled": { - "@type": "sc:Boolean", - "@id": "pt:downloadEnabled" - }, - "waitTranscoding": { - "@type": "sc:Boolean", - "@id": "pt:waitTranscoding" + "sc": "http://schema.org/", + "playlists": { + "@id": "pt:playlists", + "@type": "@id" }, "support": { "@type": "sc:Text", "@id": "pt:support" }, - "likes": { - "@id": "as:likes", - "@type": "@id" - }, - "dislikes": { - "@id": "as:dislikes", - "@type": "@id" - }, - "playlists": { - "@id": "pt:playlists", - "@type": "@id" - }, - "shares": { - "@id": "as:shares", - "@type": "@id" - }, - "comments": { - "@id": "as:comments", - "@type": "@id" - } + "icons": "as:icon" } ], - "summary": "Un logiciel libre pour reprendre le contrôle de vos vidéos", - "support": null, + "type": "Group", + "id": "https://peertube.stream/video-channels/vu", + "following": "https://peertube.stream/video-channels/vu/following", + "followers": "https://peertube.stream/video-channels/vu/followers", + "playlists": "https://peertube.stream/video-channels/vu/playlists", + "inbox": "https://peertube.stream/video-channels/vu/inbox", + "outbox": "https://peertube.stream/video-channels/vu/outbox", + "preferredUsername": "vu", + "url": "https://peertube.stream/video-channels/vu", + "name": "VU", + "endpoints": { + "sharedInbox": "https://peertube.stream/inbox" + }, + "publicKey": { + "id": "https://peertube.stream/video-channels/vu#main-key", + "owner": "https://peertube.stream/video-channels/vu", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtcWpN7efQx5C7ecWkw3r\nX4ViPy/bl3d3iyVLyP6z/3+WAUKJxqR+QKlNzxM7NglzB0B48NYu2cg4iuwKkSK9\ntrfMC/Ze0H10Wo/5kUH5YQKzLo4syHOuuM+1rbZFBbzVFwk4k0qqLFTXQ+Y6WNSS\nG9OlFYZNpRaUkgF8Q/KCsngn68qsZ0gLly9FJb+6+j3IppLJNXrBpFB5qulWibL+\neN+3XMnaTm6ge6X+rFti5r6dh10grL0KU/eZKmGyadgdwYdvR/LLtBWwFIwSJShk\nuIPhcz2zbkwrV3AixLe76TLGXX5M9qczfsVYLupyU7TwPlFM2ENDtDdfp41sWaZa\nxQIDAQAB\n-----END PUBLIC KEY-----" + }, + "published": "2020-12-10T16:07:08.406Z", + "icon": [ + { + "type": "Image", + "mediaType": "image/jpeg", + "height": 48, + "width": 48, + "url": "https://peertube.stream/lazy-static/avatars/45ec87d5-c8ec-4fcf-948f-d5a928b56496.jpg" + }, + { + "type": "Image", + "mediaType": "image/jpeg", + "height": 120, + "width": 120, + "url": "https://peertube.stream/lazy-static/avatars/3296c098-abbb-4fda-a67a-ab88e447ca19.jpg" + } + ], + "image": { + "type": "Image", + "mediaType": "image/jpeg", + "height": 317, + "width": 1920, + "url": "https://peertube.stream/lazy-static/banners/550c0541-3021-4d4b-8654-54d0c4cda96d.jpg" + }, + "summary": "VU c'est du lundi au samedi sur France 5 à 20h00 \nRetrouvez les meilleurs moments de la télévision, en 6 minutes.\n\nChaîne PeerTube non-officielle.", + "support": "Suivre VU :\n- Twitter : https://twitter.com/vufrancetv\n- Facebook :https://www.facebook.com/vufrancetv/\n- Site : https://www.france.tv/france-5/vu/", "attributedTo": [ { "type": "Person", - "id": "https://framatube.org/accounts/framasoft" + "id": "https://peertube.stream/accounts/createurs" } ] } diff --git a/crates/apub/assets/peertube/objects/person.json b/crates/apub/assets/peertube/objects/person.json index 871e88e13..2b1acdaad 100644 --- a/crates/apub/assets/peertube/objects/person.json +++ b/crates/apub/assets/peertube/objects/person.json @@ -1,30 +1,4 @@ { - "type": "Person", - "id": "https://framatube.org/accounts/framasoft", - "following": "https://framatube.org/accounts/framasoft/following", - "followers": "https://framatube.org/accounts/framasoft/followers", - "playlists": "https://framatube.org/accounts/framasoft/playlists", - "inbox": "https://framatube.org/accounts/framasoft/inbox", - "outbox": "https://framatube.org/accounts/framasoft/outbox", - "preferredUsername": "framasoft", - "url": "https://framatube.org/accounts/framasoft", - "name": "Framasoft", - "endpoints": { - "sharedInbox": "https://framatube.org/inbox" - }, - "publicKey": { - "id": "https://framatube.org/accounts/framasoft#main-key", - "owner": "https://framatube.org/accounts/framasoft", - "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuRh3frgIg866D0y0FThp\nSUkJImMcHGkUvpYQYv2iUgarZZtEbwT8PfQf0bJazy+cP8KqQmMDf5PBhT7dfdny\nf/GKGMw9Olc+QISeKDj3sqZ3Csrm4KV4avMGCfth6eSU7LozojeSGCXdUFz/8UgE\nfhV4mJjEX/FbwRYoKlagv5rY9mkX5XomzZU+z9j6ZVXyofwOwJvmI1hq0SYDv2bc\neB/RgIh/H0nyMtF8o+0CT42FNEET9j9m1BKOBtPzwZHmitKRkEmui5cK256s1laB\nT61KHpcD9gQKkQ+I3sFEzCBUJYfVo6fUe+GehBZuAfq4qDhd15SfE4K9veDscDFI\nTwIDAQAB\n-----END PUBLIC KEY-----" - }, - "published": "2018-03-01T15:16:17.118Z", - "icon": { - "type": "Image", - "mediaType": "image/png", - "height": null, - "width": null, - "url": "https://framatube.org/lazy-static/avatars/f73876f5-1d45-4f8a-942a-d3d5d5ac5dc1.png" - }, "@context": [ "https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1", @@ -33,92 +7,52 @@ }, { "pt": "https://joinpeertube.org/ns#", - "sc": "http://schema.org#", - "Hashtag": "as:Hashtag", - "uuid": "sc:identifier", - "category": "sc:category", - "licence": "sc:license", - "subtitleLanguage": "sc:subtitleLanguage", - "sensitive": "as:sensitive", - "language": "sc:inLanguage", - "isLiveBroadcast": "sc:isLiveBroadcast", - "liveSaveReplay": { - "@type": "sc:Boolean", - "@id": "pt:liveSaveReplay" - }, - "permanentLive": { - "@type": "sc:Boolean", - "@id": "pt:permanentLive" - }, - "Infohash": "pt:Infohash", - "Playlist": "pt:Playlist", - "PlaylistElement": "pt:PlaylistElement", - "originallyPublishedAt": "sc:datePublished", - "views": { - "@type": "sc:Number", - "@id": "pt:views" - }, - "state": { - "@type": "sc:Number", - "@id": "pt:state" - }, - "size": { - "@type": "sc:Number", - "@id": "pt:size" - }, - "fps": { - "@type": "sc:Number", - "@id": "pt:fps" - }, - "startTimestamp": { - "@type": "sc:Number", - "@id": "pt:startTimestamp" - }, - "stopTimestamp": { - "@type": "sc:Number", - "@id": "pt:stopTimestamp" - }, - "position": { - "@type": "sc:Number", - "@id": "pt:position" - }, - "commentsEnabled": { - "@type": "sc:Boolean", - "@id": "pt:commentsEnabled" - }, - "downloadEnabled": { - "@type": "sc:Boolean", - "@id": "pt:downloadEnabled" - }, - "waitTranscoding": { - "@type": "sc:Boolean", - "@id": "pt:waitTranscoding" + "sc": "http://schema.org/", + "playlists": { + "@id": "pt:playlists", + "@type": "@id" }, "support": { "@type": "sc:Text", "@id": "pt:support" }, - "likes": { - "@id": "as:likes", - "@type": "@id" - }, - "dislikes": { - "@id": "as:dislikes", - "@type": "@id" - }, - "playlists": { - "@id": "pt:playlists", - "@type": "@id" - }, - "shares": { - "@id": "as:shares", - "@type": "@id" - }, - "comments": { - "@id": "as:comments", - "@type": "@id" - } + "icons": "as:icon" } ], - "summary": null + "type": "Person", + "id": "https://peertube.stream/accounts/createurs", + "following": "https://peertube.stream/accounts/createurs/following", + "followers": "https://peertube.stream/accounts/createurs/followers", + "playlists": "https://peertube.stream/accounts/createurs/playlists", + "inbox": "https://peertube.stream/accounts/createurs/inbox", + "outbox": "https://peertube.stream/accounts/createurs/outbox", + "preferredUsername": "createurs", + "url": "https://peertube.stream/accounts/createurs", + "name": "Créateurs", + "endpoints": { + "sharedInbox": "https://peertube.stream/inbox" + }, + "publicKey": { + "id": "https://peertube.stream/accounts/createurs#main-key", + "owner": "https://peertube.stream/accounts/createurs", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxqkQhbRYbA81+WTYjorR\n2lEMad3kYCnzDjGTLr4I92eanzFHxyELGnjzP6TpEvjOiB9NrCRrqU/iFPLdgrq2\nwIFcXPWdCq6Gcg7QLlaeMM0JoJmr0KTEhzg0XKCo96UsyTzaF4DISxqi8RyoyWeU\nEkgiOzlkdYTlouq3MlQH+p1PBAsNUQfIEUsU+l6k1vzbm8JRwlT+D1bNde4I/Lqs\n4uB5ru3zzInwZ2hz9+heiriNoGEBv74rZHYn966tZVX8iMGx2+m6okozEdEQbqCl\n0ekqDcd8P6CoFqqeeu8coh82OUtuFI/XsbetdWA55YQmSHyMiTsIwVbeoogIETbI\n4QIDAQAB\n-----END PUBLIC KEY-----" + }, + "published": "2020-11-11T17:12:37.243Z", + "icon": [ + { + "type": "Image", + "mediaType": "image/png", + "height": 48, + "width": 48, + "url": "https://peertube.stream/lazy-static/avatars/1760df9a-3c96-45fc-9342-c313a3bf2210.png" + }, + { + "type": "Image", + "mediaType": "image/png", + "height": 120, + "width": 120, + "url": "https://peertube.stream/lazy-static/avatars/c27b672d-ad8f-498a-adbe-553af8da56f9.png" + } + ], + "summary": "Centralisation de miroirs de chaînes. La grande majorité a été contactée ou diffuse sous licence avec paternité.\n\nCompte maintenu par [Raph](https://tooter.social/@raph)." } diff --git a/crates/apub/assets/peertube/objects/video.json b/crates/apub/assets/peertube/objects/video.json index 78e732fe5..daca3d554 100644 --- a/crates/apub/assets/peertube/objects/video.json +++ b/crates/apub/assets/peertube/objects/video.json @@ -1,372 +1,4 @@ { - "type": "Video", - "id": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277", - "name": "What is the Fediverse?", - "duration": "PT98S", - "uuid": "4294a720-f263-4ea4-9392-cf9cea4d5277", - "tag": [ - { - "type": "Hashtag", - "name": "fediverse" - }, - { - "type": "Hashtag", - "name": "framasoft" - }, - { - "type": "Hashtag", - "name": "Mastodon" - }, - { - "type": "Hashtag", - "name": "PeerTube " - } - ], - "category": { - "identifier": "15", - "name": "Science & Technology" - }, - "licence": { - "identifier": "2", - "name": "Attribution - Share Alike" - }, - "language": { - "identifier": "en", - "name": "English" - }, - "views": 4805, - "sensitive": false, - "waitTranscoding": true, - "isLiveBroadcast": false, - "liveSaveReplay": null, - "permanentLive": null, - "state": 1, - "commentsEnabled": true, - "downloadEnabled": true, - "published": "2022-04-28T11:51:16.293Z", - "originallyPublishedAt": null, - "updated": "2022-05-03T11:39:02.489Z", - "mediaType": "text/markdown", - "content": "Help us translate the subtitles [on our translation tool](https://weblate.framasoft.org/projects/what-is-the-fediverse-video/subtitles/).\r\n\r\n**Animation Produced by** [LILA](https://libreart.info/) - [ZeMarmot Team](https://film.zemarmot.net/)\r\n**Direction & Animation** by Aryeom\r\n**Script & Technology** by Jehan\r\n**Voice by** Paul Peterson\r\n**Licence**: [CC-By-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/)\r\n\r\n**Sponsored by** [Framasoft](https://framasoft.org/)\r\n\r\n**Sound by** ORL - [AMMD](https://ammd.net/)\r\n\r\n**Music**: \"Dolling\" by CyberSDF - [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/)", - "support": null, - "subtitleLanguage": [ - { - "identifier": "ca", - "name": "Catalan", - "url": "https://framatube.org/lazy-static/video-captions/6f8aedd2-c61b-47f6-a2c9-75b15af24d14-ca.vtt" - }, - { - "identifier": "en", - "name": "English", - "url": "https://framatube.org/lazy-static/video-captions/2f199e59-5cf8-4529-a033-9d6dd4a858ca-en.vtt" - }, - { - "identifier": "es", - "name": "Spanish", - "url": "https://framatube.org/lazy-static/video-captions/3f74c16b-925f-45e1-8388-e358428c2436-es.vtt" - }, - { - "identifier": "eu", - "name": "Basque", - "url": "https://framatube.org/lazy-static/video-captions/c4c88e7e-b9d4-4192-bcf2-caf025ddc9fd-eu.vtt" - }, - { - "identifier": "fr", - "name": "French", - "url": "https://framatube.org/lazy-static/video-captions/c18906e3-6257-43e7-90e4-fa2c8ded258b-fr.vtt" - }, - { - "identifier": "hu", - "name": "Hungarian", - "url": "https://framatube.org/lazy-static/video-captions/0a8a295d-a288-404b-b7b3-a2272bc2a6fb-hu.vtt" - }, - { - "identifier": "it", - "name": "Italian", - "url": "https://framatube.org/lazy-static/video-captions/cf857bd9-8b04-4018-af9a-23fa1ff7662d-it.vtt" - }, - { - "identifier": "nb", - "name": "Norwegian Bokmål", - "url": "https://framatube.org/lazy-static/video-captions/12e3a0e9-a29e-4b06-8538-91bed2a11242-nb.vtt" - }, - { - "identifier": "oc", - "name": "Occitan", - "url": "https://framatube.org/lazy-static/video-captions/d841af30-97bf-4a0c-b1f9-e163ba77f23f-oc.vtt" - }, - { - "identifier": "sh", - "name": "Serbo-Croatian", - "url": "https://framatube.org/lazy-static/video-captions/7afe4dae-745f-4769-9f17-9c3a079235cf-sh.vtt" - }, - { - "identifier": "tr", - "name": "Turkish", - "url": "https://framatube.org/lazy-static/video-captions/1b2ea189-760c-4a3e-98d3-16f596c151f0-tr.vtt" - }, - { - "identifier": "vi", - "name": "Vietnamese", - "url": "https://framatube.org/lazy-static/video-captions/552b4086-54ab-4eb3-a8b3-7611a2175e77-vi.vtt" - } - ], - "icon": [ - { - "type": "Image", - "url": "https://framatube.org/static/thumbnails/1f9eb76e-c089-4bdd-af14-602935a6db72.jpg", - "mediaType": "image/jpeg", - "width": 280, - "height": 157 - }, - { - "type": "Image", - "url": "https://framatube.org/lazy-static/previews/8f89d4d8-696f-4512-9a1a-72f1d12caede.jpg", - "mediaType": "image/jpeg", - "width": 850, - "height": 480 - } - ], - "url": [ - { - "type": "Link", - "mediaType": "text/html", - "href": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277" - }, - { - "type": "Link", - "mediaType": "application/x-mpegURL", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/adc259cb-06f7-496c-8a50-599e58358b29-master.m3u8", - "tag": [ - { - "type": "Infohash", - "name": "caf7178ddd2013e28c9fbcbb7be28df25d03a023" - }, - { - "type": "Infohash", - "name": "cc18bb140f51f64090ba41c951fba85705cafa38" - }, - { - "type": "Infohash", - "name": "595513d823a1aecc18abacac94a1ebb0c31ec009" - }, - { - "type": "Infohash", - "name": "6ae0ce749a57d0f8ff70286878ea7661f85eebf7" - }, - { - "type": "Infohash", - "name": "4eb799f42d461929ed8dd4befae274c9a4404b99" - }, - { - "type": "Infohash", - "name": "b48d1ea795657668783544fd1c9baf637198a323" - }, - { - "type": "Link", - "name": "sha256", - "mediaType": "application/json", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/b414eda3-c8af-4271-8dde-253db28aacd1-segments-sha256.json" - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/64147344-1957-480d-9106-59dd7bbf5661-1080-fragmented.mp4", - "height": 1080, - "size": 14653991, - "fps": 24 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421492", - "height": 1080, - "fps": 24 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/83fa27e3-aba7-4e01-9e66-931086374176-1080-hls.torrent", - "height": 1080 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2F83fa27e3-aba7-4e01-9e66-931086374176-1080-hls.torrent&xt=urn:btih:5651916e4301c812412f51381c5af0c1f627bfcb&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2F64147344-1957-480d-9106-59dd7bbf5661-1080-fragmented.mp4", - "height": 1080 - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/0efaeae5-7468-4c45-ade5-d3b6c732621f-720-fragmented.mp4", - "height": 720, - "size": 9939723, - "fps": 24 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421496", - "height": 720, - "fps": 24 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/b325c824-c052-46e2-9b46-887595055521-720-hls.torrent", - "height": 720 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2Fb325c824-c052-46e2-9b46-887595055521-720-hls.torrent&xt=urn:btih:b5a1db245fe156edab7f1981693178dcd47075d2&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2F0efaeae5-7468-4c45-ade5-d3b6c732621f-720-fragmented.mp4", - "height": 720 - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/201f9772-4971-4bc3-8356-9b85b405ae5d-480-fragmented.mp4", - "height": 480, - "size": 7398758, - "fps": 24 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421494", - "height": 480, - "fps": 24 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/bd99f84e-e9bc-4d36-bea6-6f06000f87c5-480-hls.torrent", - "height": 480 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2Fbd99f84e-e9bc-4d36-bea6-6f06000f87c5-480-hls.torrent&xt=urn:btih:6cbe09b50cf7788923a2ec4852a3b2bfd1cd1907&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2F201f9772-4971-4bc3-8356-9b85b405ae5d-480-fragmented.mp4", - "height": 480 - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/b2313ae6-da36-4fe3-bec5-aa352824a38a-360-fragmented.mp4", - "height": 360, - "size": 6133890, - "fps": 24 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421495", - "height": 360, - "fps": 24 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/b939430a-fdfd-4da7-a030-759ecafa6ac7-360-hls.torrent", - "height": 360 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2Fb939430a-fdfd-4da7-a030-759ecafa6ac7-360-hls.torrent&xt=urn:btih:16693f14ad9e53fc41d335e3fa409c2f943d7b68&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2Fb2313ae6-da36-4fe3-bec5-aa352824a38a-360-fragmented.mp4", - "height": 360 - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/06a866f2-0527-4d68-93b7-c656d7374e86-240-fragmented.mp4", - "height": 240, - "size": 4861464, - "fps": 24 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421497", - "height": 240, - "fps": 24 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/072001ee-18ad-4859-af10-9d7bf12d640c-240-hls.torrent", - "height": 240 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2F072001ee-18ad-4859-af10-9d7bf12d640c-240-hls.torrent&xt=urn:btih:b823f54d8cd73f9d7a55266ce683f43bf772d26a&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2F06a866f2-0527-4d68-93b7-c656d7374e86-240-fragmented.mp4", - "height": 240 - }, - { - "type": "Link", - "mediaType": "video/mp4", - "href": "https://framatube.org/static/streaming-playlists/hls/4294a720-f263-4ea4-9392-cf9cea4d5277/f8a1caed-057f-4700-a28e-004efc158b15-0-fragmented.mp4", - "height": 0, - "size": 3141179, - "fps": 0 - }, - { - "type": "Link", - "rel": ["metadata", "video/mp4"], - "mediaType": "application/json", - "href": "https://framatube.org/api/v1/videos/4294a720-f263-4ea4-9392-cf9cea4d5277/metadata/1421493", - "height": 0, - "fps": 0 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent", - "href": "https://framatube.org/lazy-static/torrents/77cb6940-7e90-48d1-a391-bfa463b9600c-0-hls.torrent", - "height": 0 - }, - { - "type": "Link", - "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", - "href": "magnet:?xs=https%3A%2F%2Fframatube.org%2Flazy-static%2Ftorrents%2F77cb6940-7e90-48d1-a391-bfa463b9600c-0-hls.torrent&xt=urn:btih:9bc7717ed01869507041e31a7e65baffa78ba651&dn=What+is+the+Fediverse%3F&tr=https%3A%2F%2Fframatube.org%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fframatube.org%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fframatube.org%2Fstatic%2Fstreaming-playlists%2Fhls%2F4294a720-f263-4ea4-9392-cf9cea4d5277%2Ff8a1caed-057f-4700-a28e-004efc158b15-0-fragmented.mp4", - "height": 0 - } - ] - }, - { - "type": "Link", - "name": "tracker-http", - "rel": ["tracker", "http"], - "href": "https://framatube.org/tracker/announce" - }, - { - "type": "Link", - "name": "tracker-websocket", - "rel": ["tracker", "websocket"], - "href": "wss://framatube.org:443/tracker/socket" - } - ], - "likes": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277/likes", - "dislikes": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277/dislikes", - "shares": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277/announces", - "comments": "https://framatube.org/videos/watch/4294a720-f263-4ea4-9392-cf9cea4d5277/comments", - "attributedTo": [ - { - "type": "Person", - "id": "https://framatube.org/accounts/framasoft" - }, - { - "type": "Group", - "id": "https://framatube.org/video-channels/joinpeertube" - } - ], - "to": ["https://www.w3.org/ns/activitystreams#Public"], - "cc": ["https://framatube.org/accounts/framasoft/followers"], "@context": [ "https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1", @@ -375,7 +7,7 @@ }, { "pt": "https://joinpeertube.org/ns#", - "sc": "http://schema.org#", + "sc": "http://schema.org/", "Hashtag": "as:Hashtag", "uuid": "sc:identifier", "category": "sc:category", @@ -383,6 +15,7 @@ "subtitleLanguage": "sc:subtitleLanguage", "sensitive": "as:sensitive", "language": "sc:inLanguage", + "identifier": "sc:identifier", "isLiveBroadcast": "sc:isLiveBroadcast", "liveSaveReplay": { "@type": "sc:Boolean", @@ -392,10 +25,26 @@ "@type": "sc:Boolean", "@id": "pt:permanentLive" }, + "latencyMode": { + "@type": "sc:Number", + "@id": "pt:latencyMode" + }, "Infohash": "pt:Infohash", - "Playlist": "pt:Playlist", - "PlaylistElement": "pt:PlaylistElement", + "tileWidth": { + "@type": "sc:Number", + "@id": "pt:tileWidth" + }, + "tileHeight": { + "@type": "sc:Number", + "@id": "pt:tileHeight" + }, + "tileDuration": { + "@type": "sc:Number", + "@id": "pt:tileDuration" + }, "originallyPublishedAt": "sc:datePublished", + "uploadDate": "sc:uploadDate", + "hasParts": "sc:hasParts", "views": { "@type": "sc:Number", "@id": "pt:views" @@ -412,18 +61,6 @@ "@type": "sc:Number", "@id": "pt:fps" }, - "startTimestamp": { - "@type": "sc:Number", - "@id": "pt:startTimestamp" - }, - "stopTimestamp": { - "@type": "sc:Number", - "@id": "pt:stopTimestamp" - }, - "position": { - "@type": "sc:Number", - "@id": "pt:position" - }, "commentsEnabled": { "@type": "sc:Boolean", "@id": "pt:commentsEnabled" @@ -448,10 +85,6 @@ "@id": "as:dislikes", "@type": "@id" }, - "playlists": { - "@id": "pt:playlists", - "@type": "@id" - }, "shares": { "@id": "as:shares", "@type": "@id" @@ -461,5 +94,348 @@ "@type": "@id" } } - ] + ], + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": ["https://peertube.stream/accounts/createurs/followers"], + "type": "Video", + "id": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60", + "name": "VU du 12/12/23 : Démission \"refrusée\"", + "duration": "PT383S", + "uuid": "46cc7342-fdd5-4583-ae16-2eeb340d3b60", + "category": { + "identifier": "11", + "name": "News & Politics" + }, + "views": 83, + "sensitive": false, + "waitTranscoding": true, + "state": 1, + "commentsEnabled": true, + "downloadEnabled": true, + "published": "2023-12-12T17:02:02.188Z", + "originallyPublishedAt": "2023-12-11T23:00:00.000Z", + "updated": "2023-12-14T06:40:34.279Z", + "tag": [ + { + "type": "Hashtag", + "name": "France3" + }, + { + "type": "Hashtag", + "name": "lezapping" + } + ], + "mediaType": "text/markdown", + "content": "Un regard impertinent et libre, orchestré par Patrick Menais et son équipe, sur le monde de l’image.\n\nEn avant-première du lundi au samedi à17h00 sur Facebook, Twitter et YouTube.\n\nDu lundi au samedi à 20h00 sur France 5.\n\nhttps://www.facebook.com/vufrancetv\nhttps://twitter.com/VuFrancetv", + "support": null, + "subtitleLanguage": [], + "icon": [ + { + "type": "Image", + "url": "https://peertube.stream/lazy-static/thumbnails/208d2248-6fa3-4a58-a2e6-c6f176559457.jpg", + "mediaType": "image/jpeg", + "width": 280, + "height": 157 + }, + { + "type": "Image", + "url": "https://peertube.stream/lazy-static/previews/73d34e91-0233-443b-a1c3-d98a7ec6a87c.jpg", + "mediaType": "image/jpeg", + "width": 850, + "height": 480 + } + ], + "preview": [ + { + "type": "Image", + "rel": ["storyboard"], + "url": [ + { + "mediaType": "image/jpeg", + "href": "https://peertube.stream/lazy-static/storyboards/fb103d5f-8f76-4c8b-bc81-f952961cacfd.jpg", + "width": 1920, + "height": 1080, + "tileWidth": 192, + "tileHeight": 108, + "tileDuration": "PT4S" + } + ] + } + ], + "url": [ + { + "type": "Link", + "mediaType": "text/html", + "href": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60" + }, + { + "type": "Link", + "mediaType": "application/x-mpegURL", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/7847c00b-17f0-4cd9-b788-94283bd96d5b-master.m3u8", + "tag": [ + { + "type": "Infohash", + "name": "f50d9a3e851756a1fc1da7fe8b6e40f849c1f3a1" + }, + { + "type": "Infohash", + "name": "fdddadfcf01c52808a5716ac9c0f09e379a1ca69" + }, + { + "type": "Infohash", + "name": "c309597f071c6ab59e1a6935be3dc1ceb58c9250" + }, + { + "type": "Infohash", + "name": "5c28ed3e05102a678dc047a126650fe53d45ded4" + }, + { + "type": "Infohash", + "name": "085f2c72c69af02913177534ec601349ca2b4f01" + }, + { + "type": "Infohash", + "name": "37b9dbeab6f433e94f80a614f888e9a1e9ee3534" + }, + { + "type": "Infohash", + "name": "cc15513891e63a92743730ba65ab256f8825f071" + }, + { + "type": "Link", + "name": "sha256", + "mediaType": "application/json", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/a3f5af94-ba6b-4349-a4b0-151cebdf9af6-segments-sha256.json" + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/5a3db28f-a4b2-49ae-963e-7fd9414efe7c-1080-fragmented.mp4", + "height": 1080, + "size": 90186372, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570438", + "height": 1080, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/c3dd78f2-ff9b-41f1-899d-55440f512e09-1080-hls.torrent", + "height": 1080 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2Fc3dd78f2-ff9b-41f1-899d-55440f512e09-1080-hls.torrent&xt=urn:btih:944323d8a38e077cdea5c1b1aa82300d1f49076a&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2F5a3db28f-a4b2-49ae-963e-7fd9414efe7c-1080-fragmented.mp4", + "height": 1080 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/557f45f0-60b7-418c-bddd-e55701b387bb-720-fragmented.mp4", + "height": 720, + "size": 50950797, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570447", + "height": 720, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/0529c736-0c49-4efd-a9ff-c4989b4c2071-720-hls.torrent", + "height": 720 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2F0529c736-0c49-4efd-a9ff-c4989b4c2071-720-hls.torrent&xt=urn:btih:a2662d0714edf3882193f782814441eb904460be&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2F557f45f0-60b7-418c-bddd-e55701b387bb-720-fragmented.mp4", + "height": 720 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/097e6338-4c6e-4c21-8fed-7df0a245c9b3-480-fragmented.mp4", + "height": 480, + "size": 31542462, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570441", + "height": 480, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/56b47f85-b2de-44b1-9089-db13c8534e1c-480-hls.torrent", + "height": 480 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2F56b47f85-b2de-44b1-9089-db13c8534e1c-480-hls.torrent&xt=urn:btih:9d1cc84a448ba531d2f5422a8910fd79580768ff&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2F097e6338-4c6e-4c21-8fed-7df0a245c9b3-480-fragmented.mp4", + "height": 480 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/b6db1f0c-0b6f-4f26-b811-d38631f4c42b-360-fragmented.mp4", + "height": 360, + "size": 23389554, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570442", + "height": 360, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/89df203a-586e-4d09-b645-21c321ae81c2-360-hls.torrent", + "height": 360 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2F89df203a-586e-4d09-b645-21c321ae81c2-360-hls.torrent&xt=urn:btih:40dbe1b6fb96d87d0750b32b26fd52913f22c84e&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2Fb6db1f0c-0b6f-4f26-b811-d38631f4c42b-360-fragmented.mp4", + "height": 360 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/d0d23e04-a7b2-47f9-8072-94a06dc0c402-240-fragmented.mp4", + "height": 240, + "size": 16040535, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570448", + "height": 240, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/29c43d5c-b26f-404c-a286-7aff2e2bb139-240-hls.torrent", + "height": 240 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2F29c43d5c-b26f-404c-a286-7aff2e2bb139-240-hls.torrent&xt=urn:btih:f3f102c22d48b8a0aec19be463d8f04fb3a3f499&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2Fd0d23e04-a7b2-47f9-8072-94a06dc0c402-240-fragmented.mp4", + "height": 240 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/6f3b1939-67c4-45f0-bd93-2508721dda69-144-fragmented.mp4", + "height": 144, + "size": 10969421, + "fps": 25 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570449", + "height": 144, + "fps": 25 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/e39095d9-8fa2-4543-a66f-b4b9d6165a4e-144-hls.torrent", + "height": 144 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2Fe39095d9-8fa2-4543-a66f-b4b9d6165a4e-144-hls.torrent&xt=urn:btih:8b263d7e814d611597a36dcd9655d959c86605a4&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2F6f3b1939-67c4-45f0-bd93-2508721dda69-144-fragmented.mp4", + "height": 144 + }, + { + "type": "Link", + "mediaType": "video/mp4", + "href": "https://peertube.stream/static/streaming-playlists/hls/46cc7342-fdd5-4583-ae16-2eeb340d3b60/86ab6cca-46e5-4c6e-9c2c-8aef803b85f2-0-fragmented.mp4", + "height": 0, + "size": 6074306, + "fps": 0 + }, + { + "type": "Link", + "rel": ["metadata", "video/mp4"], + "mediaType": "application/json", + "href": "https://peertube.stream/api/v1/videos/46cc7342-fdd5-4583-ae16-2eeb340d3b60/metadata/1570439", + "height": 0, + "fps": 0 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent", + "href": "https://peertube.stream/lazy-static/torrents/25ae194d-c3ec-412a-886f-3b0d02599ca7-0-hls.torrent", + "height": 0 + }, + { + "type": "Link", + "mediaType": "application/x-bittorrent;x-scheme-handler/magnet", + "href": "magnet:?xs=https%3A%2F%2Fpeertube.stream%2Flazy-static%2Ftorrents%2F25ae194d-c3ec-412a-886f-3b0d02599ca7-0-hls.torrent&xt=urn:btih:e4458f2445732a228e9a83e2ae53a103f5e1097e&dn=VU+du+12%2F12%2F23+%3A+D%C3%A9mission+%22refrus%C3%A9e%22&tr=https%3A%2F%2Fpeertube.stream%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube.stream%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube.stream%2Fstatic%2Fstreaming-playlists%2Fhls%2F46cc7342-fdd5-4583-ae16-2eeb340d3b60%2F86ab6cca-46e5-4c6e-9c2c-8aef803b85f2-0-fragmented.mp4", + "height": 0 + } + ] + }, + { + "type": "Link", + "name": "tracker-http", + "rel": ["tracker", "http"], + "href": "https://peertube.stream/tracker/announce" + }, + { + "type": "Link", + "name": "tracker-websocket", + "rel": ["tracker", "websocket"], + "href": "wss://peertube.stream:443/tracker/socket" + } + ], + "likes": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60/likes", + "dislikes": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60/dislikes", + "shares": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60/announces", + "comments": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60/comments", + "hasParts": "https://peertube.stream/videos/watch/46cc7342-fdd5-4583-ae16-2eeb340d3b60/chapters", + "attributedTo": [ + { + "type": "Person", + "id": "https://peertube.stream/accounts/createurs" + }, + { + "type": "Group", + "id": "https://peertube.stream/video-channels/vu" + } + ], + "isLiveBroadcast": false, + "liveSaveReplay": null, + "permanentLive": null, + "latencyMode": null, + "peertubeLiveChat": false } diff --git a/crates/apub/assets/pleroma/objects/note.json b/crates/apub/assets/pleroma/objects/note.json index ff4b20d25..af61ff46e 100644 --- a/crates/apub/assets/pleroma/objects/note.json +++ b/crates/apub/assets/pleroma/objects/note.json @@ -10,7 +10,7 @@ "attachment": [], "attributedTo": "https://queer.hacktivis.me/users/lanodan", "cc": ["https://www.w3.org/ns/activitystreams#Public"], - "content": "@popolon Have what?", + "content": "Have what?", "context": "https://queer.hacktivis.me/contexts/34cba3d2-2f35-4169-aeff-56af9bfeb753", "conversation": "https://queer.hacktivis.me/contexts/34cba3d2-2f35-4169-aeff-56af9bfeb753", "id": "https://queer.hacktivis.me/objects/8d4973f4-53de-49cd-8c27-df160e16a9c2", diff --git a/crates/apub/assets/pleroma/objects/person.json b/crates/apub/assets/pleroma/objects/person.json index bc9008bab..fff9a2cba 100644 --- a/crates/apub/assets/pleroma/objects/person.json +++ b/crates/apub/assets/pleroma/objects/person.json @@ -41,7 +41,7 @@ "owner": "https://queer.hacktivis.me/users/lanodan", "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsWOgdjSMc010qvxC3njI\nXJlFWMJ5gJ8QXCW/PajYdsHPM6d+jxBNJ6zp9/tIRa2m7bWHTSkuHQ7QthOpt6vu\n+dAWpKRLS607SPLItn/qUcyXvgN+H8shfyhMxvkVs9jXdtlBsLUVE7UNpN0dxzqe\nI79QWbf7o4amgaIWGRYB+OYMnIxKt+GzIkivZdSVSYjfxNnBYkMCeUxm5EpPIxKS\nP5bBHAVRRambD5NUmyKILuC60/rYuc/C+vmgpY2HCWFS2q6o34dPr9enwL6t4b3m\nS1t/EJHk9rGaaDqSGkDEfyQI83/7SDebWKuETMKKFLZi1vMgQIFuOYCIhN6bIiZm\npQIDAQAB\n-----END PUBLIC KEY-----\n\n" }, - "summary": "---
Website: https://hacktivis.me/
Lang: Français(natif), English(fluent), LSF(🤏~👌), русский (еле-еле),
Politics: Anarchist as in DIY/DIWO, freedom of association, anti-authoritarian, anti-identitarianism

Pronouns: meh, pick any, have fun
Timezone: Let's say Mars, I have a non-24h cycle
```
🦊🦄⚧🂡ⓥ :anarchy: 👿🐧 :gentoo:
Pleroma maintainer (mostly backend)
BadWolf developer
Gentoo contributor

Dayjob: yogoko.fr

That person which uses HJKL in games

Just because computer bad: X5O!P%@AP[4\\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*

banner from: https://soc.flyingcube.tech/objects/56f79be2-9013-4559-9826-f7dc392417db
Federation-bots: #nobot", + "summary": "---Lang: Français(natif), English(fluent), LSF(🤏~👌), русский (еле-еле),
Politics: Anarchist as in DIY/DIWO, freedom of association, anti-authoritarian, anti-identitarianism

Pronouns: meh, pick any, have fun
Timezone: Let's say Mars, I have a non-24h cycle
```
🦊🦄⚧🂡ⓥ :anarchy: 👿🐧 :gentoo:
Pleroma maintainer (mostly backend)
BadWolf developer
Gentoo contributor

Dayjob: yogoko.fr

That person which uses HJKL in games

Just because computer bad: X5O!P%@AP[4\\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*

banner from: https://soc.flyingcube.tech/objects/56f79be2-9013-4559-9826-f7dc392417db
Federation-bots: #nobot", "tag": [ { "icon": { diff --git a/crates/apub/assets/wordpress/activities/announce.json b/crates/apub/assets/wordpress/activities/announce.json new file mode 100644 index 000000000..985dec307 --- /dev/null +++ b/crates/apub/assets/wordpress/activities/announce.json @@ -0,0 +1,49 @@ +{ + "@context": ["https://www.w3.org/ns/activitystreams"], + "id": "https://pfefferle.org/lemmy-part-4/#activity#activity", + "type": "Announce", + "audience": "https://pfefferle.org/@pfefferle.org", + "published": "2024-05-03T12:32:29Z", + "updated": "2024-05-06T08:20:33Z", + "to": [ + "https://www.w3.org/ns/activitystreams#Public", + "https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers" + ], + "cc": [], + "object": { + "id": "https://pfefferle.org/lemmy-part-4/#activity", + "type": "Update", + "audience": "https://pfefferle.org/@pfefferle.org", + "published": "2024-05-03T12:32:29Z", + "updated": "2024-05-06T08:20:33Z", + "to": [ + "https://www.w3.org/ns/activitystreams#Public", + "https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers" + ], + "cc": [], + "object": { + "id": "https://pfefferle.org/lemmy-part-4/", + "type": "Article", + "attachment": [], + "attributedTo": "https://pfefferle.org/author/pfefferle/", + "audience": "https://pfefferle.org/@pfefferle.org", + "content": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E", + "contentMap": { + "en": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E" + }, + "name": "Lemmy (Part 4)", + "published": "2024-05-03T12:32:29Z", + "summary": "Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object [...]", + "tag": [], + "updated": "2024-05-06T08:20:33Z", + "url": "https://pfefferle.org/lemmy-part-4/", + "to": [ + "https://www.w3.org/ns/activitystreams#Public", + "https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers" + ], + "cc": [] + }, + "actor": "https://pfefferle.org/author/pfefferle/" + }, + "actor": "https://pfefferle.org/@pfefferle.org" +} diff --git a/crates/apub/assets/wordpress/objects/group.json b/crates/apub/assets/wordpress/objects/group.json new file mode 100644 index 000000000..35f2af0c4 --- /dev/null +++ b/crates/apub/assets/wordpress/objects/group.json @@ -0,0 +1,66 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + "https://purl.archive.org/socialweb/webfinger", + { + "schema": "http://schema.org#", + "toot": "http://joinmastodon.org/ns#", + "webfinger": "https://webfinger.net/#", + "lemmy": "https://join-lemmy.org/ns#", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + "Hashtag": "as:Hashtag", + "featured": { + "@id": "toot:featured", + "@type": "@id" + }, + "featuredTags": { + "@id": "toot:featuredTags", + "@type": "@id" + }, + "moderators": { + "@id": "lemmy:moderators", + "@type": "@id" + }, + "postingRestrictedToMods": "lemmy:postingRestrictedToMods", + "discoverable": "toot:discoverable", + "indexable": "toot:indexable", + "resource": "webfinger:resource" + } + ], + "id": "https://pfefferle.org/@pfefferle.org", + "type": "Group", + "attachment": [], + "attributedTo": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators", + "name": "Matthias Pfefferle", + "icon": { + "type": "Image", + "url": "https://pfefferle.org/wp-content/uploads/2023/06/cropped-BeLItBV-_400x400.jpg" + }, + "published": "2024-04-03T16:58:22Z", + "summary": "

Webworker, blogger und podcaster

\n", + "tag": [], + "url": "https://pfefferle.org/@pfefferle.org", + "inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/inbox", + "outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/outbox", + "following": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/following", + "followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers", + "preferredUsername": "pfefferle.org", + "endpoints": { + "sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox" + }, + "publicKey": { + "id": "https://pfefferle.org/@pfefferle.org#main-key", + "owner": "https://pfefferle.org/@pfefferle.org", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuq8xeLMFcaCwPFBhgMRE\n/dDh2XKoNXFXnixctmK8BXSuuLMxucm3I/8NyhIvb3LqU+uP1fO8F0ecUbk2sN+x\nKag5vIV6yKXzJ8ILMWQ9AaELpXDmMZqL0zal0LUJRAOkDgPDovDAoq6tx++yDoV0\njdVbf9CoZKit1cz2ZrEuE5dswq3J/z9+c6POkhCkWEX5TPJzkOrmnjkvrXxGHUJ2\nA3+P+VaZhd5cmvqYosSpYNJshxCdev12pIF78OnYLiYiyXlgGHU+7uQR0M4tTcij\n6cUdLkms9m+b6H3ctXntPn410e5YLFPldjAYzQB5wHVdFZsWtyrbqfYdCa+KkKpA\nvwIDAQAB\n-----END PUBLIC KEY-----\n" + }, + "manuallyApprovesFollowers": false, + "featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/collections/featured", + "moderators": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators", + "discoverable": true, + "indexable": true, + "webfinger": "pfefferle.org@pfefferle.org", + "postingRestrictedToMods": true +} diff --git a/crates/apub/assets/wordpress/objects/note.json b/crates/apub/assets/wordpress/objects/note.json new file mode 100644 index 000000000..5b4b24da0 --- /dev/null +++ b/crates/apub/assets/wordpress/objects/note.json @@ -0,0 +1,24 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "Hashtag": "as:Hashtag" + } + ], + "id": "https://pfefferle.org?c=148", + "type": "Note", + "attributedTo": "https://pfefferle.org/author/pfefferle/", + "content": "

Nice! Hello from WordPress!

", + "contentMap": { + "en": "

Nice! Hello from WordPress!

" + }, + "inReplyTo": "https://socialhub.activitypub.rocks/ap/object/ce040f1ead95964f6dbbf1084b81432d", + "published": "2024-04-30T15:21:13Z", + "tag": [], + "url": "https://pfefferle.org?c=148", + "to": [ + "https://www.w3.org/ns/activitystreams#Public", + "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers" + ], + "cc": [] +} diff --git a/crates/apub/assets/wordpress/objects/page.json b/crates/apub/assets/wordpress/objects/page.json new file mode 100644 index 000000000..1e8f4be3c --- /dev/null +++ b/crates/apub/assets/wordpress/objects/page.json @@ -0,0 +1,26 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "Hashtag": "as:Hashtag" + } + ], + "id": "https://pfefferle.org/this-is-a-test-federation/", + "type": "Article", + "attachment": [], + "attributedTo": "https://pfefferle.org/author/pfefferle/", + "content": "

with Discource!

", + "contentMap": { + "en": "

with Discource!

" + }, + "name": "This is a test-federation", + "published": "2024-04-30T15:16:41Z", + "summary": "with Discource! [...]", + "tag": [], + "url": "https://pfefferle.org/this-is-a-test-federation/", + "to": [ + "https://www.w3.org/ns/activitystreams#Public", + "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers" + ], + "cc": [] +} diff --git a/crates/apub/assets/wordpress/objects/person.json b/crates/apub/assets/wordpress/objects/person.json new file mode 100644 index 000000000..6c74b53bf --- /dev/null +++ b/crates/apub/assets/wordpress/objects/person.json @@ -0,0 +1,74 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + "https://purl.archive.org/socialweb/webfinger", + { + "schema": "http://schema.org#", + "toot": "http://joinmastodon.org/ns#", + "webfinger": "https://webfinger.net/#", + "lemmy": "https://join-lemmy.org/ns#", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + "Hashtag": "as:Hashtag", + "featured": { + "@id": "toot:featured", + "@type": "@id" + }, + "featuredTags": { + "@id": "toot:featuredTags", + "@type": "@id" + }, + "moderators": { + "@id": "lemmy:moderators", + "@type": "@id" + }, + "postingRestrictedToMods": "lemmy:postingRestrictedToMods", + "discoverable": "toot:discoverable", + "indexable": "toot:indexable", + "resource": "webfinger:resource" + } + ], + "id": "https://pfefferle.org/author/pfefferle/", + "type": "Person", + "attachment": [ + { + "type": "PropertyValue", + "name": "Blog", + "value": "pfefferle.org" + }, + { + "type": "PropertyValue", + "name": "Profile", + "value": "pfefferle.org" + } + ], + "name": "Matthias Pfefferle", + "icon": { + "type": "Image", + "url": "https://secure.gravatar.com/avatar/a2bdca7870e859658cece96c044b3be5?s=120&d=mm&r=g" + }, + "published": "2014-02-10T15:23:08Z", + "summary": "

Ich arbeite als Open Web Lead für Automattic.

\n", + "tag": [], + "url": "https://pfefferle.org/author/pfefferle/", + "inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/inbox", + "outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/outbox", + "following": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/following", + "followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers", + "preferredUsername": "matthias", + "endpoints": { + "sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox" + }, + "publicKey": { + "id": "https://pfefferle.org/author/pfefferle/#main-key", + "owner": "https://pfefferle.org/author/pfefferle/", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvTA5RA40nOsso04RSwyX\nHXTojRPUMlIlArDcSy3M5GUJp9/xbxSUOdBjqd31KKB1GIi3vrLmD1Qi/ZqS95Qy\nw2Zd3xOsCg+o9bsyOG+O6Y8Lu+HEB5JKLUbNHdiSviakJ8wGadH9Wm4WIiN20y+q\n/u6lgxgiWfZ2CFCN6SOc28fUKi9NmKvXK+M12BhFfy1tC5KWXKDm0UbfI1+dmqhR\n3Ffe6vEsCI/YIVVdWxQ9kouOd0XSHOGdslktkepRO7IP9i9TdwyeCa0WWRoeO5Wa\ntVpc1Y0WuNbTM2ksIXTg0G+rO1/6KO/hrHnGu3RCfb/ZIHK5L/aWYb9B3PG3LyKV\n+wIDAQAB\n-----END PUBLIC KEY-----\n" + }, + "manuallyApprovesFollowers": false, + "featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/collections/featured", + "discoverable": true, + "indexable": true, + "webfinger": "matthias@pfefferle.org" +} diff --git a/crates/apub/src/activities/block/block_user.rs b/crates/apub/src/activities/block/block_user.rs index 4469be53e..64d5e7816 100644 --- a/crates/apub/src/activities/block/block_user.rs +++ b/crates/apub/src/activities/block/block_user.rs @@ -23,7 +23,7 @@ use anyhow::anyhow; use chrono::{DateTime, Utc}; use lemmy_api_common::{ context::LemmyContext, - utils::{remove_user_data, remove_user_data_in_community}, + utils::{remove_or_restore_user_data, remove_or_restore_user_data_in_community}, }; use lemmy_db_schema::{ source::{ @@ -39,7 +39,7 @@ use lemmy_db_schema::{ }, traits::{Bannable, Crud, Followable}, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{FederationError, LemmyError, LemmyResult}; use url::Url; impl BlockUser { @@ -51,7 +51,7 @@ impl BlockUser { reason: Option, expires: Option>, context: &Data, - ) -> Result { + ) -> LemmyResult { let audience = if let SiteOrCommunity::Community(c) = target { Some(c.id().into()) } else { @@ -71,7 +71,7 @@ impl BlockUser { &context.settings().get_protocol_and_hostname(), )?, audience, - expires, + end_time: expires, }) } @@ -84,7 +84,7 @@ impl BlockUser { reason: Option, expires: Option>, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let block = BlockUser::new( target, user, @@ -124,12 +124,15 @@ impl ActivityHandler for BlockUser { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; match self.target.dereference(context).await? { SiteOrCommunity::Site(site) => { - let domain = self.object.inner().domain().expect("url needs domain"); + let domain = self + .object + .inner() + .domain() + .ok_or(FederationError::UrlWithoutDomain)?; if context.settings().hostname == domain { return Err( anyhow!("Site bans from remote instance can't affect user's home instance").into(), @@ -148,11 +151,13 @@ impl ActivityHandler for BlockUser { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { - let expires = self.expires.map(Into::into); + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; + let expires = self.end_time.map(Into::into); let mod_person = self.actor.dereference(context).await?; let blocked_person = self.object.dereference(context).await?; let target = self.target.dereference(context).await?; + let reason = self.summary; match target { SiteOrCommunity::Site(_site) => { let blocked_person = Person::update( @@ -166,14 +171,15 @@ impl ActivityHandler for BlockUser { ) .await?; if self.remove_data.unwrap_or(false) { - remove_user_data(blocked_person.id, context).await?; + remove_or_restore_user_data(mod_person.id, blocked_person.id, true, &reason, context) + .await?; } // write mod log let form = ModBanForm { mod_person_id: mod_person.id, other_person_id: blocked_person.id, - reason: self.summary, + reason, banned: Some(true), expires, }; @@ -198,8 +204,15 @@ impl ActivityHandler for BlockUser { .ok(); if self.remove_data.unwrap_or(false) { - remove_user_data_in_community(community.id, blocked_person.id, &mut context.pool()) - .await?; + remove_or_restore_user_data_in_community( + community.id, + mod_person.id, + blocked_person.id, + true, + &reason, + &mut context.pool(), + ) + .await?; } // write to mod log @@ -207,7 +220,7 @@ impl ActivityHandler for BlockUser { mod_person_id: mod_person.id, other_person_id: blocked_person.id, community_id: community.id, - reason: self.summary, + reason, banned: Some(true), expires, }; diff --git a/crates/apub/src/activities/block/mod.rs b/crates/apub/src/activities/block/mod.rs index c6bef9a00..c8323fcb4 100644 --- a/crates/apub/src/activities/block/mod.rs +++ b/crates/apub/src/activities/block/mod.rs @@ -14,7 +14,6 @@ use chrono::{DateTime, Utc}; use lemmy_api_common::{ community::BanFromCommunity, context::LemmyContext, - person::BanPerson, utils::check_expire_time, }; use lemmy_db_schema::{ @@ -23,7 +22,6 @@ use lemmy_db_schema::{ traits::Crud, utils::DbPool, }; -use lemmy_db_views::structs::SiteView; use lemmy_utils::error::{LemmyError, LemmyResult}; use serde::Deserialize; use url::Url; @@ -36,7 +34,6 @@ pub enum SiteOrCommunity { Site(ApubSite), Community(ApubCommunity), } - #[derive(Deserialize)] #[serde(untagged)] pub enum InstanceOrGroup { @@ -59,10 +56,7 @@ impl Object for SiteOrCommunity { } #[tracing::instrument(skip_all)] - async fn read_from_id( - object_id: Url, - data: &Data, - ) -> Result, LemmyError> + async fn read_from_id(object_id: Url, data: &Data) -> LemmyResult> where Self: Sized, { @@ -75,12 +69,18 @@ impl Object for SiteOrCommunity { }) } - async fn delete(self, _data: &Data) -> Result<(), LemmyError> { - unimplemented!() + async fn delete(self, data: &Data) -> LemmyResult<()> { + match self { + SiteOrCommunity::Site(i) => i.delete(data).await, + SiteOrCommunity::Community(c) => c.delete(data).await, + } } - async fn into_json(self, _data: &Data) -> Result { - unimplemented!() + async fn into_json(self, data: &Data) -> LemmyResult { + Ok(match self { + SiteOrCommunity::Site(i) => InstanceOrGroup::Instance(i.into_json(data).await?), + SiteOrCommunity::Community(c) => InstanceOrGroup::Group(c.into_json(data).await?), + }) } #[tracing::instrument(skip_all)] @@ -88,7 +88,7 @@ impl Object for SiteOrCommunity { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { match apub { InstanceOrGroup::Instance(i) => ApubSite::verify(i, expected_domain, data).await, InstanceOrGroup::Group(g) => ApubCommunity::verify(g, expected_domain, data).await, @@ -96,7 +96,7 @@ impl Object for SiteOrCommunity { } #[tracing::instrument(skip_all)] - async fn from_json(apub: Self::Kind, data: &Data) -> Result + async fn from_json(apub: Self::Kind, data: &Data) -> LemmyResult where Self: Sized, { @@ -118,10 +118,7 @@ impl SiteOrCommunity { } } -async fn generate_cc( - target: &SiteOrCommunity, - pool: &mut DbPool<'_>, -) -> Result, LemmyError> { +async fn generate_cc(target: &SiteOrCommunity, pool: &mut DbPool<'_>) -> LemmyResult> { Ok(match target { SiteOrCommunity::Site(_) => Site::read_remote_sites(pool) .await? @@ -133,23 +130,26 @@ async fn generate_cc( } pub(crate) async fn send_ban_from_site( - mod_: Person, + moderator: Person, banned_user: Person, - data: BanPerson, + reason: Option, + remove_or_restore_data: Option, + ban: bool, + expires: Option, context: Data, -) -> Result<(), LemmyError> { - let site = SiteOrCommunity::Site(SiteView::read_local(&mut context.pool()).await?.site.into()); - let expires = check_expire_time(data.expires)?; +) -> LemmyResult<()> { + let site = SiteOrCommunity::Site(Site::read_local(&mut context.pool()).await?.into()); + let expires = check_expire_time(expires)?; // if the action affects a local user, federate to other instances if banned_user.local { - if data.ban { + if ban { BlockUser::send( &site, &banned_user.into(), - &mod_.into(), - data.remove_data.unwrap_or(false), - data.reason.clone(), + &moderator.into(), + remove_or_restore_data.unwrap_or(false), + reason.clone(), expires, &context, ) @@ -158,8 +158,9 @@ pub(crate) async fn send_ban_from_site( UndoBlockUser::send( &site, &banned_user.into(), - &mod_.into(), - data.reason.clone(), + &moderator.into(), + remove_or_restore_data.unwrap_or(false), + reason.clone(), &context, ) .await @@ -186,7 +187,7 @@ pub(crate) async fn send_ban_from_community( &SiteOrCommunity::Community(community), &banned_person.into(), &mod_.into(), - data.remove_data.unwrap_or(false), + data.remove_or_restore_data.unwrap_or(false), data.reason.clone(), expires, &context, @@ -197,6 +198,7 @@ pub(crate) async fn send_ban_from_community( &SiteOrCommunity::Community(community), &banned_person.into(), &mod_.into(), + data.remove_or_restore_data.unwrap_or(false), data.reason.clone(), &context, ) diff --git a/crates/apub/src/activities/block/undo_block_user.rs b/crates/apub/src/activities/block/undo_block_user.rs index 97e2bc336..f9f6890b6 100644 --- a/crates/apub/src/activities/block/undo_block_user.rs +++ b/crates/apub/src/activities/block/undo_block_user.rs @@ -17,7 +17,10 @@ use activitypub_federation::{ protocol::verification::verify_domains_match, traits::{ActivityHandler, Actor}, }; -use lemmy_api_common::context::LemmyContext; +use lemmy_api_common::{ + context::LemmyContext, + utils::{remove_or_restore_user_data, remove_or_restore_user_data_in_community}, +}; use lemmy_db_schema::{ source::{ activity::ActivitySendTargets, @@ -27,7 +30,7 @@ use lemmy_db_schema::{ }, traits::{Bannable, Crud}, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl UndoBlockUser { @@ -36,9 +39,10 @@ impl UndoBlockUser { target: &SiteOrCommunity, user: &ApubPerson, mod_: &ApubPerson, + restore_data: bool, reason: Option, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let block = BlockUser::new(target, user, mod_, None, reason, None, context).await?; let audience = if let SiteOrCommunity::Community(c) = target { Some(c.id().into()) @@ -58,6 +62,7 @@ impl UndoBlockUser { kind: UndoType::Undo, id: id.clone(), audience, + restore_data: Some(restore_data), }; let mut inboxes = ActivitySendTargets::to_inbox(user.shared_inbox_or_inbox()); @@ -88,8 +93,7 @@ impl ActivityHandler for UndoBlockUser { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; verify_domains_match(self.actor.inner(), self.object.actor.inner())?; self.object.verify(context).await?; @@ -97,8 +101,9 @@ impl ActivityHandler for UndoBlockUser { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { - let expires = self.object.expires.map(Into::into); + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; + let expires = self.object.end_time.map(Into::into); let mod_person = self.actor.dereference(context).await?; let blocked_person = self.object.object.dereference(context).await?; match self.object.target.dereference(context).await? { @@ -114,6 +119,11 @@ impl ActivityHandler for UndoBlockUser { ) .await?; + if self.restore_data.unwrap_or(false) { + remove_or_restore_user_data(mod_person.id, blocked_person.id, false, &None, context) + .await?; + } + // write mod log let form = ModBanForm { mod_person_id: mod_person.id, @@ -132,6 +142,18 @@ impl ActivityHandler for UndoBlockUser { }; CommunityPersonBan::unban(&mut context.pool(), &community_user_ban_form).await?; + if self.restore_data.unwrap_or(false) { + remove_or_restore_user_data_in_community( + community.id, + mod_person.id, + blocked_person.id, + false, + &None, + &mut context.pool(), + ) + .await?; + } + // write to mod log let form = ModBanFromCommunityForm { mod_person_id: mod_person.id, diff --git a/crates/apub/src/activities/community/announce.rs b/crates/apub/src/activities/community/announce.rs index c704ad012..e374d2874 100644 --- a/crates/apub/src/activities/community/announce.rs +++ b/crates/apub/src/activities/community/announce.rs @@ -22,8 +22,11 @@ use activitypub_federation::{ traits::{ActivityHandler, Actor}, }; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::source::{activity::ActivitySendTargets, community::CommunityFollower}; -use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; +use lemmy_db_schema::{ + source::{activity::ActivitySendTargets, community::CommunityFollower}, + CommunityVisibility, +}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}; use serde_json::Value; use url::Url; @@ -51,7 +54,7 @@ impl ActivityHandler for RawAnnouncableActivities { // This is only for sending, not receiving so we reject it. if let AnnouncableActivities::Page(_) = activity { - Err(LemmyErrorType::CannotReceivePage)? + Err(FederationError::CannotReceivePage)? } // Need to treat community as optional here because `Delete/PrivateMessage` gets routed through @@ -79,7 +82,7 @@ impl AnnounceActivity { object: RawAnnouncableActivities, community: &ApubCommunity, context: &Data, - ) -> Result { + ) -> LemmyResult { let inner_kind = object .other .get("type") @@ -91,7 +94,12 @@ impl AnnounceActivity { actor: community.id().into(), to: vec![public()], object: IdOrNestedObject::NestedObject(object), - cc: vec![community.followers_url.clone().into()], + cc: community + .followers_url + .clone() + .map(Into::into) + .into_iter() + .collect(), kind: AnnounceType::Announce, id, }) @@ -102,7 +110,7 @@ impl AnnounceActivity { object: RawAnnouncableActivities, community: &ApubCommunity, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let announce = AnnounceActivity::new(object.clone(), community, context)?; let inboxes = ActivitySendTargets::to_local_community_followers(community.id); send_lemmy_activity(context, announce, community, inboxes.clone(), false).await?; @@ -145,19 +153,19 @@ impl ActivityHandler for AnnounceActivity { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, _context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let object: AnnouncableActivities = self.object.object(context).await?.try_into()?; // This is only for sending, not receiving so we reject it. if let AnnouncableActivities::Page(_) = object { - Err(LemmyErrorType::CannotReceivePage)? + Err(FederationError::CannotReceivePage)? } let community = object.community(context).await?; @@ -205,10 +213,12 @@ async fn can_accept_activity_in_community( context: &Data, ) -> LemmyResult<()> { if let Some(community) = community { - if !community.local - && !CommunityFollower::has_local_followers(&mut context.pool(), community.id).await? - { - Err(LemmyErrorType::CommunityHasNoFollowers)? + // Local only community can't federate + if community.visibility != CommunityVisibility::Public { + return Err(LemmyErrorType::NotFound.into()); + } + if !community.local { + CommunityFollower::check_has_local_followers(&mut context.pool(), community.id).await? } } Ok(()) diff --git a/crates/apub/src/activities/community/collection_add.rs b/crates/apub/src/activities/community/collection_add.rs index ba962359e..5ab754d35 100644 --- a/crates/apub/src/activities/community/collection_add.rs +++ b/crates/apub/src/activities/community/collection_add.rs @@ -36,7 +36,7 @@ use lemmy_db_schema::{ }, traits::{Crud, Joinable}, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl CollectionAdd { @@ -46,7 +46,7 @@ impl CollectionAdd { added_mod: &ApubPerson, actor: &ApubPerson, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let id = generate_activity_id( AddType::Add, &context.settings().get_protocol_and_hostname(), @@ -72,7 +72,7 @@ impl CollectionAdd { featured_post: &ApubPost, actor: &ApubPerson, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let id = generate_activity_id( AddType::Add, &context.settings().get_protocol_and_hostname(), @@ -114,8 +114,7 @@ impl ActivityHandler for CollectionAdd { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -124,7 +123,8 @@ impl ActivityHandler for CollectionAdd { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let (community, collection_type) = Community::get_by_collection_url(&mut context.pool(), &self.target.into()).await?; match collection_type { @@ -133,8 +133,8 @@ impl ActivityHandler for CollectionAdd { .dereference(context) .await?; - // If we had to refetch the community while parsing the activity, then the new mod has already - // been added. Skip it here as it would result in a duplicate key error. + // If we had to refetch the community while parsing the activity, then the new mod has + // already been added. Skip it here as it would result in a duplicate key error. let new_mod_id = new_mod.id; let moderated_communities = CommunityModerator::get_person_moderated_communities(&mut context.pool(), new_mod_id) @@ -179,7 +179,7 @@ pub(crate) async fn send_add_mod_to_community( updated_mod_id: PersonId, added: bool, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let actor: ApubPerson = actor.into(); let community: ApubCommunity = Community::read(&mut context.pool(), community_id) .await? @@ -199,7 +199,7 @@ pub(crate) async fn send_feature_post( actor: Person, featured: bool, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let actor: ApubPerson = actor.into(); let post: ApubPost = post.into(); let community = Community::read(&mut context.pool(), post.community_id) diff --git a/crates/apub/src/activities/community/collection_remove.rs b/crates/apub/src/activities/community/collection_remove.rs index c71e282bc..90df1fd14 100644 --- a/crates/apub/src/activities/community/collection_remove.rs +++ b/crates/apub/src/activities/community/collection_remove.rs @@ -31,7 +31,7 @@ use lemmy_db_schema::{ }, traits::{Crud, Joinable}, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl CollectionRemove { @@ -41,7 +41,7 @@ impl CollectionRemove { removed_mod: &ApubPerson, actor: &ApubPerson, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let id = generate_activity_id( RemoveType::Remove, &context.settings().get_protocol_and_hostname(), @@ -67,7 +67,7 @@ impl CollectionRemove { featured_post: &ApubPost, actor: &ApubPerson, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let id = generate_activity_id( RemoveType::Remove, &context.settings().get_protocol_and_hostname(), @@ -109,8 +109,7 @@ impl ActivityHandler for CollectionRemove { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -119,7 +118,8 @@ impl ActivityHandler for CollectionRemove { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let (community, collection_type) = Community::get_by_collection_url(&mut context.pool(), &self.target.into()).await?; match collection_type { diff --git a/crates/apub/src/activities/community/lock_page.rs b/crates/apub/src/activities/community/lock_page.rs index 634e5ab2f..0d90b5bb0 100644 --- a/crates/apub/src/activities/community/lock_page.rs +++ b/crates/apub/src/activities/community/lock_page.rs @@ -26,12 +26,13 @@ use lemmy_db_schema::{ source::{ activity::ActivitySendTargets, community::Community, + moderator::{ModLockPost, ModLockPostForm}, person::Person, post::{Post, PostUpdateForm}, }, traits::Crud, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; #[async_trait::async_trait] @@ -57,12 +58,22 @@ impl ActivityHandler for LockPage { } async fn receive(self, context: &Data) -> Result<(), Self::Error> { + insert_received_activity(&self.id, context).await?; + let locked = Some(true); let form = PostUpdateForm { - locked: Some(true), + locked, ..Default::default() }; let post = self.object.dereference(context).await?; Post::update(&mut context.pool(), post.id, &form).await?; + + let form = ModLockPostForm { + mod_person_id: self.actor.dereference(context).await?.id, + post_id: post.id, + locked, + }; + ModLockPost::create(&mut context.pool(), &form).await?; + Ok(()) } } @@ -81,7 +92,6 @@ impl ActivityHandler for UndoLockPage { } async fn verify(&self, context: &Data) -> Result<(), Self::Error> { - insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -91,12 +101,22 @@ impl ActivityHandler for UndoLockPage { } async fn receive(self, context: &Data) -> Result<(), Self::Error> { + insert_received_activity(&self.id, context).await?; + let locked = Some(false); let form = PostUpdateForm { - locked: Some(false), + locked, ..Default::default() }; let post = self.object.object.dereference(context).await?; Post::update(&mut context.pool(), post.id, &form).await?; + + let form = ModLockPostForm { + mod_person_id: self.actor.dereference(context).await?.id, + post_id: post.id, + locked, + }; + ModLockPost::create(&mut context.pool(), &form).await?; + Ok(()) } } @@ -106,7 +126,7 @@ pub(crate) async fn send_lock_post( actor: Person, locked: bool, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let community: ApubCommunity = Community::read(&mut context.pool(), post.community_id) .await? .into(); diff --git a/crates/apub/src/activities/community/mod.rs b/crates/apub/src/activities/community/mod.rs index c654a5c79..59b8fadbb 100644 --- a/crates/apub/src/activities/community/mod.rs +++ b/crates/apub/src/activities/community/mod.rs @@ -6,8 +6,11 @@ use crate::{ }; use activitypub_federation::{config::Data, traits::Actor}; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::source::{activity::ActivitySendTargets, person::PersonFollower}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{ + source::{activity::ActivitySendTargets, person::PersonFollower}, + CommunityVisibility, +}; +use lemmy_utils::error::LemmyResult; pub mod announce; pub mod collection_add; @@ -21,13 +24,14 @@ pub mod update; /// /// Activities are sent to the community itself if it lives on another instance. If the community /// is local, the activity is directly wrapped into Announce and sent to community followers. -/// Activities are also sent to those who follow the actor (with exception of moderation activities). +/// Activities are also sent to those who follow the actor (with exception of moderation +/// activities). /// /// * `activity` - The activity which is being sent /// * `actor` - The user who is sending the activity /// * `community` - Community inside which the activity is sent -/// * `inboxes` - Any additional inboxes the activity should be sent to (for example, -/// to the user who is being promoted to moderator) +/// * `inboxes` - Any additional inboxes the activity should be sent to (for example, to the user +/// who is being promoted to moderator) /// * `is_mod_activity` - True for things like Add/Mod, these are not sent to user followers pub(crate) async fn send_activity_in_community( activity: AnnouncableActivities, @@ -36,7 +40,12 @@ pub(crate) async fn send_activity_in_community( extra_inboxes: ActivitySendTargets, is_mod_action: bool, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { + // If community is local only, don't send anything out + if community.visibility != CommunityVisibility::Public { + return Ok(()); + } + // send to any users which are mentioned or affected directly let mut inboxes = extra_inboxes; diff --git a/crates/apub/src/activities/community/report.rs b/crates/apub/src/activities/community/report.rs index 7da5ac8ae..4966add34 100644 --- a/crates/apub/src/activities/community/report.rs +++ b/crates/apub/src/activities/community/report.rs @@ -1,8 +1,11 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity, verify_person_in_community}, insert_received_activity, - objects::{community::ApubCommunity, person::ApubPerson}, - protocol::{activities::community::report::Report, InCommunity}, + objects::{community::ApubCommunity, instance::ApubSite, person::ApubPerson}, + protocol::{ + activities::community::report::{Report, ReportObject}, + InCommunity, + }, PostOrComment, }; use activitypub_federation::{ @@ -11,7 +14,10 @@ use activitypub_federation::{ kinds::activity::FlagType, traits::{ActivityHandler, Actor}, }; -use lemmy_api_common::context::LemmyContext; +use lemmy_api_common::{ + context::LemmyContext, + utils::{check_comment_deleted_or_removed, check_post_deleted_or_removed}, +}; use lemmy_db_schema::{ source::{ activity::ActivitySendTargets, @@ -19,10 +25,11 @@ use lemmy_db_schema::{ community::Community, person::Person, post_report::{PostReport, PostReportForm}, + site::Site, }, - traits::Reportable, + traits::{Crud, Reportable}, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl Report { @@ -33,7 +40,7 @@ impl Report { community: Community, reason: String, context: Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let actor: ApubPerson = actor.into(); let community: ApubCommunity = community.into(); let kind = FlagType::Flag; @@ -44,18 +51,32 @@ impl Report { let report = Report { actor: actor.id().into(), to: [community.id().into()], - object: object_id, - summary: reason, + object: ReportObject::Lemmy(object_id.clone()), + summary: Some(reason), + content: None, kind, id: id.clone(), audience: Some(community.id().into()), }; - let inbox = if community.local { - ActivitySendTargets::empty() - } else { - ActivitySendTargets::to_inbox(community.shared_inbox_or_inbox()) + + // send report to the community where object was posted + let mut inboxes = ActivitySendTargets::to_inbox(community.shared_inbox_or_inbox()); + + // also send report to user's home instance if possible + let object_creator_id = match object_id.dereference_local(&context).await? { + PostOrComment::Post(p) => p.creator_id, + PostOrComment::Comment(c) => c.creator_id, }; - send_lemmy_activity(&context, report, &actor, inbox, false).await + let object_creator = Person::read(&mut context.pool(), object_creator_id).await?; + let object_creator_site: Option = + Site::read_from_instance_id(&mut context.pool(), object_creator.instance_id) + .await? + .map(Into::into); + if let Some(inbox) = object_creator_site.map(|s| s.shared_inbox_or_inbox()) { + inboxes.add_inbox(inbox); + } + + send_lemmy_activity(&context, report, &actor, inboxes, false).await } } @@ -73,34 +94,39 @@ impl ActivityHandler for Report { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let actor = self.actor.dereference(context).await?; + let reason = self.reason()?; match self.object.dereference(context).await? { PostOrComment::Post(post) => { + check_post_deleted_or_removed(&post)?; + let report_form = PostReportForm { creator_id: actor.id, post_id: post.id, original_post_name: post.name.clone(), original_post_url: post.url.clone(), - reason: self.summary.clone(), + reason, original_post_body: post.body.clone(), }; PostReport::report(&mut context.pool(), &report_form).await?; } PostOrComment::Comment(comment) => { + check_comment_deleted_or_removed(&comment)?; + let report_form = CommentReportForm { creator_id: actor.id, comment_id: comment.id, original_comment_text: comment.content.clone(), - reason: self.summary.clone(), + reason, }; CommentReport::report(&mut context.pool(), &report_form).await?; } diff --git a/crates/apub/src/activities/community/update.rs b/crates/apub/src/activities/community/update.rs index 11040f6b9..48a64bd9d 100644 --- a/crates/apub/src/activities/community/update.rs +++ b/crates/apub/src/activities/community/update.rs @@ -8,7 +8,7 @@ use crate::{ }, activity_lists::AnnouncableActivities, insert_received_activity, - objects::{community::ApubCommunity, person::ApubPerson}, + objects::{community::ApubCommunity, person::ApubPerson, read_from_string_or_source_opt}, protocol::{activities::community::update::UpdateCommunity, InCommunity}, }; use activitypub_federation::{ @@ -18,17 +18,22 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ - source::{activity::ActivitySendTargets, community::Community, person::Person}, + source::{ + activity::ActivitySendTargets, + community::{Community, CommunityUpdateForm}, + person::Person, + }, traits::Crud, + utils::naive_now, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; pub(crate) async fn send_update_community( community: Community, actor: Person, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let community: ApubCommunity = community.into(); let actor: ApubPerson = actor.into(); let id = generate_activity_id( @@ -71,8 +76,7 @@ impl ActivityHandler for UpdateCommunity { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -82,10 +86,39 @@ impl ActivityHandler for UpdateCommunity { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let community = self.community(context).await?; - let community_update_form = self.object.into_update_form(); + let community_update_form = CommunityUpdateForm { + title: Some(self.object.name.unwrap_or(self.object.preferred_username)), + description: Some(read_from_string_or_source_opt( + &self.object.summary, + &None, + &self.object.source, + )), + published: self.object.published.map(Into::into), + updated: Some(self.object.updated.map(Into::into)), + nsfw: Some(self.object.sensitive.unwrap_or(false)), + actor_id: Some(self.object.id.into()), + public_key: Some(self.object.public_key.public_key_pem), + last_refreshed_at: Some(naive_now()), + icon: Some(self.object.icon.map(|i| i.url.into())), + banner: Some(self.object.image.map(|i| i.url.into())), + followers_url: self.object.followers.map(Into::into), + inbox_url: Some( + self + .object + .endpoints + .map(|e| e.shared_inbox) + .unwrap_or(self.object.inbox) + .into(), + ), + moderators_url: self.object.attributed_to.map(Into::into), + posting_restricted_to_mods: self.object.posting_restricted_to_mods, + featured_url: self.object.featured.map(Into::into), + ..Default::default() + }; Community::update(&mut context.pool(), community.id, &community_update_form).await?; Ok(()) diff --git a/crates/apub/src/activities/create_or_update/comment.rs b/crates/apub/src/activities/create_or_update/comment.rs index e162709ba..0a0737151 100644 --- a/crates/apub/src/activities/create_or_update/comment.rs +++ b/crates/apub/src/activities/create_or_update/comment.rs @@ -19,7 +19,7 @@ use activitypub_federation::{ config::Data, fetch::object_id::ObjectId, kinds::public, - protocol::verification::verify_domains_match, + protocol::verification::{verify_domains_match, verify_urls_match}, traits::{ActivityHandler, Actor, Object}, }; use lemmy_api_common::{ @@ -39,7 +39,10 @@ use lemmy_db_schema::{ }, traits::{Crud, Likeable}, }; -use lemmy_utils::{error::LemmyError, utils::mention::scrape_text_for_mentions}; +use lemmy_utils::{ + error::{LemmyError, LemmyResult}, + utils::mention::scrape_text_for_mentions, +}; use url::Url; impl CreateOrUpdateNote { @@ -49,7 +52,7 @@ impl CreateOrUpdateNote { person_id: PersonId, kind: CreateOrUpdateType, context: Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { // TODO: might be helpful to add a comment method to retrieve community directly let post_id = comment.post_id; let post = Post::read(&mut context.pool(), post_id).await?; @@ -114,8 +117,7 @@ impl ActivityHandler for CreateOrUpdateNote { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; let post = self.object.get_parents(context).await?.0; let community = self.community(context).await?; @@ -124,13 +126,15 @@ impl ActivityHandler for CreateOrUpdateNote { verify_domains_match(self.actor.inner(), self.object.id.inner())?; check_community_deleted_or_removed(&community)?; check_post_deleted_or_removed(&post)?; + verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?; ApubComment::verify(&self.object, self.actor.inner(), context).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; // Need to do this check here instead of Note::from_json because we need the person who // send the activity, not the comment author. let existing_comment = self.object.id.dereference_local(context).await.ok(); @@ -149,7 +153,6 @@ impl ActivityHandler for CreateOrUpdateNote { // author likes their own comment by default let like_form = CommentLikeForm { comment_id: comment.id, - post_id: comment.post_id, person_id: comment.creator_id, score: 1, }; @@ -159,17 +162,16 @@ impl ActivityHandler for CreateOrUpdateNote { CommentAggregates::update_hot_rank(&mut context.pool(), comment.id).await?; let do_send_email = self.kind == CreateOrUpdateType::Create; - let post_id = comment.post_id; - let post = Post::read(&mut context.pool(), post_id).await?; let actor = self.actor.dereference(context).await?; // Note: // Although mentions could be gotten from the post tags (they are included there), or the ccs, // Its much easier to scrape them from the comment body, since the API has to do that // anyway. - // TODO: for compatibility with other projects, it would be much better to read this from cc or tags + // TODO: for compatibility with other projects, it would be much better to read this from cc or + // tags let mentions = scrape_text_for_mentions(&comment.content); - send_local_notifs(mentions, &comment.0, &actor, &post, do_send_email, context).await?; + send_local_notifs(mentions, comment.id, &actor, do_send_email, context, None).await?; Ok(()) } } diff --git a/crates/apub/src/activities/create_or_update/post.rs b/crates/apub/src/activities/create_or_update/post.rs index 5d58834e6..fb53100f6 100644 --- a/crates/apub/src/activities/create_or_update/post.rs +++ b/crates/apub/src/activities/create_or_update/post.rs @@ -4,7 +4,6 @@ use crate::{ community::send_activity_in_community, generate_activity_id, verify_is_public, - verify_mod_action, verify_person_in_community, }, activity_lists::AnnouncableActivities, @@ -33,7 +32,7 @@ use lemmy_db_schema::{ }, traits::{Crud, Likeable}, }; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl CreateOrUpdatePage { @@ -43,7 +42,7 @@ impl CreateOrUpdatePage { community: &ApubCommunity, kind: CreateOrUpdateType, context: &Data, - ) -> Result { + ) -> LemmyResult { let id = generate_activity_id( kind.clone(), &context.settings().get_protocol_and_hostname(), @@ -65,8 +64,7 @@ impl CreateOrUpdatePage { person_id: PersonId, kind: CreateOrUpdateType, context: Data, - ) -> Result<(), LemmyError> { - let post = ApubPost(post); + ) -> LemmyResult<()> { let community_id = post.community_id; let person: ApubPerson = Person::read(&mut context.pool(), person_id).await?.into(); let community: ApubCommunity = Community::read(&mut context.pool(), community_id) @@ -74,15 +72,14 @@ impl CreateOrUpdatePage { .into(); let create_or_update = - CreateOrUpdatePage::new(post, &person, &community, kind, &context).await?; - let is_mod_action = create_or_update.object.is_mod_action(&context).await?; + CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?; let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update); send_activity_in_community( activity, &person, &community, ActivitySendTargets::empty(), - is_mod_action, + false, &context, ) .await?; @@ -104,42 +101,20 @@ impl ActivityHandler for CreateOrUpdatePage { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; check_community_deleted_or_removed(&community)?; - - match self.kind { - CreateOrUpdateType::Create => { - verify_domains_match(self.actor.inner(), self.object.id.inner())?; - verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?; - // Check that the post isnt locked, as that isnt possible for newly created posts. - // However, when fetching a remote post we generate a new create activity with the current - // locked value, so this check may fail. So only check if its a local community, - // because then we will definitely receive all create and update activities separately. - let is_locked = self.object.comments_enabled == Some(false); - if community.local && is_locked { - Err(LemmyErrorType::NewPostCannotBeLocked)? - } - } - CreateOrUpdateType::Update => { - let is_mod_action = self.object.is_mod_action(context).await?; - if is_mod_action { - verify_mod_action(&self.actor, &community, context).await?; - } else { - verify_domains_match(self.actor.inner(), self.object.id.inner())?; - verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?; - } - } - } + verify_domains_match(self.actor.inner(), self.object.id.inner())?; + verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?; ApubPost::verify(&self.object, self.actor.inner(), context).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let post = ApubPost::from_json(self.object, context).await?; // author likes their own post by default diff --git a/crates/apub/src/activities/create_or_update/private_message.rs b/crates/apub/src/activities/create_or_update/private_message.rs index 74f833051..6bba4e374 100644 --- a/crates/apub/src/activities/create_or_update/private_message.rs +++ b/crates/apub/src/activities/create_or_update/private_message.rs @@ -9,20 +9,20 @@ use crate::{ }; use activitypub_federation::{ config::Data, - protocol::verification::verify_domains_match, + protocol::verification::{verify_domains_match, verify_urls_match}, traits::{ActivityHandler, Actor, Object}, }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::source::activity::ActivitySendTargets; use lemmy_db_views::structs::PrivateMessageView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; pub(crate) async fn send_create_or_update_pm( pm_view: PrivateMessageView, kind: CreateOrUpdateType, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let actor: ApubPerson = pm_view.creator.into(); let recipient: ApubPerson = pm_view.recipient.into(); @@ -57,17 +57,18 @@ impl ActivityHandler for CreateOrUpdateChatMessage { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_person(&self.actor, context).await?; verify_domains_match(self.actor.inner(), self.object.id.inner())?; verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?; + verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?; ApubPrivateMessage::verify(&self.object, self.actor.inner(), context).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; ApubPrivateMessage::from_json(self.object, context).await?; Ok(()) } diff --git a/crates/apub/src/activities/deletion/delete.rs b/crates/apub/src/activities/deletion/delete.rs index 140c98665..1ddf642b9 100644 --- a/crates/apub/src/activities/deletion/delete.rs +++ b/crates/apub/src/activities/deletion/delete.rs @@ -12,6 +12,7 @@ use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ source::{ comment::{Comment, CommentUpdateForm}, + comment_report::CommentReport, community::{Community, CommunityUpdateForm}, moderator::{ ModRemoveComment, @@ -22,10 +23,11 @@ use lemmy_db_schema::{ ModRemovePostForm, }, post::{Post, PostUpdateForm}, + post_report::PostReport, }, - traits::Crud, + traits::{Crud, Reportable}, }; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}; use url::Url; #[async_trait::async_trait] @@ -42,14 +44,14 @@ impl ActivityHandler for Delete { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_delete_activity(self, self.summary.is_some(), context).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; if let Some(reason) = self.summary { // We set reason to empty string if it doesn't exist, to distinguish between delete and // remove. Here we change it back to option, so we don't write it to db. @@ -66,7 +68,14 @@ impl ActivityHandler for Delete { ) .await } else { - receive_delete_action(self.object.id(), &self.actor, true, context).await + receive_delete_action( + self.object.id(), + &self.actor, + true, + self.remove_data, + context, + ) + .await } } } @@ -79,7 +88,7 @@ impl Delete { community: Option<&Community>, summary: Option, context: &Data, - ) -> Result { + ) -> LemmyResult { let id = generate_activity_id( DeleteType::Delete, &context.settings().get_protocol_and_hostname(), @@ -94,6 +103,7 @@ impl Delete { summary, id, audience: community.map(|c| c.actor_id.clone().into()), + remove_data: None, }) } } @@ -104,11 +114,11 @@ pub(in crate::activities) async fn receive_remove_action( object: &Url, reason: Option, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { match DeletableObjects::read_from_db(object, context).await? { DeletableObjects::Community(community) => { if community.local { - Err(LemmyErrorType::OnlyLocalAdminCanRemoveCommunity)? + Err(FederationError::OnlyLocalAdminCanRemoveCommunity)? } let form = ModRemoveCommunityForm { mod_person_id: actor.id, @@ -128,6 +138,7 @@ pub(in crate::activities) async fn receive_remove_action( .await?; } DeletableObjects::Post(post) => { + PostReport::resolve_all_for_object(&mut context.pool(), post.id, actor.id).await?; let form = ModRemovePostForm { mod_person_id: actor.id, post_id: post.id, @@ -146,6 +157,7 @@ pub(in crate::activities) async fn receive_remove_action( .await?; } DeletableObjects::Comment(comment) => { + CommentReport::resolve_all_for_object(&mut context.pool(), comment.id, actor.id).await?; let form = ModRemoveCommentForm { mod_person_id: actor.id, comment_id: comment.id, @@ -163,7 +175,9 @@ pub(in crate::activities) async fn receive_remove_action( ) .await?; } - DeletableObjects::PrivateMessage(_) => unimplemented!(), + // TODO these need to be implemented yet, for now, return errors + DeletableObjects::PrivateMessage(_) => Err(LemmyErrorType::NotFound)?, + DeletableObjects::Person(_) => Err(LemmyErrorType::NotFound)?, } Ok(()) } diff --git a/crates/apub/src/activities/deletion/delete_user.rs b/crates/apub/src/activities/deletion/delete_user.rs deleted file mode 100644 index 7a56bda90..000000000 --- a/crates/apub/src/activities/deletion/delete_user.rs +++ /dev/null @@ -1,77 +0,0 @@ -use crate::{ - activities::{generate_activity_id, send_lemmy_activity, verify_is_public, verify_person}, - insert_received_activity, - objects::person::ApubPerson, - protocol::activities::deletion::delete_user::DeleteUser, -}; -use activitypub_federation::{ - config::Data, - kinds::{activity::DeleteType, public}, - protocol::verification::verify_urls_match, - traits::{ActivityHandler, Actor}, -}; -use lemmy_api_common::{context::LemmyContext, utils::purge_user_account}; -use lemmy_db_schema::source::{activity::ActivitySendTargets, person::Person}; -use lemmy_utils::error::LemmyError; -use url::Url; - -pub async fn delete_user( - person: Person, - delete_content: bool, - context: Data, -) -> Result<(), LemmyError> { - let actor: ApubPerson = person.into(); - - let id = generate_activity_id( - DeleteType::Delete, - &context.settings().get_protocol_and_hostname(), - )?; - let delete = DeleteUser { - actor: actor.id().into(), - to: vec![public()], - object: actor.id().into(), - kind: DeleteType::Delete, - id: id.clone(), - cc: vec![], - remove_data: Some(delete_content), - }; - - let inboxes = ActivitySendTargets::to_all_instances(); - - send_lemmy_activity(&context, delete, &actor, inboxes, true).await?; - Ok(()) -} - -/// This can be separate from Delete activity because it doesn't need to be handled in shared inbox -/// (cause instance actor doesn't have shared inbox). -#[async_trait::async_trait] -impl ActivityHandler for DeleteUser { - type DataType = LemmyContext; - type Error = LemmyError; - - fn id(&self) -> &Url { - &self.id - } - - fn actor(&self) -> &Url { - self.actor.inner() - } - - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; - verify_is_public(&self.to, &[])?; - verify_person(&self.actor, context).await?; - verify_urls_match(self.actor.inner(), self.object.inner())?; - Ok(()) - } - - async fn receive(self, context: &Data) -> Result<(), LemmyError> { - let actor = self.actor.dereference(context).await?; - if self.remove_data.unwrap_or(false) { - purge_user_account(actor.id, context).await?; - } else { - Person::delete_account(&mut context.pool(), actor.id).await?; - } - Ok(()) - } -} diff --git a/crates/apub/src/activities/deletion/mod.rs b/crates/apub/src/activities/deletion/mod.rs index 26cd6b1ab..b12532087 100644 --- a/crates/apub/src/activities/deletion/mod.rs +++ b/crates/apub/src/activities/deletion/mod.rs @@ -24,12 +24,11 @@ use activitypub_federation::{ config::Data, fetch::object_id::ObjectId, kinds::public, - protocol::verification::verify_domains_match, + protocol::verification::{verify_domains_match, verify_urls_match}, traits::{Actor, Object}, }; -use lemmy_api_common::context::LemmyContext; +use lemmy_api_common::{context::LemmyContext, utils::purge_user_account}; use lemmy_db_schema::{ - newtypes::CommunityId, source::{ activity::ActivitySendTargets, comment::{Comment, CommentUpdateForm}, @@ -40,12 +39,11 @@ use lemmy_db_schema::{ }, traits::Crud, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use std::ops::Deref; use url::Url; pub mod delete; -pub mod delete_user; pub mod undo_delete; /// Parameter `reason` being set indicates that this is a removal by a mod. If its unset, this @@ -58,7 +56,7 @@ pub(crate) async fn send_apub_delete_in_community( reason: Option, deleted: bool, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let actor = ApubPerson::from(actor); let is_mod_action = reason.is_some(); let activity = if deleted { @@ -79,45 +77,13 @@ pub(crate) async fn send_apub_delete_in_community( .await } -/// Parameter `reason` being set indicates that this is a removal by a mod. If its unset, this -/// action was done by a normal user. -#[tracing::instrument(skip_all)] -pub(crate) async fn send_apub_delete_in_community_new( - actor: Person, - community_id: CommunityId, - object: DeletableObjects, - reason: Option, - deleted: bool, - context: Data, -) -> Result<(), LemmyError> { - let community = Community::read(&mut context.pool(), community_id).await?; - let actor = ApubPerson::from(actor); - let is_mod_action = reason.is_some(); - let activity = if deleted { - let delete = Delete::new(&actor, object, public(), Some(&community), reason, &context)?; - AnnouncableActivities::Delete(delete) - } else { - let undo = UndoDelete::new(&actor, object, public(), Some(&community), reason, &context)?; - AnnouncableActivities::UndoDelete(undo) - }; - send_activity_in_community( - activity, - &actor, - &community.into(), - ActivitySendTargets::empty(), - is_mod_action, - &context, - ) - .await -} - #[tracing::instrument(skip_all)] pub(crate) async fn send_apub_delete_private_message( actor: &ApubPerson, pm: PrivateMessage, deleted: bool, context: Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let recipient_id = pm.recipient_id; let recipient: ApubPerson = Person::read(&mut context.pool(), recipient_id) .await? @@ -135,8 +101,26 @@ pub(crate) async fn send_apub_delete_private_message( Ok(()) } +pub async fn send_apub_delete_user( + person: Person, + remove_data: bool, + context: Data, +) -> LemmyResult<()> { + let person: ApubPerson = person.into(); + + let deletable = DeletableObjects::Person(person.clone()); + let mut delete: Delete = Delete::new(&person, deletable, public(), None, None, &context)?; + delete.remove_data = Some(remove_data); + + let inboxes = ActivitySendTargets::to_all_instances(); + + send_lemmy_activity(&context, delete, &person, inboxes, true).await?; + Ok(()) +} + pub enum DeletableObjects { Community(ApubCommunity), + Person(ApubPerson), Comment(ApubComment), Post(ApubPost), PrivateMessage(ApubPrivateMessage), @@ -147,10 +131,13 @@ impl DeletableObjects { pub(crate) async fn read_from_db( ap_id: &Url, context: &Data, - ) -> Result { + ) -> LemmyResult { if let Some(c) = ApubCommunity::read_from_id(ap_id.clone(), context).await? { return Ok(DeletableObjects::Community(c)); } + if let Some(p) = ApubPerson::read_from_id(ap_id.clone(), context).await? { + return Ok(DeletableObjects::Person(p)); + } if let Some(p) = ApubPost::read_from_id(ap_id.clone(), context).await? { return Ok(DeletableObjects::Post(p)); } @@ -166,6 +153,7 @@ impl DeletableObjects { pub(crate) fn id(&self) -> Url { match self { DeletableObjects::Community(c) => c.id(), + DeletableObjects::Person(p) => p.id(), DeletableObjects::Comment(c) => c.ap_id.clone().into(), DeletableObjects::Post(p) => p.ap_id.clone().into(), DeletableObjects::PrivateMessage(p) => p.ap_id.clone().into(), @@ -178,7 +166,7 @@ pub(in crate::activities) async fn verify_delete_activity( activity: &Delete, is_mod_action: bool, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let object = DeletableObjects::read_from_db(activity.object.id(), context).await?; match object { DeletableObjects::Community(community) => { @@ -191,6 +179,11 @@ pub(in crate::activities) async fn verify_delete_activity( // community deletion is always a mod (or admin) action verify_mod_action(&activity.actor, &community, context).await?; } + DeletableObjects::Person(person) => { + verify_is_public(&activity.to, &[])?; + verify_person(&activity.actor, context).await?; + verify_urls_match(person.actor_id.inner(), activity.object.id())?; + } DeletableObjects::Post(p) => { verify_is_public(&activity.to, &[])?; verify_delete_post_or_comment( @@ -228,7 +221,7 @@ async fn verify_delete_post_or_comment( community: &ApubCommunity, is_mod_action: bool, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { verify_person_in_community(actor, community, context).await?; if is_mod_action { verify_mod_action(actor, community, context).await?; @@ -245,8 +238,9 @@ async fn receive_delete_action( object: &Url, actor: &ObjectId, deleted: bool, + do_purge_user_account: Option, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { match DeletableObjects::read_from_db(object, context).await? { DeletableObjects::Community(community) => { if community.local { @@ -266,6 +260,13 @@ async fn receive_delete_action( ) .await?; } + DeletableObjects::Person(person) => { + if do_purge_user_account.unwrap_or(false) { + purge_user_account(person.id, context).await?; + } else { + Person::delete_account(&mut context.pool(), person.id).await?; + } + } DeletableObjects::Post(post) => { if deleted != post.deleted { Post::update( diff --git a/crates/apub/src/activities/deletion/undo_delete.rs b/crates/apub/src/activities/deletion/undo_delete.rs index 697153fc2..6328bb427 100644 --- a/crates/apub/src/activities/deletion/undo_delete.rs +++ b/crates/apub/src/activities/deletion/undo_delete.rs @@ -25,7 +25,7 @@ use lemmy_db_schema::{ }, traits::Crud, }; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}; use url::Url; #[async_trait::async_trait] @@ -42,14 +42,14 @@ impl ActivityHandler for UndoDelete { } async fn verify(&self, data: &Data) -> Result<(), Self::Error> { - insert_received_activity(&self.id, data).await?; self.object.verify(data).await?; verify_delete_activity(&self.object, self.object.summary.is_some(), data).await?; Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; if self.object.summary.is_some() { UndoDelete::receive_undo_remove_action( &self.actor.dereference(context).await?, @@ -58,7 +58,7 @@ impl ActivityHandler for UndoDelete { ) .await } else { - receive_delete_action(self.object.object.id(), &self.actor, false, context).await + receive_delete_action(self.object.object.id(), &self.actor, false, None, context).await } } } @@ -72,7 +72,7 @@ impl UndoDelete { community: Option<&Community>, summary: Option, context: &Data, - ) -> Result { + ) -> LemmyResult { let object = Delete::new(actor, object, to.clone(), community, summary, context)?; let id = generate_activity_id( @@ -96,11 +96,11 @@ impl UndoDelete { actor: &ApubPerson, object: &Url, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { match DeletableObjects::read_from_db(object, context).await? { DeletableObjects::Community(community) => { if community.local { - Err(LemmyErrorType::OnlyLocalAdminCanRestoreCommunity)? + Err(FederationError::OnlyLocalAdminCanRestoreCommunity)? } let form = ModRemoveCommunityForm { mod_person_id: actor.id, @@ -155,7 +155,9 @@ impl UndoDelete { ) .await?; } - DeletableObjects::PrivateMessage(_) => unimplemented!(), + // TODO these need to be implemented yet, for now, return errors + DeletableObjects::PrivateMessage(_) => Err(LemmyErrorType::NotFound)?, + DeletableObjects::Person(_) => Err(LemmyErrorType::NotFound)?, } Ok(()) } diff --git a/crates/apub/src/activities/following/accept.rs b/crates/apub/src/activities/following/accept.rs index 381b05930..fa711b904 100644 --- a/crates/apub/src/activities/following/accept.rs +++ b/crates/apub/src/activities/following/accept.rs @@ -14,12 +14,12 @@ use lemmy_db_schema::{ source::{activity::ActivitySendTargets, community::CommunityFollower}, traits::Followable, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl AcceptFollow { #[tracing::instrument(skip_all)] - pub async fn send(follow: Follow, context: &Data) -> Result<(), LemmyError> { + pub async fn send(follow: Follow, context: &Data) -> LemmyResult<()> { let user_or_community = follow.object.dereference_local(context).await?; let person = follow.actor.clone().dereference(context).await?; let accept = AcceptFollow { @@ -52,8 +52,7 @@ impl ActivityHandler for AcceptFollow { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_urls_match(self.actor.inner(), self.object.object.inner())?; self.object.verify(context).await?; if let Some(to) = &self.to { @@ -63,7 +62,8 @@ impl ActivityHandler for AcceptFollow { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let community = self.actor.dereference(context).await?; let person = self.object.actor.dereference(context).await?; // This will throw an error if no follow was requested diff --git a/crates/apub/src/activities/following/follow.rs b/crates/apub/src/activities/following/follow.rs index 6f6e2718f..02f29a1a9 100644 --- a/crates/apub/src/activities/following/follow.rs +++ b/crates/apub/src/activities/following/follow.rs @@ -24,8 +24,9 @@ use lemmy_db_schema::{ person::{PersonFollower, PersonFollowerForm}, }, traits::Followable, + CommunityVisibility, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; use url::Url; impl Follow { @@ -33,7 +34,7 @@ impl Follow { actor: &ApubPerson, community: &ApubCommunity, context: &Data, - ) -> Result { + ) -> LemmyResult { Ok(Follow { actor: actor.id().into(), object: community.id().into(), @@ -51,16 +52,7 @@ impl Follow { actor: &ApubPerson, community: &ApubCommunity, context: &Data, - ) -> Result<(), LemmyError> { - let community_follower_form = CommunityFollowerForm { - community_id: community.id, - person_id: actor.id, - pending: true, - }; - CommunityFollower::follow(&mut context.pool(), &community_follower_form) - .await - .ok(); - + ) -> LemmyResult<()> { let follow = Follow::new(actor, community, context)?; let inbox = if community.local { ActivitySendTargets::empty() @@ -85,8 +77,7 @@ impl ActivityHandler for Follow { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_person(&self.actor, context).await?; let object = self.object.dereference(context).await?; if let UserOrCommunity::Community(c) = object { @@ -99,7 +90,8 @@ impl ActivityHandler for Follow { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.dereference(context).await?; match object { @@ -112,6 +104,10 @@ impl ActivityHandler for Follow { PersonFollower::follow(&mut context.pool(), &form).await?; } UserOrCommunity::Community(c) => { + // Dont allow following local-only community via federation. + if c.visibility != CommunityVisibility::Public { + return Err(LemmyErrorType::NotFound.into()); + } let form = CommunityFollowerForm { community_id: c.id, person_id: actor.id, diff --git a/crates/apub/src/activities/following/mod.rs b/crates/apub/src/activities/following/mod.rs index c4f0bd0b7..7c7163f12 100644 --- a/crates/apub/src/activities/following/mod.rs +++ b/crates/apub/src/activities/following/mod.rs @@ -5,7 +5,7 @@ use crate::{ use activitypub_federation::config::Data; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::source::{community::Community, person::Person}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub mod accept; pub mod follow; @@ -16,7 +16,7 @@ pub async fn send_follow_community( person: Person, follow: bool, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let community: ApubCommunity = community.into(); let actor: ApubPerson = person.into(); if follow { diff --git a/crates/apub/src/activities/following/undo_follow.rs b/crates/apub/src/activities/following/undo_follow.rs index 2f1c5a76b..ba6253946 100644 --- a/crates/apub/src/activities/following/undo_follow.rs +++ b/crates/apub/src/activities/following/undo_follow.rs @@ -20,7 +20,7 @@ use lemmy_db_schema::{ }, traits::Followable, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl UndoFollow { @@ -29,7 +29,7 @@ impl UndoFollow { actor: &ApubPerson, community: &ApubCommunity, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let object = Follow::new(actor, community, context)?; let undo = UndoFollow { actor: actor.id().into(), @@ -64,8 +64,7 @@ impl ActivityHandler for UndoFollow { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { verify_urls_match(self.actor.inner(), self.object.actor.inner())?; verify_person(&self.actor, context).await?; self.object.verify(context).await?; @@ -76,7 +75,8 @@ impl ActivityHandler for UndoFollow { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let person = self.actor.dereference(context).await?; let object = self.object.object.dereference(context).await?; diff --git a/crates/apub/src/activities/mod.rs b/crates/apub/src/activities/mod.rs index 83e2caeb2..21723c390 100644 --- a/crates/apub/src/activities/mod.rs +++ b/crates/apub/src/activities/mod.rs @@ -9,10 +9,9 @@ use crate::{ }, create_or_update::private_message::send_create_or_update_pm, deletion::{ - delete_user::delete_user, send_apub_delete_in_community, - send_apub_delete_in_community_new, send_apub_delete_private_message, + send_apub_delete_user, DeletableObjects, }, voting::send_like_activity, @@ -23,13 +22,11 @@ use crate::{ create_or_update::{note::CreateOrUpdateNote, page::CreateOrUpdatePage}, CreateOrUpdateType, }, - CONTEXT, }; use activitypub_federation::{ config::Data, fetch::object_id::ObjectId, kinds::{activity::AnnounceType, public}, - protocol::context::WithContext, traits::{ActivityHandler, Actor}, }; use anyhow::anyhow; @@ -37,14 +34,16 @@ use lemmy_api_common::{ context::LemmyContext, send_activity::{ActivityChannel, SendActivityData}, }; -use lemmy_db_schema::source::{ - activity::{ActivitySendTargets, ActorType, SentActivity, SentActivityForm}, - community::Community, +use lemmy_db_schema::{ + source::{ + activity::{ActivitySendTargets, ActorType, SentActivity, SentActivityForm}, + community::Community, + }, + traits::Crud, }; use lemmy_db_views_actor::structs::{CommunityPersonBanView, CommunityView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}; use serde::Serialize; -use std::ops::Deref; use tracing::info; use url::{ParseError, Url}; use uuid::Uuid; @@ -62,7 +61,7 @@ pub mod voting; async fn verify_person( person_id: &ObjectId, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let person = person_id.dereference(context).await?; if person.banned { Err(anyhow!("Person {} is banned", person_id)) @@ -79,21 +78,16 @@ pub(crate) async fn verify_person_in_community( person_id: &ObjectId, community: &ApubCommunity, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let person = person_id.dereference(context).await?; if person.banned { - Err(LemmyErrorType::PersonIsBannedFromSite( + Err(FederationError::PersonIsBannedFromSite( person.actor_id.to_string(), ))? } let person_id = person.id; let community_id = community.id; - let is_banned = CommunityPersonBanView::get(&mut context.pool(), person_id, community_id).await?; - if is_banned { - Err(LemmyErrorType::PersonIsBannedFromCommunity)? - } else { - Ok(()) - } + CommunityPersonBanView::check(&mut context.pool(), person_id, community_id).await } /// Verify that mod action in community was performed by a moderator. @@ -106,15 +100,7 @@ pub(crate) async fn verify_mod_action( mod_id: &ObjectId, community: &Community, context: &Data, -) -> Result<(), LemmyError> { - let mod_ = mod_id.dereference(context).await?; - - let is_mod_or_admin = - CommunityView::is_mod_or_admin(&mut context.pool(), mod_.id, community.id).await?; - if is_mod_or_admin { - return Ok(()); - } - +) -> LemmyResult<()> { // mod action comes from the same instance as the community, so it was presumably done // by an instance admin. // TODO: federate instance admin status and check it here @@ -122,35 +108,33 @@ pub(crate) async fn verify_mod_action( return Ok(()); } - Err(LemmyErrorType::NotAModerator)? + let mod_ = mod_id.dereference(context).await?; + CommunityView::check_is_mod_or_admin(&mut context.pool(), mod_.id, community.id).await } -pub(crate) fn verify_is_public(to: &[Url], cc: &[Url]) -> Result<(), LemmyError> { +pub(crate) fn verify_is_public(to: &[Url], cc: &[Url]) -> LemmyResult<()> { if ![to, cc].iter().any(|set| set.contains(&public())) { - Err(LemmyErrorType::ObjectIsNotPublic)? + Err(FederationError::ObjectIsNotPublic)? } else { Ok(()) } } -pub(crate) fn verify_community_matches( - a: &ObjectId, - b: T, -) -> Result<(), LemmyError> +pub(crate) fn verify_community_matches(a: &ObjectId, b: T) -> LemmyResult<()> where T: Into>, { let b: ObjectId = b.into(); if a != &b { - Err(LemmyErrorType::InvalidCommunity)? + Err(FederationError::InvalidCommunity)? } else { Ok(()) } } -pub(crate) fn check_community_deleted_or_removed(community: &Community) -> Result<(), LemmyError> { +pub(crate) fn check_community_deleted_or_removed(community: &Community) -> LemmyResult<()> { if community.deleted || community.removed { - Err(LemmyErrorType::CannotCreatePostOrCommentInDeletedOrRemovedCommunity)? + Err(FederationError::CannotCreatePostOrCommentInDeletedOrRemovedCommunity)? } else { Ok(()) } @@ -197,14 +181,13 @@ async fn send_lemmy_activity( actor: &ActorT, send_targets: ActivitySendTargets, sensitive: bool, -) -> Result<(), LemmyError> +) -> LemmyResult<()> where Activity: ActivityHandler + Serialize + Send + Sync + Clone, ActorT: Actor + GetActorType, Activity: ActivityHandler, { info!("Saving outgoing activity to queue {}", activity.id()); - let activity = WithContext::new(activity, CONTEXT.deref().clone()); let form = SentActivityForm { ap_id: activity.id().clone().into(), @@ -225,11 +208,12 @@ where Ok(()) } -pub async fn handle_outgoing_activities(context: Data) -> LemmyResult<()> { +pub async fn handle_outgoing_activities(context: Data) { while let Some(data) = ActivityChannel::retrieve_activity().await { - match_outgoing_activities(data, &context.reset_request_count()).await? + if let Err(e) = match_outgoing_activities(data, &context.reset_request_count()).await { + tracing::warn!("error while saving outgoing activity to db: {e}"); + } } - Ok(()) } pub async fn match_outgoing_activities( @@ -249,24 +233,31 @@ pub async fn match_outgoing_activities( CreateOrUpdatePage::send(post, creator_id, CreateOrUpdateType::Update, context).await } DeletePost(post, person, data) => { - send_apub_delete_in_community_new( + let community = Community::read(&mut context.pool(), post.community_id).await?; + send_apub_delete_in_community( person, - post.community_id, + community, DeletableObjects::Post(post.into()), None, data.deleted, - context, + &context, ) .await } - RemovePost(post, person, data) => { - send_apub_delete_in_community_new( - person, - post.community_id, + RemovePost { + post, + moderator, + reason, + removed, + } => { + let community = Community::read(&mut context.pool(), post.community_id).await?; + send_apub_delete_in_community( + moderator, + community, DeletableObjects::Post(post.into()), - data.reason.or_else(|| Some(String::new())), - data.removed, - context, + reason.or_else(|| Some(String::new())), + removed, + &context, ) .await } @@ -285,15 +276,25 @@ pub async fn match_outgoing_activities( let deletable = DeletableObjects::Comment(comment.into()); send_apub_delete_in_community(actor, community, deletable, None, is_deleted, &context).await } - RemoveComment(comment, actor, community, reason) => { + RemoveComment { + comment, + moderator, + community, + reason, + } => { let is_removed = comment.removed; let deletable = DeletableObjects::Comment(comment.into()); - send_apub_delete_in_community(actor, community, deletable, reason, is_removed, &context) - .await - } - LikePostOrComment(object_id, person, community, score) => { - send_like_activity(object_id, person, community, score, context).await + send_apub_delete_in_community( + moderator, community, deletable, reason, is_removed, &context, + ) + .await } + LikePostOrComment { + object_id, + actor, + community, + score, + } => send_like_activity(object_id, actor, community, score, context).await, FollowCommunity(community, person, follow) => { send_follow_community(community, person, follow, &context).await } @@ -302,10 +303,15 @@ pub async fn match_outgoing_activities( let deletable = DeletableObjects::Community(community.clone().into()); send_apub_delete_in_community(actor, community, deletable, None, removed, &context).await } - RemoveCommunity(actor, community, reason, removed) => { + RemoveCommunity { + moderator, + community, + reason, + removed, + } => { let deletable = DeletableObjects::Community(community.clone().into()); send_apub_delete_in_community( - actor, + moderator, community, deletable, reason.clone().or_else(|| Some(String::new())), @@ -314,13 +320,37 @@ pub async fn match_outgoing_activities( ) .await } - AddModToCommunity(actor, community_id, updated_mod_id, added) => { - send_add_mod_to_community(actor, community_id, updated_mod_id, added, context).await + AddModToCommunity { + moderator, + community_id, + target, + added, + } => send_add_mod_to_community(moderator, community_id, target, added, context).await, + BanFromCommunity { + moderator, + community_id, + target, + data, + } => send_ban_from_community(moderator, community_id, target, data, context).await, + BanFromSite { + moderator, + banned_user, + reason, + remove_or_restore_data, + ban, + expires, + } => { + send_ban_from_site( + moderator, + banned_user, + reason, + remove_or_restore_data, + ban, + expires, + context, + ) + .await } - BanFromCommunity(mod_, community_id, target, data) => { - send_ban_from_community(mod_, community_id, target, data, context).await - } - BanFromSite(mod_, target, data) => send_ban_from_site(mod_, target, data, context).await, CreatePrivateMessage(pm) => { send_create_or_update_pm(pm, CreateOrUpdateType::Create, context).await } @@ -330,10 +360,13 @@ pub async fn match_outgoing_activities( DeletePrivateMessage(person, pm, deleted) => { send_apub_delete_private_message(&person.into(), pm, deleted, context).await } - DeleteUser(person, delete_content) => delete_user(person, delete_content, context).await, - CreateReport(url, actor, community, reason) => { - Report::send(ObjectId::from(url), actor, community, reason, context).await - } + DeleteUser(person, remove_data) => send_apub_delete_user(person, remove_data, context).await, + CreateReport { + object_id, + actor, + community, + reason, + } => Report::send(ObjectId::from(object_id), actor, community, reason, context).await, } }; fed_task.await?; diff --git a/crates/apub/src/activities/voting/mod.rs b/crates/apub/src/activities/voting/mod.rs index c60235c05..7c39b2246 100644 --- a/crates/apub/src/activities/voting/mod.rs +++ b/crates/apub/src/activities/voting/mod.rs @@ -21,7 +21,7 @@ use lemmy_db_schema::{ }, traits::Likeable, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub mod undo_vote; pub mod vote; @@ -32,8 +32,8 @@ pub(crate) async fn send_like_activity( community: Community, score: i16, context: Data, -) -> Result<(), LemmyError> { - let object_id: ObjectId = object_id.try_into()?; +) -> LemmyResult<()> { + let object_id: ObjectId = object_id.into(); let actor: ApubPerson = actor.into(); let community: ApubCommunity = community.into(); @@ -44,7 +44,7 @@ pub(crate) async fn send_like_activity( let activity = AnnouncableActivities::Vote(vote); send_activity_in_community(activity, &actor, &community, empty, false, &context).await } else { - // Lemmy API doesnt distinguish between Undo/Like and Undo/Dislike, so we hardcode it here. + // Lemmy API doesn't distinguish between Undo/Like and Undo/Dislike, so we hardcode it here. let vote = Vote::new(object_id, &actor, &community, VoteType::Like, &context)?; let undo_vote = UndoVote::new(vote, &actor, &community, &context)?; let activity = AnnouncableActivities::UndoVote(undo_vote); @@ -58,11 +58,10 @@ async fn vote_comment( actor: ApubPerson, comment: &ApubComment, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let comment_id = comment.id; let like_form = CommentLikeForm { comment_id, - post_id: comment.post_id, person_id: actor.id, score: vote_type.into(), }; @@ -78,7 +77,7 @@ async fn vote_post( actor: ApubPerson, post: &ApubPost, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let post_id = post.id; let like_form = PostLikeForm { post_id: post.id, @@ -96,7 +95,7 @@ async fn undo_vote_comment( actor: ApubPerson, comment: &ApubComment, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let comment_id = comment.id; let person_id = actor.id; CommentLike::remove(&mut context.pool(), person_id, comment_id).await?; @@ -108,7 +107,7 @@ async fn undo_vote_post( actor: ApubPerson, post: &ApubPost, context: &Data, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let post_id = post.id; let person_id = actor.id; PostLike::remove(&mut context.pool(), person_id, post_id).await?; diff --git a/crates/apub/src/activities/voting/undo_vote.rs b/crates/apub/src/activities/voting/undo_vote.rs index 9616c651f..61875d442 100644 --- a/crates/apub/src/activities/voting/undo_vote.rs +++ b/crates/apub/src/activities/voting/undo_vote.rs @@ -19,7 +19,7 @@ use activitypub_federation::{ traits::{ActivityHandler, Actor}, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl UndoVote { @@ -28,7 +28,7 @@ impl UndoVote { actor: &ApubPerson, community: &ApubCommunity, context: &Data, - ) -> Result { + ) -> LemmyResult { Ok(UndoVote { actor: actor.id().into(), object: vote, @@ -56,8 +56,7 @@ impl ActivityHandler for UndoVote { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; verify_urls_match(self.actor.inner(), self.object.actor.inner())?; @@ -66,7 +65,8 @@ impl ActivityHandler for UndoVote { } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.object.dereference(context).await?; match object { diff --git a/crates/apub/src/activities/voting/vote.rs b/crates/apub/src/activities/voting/vote.rs index 926c29302..1cdc81952 100644 --- a/crates/apub/src/activities/voting/vote.rs +++ b/crates/apub/src/activities/voting/vote.rs @@ -2,7 +2,7 @@ use crate::{ activities::{ generate_activity_id, verify_person_in_community, - voting::{vote_comment, vote_post}, + voting::{undo_vote_comment, undo_vote_post, vote_comment, vote_post}, }, insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, @@ -17,10 +17,9 @@ use activitypub_federation::{ fetch::object_id::ObjectId, traits::{ActivityHandler, Actor}, }; -use anyhow::anyhow; -use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::source::local_site::LocalSite; -use lemmy_utils::error::LemmyError; +use lemmy_api_common::{context::LemmyContext, utils::check_bot_account}; +use lemmy_db_schema::{source::local_site::LocalSite, FederationMode}; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; impl Vote { @@ -30,7 +29,7 @@ impl Vote { community: &ApubCommunity, kind: VoteType, context: &Data, - ) -> Result { + ) -> LemmyResult { Ok(Vote { actor: actor.id().into(), object: object_id, @@ -55,28 +54,46 @@ impl ActivityHandler for Vote { } #[tracing::instrument(skip_all)] - async fn verify(&self, context: &Data) -> Result<(), LemmyError> { - insert_received_activity(&self.id, context).await?; + async fn verify(&self, context: &Data) -> LemmyResult<()> { let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; - let enable_downvotes = LocalSite::read(&mut context.pool()) - .await - .map(|l| l.enable_downvotes) - .unwrap_or(true); - if self.kind == VoteType::Dislike && !enable_downvotes { - Err(anyhow!("Downvotes disabled").into()) - } else { - Ok(()) - } + Ok(()) } #[tracing::instrument(skip_all)] - async fn receive(self, context: &Data) -> Result<(), LemmyError> { + async fn receive(self, context: &Data) -> LemmyResult<()> { + insert_received_activity(&self.id, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.dereference(context).await?; - match object { - PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await, - PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await, + + check_bot_account(&actor.0)?; + + // Check for enabled federation votes + let local_site = LocalSite::read(&mut context.pool()) + .await + .unwrap_or_default(); + + let (downvote_setting, upvote_setting) = match object { + PostOrComment::Post(_) => (local_site.post_downvotes, local_site.post_upvotes), + PostOrComment::Comment(_) => (local_site.comment_downvotes, local_site.comment_upvotes), + }; + + // Don't allow dislikes for either disabled, or local only votes + let downvote_fail = self.kind == VoteType::Dislike && downvote_setting != FederationMode::All; + let upvote_fail = self.kind == VoteType::Like && upvote_setting != FederationMode::All; + + if downvote_fail || upvote_fail { + // If this is a rejection, undo the vote + match object { + PostOrComment::Post(p) => undo_vote_post(actor, &p, context).await, + PostOrComment::Comment(c) => undo_vote_comment(actor, &c, context).await, + } + } else { + // Otherwise apply the vote normally + match object { + PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await, + PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await, + } } } } diff --git a/crates/apub/src/activity_lists.rs b/crates/apub/src/activity_lists.rs index d4ca20c33..9262236d8 100644 --- a/crates/apub/src/activity_lists.rs +++ b/crates/apub/src/activity_lists.rs @@ -16,7 +16,7 @@ use crate::{ note::CreateOrUpdateNote, page::CreateOrUpdatePage, }, - deletion::{delete::Delete, delete_user::DeleteUser, undo_delete::UndoDelete}, + deletion::{delete::Delete, undo_delete::UndoDelete}, following::{accept::AcceptFollow, follow::Follow, undo_follow::UndoFollow}, voting::{undo_vote::UndoVote, vote::Vote}, }, @@ -26,7 +26,7 @@ use crate::{ }; use activitypub_federation::{config::Data, traits::ActivityHandler}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; use serde::{Deserialize, Serialize}; use url::Url; @@ -35,7 +35,7 @@ use url::Url; /// This could theoretically be defined as an enum with variants `GroupInboxActivities` and /// `PersonInboxActivities`. In practice we need to write it out manually so that priorities /// are handled correctly. -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, Clone)] #[serde(untagged)] #[enum_delegate::implement(ActivityHandler)] pub enum SharedInboxActivities { @@ -98,20 +98,10 @@ pub enum AnnouncableActivities { Page(Page), } -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(untagged)] -#[enum_delegate::implement(ActivityHandler)] -#[allow(clippy::enum_variant_names)] -pub enum SiteInboxActivities { - BlockUser(BlockUser), - UndoBlockUser(UndoBlockUser), - DeleteUser(DeleteUser), -} - #[async_trait::async_trait] impl InCommunity for AnnouncableActivities { #[tracing::instrument(skip(self, context))] - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { use AnnouncableActivities::*; match self { CreateOrUpdateComment(a) => a.community(context).await, @@ -127,51 +117,49 @@ impl InCommunity for AnnouncableActivities { CollectionRemove(a) => a.community(context).await, LockPost(a) => a.community(context).await, UndoLockPost(a) => a.community(context).await, - Page(_) => unimplemented!(), + Page(_) => Err(LemmyErrorType::NotFound.into()), } } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ - activity_lists::{GroupInboxActivities, PersonInboxActivities, SiteInboxActivities}, + activity_lists::{GroupInboxActivities, PersonInboxActivities, SharedInboxActivities}, protocol::tests::{test_json, test_parse_lemmy_item}, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_group_inbox() { - test_parse_lemmy_item::("assets/lemmy/activities/following/follow.json") - .unwrap(); + fn test_group_inbox() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/activities/following/follow.json")?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_note.json", - ) - .unwrap(); + )?; + Ok(()) } #[test] - fn test_person_inbox() { - test_parse_lemmy_item::("assets/lemmy/activities/following/accept.json") - .unwrap(); + fn test_person_inbox() -> LemmyResult<()> { + test_parse_lemmy_item::( + "assets/lemmy/activities/following/accept.json", + )?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_note.json", - ) - .unwrap(); + )?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_private_message.json", - ) - .unwrap(); - test_json::("assets/mastodon/activities/follow.json").unwrap(); + )?; + test_json::("assets/mastodon/activities/follow.json")?; + Ok(()) } #[test] - fn test_site_inbox() { - test_parse_lemmy_item::( + fn test_shared_inbox() -> LemmyResult<()> { + test_parse_lemmy_item::( "assets/lemmy/activities/deletion/delete_user.json", - ) - .unwrap(); + )?; + Ok(()) } } diff --git a/crates/apub/src/api/list_comments.rs b/crates/apub/src/api/list_comments.rs index 7d1de019e..3e7a2f4eb 100644 --- a/crates/apub/src/api/list_comments.rs +++ b/crates/apub/src/api/list_comments.rs @@ -1,3 +1,4 @@ +use super::comment_sort_type_with_default; use crate::{ api::listing_type_with_default, fetcher::resolve_actor_identifier, @@ -11,34 +12,45 @@ use lemmy_api_common::{ utils::check_private_instance, }; use lemmy_db_schema::{ - source::{comment::Comment, community::Community, local_site::LocalSite}, + source::{comment::Comment, community::Community}, traits::Crud, }; -use lemmy_db_views::{comment_view::CommentQuery, structs::LocalUserView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_db_views::{ + comment_view::CommentQuery, + structs::{LocalUserView, SiteView}, +}; +use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn list_comments( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; - check_private_instance(&local_user_view, &local_site)?; +) -> LemmyResult> { + let site_view = SiteView::read_local(&mut context.pool()).await?; + check_private_instance(&local_user_view, &site_view.local_site)?; let community_id = if let Some(name) = &data.community_name { - Some(resolve_actor_identifier::(name, &context, &None, true).await?) - .map(|c| c.id) + Some( + resolve_actor_identifier::(name, &context, &local_user_view, true) + .await?, + ) + .map(|c| c.id) } else { data.community_id }; - let sort = data.sort; + let local_user_ref = local_user_view.as_ref().map(|u| &u.local_user); + let sort = Some(comment_sort_type_with_default( + data.sort, + local_user_ref, + &site_view.local_site, + )); let max_depth = data.max_depth; - let saved_only = data.saved_only.unwrap_or_default(); + let saved_only = data.saved_only; - let liked_only = data.liked_only.unwrap_or_default(); - let disliked_only = data.disliked_only.unwrap_or_default(); - if liked_only && disliked_only { + let liked_only = data.liked_only; + let disliked_only = data.disliked_only; + if liked_only.unwrap_or_default() && disliked_only.unwrap_or_default() { return Err(LemmyError::from(LemmyErrorType::ContradictingFilters)); } @@ -48,9 +60,10 @@ pub async fn list_comments( let listing_type = Some(listing_type_with_default( data.type_, - &local_site, + local_user_view.as_ref().map(|u| &u.local_user), + &site_view.local_site, community_id, - )?); + )); // If a parent_id is given, fetch the comment to get the path let parent_path = if let Some(parent_id) = parent_id { @@ -61,6 +74,8 @@ pub async fn list_comments( let parent_path_cloned = parent_path.clone(); let post_id = data.post_id; + let local_user = local_user_view.as_ref().map(|l| &l.local_user); + let comments = CommentQuery { listing_type, sort, @@ -71,12 +86,12 @@ pub async fn list_comments( community_id, parent_path: parent_path_cloned, post_id, - local_user: local_user_view.as_ref(), + local_user, page, limit, ..Default::default() } - .list(&mut context.pool()) + .list(&site_view.site, &mut context.pool()) .await .with_lemmy_type(LemmyErrorType::CouldntGetComments)?; diff --git a/crates/apub/src/api/list_posts.rs b/crates/apub/src/api/list_posts.rs index dc3618c50..d75a82d3b 100644 --- a/crates/apub/src/api/list_posts.rs +++ b/crates/apub/src/api/list_posts.rs @@ -1,5 +1,5 @@ use crate::{ - api::listing_type_with_default, + api::{listing_type_with_default, post_sort_type_with_default}, fetcher::resolve_actor_identifier, objects::community::ApubCommunity, }; @@ -8,48 +8,59 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, post::{GetPosts, GetPostsResponse}, - utils::check_private_instance, + utils::{check_conflicting_like_filters, check_private_instance}, }; -use lemmy_db_schema::source::{community::Community, local_site::LocalSite}; +use lemmy_db_schema::source::community::Community; use lemmy_db_views::{ post_view::PostQuery, - structs::{LocalUserView, PaginationCursor}, + structs::{LocalUserView, PaginationCursor, SiteView}, }; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn list_posts( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { + let local_site = SiteView::read_local(&mut context.pool()).await?; - check_private_instance(&local_user_view, &local_site)?; - - let sort = data.sort; + check_private_instance(&local_user_view, &local_site.local_site)?; let page = data.page; let limit = data.limit; let community_id = if let Some(name) = &data.community_name { - Some(resolve_actor_identifier::(name, &context, &None, true).await?) - .map(|c| c.id) + Some( + resolve_actor_identifier::(name, &context, &local_user_view, true) + .await?, + ) + .map(|c| c.id) } else { data.community_id }; - let saved_only = data.saved_only.unwrap_or_default(); + let saved_only = data.saved_only; + let show_hidden = data.show_hidden; + let show_read = data.show_read; + let show_nsfw = data.show_nsfw; - let liked_only = data.liked_only.unwrap_or_default(); - let disliked_only = data.disliked_only.unwrap_or_default(); - if liked_only && disliked_only { - return Err(LemmyError::from(LemmyErrorType::ContradictingFilters)); - } + let liked_only = data.liked_only; + let disliked_only = data.disliked_only; + check_conflicting_like_filters(liked_only, disliked_only)?; + let local_user = local_user_view.as_ref().map(|u| &u.local_user); let listing_type = Some(listing_type_with_default( data.type_, - &local_site, + local_user, + &local_site.local_site, community_id, - )?); + )); + + let sort = Some(post_sort_type_with_default( + data.sort, + local_user, + &local_site.local_site, + )); + // parse pagination token let page_after = if let Some(pa) = &data.page_cursor { Some(pa.read(&mut context.pool()).await?) @@ -58,7 +69,7 @@ pub async fn list_posts( }; let posts = PostQuery { - local_user: local_user_view.as_ref(), + local_user, listing_type, sort, community_id, @@ -68,9 +79,12 @@ pub async fn list_posts( page, page_after, limit, + show_hidden, + show_read, + show_nsfw, ..Default::default() } - .list(&mut context.pool()) + .list(&local_site.site, &mut context.pool()) .await .with_lemmy_type(LemmyErrorType::CouldntGetPosts)?; diff --git a/crates/apub/src/api/mod.rs b/crates/apub/src/api/mod.rs index 59586e477..580be3228 100644 --- a/crates/apub/src/api/mod.rs +++ b/crates/apub/src/api/mod.rs @@ -1,5 +1,10 @@ -use lemmy_db_schema::{newtypes::CommunityId, source::local_site::LocalSite, ListingType}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{ + newtypes::CommunityId, + source::{local_site::LocalSite, local_user::LocalUser}, + CommentSortType, + ListingType, + PostSortType, +}; pub mod list_comments; pub mod list_posts; @@ -12,15 +17,47 @@ pub mod user_settings_backup; /// Returns default listing type, depending if the query is for frontpage or community. fn listing_type_with_default( type_: Option, + local_user: Option<&LocalUser>, local_site: &LocalSite, community_id: Option, -) -> Result { +) -> ListingType { // On frontpage use listing type from param or admin configured default - let listing_type = if community_id.is_none() { - type_.unwrap_or(local_site.default_post_listing_type) + if community_id.is_none() { + type_.unwrap_or( + local_user + .map(|u| u.default_listing_type) + .unwrap_or(local_site.default_post_listing_type), + ) } else { // inside of community show everything ListingType::All - }; - Ok(listing_type) + } +} + +/// Returns a default instance-level post sort type, if none is given by the user. +/// Order is type, local user default, then site default. +fn post_sort_type_with_default( + type_: Option, + local_user: Option<&LocalUser>, + local_site: &LocalSite, +) -> PostSortType { + type_.unwrap_or( + local_user + .map(|u| u.default_post_sort_type) + .unwrap_or(local_site.default_post_sort_type), + ) +} + +/// Returns a default instance-level comment sort type, if none is given by the user. +/// Order is type, local user default, then site default. +fn comment_sort_type_with_default( + type_: Option, + local_user: Option<&LocalUser>, + local_site: &LocalSite, +) -> CommentSortType { + type_.unwrap_or( + local_user + .map(|u| u.default_comment_sort_type) + .unwrap_or(local_site.default_comment_sort_type), + ) } diff --git a/crates/apub/src/api/read_community.rs b/crates/apub/src/api/read_community.rs index afa6fb829..f94769158 100644 --- a/crates/apub/src/api/read_community.rs +++ b/crates/apub/src/api/read_community.rs @@ -4,24 +4,23 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ community::{GetCommunity, GetCommunityResponse}, context::LemmyContext, - utils::{check_private_instance, is_mod_or_admin_opt}, + utils::{check_private_instance, is_mod_or_admin_opt, read_site_for_actor}, }; use lemmy_db_schema::source::{ actor_language::CommunityLanguage, community::Community, local_site::LocalSite, - site::Site, }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::{CommunityModeratorView, CommunityView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorExt2, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn get_community( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; if data.name.is_none() && data.id.is_none() { @@ -30,15 +29,14 @@ pub async fn get_community( check_private_instance(&local_user_view, &local_site)?; - let person_id = local_user_view.as_ref().map(|u| u.person.id); + let local_user = local_user_view.as_ref().map(|u| &u.local_user); let community_id = match data.id { Some(id) => id, None => { let name = data.name.clone().unwrap_or_else(|| "main".to_string()); resolve_actor_identifier::(&name, &context, &local_user_view, true) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)? + .await? .id } }; @@ -54,25 +52,14 @@ pub async fn get_community( let community_view = CommunityView::read( &mut context.pool(), community_id, - person_id, + local_user, is_mod_or_admin, ) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; + .await?; - let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindCommunity)?; + let moderators = CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; - let site_id = Site::instance_actor_id_from_url(community_view.community.actor_id.clone().into()); - let mut site = Site::read_from_apub_id(&mut context.pool(), &site_id.into()).await?; - // no need to include metadata for local site (its already available through other endpoints). - // this also prevents us from leaking the federation private key. - if let Some(s) = &site { - if s.actor_id.domain() == Some(context.settings().hostname.as_ref()) { - site = None; - } - } + let site = read_site_for_actor(community_view.community.actor_id.clone(), &context).await?; let community_id = community_view.community.id; let discussion_languages = CommunityLanguage::read(&mut context.pool(), community_id).await?; diff --git a/crates/apub/src/api/read_person.rs b/crates/apub/src/api/read_person.rs index 26ad287f1..fac68cd63 100644 --- a/crates/apub/src/api/read_person.rs +++ b/crates/apub/src/api/read_person.rs @@ -4,41 +4,41 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, person::{GetPersonDetails, GetPersonDetailsResponse}, - utils::check_private_instance, + utils::{check_private_instance, read_site_for_actor}, }; -use lemmy_db_schema::{ - source::{local_site::LocalSite, person::Person}, - utils::post_to_comment_sort_type, +use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type}; +use lemmy_db_views::{ + comment_view::CommentQuery, + post_view::PostQuery, + structs::{LocalUserView, SiteView}, }; -use lemmy_db_views::{comment_view::CommentQuery, post_view::PostQuery, structs::LocalUserView}; use lemmy_db_views_actor::structs::{CommunityModeratorView, PersonView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt2, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn read_person( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { +) -> LemmyResult> { // Check to make sure a person name or an id is given if data.username.is_none() && data.person_id.is_none() { Err(LemmyErrorType::NoIdGiven)? } - let local_site = LocalSite::read(&mut context.pool()).await?; + let local_site = SiteView::read_local(&mut context.pool()).await?; - check_private_instance(&local_user_view, &local_site)?; + check_private_instance(&local_user_view, &local_site.local_site)?; let person_details_id = match data.person_id { Some(id) => id, None => { if let Some(username) = &data.username { resolve_actor_identifier::(username, &context, &local_user_view, true) - .await - .with_lemmy_type(LemmyErrorType::CouldntFindPerson)? + .await? .id } else { - Err(LemmyErrorType::CouldntFindPerson)? + Err(LemmyErrorType::NotFound)? } } }; @@ -50,50 +50,57 @@ pub async fn read_person( let sort = data.sort; let page = data.page; let limit = data.limit; - let saved_only = data.saved_only.unwrap_or_default(); + let saved_only = data.saved_only; let community_id = data.community_id; // If its saved only, you don't care what creator it was // Or, if its not saved, then you only want it for that specific creator - let creator_id = if !saved_only { + let creator_id = if !saved_only.unwrap_or_default() { Some(person_details_id) } else { None }; + let local_user = local_user_view.as_ref().map(|l| &l.local_user); + let posts = PostQuery { sort, saved_only, - local_user: local_user_view.as_ref(), + local_user, community_id, - is_profile_view: true, page, limit, creator_id, ..Default::default() } - .list(&mut context.pool()) + .list(&local_site.site, &mut context.pool()) .await?; let comments = CommentQuery { - local_user: local_user_view.as_ref(), + local_user, sort: sort.map(post_to_comment_sort_type), saved_only, community_id, - is_profile_view: true, page, limit, creator_id, ..Default::default() } - .list(&mut context.pool()) + .list(&local_site.site, &mut context.pool()) .await?; - let moderates = - CommunityModeratorView::for_person(&mut context.pool(), person_details_id).await?; + let moderates = CommunityModeratorView::for_person( + &mut context.pool(), + person_details_id, + local_user_view.map(|l| l.local_user).as_ref(), + ) + .await?; + + let site = read_site_for_actor(person_view.person.actor_id.clone(), &context).await?; // Return the jwt Ok(Json(GetPersonDetailsResponse { person_view, + site, moderates, comments, posts, diff --git a/crates/apub/src/api/resolve_object.rs b/crates/apub/src/api/resolve_object.rs index e081377f6..d9d50e69e 100644 --- a/crates/apub/src/api/resolve_object.rs +++ b/crates/apub/src/api/resolve_object.rs @@ -1,78 +1,190 @@ -use crate::fetcher::search::{ - search_query_to_object_id, - search_query_to_object_id_local, - SearchableObjects, +use crate::fetcher::{ + post_or_comment::PostOrComment, + search::{search_query_to_object_id, search_query_to_object_id_local, SearchableObjects}, + user_or_community::UserOrCommunity, }; use activitypub_federation::config::Data; use actix_web::web::{Json, Query}; -use diesel::NotFound; use lemmy_api_common::{ context::LemmyContext, site::{ResolveObject, ResolveObjectResponse}, utils::check_private_instance, }; -use lemmy_db_schema::{newtypes::PersonId, source::local_site::LocalSite, utils::DbPool}; +use lemmy_db_schema::{source::local_site::LocalSite, utils::DbPool}; use lemmy_db_views::structs::{CommentView, LocalUserView, PostView}; use lemmy_db_views_actor::structs::{CommunityView, PersonView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt2, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorExt2, LemmyErrorType, LemmyResult}; #[tracing::instrument(skip(context))] pub async fn resolve_object( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { +) -> LemmyResult> { let local_site = LocalSite::read(&mut context.pool()).await?; check_private_instance(&local_user_view, &local_site)?; - let person_id = local_user_view.map(|v| v.person.id); // If we get a valid personId back we can safely assume that the user is authenticated, // if there's no personId then the JWT was missing or invalid. - let is_authenticated = person_id.is_some(); + let is_authenticated = local_user_view.is_some(); - let res = if is_authenticated { + let res = if is_authenticated || cfg!(debug_assertions) { // user is fully authenticated; allow remote lookups as well. - search_query_to_object_id(&data.q, &context).await + search_query_to_object_id(data.q.clone(), &context).await } else { // user isn't authenticated only allow a local search. search_query_to_object_id_local(&data.q, &context).await } - .with_lemmy_type(LemmyErrorType::CouldntFindObject)?; + .with_lemmy_type(LemmyErrorType::NotFound)?; - convert_response(res, person_id, &mut context.pool()) + convert_response(res, local_user_view, &mut context.pool()) .await - .with_lemmy_type(LemmyErrorType::CouldntFindObject) + .with_lemmy_type(LemmyErrorType::NotFound) } async fn convert_response( object: SearchableObjects, - user_id: Option, + local_user_view: Option, pool: &mut DbPool<'_>, -) -> Result, LemmyError> { - use SearchableObjects::*; - let removed_or_deleted; +) -> LemmyResult> { let mut res = ResolveObjectResponse::default(); + let local_user = local_user_view.map(|l| l.local_user); + let is_admin = local_user.clone().map(|l| l.admin).unwrap_or_default(); + match object { - Person(p) => { - removed_or_deleted = p.deleted; - res.person = Some(PersonView::read(pool, p.id).await?) - } - Community(c) => { - removed_or_deleted = c.deleted || c.removed; - res.community = Some(CommunityView::read(pool, c.id, user_id, false).await?) - } - Post(p) => { - removed_or_deleted = p.deleted || p.removed; - res.post = Some(PostView::read(pool, p.id, user_id, false).await?) - } - Comment(c) => { - removed_or_deleted = c.deleted || c.removed; - res.comment = Some(CommentView::read(pool, c.id, user_id).await?) - } + SearchableObjects::PostOrComment(pc) => match *pc { + PostOrComment::Post(p) => { + res.post = Some(PostView::read(pool, p.id, local_user.as_ref(), is_admin).await?) + } + PostOrComment::Comment(c) => { + res.comment = Some(CommentView::read(pool, c.id, local_user.as_ref()).await?) + } + }, + SearchableObjects::PersonOrCommunity(pc) => match *pc { + UserOrCommunity::User(u) => res.person = Some(PersonView::read(pool, u.id).await?), + UserOrCommunity::Community(c) => { + res.community = Some(CommunityView::read(pool, c.id, local_user.as_ref(), is_admin).await?) + } + }, }; - // if the object was deleted from database, dont return it - if removed_or_deleted { - Err(NotFound {}.into()) - } else { - Ok(Json(res)) + + Ok(Json(res)) +} + +#[cfg(test)] +mod tests { + use crate::api::resolve_object::resolve_object; + use actix_web::web::Query; + use lemmy_api_common::{context::LemmyContext, site::ResolveObject}; + use lemmy_db_schema::{ + source::{ + community::{Community, CommunityInsertForm}, + instance::Instance, + local_site::{LocalSite, LocalSiteInsertForm}, + post::{Post, PostInsertForm, PostUpdateForm}, + site::{Site, SiteInsertForm}, + }, + traits::Crud, + }; + use lemmy_db_views::structs::LocalUserView; + use lemmy_utils::{error::LemmyResult, LemmyErrorType}; + use serial_test::serial; + + #[tokio::test] + #[serial] + #[expect(clippy::unwrap_used)] + async fn test_object_visibility() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); + + let name = "test_local_user_name"; + let bio = "test_local_user_bio"; + + let creator = LocalUserView::create_test_user(pool, name, bio, false).await?; + let regular_user = LocalUserView::create_test_user(pool, name, bio, false).await?; + let admin_user = LocalUserView::create_test_user(pool, name, bio, true).await?; + + let instance_id = creator.person.instance_id; + let site_form = SiteInsertForm::new("test site".to_string(), instance_id); + let site = Site::create(pool, &site_form).await?; + + let local_site_form = LocalSiteInsertForm { + site_setup: Some(true), + private_instance: Some(false), + ..LocalSiteInsertForm::new(site.id) + }; + LocalSite::create(pool, &local_site_form).await?; + + let community = Community::create( + pool, + &CommunityInsertForm::new( + instance_id, + "test".to_string(), + "test".to_string(), + "pubkey".to_string(), + ), + ) + .await?; + + let post_insert_form = PostInsertForm::new("Test".to_string(), creator.person.id, community.id); + let post = Post::create(pool, &post_insert_form).await?; + + let query = format!("q={}", post.ap_id).to_string(); + let query: Query = Query::from_query(&query)?; + + // Objects should be resolvable without authentication + let res = resolve_object(query.clone(), context.reset_request_count(), None).await?; + assert_eq!(res.post.as_ref().unwrap().post.ap_id, post.ap_id); + // Objects should be resolvable by regular users + let res = resolve_object( + query.clone(), + context.reset_request_count(), + Some(regular_user.clone()), + ) + .await?; + assert_eq!(res.post.as_ref().unwrap().post.ap_id, post.ap_id); + // Objects should be resolvable by admins + let res = resolve_object( + query.clone(), + context.reset_request_count(), + Some(admin_user.clone()), + ) + .await?; + assert_eq!(res.post.as_ref().unwrap().post.ap_id, post.ap_id); + + Post::update( + pool, + post.id, + &PostUpdateForm { + deleted: Some(true), + ..Default::default() + }, + ) + .await?; + + // Deleted objects should not be resolvable without authentication + let res = resolve_object(query.clone(), context.reset_request_count(), None).await; + assert!(res.is_err_and(|e| e.error_type == LemmyErrorType::NotFound)); + // Deleted objects should not be resolvable by regular users + let res = resolve_object( + query.clone(), + context.reset_request_count(), + Some(regular_user.clone()), + ) + .await; + assert!(res.is_err_and(|e| e.error_type == LemmyErrorType::NotFound)); + // Deleted objects should be resolvable by admins + let res = resolve_object( + query.clone(), + context.reset_request_count(), + Some(admin_user.clone()), + ) + .await?; + assert_eq!(res.post.as_ref().unwrap().post.ap_id, post.ap_id); + + LocalSite::delete(pool).await?; + Site::delete(pool, site.id).await?; + Instance::delete(pool, instance_id).await?; + + Ok(()) } } diff --git a/crates/apub/src/api/search.rs b/crates/apub/src/api/search.rs index b854a91d1..cdc9bc55e 100644 --- a/crates/apub/src/api/search.rs +++ b/crates/apub/src/api/search.rs @@ -4,26 +4,30 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, site::{Search, SearchResponse}, - utils::{check_private_instance, is_admin}, + utils::{check_conflicting_like_filters, check_private_instance, is_admin}, }; -use lemmy_db_schema::{ - source::{community::Community, local_site::LocalSite}, - utils::post_to_comment_sort_type, - SearchType, +use lemmy_db_schema::{source::community::Community, utils::post_to_comment_sort_type, SearchType}; +use lemmy_db_views::{ + comment_view::CommentQuery, + post_view::PostQuery, + structs::{LocalUserView, SiteView}, }; -use lemmy_db_views::{comment_view::CommentQuery, post_view::PostQuery, structs::LocalUserView}; -use lemmy_db_views_actor::{community_view::CommunityQuery, person_view::PersonQuery}; -use lemmy_utils::error::LemmyError; +use lemmy_db_views_actor::{ + community_view::CommunityQuery, + person_view::PersonQuery, + structs::CommunitySortType, +}; +use lemmy_utils::error::LemmyResult; #[tracing::instrument(skip(context))] pub async fn search( data: Query, context: Data, local_user_view: Option, -) -> Result, LemmyError> { - let local_site = LocalSite::read(&mut context.pool()).await?; +) -> LemmyResult> { + let local_site = SiteView::read_local(&mut context.pool()).await?; - check_private_instance(&local_user_view, &local_site)?; + check_private_instance(&local_user_view, &local_site.local_site)?; let is_admin = local_user_view .as_ref() @@ -37,163 +41,136 @@ pub async fn search( // TODO no clean / non-nsfw searching rn - let q = data.q.clone(); - let page = data.page; - let limit = data.limit; - let sort = data.sort; - let listing_type = data.listing_type; - let search_type = data.type_.unwrap_or(SearchType::All); - let community_id = if let Some(name) = &data.community_name { + let Query(Search { + q, + community_id, + community_name, + creator_id, + type_, + sort, + listing_type, + page, + limit, + title_only, + post_url_only, + saved_only, + liked_only, + disliked_only, + }) = data; + + let q = q.clone(); + let search_type = type_.unwrap_or(SearchType::All); + let community_id = if let Some(name) = &community_name { Some( resolve_actor_identifier::(name, &context, &local_user_view, false) .await?, ) .map(|c| c.id) } else { - data.community_id + community_id }; - let creator_id = data.creator_id; - let local_user = local_user_view.as_ref().map(|l| l.local_user.clone()); + let local_user = local_user_view.as_ref().map(|l| &l.local_user); + + check_conflicting_like_filters(liked_only, disliked_only)?; + + let posts_query = PostQuery { + sort, + listing_type, + community_id, + creator_id, + local_user, + search_term: Some(q.clone()), + page, + limit, + title_only, + url_only: post_url_only, + liked_only, + disliked_only, + saved_only, + ..Default::default() + }; + + let comment_query = CommentQuery { + sort: sort.map(post_to_comment_sort_type), + listing_type, + search_term: Some(q.clone()), + community_id, + creator_id, + local_user, + page, + limit, + liked_only, + disliked_only, + saved_only, + ..Default::default() + }; + + let community_query = CommunityQuery { + sort: sort.map(CommunitySortType::from), + listing_type, + search_term: Some(q.clone()), + title_only, + local_user, + is_mod_or_admin: is_admin, + page, + limit, + ..Default::default() + }; + + let person_query = PersonQuery { + sort, + search_term: Some(q.clone()), + listing_type, + page, + limit, + }; + match search_type { SearchType::Posts => { - posts = PostQuery { - sort: (sort), - listing_type: (listing_type), - community_id: (community_id), - creator_id: (creator_id), - local_user: (local_user_view.as_ref()), - search_term: (Some(q)), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; + posts = posts_query + .list(&local_site.site, &mut context.pool()) + .await?; } SearchType::Comments => { - comments = CommentQuery { - sort: (sort.map(post_to_comment_sort_type)), - listing_type: (listing_type), - search_term: (Some(q)), - community_id: (community_id), - creator_id: (creator_id), - local_user: (local_user_view.as_ref()), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; + comments = comment_query + .list(&local_site.site, &mut context.pool()) + .await?; } SearchType::Communities => { - communities = CommunityQuery { - sort: (sort), - listing_type: (listing_type), - search_term: (Some(q)), - local_user: (local_user.as_ref()), - is_mod_or_admin: (is_admin), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; + communities = community_query + .list(&local_site.site, &mut context.pool()) + .await?; } SearchType::Users => { - users = PersonQuery { - sort, - search_term: (Some(q)), - page: (page), - limit: (limit), - } - .list(&mut context.pool()) - .await?; + users = person_query.list(&mut context.pool()).await?; } SearchType::All => { // If the community or creator is included, dont search communities or users let community_or_creator_included = - data.community_id.is_some() || data.community_name.is_some() || data.creator_id.is_some(); + community_id.is_some() || community_name.is_some() || creator_id.is_some(); - let q = data.q.clone(); + posts = posts_query + .list(&local_site.site, &mut context.pool()) + .await?; - posts = PostQuery { - sort: (sort), - listing_type: (listing_type), - community_id: (community_id), - creator_id: (creator_id), - local_user: (local_user_view.as_ref()), - search_term: (Some(q)), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; - - let q = data.q.clone(); - - comments = CommentQuery { - sort: (sort.map(post_to_comment_sort_type)), - listing_type: (listing_type), - search_term: (Some(q)), - community_id: (community_id), - creator_id: (creator_id), - local_user: (local_user_view.as_ref()), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; - - let q = data.q.clone(); + comments = comment_query + .list(&local_site.site, &mut context.pool()) + .await?; communities = if community_or_creator_included { vec![] } else { - CommunityQuery { - sort: (sort), - listing_type: (listing_type), - search_term: (Some(q)), - local_user: (local_user.as_ref()), - is_mod_or_admin: (is_admin), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await? + community_query + .list(&local_site.site, &mut context.pool()) + .await? }; - let q = data.q.clone(); - users = if community_or_creator_included { vec![] } else { - PersonQuery { - sort, - search_term: (Some(q)), - page: (page), - limit: (limit), - } - .list(&mut context.pool()) - .await? + person_query.list(&mut context.pool()).await? }; } - SearchType::Url => { - posts = PostQuery { - sort: (sort), - listing_type: (listing_type), - community_id: (community_id), - creator_id: (creator_id), - url_search: (Some(q)), - page: (page), - limit: (limit), - ..Default::default() - } - .list(&mut context.pool()) - .await?; - } }; // Return the jwt diff --git a/crates/apub/src/api/user_settings_backup.rs b/crates/apub/src/api/user_settings_backup.rs index 419be280d..2e075c202 100644 --- a/crates/apub/src/api/user_settings_backup.rs +++ b/crates/apub/src/api/user_settings_backup.rs @@ -4,9 +4,10 @@ use crate::objects::{ person::ApubPerson, post::ApubPost, }; -use activitypub_federation::{config::Data, fetch::object_id::ObjectId}; +use activitypub_federation::{config::Data, fetch::object_id::ObjectId, traits::Object}; use actix_web::web::Json; use futures::{future::try_join_all, StreamExt}; +use itertools::Itertools; use lemmy_api_common::{context::LemmyContext, SuccessResponse}; use lemmy_db_schema::{ newtypes::DbUrl, @@ -14,7 +15,10 @@ use lemmy_db_schema::{ comment::{CommentSaved, CommentSavedForm}, community::{CommunityFollower, CommunityFollowerForm}, community_block::{CommunityBlock, CommunityBlockForm}, + instance::Instance, + instance_block::{InstanceBlock, InstanceBlockForm}, local_user::{LocalUser, LocalUserUpdateForm}, + local_user_vote_display_mode::{LocalUserVoteDisplayMode, LocalUserVoteDisplayModeUpdateForm}, person::{Person, PersonUpdateForm}, person_block::{PersonBlock, PersonBlockForm}, post::{PostSaved, PostSavedForm}, @@ -23,12 +27,15 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}, + error::{LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}, spawn_try_task, }; use serde::{Deserialize, Serialize}; +use std::future::Future; use tracing::info; +const PARALLELISM: usize = 10; + /// Backup of user data. This struct should never be changed so that the data can be used as a /// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow /// importing partial backups. @@ -37,7 +44,7 @@ use tracing::info; /// /// Be careful with any changes to this struct, to avoid breaking changes which could prevent /// importing older backups. -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, Default)] pub struct UserSettingsBackup { pub display_name: Option, pub bio: Option, @@ -48,6 +55,7 @@ pub struct UserSettingsBackup { // TODO: might be worth making a separate struct for settings backup, to avoid breakage in case // fields are renamed, and to avoid storing unnecessary fields like person_id or email pub settings: Option, + pub vote_display_mode_settings: Option, #[serde(default)] pub followed_communities: Vec>, #[serde(default)] @@ -58,13 +66,15 @@ pub struct UserSettingsBackup { pub blocked_communities: Vec>, #[serde(default)] pub blocked_users: Vec>, + #[serde(default)] + pub blocked_instances: Vec, } #[tracing::instrument(skip(context))] pub async fn export_settings( local_user_view: LocalUserView, context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { let lists = LocalUser::export_backup(&mut context.pool(), local_user_view.person.id).await?; let vec_into = |vec: Vec<_>| vec.into_iter().map(Into::into).collect(); @@ -76,8 +86,10 @@ pub async fn export_settings( matrix_id: local_user_view.person.matrix_user_id, bot_account: local_user_view.person.bot_account.into(), settings: Some(local_user_view.local_user), + vote_display_mode_settings: Some(local_user_view.local_user_vote_display_mode), followed_communities: vec_into(lists.followed_communities), blocked_communities: vec_into(lists.blocked_communities), + blocked_instances: lists.blocked_instances, blocked_users: lists.blocked_users.into_iter().map(Into::into).collect(), saved_posts: lists.saved_posts.into_iter().map(Into::into).collect(), saved_comments: lists.saved_comments.into_iter().map(Into::into).collect(), @@ -89,33 +101,35 @@ pub async fn import_settings( data: Json, local_user_view: LocalUserView, context: Data, -) -> Result, LemmyError> { +) -> LemmyResult> { let person_form = PersonUpdateForm { - display_name: Some(data.display_name.clone()), - bio: Some(data.bio.clone()), - matrix_user_id: Some(data.matrix_id.clone()), + display_name: data.display_name.clone().map(Some), + bio: data.bio.clone().map(Some), + matrix_user_id: data.bio.clone().map(Some), bot_account: data.bot_account, ..Default::default() }; - Person::update(&mut context.pool(), local_user_view.person.id, &person_form).await?; + // ignore error in case form is empty + Person::update(&mut context.pool(), local_user_view.person.id, &person_form) + .await + .ok(); let local_user_form = LocalUserUpdateForm { show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw), - theme: data.settings.as_ref().map(|s| s.theme.clone()), - default_sort_type: data.settings.as_ref().map(|s| s.default_sort_type), + theme: data.settings.clone().map(|s| s.theme.clone()), + default_post_sort_type: data.settings.as_ref().map(|s| s.default_post_sort_type), + default_comment_sort_type: data.settings.as_ref().map(|s| s.default_comment_sort_type), default_listing_type: data.settings.as_ref().map(|s| s.default_listing_type), - interface_language: data.settings.as_ref().map(|s| s.interface_language.clone()), + interface_language: data.settings.clone().map(|s| s.interface_language), show_avatars: data.settings.as_ref().map(|s| s.show_avatars), send_notifications_to_email: data .settings .as_ref() .map(|s| s.send_notifications_to_email), - show_scores: data.settings.as_ref().map(|s| s.show_scores), show_bot_accounts: data.settings.as_ref().map(|s| s.show_bot_accounts), show_read_posts: data.settings.as_ref().map(|s| s.show_read_posts), open_links_in_new_tab: data.settings.as_ref().map(|s| s.open_links_in_new_tab), blur_nsfw: data.settings.as_ref().map(|s| s.blur_nsfw), - auto_expand: data.settings.as_ref().map(|s| s.auto_expand), infinite_scroll_enabled: data.settings.as_ref().map(|s| s.infinite_scroll_enabled), post_listing_mode: data.settings.as_ref().map(|s| s.post_listing_mode), ..Default::default() @@ -127,9 +141,31 @@ pub async fn import_settings( ) .await?; + // Update the vote display mode settings + let vote_display_mode_form = LocalUserVoteDisplayModeUpdateForm { + score: data.vote_display_mode_settings.as_ref().map(|s| s.score), + upvotes: data.vote_display_mode_settings.as_ref().map(|s| s.upvotes), + downvotes: data + .vote_display_mode_settings + .as_ref() + .map(|s| s.downvotes), + upvote_percentage: data + .vote_display_mode_settings + .as_ref() + .map(|s| s.upvote_percentage), + }; + + LocalUserVoteDisplayMode::update( + &mut context.pool(), + local_user_view.local_user.id, + &vote_display_mode_form, + ) + .await?; + let url_count = data.followed_communities.len() + data.blocked_communities.len() + data.blocked_users.len() + + data.blocked_instances.len() + data.saved_posts.len() + data.saved_comments.len(); if url_count > MAX_API_PARAM_ELEMENTS { @@ -137,243 +173,198 @@ pub async fn import_settings( } spawn_try_task(async move { - const PARALLELISM: usize = 10; let person_id = local_user_view.person.id; - // These tasks fetch objects from remote instances which might be down. - // TODO: Would be nice if we could send a list of failed items with api response, but then - // the request would likely timeout. - let mut failed_items = vec![]; - info!( - "Starting settings backup for {}", + "Starting settings import for {}", local_user_view.person.name ); - futures::stream::iter( - data - .followed_communities - .clone() - .into_iter() - // reset_request_count works like clone, and is necessary to avoid running into request limit - .map(|f| (f, context.reset_request_count())) - .map(|(followed, context)| async move { - // need to reset outgoing request count to avoid running into limit - let community = followed.dereference(&context).await?; - let form = CommunityFollowerForm { - person_id, - community_id: community.id, - pending: true, - }; - CommunityFollower::follow(&mut context.pool(), &form).await?; - LemmyResult::Ok(()) - }), + let failed_followed_communities = fetch_and_import( + data.followed_communities.clone(), + &context, + |(followed, context)| async move { + let community = followed.dereference(&context).await?; + let form = CommunityFollowerForm { + person_id, + community_id: community.id, + pending: true, + }; + CommunityFollower::follow(&mut context.pool(), &form).await?; + LemmyResult::Ok(()) + }, ) - .buffer_unordered(PARALLELISM) - .collect::>() - .await - .into_iter() - .enumerate() - .for_each(|(i, r)| { - if let Err(e) = r { - failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone())); - info!("Failed to import followed community: {e}"); - } - }); + .await?; - futures::stream::iter( - data - .saved_posts - .clone() - .into_iter() - .map(|s| (s, context.reset_request_count())) - .map(|(saved, context)| async move { - let post = saved.dereference(&context).await?; - let form = PostSavedForm { - person_id, - post_id: post.id, - }; - PostSaved::save(&mut context.pool(), &form).await?; - LemmyResult::Ok(()) - }), + let failed_saved_posts = fetch_and_import( + data.saved_posts.clone(), + &context, + |(saved, context)| async move { + let post = saved.dereference(&context).await?; + let form = PostSavedForm { + person_id, + post_id: post.id, + }; + PostSaved::save(&mut context.pool(), &form).await?; + LemmyResult::Ok(()) + }, ) - .buffer_unordered(PARALLELISM) - .collect::>() - .await - .into_iter() - .enumerate() - .for_each(|(i, r)| { - if let Err(e) = r { - failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone())); - info!("Failed to import saved post community: {e}"); - } - }); + .await?; - futures::stream::iter( - data - .saved_comments - .clone() - .into_iter() - .map(|s| (s, context.reset_request_count())) - .map(|(saved, context)| async move { - let comment = saved.dereference(&context).await?; - let form = CommentSavedForm { - person_id, - comment_id: comment.id, - }; - CommentSaved::save(&mut context.pool(), &form).await?; - LemmyResult::Ok(()) - }), + let failed_saved_comments = fetch_and_import( + data.saved_comments.clone(), + &context, + |(saved, context)| async move { + let comment = saved.dereference(&context).await?; + let form = CommentSavedForm { + person_id, + comment_id: comment.id, + }; + CommentSaved::save(&mut context.pool(), &form).await?; + LemmyResult::Ok(()) + }, ) - .buffer_unordered(PARALLELISM) - .collect::>() - .await - .into_iter() - .enumerate() - .for_each(|(i, r)| { - if let Err(e) = r { - failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone())); - info!("Failed to import saved comment community: {e}"); - } - }); + .await?; - let failed_items: Vec<_> = failed_items.into_iter().flatten().collect(); - info!( - "Finished settings backup for {}, failed items: {:#?}", - local_user_view.person.name, failed_items - ); + let failed_community_blocks = fetch_and_import( + data.blocked_communities.clone(), + &context, + |(blocked, context)| async move { + let community = blocked.dereference(&context).await?; + let form = CommunityBlockForm { + person_id, + community_id: community.id, + }; + CommunityBlock::block(&mut context.pool(), &form).await?; + LemmyResult::Ok(()) + }, + ) + .await?; - // These tasks don't connect to any remote instances but only insert directly in the database. - // That means the only error condition are db connection failures, so no extra error handling is - // needed. - try_join_all(data.blocked_communities.iter().map(|blocked| async { - // dont fetch unknown blocked objects from home server - let community = blocked.dereference_local(&context).await?; - let form = CommunityBlockForm { + let failed_user_blocks = fetch_and_import( + data.blocked_users.clone(), + &context, + |(blocked, context)| async move { + let context = context.reset_request_count(); + let target = blocked.dereference(&context).await?; + let form = PersonBlockForm { + person_id, + target_id: target.id, + }; + PersonBlock::block(&mut context.pool(), &form).await?; + LemmyResult::Ok(()) + }, + ) + .await?; + + try_join_all(data.blocked_instances.iter().map(|domain| async { + let instance = Instance::read_or_create(&mut context.pool(), domain.clone()).await?; + let form = InstanceBlockForm { person_id, - community_id: community.id, + instance_id: instance.id, }; - CommunityBlock::block(&mut context.pool(), &form).await?; + InstanceBlock::block(&mut context.pool(), &form).await?; LemmyResult::Ok(()) })) .await?; - try_join_all(data.blocked_users.iter().map(|blocked| async { - // dont fetch unknown blocked objects from home server - let target = blocked.dereference_local(&context).await?; - let form = PersonBlockForm { - person_id, - target_id: target.id, - }; - PersonBlock::block(&mut context.pool(), &form).await?; - LemmyResult::Ok(()) - })) - .await?; + info!("Settings import completed for {}, the following items failed: {failed_followed_communities}, {failed_saved_posts}, {failed_saved_comments}, {failed_community_blocks}, {failed_user_blocks}", + local_user_view.person.name); + Ok(()) }); Ok(Json(Default::default())) } -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] +async fn fetch_and_import( + objects: Vec>, + context: &Data, + import_fn: impl FnMut((ObjectId, Data)) -> Fut, +) -> LemmyResult +where + Kind: Object + Send + 'static, + for<'de2> ::Kind: Deserialize<'de2>, + Fut: Future>, +{ + let mut failed_items = vec![]; + futures::stream::iter( + objects + .clone() + .into_iter() + // need to reset outgoing request count to avoid running into limit + .map(|s| (s, context.reset_request_count())) + .map(import_fn), + ) + .buffer_unordered(PARALLELISM) + .collect::>() + .await + .into_iter() + .enumerate() + .for_each(|(i, r): (usize, LemmyResult<()>)| { + if r.is_err() { + if let Some(object) = objects.get(i) { + failed_items.push(object.inner().clone()); + } + } + }); + Ok(failed_items.into_iter().join(",")) +} - use crate::{ - api::user_settings_backup::{export_settings, import_settings}, - objects::tests::init_context, - }; - use activitypub_federation::config::Data; +#[cfg(test)] +#[expect(clippy::indexing_slicing)] +pub(crate) mod tests { + use crate::api::user_settings_backup::{export_settings, import_settings}; + use actix_web::web::Json; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ source::{ community::{Community, CommunityFollower, CommunityFollowerForm, CommunityInsertForm}, - instance::Instance, - local_user::{LocalUser, LocalUserInsertForm}, - person::{Person, PersonInsertForm}, + local_user::LocalUser, }, traits::{Crud, Followable}, }; use lemmy_db_views::structs::LocalUserView; use lemmy_db_views_actor::structs::CommunityFollowerView; - use lemmy_utils::error::LemmyErrorType; + use lemmy_utils::error::{LemmyErrorType, LemmyResult}; + use pretty_assertions::assert_eq; use serial_test::serial; use std::time::Duration; use tokio::time::sleep; - async fn create_user( - name: String, - bio: Option, - context: &Data, - ) -> LocalUserView { - let instance = Instance::read_or_create(&mut context.pool(), "example.com".to_string()) - .await - .unwrap(); - let person_form = PersonInsertForm::builder() - .name(name.clone()) - .display_name(Some(name.clone())) - .bio(bio) - .public_key("asd".to_string()) - .instance_id(instance.id) - .build(); - let person = Person::create(&mut context.pool(), &person_form) - .await - .unwrap(); - - let user_form = LocalUserInsertForm::builder() - .person_id(person.id) - .password_encrypted("pass".to_string()) - .build(); - let local_user = LocalUser::create(&mut context.pool(), &user_form) - .await - .unwrap(); - - LocalUserView::read(&mut context.pool(), local_user.id) - .await - .unwrap() - } - #[tokio::test] #[serial] - async fn test_settings_export_import() { - let context = init_context().await; + async fn test_settings_export_import() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); - let export_user = create_user("hanna".to_string(), Some("my bio".to_string()), &context).await; + let export_user = LocalUserView::create_test_user(pool, "hanna", "my bio", false).await?; - let community_form = CommunityInsertForm::builder() - .name("testcom".to_string()) - .title("testcom".to_string()) - .instance_id(export_user.person.instance_id) - .build(); - let community = Community::create(&mut context.pool(), &community_form) - .await - .unwrap(); + let community_form = CommunityInsertForm::new( + export_user.person.instance_id, + "testcom".to_string(), + "testcom".to_string(), + "pubkey".to_string(), + ); + let community = Community::create(pool, &community_form).await?; let follower_form = CommunityFollowerForm { community_id: community.id, person_id: export_user.person.id, pending: false, }; - CommunityFollower::follow(&mut context.pool(), &follower_form) - .await - .unwrap(); + CommunityFollower::follow(pool, &follower_form).await?; - let backup = export_settings(export_user.clone(), context.reset_request_count()) - .await - .unwrap(); + let backup = export_settings(export_user.clone(), context.reset_request_count()).await?; - let import_user = create_user("charles".to_string(), None, &context).await; + let import_user = + LocalUserView::create_test_user(pool, "charles", "charles bio", false).await?; - import_settings(backup, import_user.clone(), context.reset_request_count()) - .await - .unwrap(); + import_settings(backup, import_user.clone(), context.reset_request_count()).await?; // wait for background task to finish sleep(Duration::from_millis(1000)).await; - let import_user_updated = LocalUserView::read(&mut context.pool(), import_user.local_user.id) - .await - .unwrap(); + let import_user_updated = LocalUserView::read(pool, import_user.local_user.id).await?; assert_eq!( export_user.person.display_name, @@ -381,61 +372,76 @@ mod tests { ); assert_eq!(export_user.person.bio, import_user_updated.person.bio); - let follows = CommunityFollowerView::for_person(&mut context.pool(), import_user.person.id) - .await - .unwrap(); + let follows = CommunityFollowerView::for_person(pool, import_user.person.id).await?; assert_eq!(follows.len(), 1); assert_eq!(follows[0].community.actor_id, community.actor_id); - LocalUser::delete(&mut context.pool(), export_user.local_user.id) - .await - .unwrap(); - LocalUser::delete(&mut context.pool(), import_user.local_user.id) - .await - .unwrap(); + LocalUser::delete(pool, export_user.local_user.id).await?; + LocalUser::delete(pool, import_user.local_user.id).await?; + Ok(()) } #[tokio::test] #[serial] - async fn disallow_large_backup() { - let context = init_context().await; + async fn disallow_large_backup() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); - let export_user = create_user("hanna".to_string(), Some("my bio".to_string()), &context).await; + let export_user = LocalUserView::create_test_user(pool, "harry", "harry bio", false).await?; - let mut backup = export_settings(export_user.clone(), context.reset_request_count()) - .await - .unwrap(); + let mut backup = export_settings(export_user.clone(), context.reset_request_count()).await?; - for _ in 0..251 { + for _ in 0..2501 { backup .followed_communities - .push("http://example.com".parse().unwrap()); + .push("http://example.com".parse()?); backup .blocked_communities - .push("http://example2.com".parse().unwrap()); - backup - .saved_posts - .push("http://example3.com".parse().unwrap()); - backup - .saved_comments - .push("http://example4.com".parse().unwrap()); + .push("http://example2.com".parse()?); + backup.saved_posts.push("http://example3.com".parse()?); + backup.saved_comments.push("http://example4.com".parse()?); } - let import_user = create_user("charles".to_string(), None, &context).await; + let import_user = LocalUserView::create_test_user(pool, "sally", "sally bio", false).await?; let imported = import_settings(backup, import_user.clone(), context.reset_request_count()).await; assert_eq!( - imported.err().unwrap().error_type, - LemmyErrorType::TooManyItems + imported.err().map(|e| e.error_type), + Some(LemmyErrorType::TooManyItems) ); - LocalUser::delete(&mut context.pool(), export_user.local_user.id) - .await - .unwrap(); - LocalUser::delete(&mut context.pool(), import_user.local_user.id) - .await - .unwrap(); + LocalUser::delete(pool, export_user.local_user.id).await?; + LocalUser::delete(pool, import_user.local_user.id).await?; + Ok(()) + } + + #[tokio::test] + #[serial] + async fn import_partial_backup() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let pool = &mut context.pool(); + + let import_user = LocalUserView::create_test_user(pool, "larry", "larry bio", false).await?; + + let backup = + serde_json::from_str("{\"bot_account\": true, \"settings\": {\"theme\": \"my_theme\"}}")?; + import_settings( + Json(backup), + import_user.clone(), + context.reset_request_count(), + ) + .await?; + + let import_user_updated = LocalUserView::read(pool, import_user.local_user.id).await?; + // mark as bot account + assert!(import_user_updated.person.bot_account); + // dont remove existing bio + assert_eq!(import_user.person.bio, import_user_updated.person.bio); + // local_user can be deserialized without id/person_id fields + assert_eq!("my_theme", import_user_updated.local_user.theme); + + Ok(()) } } diff --git a/crates/apub/src/collections/community_featured.rs b/crates/apub/src/collections/community_featured.rs index b3ee54db6..e092693e6 100644 --- a/crates/apub/src/collections/community_featured.rs +++ b/crates/apub/src/collections/community_featured.rs @@ -6,16 +6,19 @@ use activitypub_federation::{ config::Data, kinds::collection::OrderedCollectionType, protocol::verification::verify_domains_match, - traits::{ActivityHandler, Collection, Object}, + traits::{Collection, Object}, }; use futures::future::{join_all, try_join_all}; use lemmy_api_common::{context::LemmyContext, utils::generate_featured_url}; -use lemmy_db_schema::{source::post::Post, utils::FETCH_LIMIT_MAX}; +use lemmy_db_schema::{ + source::{community::Community, post::Post}, + utils::FETCH_LIMIT_MAX, +}; use lemmy_utils::error::LemmyError; use url::Url; -#[derive(Clone, Debug)] -pub(crate) struct ApubCommunityFeatured(Vec); +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct ApubCommunityFeatured(()); #[async_trait::async_trait] impl Collection for ApubCommunityFeatured { @@ -55,37 +58,39 @@ impl Collection for ApubCommunityFeatured { async fn from_json( apub: Self::Kind, - _owner: &Self::Owner, - data: &Data, + owner: &Self::Owner, + context: &Data, ) -> Result where Self: Sized, { - let mut posts = apub.ordered_items; - if posts.len() as i64 > FETCH_LIMIT_MAX { - posts = posts + let mut pages = apub.ordered_items; + if pages.len() as i64 > FETCH_LIMIT_MAX { + pages = pages .get(0..(FETCH_LIMIT_MAX as usize)) .unwrap_or_default() .to_vec(); } - // We intentionally ignore errors here. This is because the outbox might contain posts from old - // Lemmy versions, or from other software which we cant parse. In that case, we simply skip the - // item and only parse the ones that work. - // process items in parallel, to avoid long delay from fetch_site_metadata() and other processing - join_all(posts.into_iter().map(|post| { + // process items in parallel, to avoid long delay from fetch_site_metadata() and other + // processing + let stickied_posts: Vec = join_all(pages.into_iter().map(|page| { async { // use separate request counter for each item, otherwise there will be problems with // parallel processing - let verify = post.verify(data).await; - if verify.is_ok() { - post.receive(data).await.ok(); - } + ApubPost::verify(&page, &apub.id, context).await?; + ApubPost::from_json(page, context).await } })) - .await; + .await + // ignore any failed or unparseable items + .into_iter() + .filter_map(|p| p.ok().map(|p| p.0)) + .collect(); + + Community::set_featured_posts(owner.id, stickied_posts, &mut context.pool()).await?; // This return value is unused, so just set an empty vec - Ok(ApubCommunityFeatured(Vec::new())) + Ok(ApubCommunityFeatured(())) } } diff --git a/crates/apub/src/collections/community_follower.rs b/crates/apub/src/collections/community_follower.rs index da0e52069..a4f5debbc 100644 --- a/crates/apub/src/collections/community_follower.rs +++ b/crates/apub/src/collections/community_follower.rs @@ -15,7 +15,7 @@ use lemmy_utils::error::LemmyError; use url::Url; #[derive(Clone, Debug)] -pub(crate) struct ApubCommunityFollower(Vec<()>); +pub(crate) struct ApubCommunityFollower(()); #[async_trait::async_trait] impl Collection for ApubCommunityFollower { @@ -61,6 +61,6 @@ impl Collection for ApubCommunityFollower { ) .await?; - Ok(ApubCommunityFollower(Vec::new())) + Ok(ApubCommunityFollower(())) } } diff --git a/crates/apub/src/collections/community_moderators.rs b/crates/apub/src/collections/community_moderators.rs index cdaf985ea..c7b925f97 100644 --- a/crates/apub/src/collections/community_moderators.rs +++ b/crates/apub/src/collections/community_moderators.rs @@ -15,11 +15,11 @@ use lemmy_db_schema::{ traits::Joinable, }; use lemmy_db_views_actor::structs::CommunityModeratorView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; #[derive(Clone, Debug)] -pub(crate) struct ApubCommunityModerators(pub(crate) Vec); +pub(crate) struct ApubCommunityModerators(()); #[async_trait::async_trait] impl Collection for ApubCommunityModerators { @@ -29,10 +29,7 @@ impl Collection for ApubCommunityModerators { type Error = LemmyError; #[tracing::instrument(skip_all)] - async fn read_local( - owner: &Self::Owner, - data: &Data, - ) -> Result { + async fn read_local(owner: &Self::Owner, data: &Data) -> LemmyResult { let moderators = CommunityModeratorView::for_community(&mut data.pool(), owner.id).await?; let ordered_items = moderators .into_iter() @@ -50,7 +47,7 @@ impl Collection for ApubCommunityModerators { group_moderators: &GroupModerators, expected_domain: &Url, _data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { verify_domains_match(&group_moderators.id, expected_domain)?; Ok(()) } @@ -60,7 +57,7 @@ impl Collection for ApubCommunityModerators { apub: Self::Kind, owner: &Self::Owner, data: &Data, - ) -> Result { + ) -> LemmyResult { let community_id = owner.id; let current_moderators = CommunityModeratorView::for_community(&mut data.pool(), community_id).await?; @@ -96,22 +93,17 @@ impl Collection for ApubCommunityModerators { } // This return value is unused, so just set an empty vec - Ok(ApubCommunityModerators(Vec::new())) + Ok(ApubCommunityModerators(())) } } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; use crate::{ - objects::{ - community::tests::parse_lemmy_community, - person::tests::parse_lemmy_person, - tests::init_context, - }, + objects::{community::tests::parse_lemmy_community, person::tests::parse_lemmy_person}, protocol::tests::file_to_json_object, }; use lemmy_db_schema::{ @@ -123,70 +115,50 @@ mod tests { }, traits::Crud, }; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_parse_lemmy_community_moderators() { - let context = init_context().await; - let (new_mod, site) = parse_lemmy_person(&context).await; - let community = parse_lemmy_community(&context).await; + async fn test_parse_lemmy_community_moderators() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (new_mod, site) = parse_lemmy_person(&context).await?; + let community = parse_lemmy_community(&context).await?; let community_id = community.id; let inserted_instance = - Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()) - .await - .unwrap(); + Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?; - let old_mod = PersonInsertForm::builder() - .name("holly".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let old_mod = PersonInsertForm::test_form(inserted_instance.id, "holly"); - let old_mod = Person::create(&mut context.pool(), &old_mod).await.unwrap(); + let old_mod = Person::create(&mut context.pool(), &old_mod).await?; let community_moderator_form = CommunityModeratorForm { community_id: community.id, person_id: old_mod.id, }; - CommunityModerator::join(&mut context.pool(), &community_moderator_form) - .await - .unwrap(); + CommunityModerator::join(&mut context.pool(), &community_moderator_form).await?; assert_eq!(site.actor_id.to_string(), "https://enterprise.lemmy.ml/"); let json: GroupModerators = - file_to_json_object("assets/lemmy/collections/group_moderators.json").unwrap(); - let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward").unwrap(); - ApubCommunityModerators::verify(&json, &url, &context) - .await - .unwrap(); - ApubCommunityModerators::from_json(json, &community, &context) - .await - .unwrap(); + file_to_json_object("assets/lemmy/collections/group_moderators.json")?; + let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?; + ApubCommunityModerators::verify(&json, &url, &context).await?; + ApubCommunityModerators::from_json(json, &community, &context).await?; assert_eq!(context.request_count(), 0); let current_moderators = - CommunityModeratorView::for_community(&mut context.pool(), community_id) - .await - .unwrap(); + CommunityModeratorView::for_community(&mut context.pool(), community_id).await?; assert_eq!(current_moderators.len(), 1); assert_eq!(current_moderators[0].moderator.id, new_mod.id); - Person::delete(&mut context.pool(), old_mod.id) - .await - .unwrap(); - Person::delete(&mut context.pool(), new_mod.id) - .await - .unwrap(); - Community::delete(&mut context.pool(), community.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), site.id).await.unwrap(); - Instance::delete(&mut context.pool(), inserted_instance.id) - .await - .unwrap(); + Person::delete(&mut context.pool(), old_mod.id).await?; + Person::delete(&mut context.pool(), new_mod.id).await?; + Community::delete(&mut context.pool(), community.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + Instance::delete(&mut context.pool(), inserted_instance.id).await?; + Ok(()) } } diff --git a/crates/apub/src/collections/community_outbox.rs b/crates/apub/src/collections/community_outbox.rs index 854db9349..01199bc2b 100644 --- a/crates/apub/src/collections/community_outbox.rs +++ b/crates/apub/src/collections/community_outbox.rs @@ -1,6 +1,6 @@ use crate::{ activity_lists::AnnouncableActivities, - objects::{community::ApubCommunity, post::ApubPost}, + objects::community::ApubCommunity, protocol::{ activities::{ community::announce::AnnounceActivity, @@ -18,16 +18,13 @@ use activitypub_federation::{ }; use futures::future::join_all; use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url}; -use lemmy_db_schema::{ - source::{person::Person, post::Post}, - traits::Crud, - utils::FETCH_LIMIT_MAX, -}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{source::site::Site, utils::FETCH_LIMIT_MAX, PostSortType}; +use lemmy_db_views::post_view::PostQuery; +use lemmy_utils::error::{LemmyError, LemmyResult}; use url::Url; #[derive(Clone, Debug)] -pub(crate) struct ApubCommunityOutbox(Vec); +pub(crate) struct ApubCommunityOutbox(()); #[async_trait::async_trait] impl Collection for ApubCommunityOutbox { @@ -37,22 +34,28 @@ impl Collection for ApubCommunityOutbox { type Error = LemmyError; #[tracing::instrument(skip_all)] - async fn read_local( - owner: &Self::Owner, - data: &Data, - ) -> Result { - let post_list: Vec = Post::list_for_community(&mut data.pool(), owner.id) - .await? - .into_iter() - .map(Into::into) - .collect(); + async fn read_local(owner: &Self::Owner, data: &Data) -> LemmyResult { + let site = Site::read_local(&mut data.pool()).await?; + + let post_views = PostQuery { + community_id: Some(owner.id), + sort: Some(PostSortType::New), + limit: Some(FETCH_LIMIT_MAX), + ..Default::default() + } + .list(&site, &mut data.pool()) + .await?; + let mut ordered_items = vec![]; - for post in post_list { - let person = Person::read(&mut data.pool(), post.creator_id) - .await? - .into(); - let create = - CreateOrUpdatePage::new(post, &person, owner, CreateOrUpdateType::Create, data).await?; + for post_view in post_views { + let create = CreateOrUpdatePage::new( + post_view.post.into(), + &post_view.creator.into(), + owner, + CreateOrUpdateType::Create, + data, + ) + .await?; let announcable = AnnouncableActivities::CreateOrUpdatePost(create); let announce = AnnounceActivity::new(announcable.try_into()?, owner, data)?; ordered_items.push(announce); @@ -71,7 +74,7 @@ impl Collection for ApubCommunityOutbox { group_outbox: &GroupOutbox, expected_domain: &Url, _data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { verify_domains_match(expected_domain, &group_outbox.id)?; Ok(()) } @@ -81,7 +84,7 @@ impl Collection for ApubCommunityOutbox { apub: Self::Kind, _owner: &Self::Owner, data: &Data, - ) -> Result { + ) -> LemmyResult { let mut outbox_activities = apub.ordered_items; if outbox_activities.len() as i64 > FETCH_LIMIT_MAX { outbox_activities = outbox_activities @@ -93,20 +96,25 @@ impl Collection for ApubCommunityOutbox { // We intentionally ignore errors here. This is because the outbox might contain posts from old // Lemmy versions, or from other software which we cant parse. In that case, we simply skip the // item and only parse the ones that work. - // process items in parallel, to avoid long delay from fetch_site_metadata() and other processing + // process items in parallel, to avoid long delay from fetch_site_metadata() and other + // processing join_all(outbox_activities.into_iter().map(|activity| { async { - // use separate request counter for each item, otherwise there will be problems with - // parallel processing - let verify = activity.verify(data).await; - if verify.is_ok() { - activity.receive(data).await.ok(); + // Receiving announce requires at least one local community follower for anti spam purposes. + // This won't be the case for newly fetched communities, so we extract the inner activity + // and handle it directly to bypass this check. + let inner = activity.object.object(data).await.map(TryInto::try_into); + if let Ok(Ok(AnnouncableActivities::CreateOrUpdatePost(inner))) = inner { + let verify = inner.verify(data).await; + if verify.is_ok() { + inner.receive(data).await.ok(); + } } } })) .await; // This return value is unused, so just set an empty vec - Ok(ApubCommunityOutbox(Vec::new())) + Ok(ApubCommunityOutbox(())) } } diff --git a/crates/apub/src/fetcher/markdown_links.rs b/crates/apub/src/fetcher/markdown_links.rs new file mode 100644 index 000000000..d83aae515 --- /dev/null +++ b/crates/apub/src/fetcher/markdown_links.rs @@ -0,0 +1,192 @@ +use super::{search::SearchableObjects, user_or_community::UserOrCommunity}; +use crate::fetcher::post_or_comment::PostOrComment; +use activitypub_federation::{config::Data, fetch::object_id::ObjectId}; +use lemmy_api_common::{ + context::LemmyContext, + utils::{generate_local_apub_endpoint, EndpointType}, +}; +use lemmy_db_schema::{newtypes::InstanceId, source::instance::Instance}; +use lemmy_utils::{ + error::LemmyResult, + utils::markdown::image_links::{markdown_find_links, markdown_handle_title}, +}; +use url::Url; + +pub async fn markdown_rewrite_remote_links_opt( + src: Option, + context: &Data, +) -> Option { + match src { + Some(t) => Some(markdown_rewrite_remote_links(t, context).await), + None => None, + } +} + +/// Goes through all remote markdown links and attempts to resolve them as Activitypub objects. +/// If successful, the link is rewritten to a local link, so it can be viewed without leaving the +/// local instance. +/// +/// As it relies on ObjectId::dereference, it can only be used for incoming federated objects, not +/// for the API. +pub async fn markdown_rewrite_remote_links( + mut src: String, + context: &Data, +) -> String { + let links_offsets = markdown_find_links(&src); + + // Go through the collected links in reverse order + for (start, end) in links_offsets.into_iter().rev() { + let (url, extra) = markdown_handle_title(&src, start, end); + + if let Some(local_url) = to_local_url(url, context).await { + let mut local_url = local_url.to_string(); + // restore title + if let Some(extra) = extra { + local_url = format!("{local_url} {extra}"); + } + src.replace_range(start..end, local_url.as_str()); + } + } + + src +} + +pub(crate) async fn to_local_url(url: &str, context: &Data) -> Option { + let local_domain = &context.settings().get_protocol_and_hostname(); + let object_id = ObjectId::::parse(url).ok()?; + if object_id.inner().domain() == Some(local_domain) { + return None; + } + let dereferenced = object_id.dereference(context).await.ok()?; + match dereferenced { + SearchableObjects::PostOrComment(pc) => match *pc { + PostOrComment::Post(post) => { + generate_local_apub_endpoint(EndpointType::Post, &post.id.to_string(), local_domain) + } + PostOrComment::Comment(comment) => { + generate_local_apub_endpoint(EndpointType::Comment, &comment.id.to_string(), local_domain) + } + } + .ok() + .map(Into::into), + SearchableObjects::PersonOrCommunity(pc) => match *pc { + UserOrCommunity::User(user) => { + format_actor_url(&user.name, "u", user.instance_id, context).await + } + UserOrCommunity::Community(community) => { + format_actor_url(&community.name, "c", community.instance_id, context).await + } + } + .ok(), + } +} + +async fn format_actor_url( + name: &str, + kind: &str, + instance_id: InstanceId, + context: &LemmyContext, +) -> LemmyResult { + let local_protocol_and_hostname = context.settings().get_protocol_and_hostname(); + let local_hostname = &context.settings().hostname; + let instance = Instance::read(&mut context.pool(), instance_id).await?; + let url = if &instance.domain != local_hostname { + format!( + "{local_protocol_and_hostname}/{kind}/{name}@{}", + instance.domain + ) + } else { + format!("{local_protocol_and_hostname}/{kind}/{name}") + }; + Ok(Url::parse(&url)?) +} + +#[cfg(test)] +mod tests { + use super::*; + use lemmy_db_schema::{ + source::{ + community::{Community, CommunityInsertForm}, + post::{Post, PostInsertForm}, + }, + traits::Crud, + }; + use lemmy_db_views::structs::LocalUserView; + use pretty_assertions::assert_eq; + use serial_test::serial; + + #[serial] + #[tokio::test] + async fn test_markdown_rewrite_remote_links() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let instance = Instance::read_or_create(&mut context.pool(), "example.com".to_string()).await?; + let community = Community::create( + &mut context.pool(), + &CommunityInsertForm::new( + instance.id, + "my_community".to_string(), + "My Community".to_string(), + "pubkey".to_string(), + ), + ) + .await?; + let user = + LocalUserView::create_test_user(&mut context.pool(), "garda", "garda bio", false).await?; + + // insert a remote post which is already fetched + let post_form = PostInsertForm { + ap_id: Some(Url::parse("https://example.com/post/123")?.into()), + ..PostInsertForm::new("My post".to_string(), user.person.id, community.id) + }; + let post = Post::create(&mut context.pool(), &post_form).await?; + let markdown_local_post_url = format!("[link](https://lemmy-alpha/post/{})", post.id); + + let tests: Vec<_> = vec![ + ( + "rewrite remote post link", + format!("[link]({})", post.ap_id), + markdown_local_post_url.as_ref(), + ), + ( + "rewrite community link", + format!("[link]({})", community.actor_id), + "[link](https://lemmy-alpha/c/my_community@example.com)", + ), + ( + "dont rewrite local post link", + "[link](https://lemmy-alpha/post/2)".to_string(), + "[link](https://lemmy-alpha/post/2)", + ), + ( + "dont rewrite local community link", + "[link](https://lemmy-alpha/c/test)".to_string(), + "[link](https://lemmy-alpha/c/test)", + ), + ( + "dont rewrite non-fediverse link", + "[link](https://example.com/)".to_string(), + "[link](https://example.com/)", + ), + ( + "dont rewrite invalid url", + "[link](example-com)".to_string(), + "[link](example-com)", + ), + ]; + + let context = LemmyContext::init_test_context().await; + for (msg, input, expected) in &tests { + let result = markdown_rewrite_remote_links(input.to_string(), &context).await; + + assert_eq!( + &result, expected, + "Testing {}, with original input '{}'", + msg, input + ); + } + + Instance::delete(&mut context.pool(), instance.id).await?; + + Ok(()) + } +} diff --git a/crates/apub/src/fetcher/mod.rs b/crates/apub/src/fetcher/mod.rs index 4e30b4b16..29202004f 100644 --- a/crates/apub/src/fetcher/mod.rs +++ b/crates/apub/src/fetcher/mod.rs @@ -8,8 +8,9 @@ use itertools::Itertools; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::traits::ApubActor; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; +pub(crate) mod markdown_links; pub mod post_or_comment; pub mod search; pub mod site_or_community_or_user; @@ -25,7 +26,7 @@ pub async fn resolve_actor_identifier( context: &Data, local_user_view: &Option, include_deleted: bool, -) -> Result +) -> LemmyResult where ActorType: Object + Object @@ -42,9 +43,12 @@ where .splitn(2, '@') .collect_tuple() .expect("invalid query"); - let actor = DbActor::read_from_name_and_domain(&mut context.pool(), name, domain).await; - if actor.is_ok() { - Ok(actor?.into()) + let actor = DbActor::read_from_name_and_domain(&mut context.pool(), name, domain) + .await + .ok() + .flatten(); + if let Some(actor) = actor { + Ok(actor.into()) } else if local_user_view.is_some() { // Fetch the actor from its home instance using webfinger let actor: ActorType = webfinger_resolve_actor(&identifier.to_lowercase(), context).await?; @@ -59,6 +63,7 @@ where Ok( DbActor::read_from_name(&mut context.pool(), &identifier, include_deleted) .await? + .ok_or(NotFound)? .into(), ) } diff --git a/crates/apub/src/fetcher/post_or_comment.rs b/crates/apub/src/fetcher/post_or_comment.rs index 31c53864a..be48e8ebd 100644 --- a/crates/apub/src/fetcher/post_or_comment.rs +++ b/crates/apub/src/fetcher/post_or_comment.rs @@ -12,7 +12,7 @@ use lemmy_db_schema::{ source::{community::Community, post::Post}, traits::Crud, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use serde::Deserialize; use url::Url; @@ -26,7 +26,7 @@ pub enum PostOrComment { #[serde(untagged)] pub enum PageOrNote { Page(Box), - Note(Note), + Note(Box), } #[async_trait::async_trait] @@ -40,10 +40,7 @@ impl Object for PostOrComment { } #[tracing::instrument(skip_all)] - async fn read_from_id( - object_id: Url, - data: &Data, - ) -> Result, LemmyError> { + async fn read_from_id(object_id: Url, data: &Data) -> LemmyResult> { let post = ApubPost::read_from_id(object_id.clone(), data).await?; Ok(match post { Some(o) => Some(PostOrComment::Post(o)), @@ -54,15 +51,18 @@ impl Object for PostOrComment { } #[tracing::instrument(skip_all)] - async fn delete(self, data: &Data) -> Result<(), LemmyError> { + async fn delete(self, data: &Data) -> LemmyResult<()> { match self { PostOrComment::Post(p) => p.delete(data).await, PostOrComment::Comment(c) => c.delete(data).await, } } - async fn into_json(self, _data: &Data) -> Result { - unimplemented!() + async fn into_json(self, data: &Data) -> LemmyResult { + Ok(match self { + PostOrComment::Post(p) => PageOrNote::Page(Box::new(p.into_json(data).await?)), + PostOrComment::Comment(c) => PageOrNote::Note(Box::new(c.into_json(data).await?)), + }) } #[tracing::instrument(skip_all)] @@ -70,7 +70,7 @@ impl Object for PostOrComment { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { match apub { PageOrNote::Page(a) => ApubPost::verify(a, expected_domain, data).await, PageOrNote::Note(a) => ApubComment::verify(a, expected_domain, data).await, @@ -78,17 +78,17 @@ impl Object for PostOrComment { } #[tracing::instrument(skip_all)] - async fn from_json(apub: PageOrNote, context: &Data) -> Result { + async fn from_json(apub: PageOrNote, context: &Data) -> LemmyResult { Ok(match apub { PageOrNote::Page(p) => PostOrComment::Post(ApubPost::from_json(*p, context).await?), - PageOrNote::Note(n) => PostOrComment::Comment(ApubComment::from_json(n, context).await?), + PageOrNote::Note(n) => PostOrComment::Comment(ApubComment::from_json(*n, context).await?), }) } } #[async_trait::async_trait] impl InCommunity for PostOrComment { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let cid = match self { PostOrComment::Post(p) => p.community_id, PostOrComment::Comment(c) => { diff --git a/crates/apub/src/fetcher/search.rs b/crates/apub/src/fetcher/search.rs index 54951edd9..e8c029106 100644 --- a/crates/apub/src/fetcher/search.rs +++ b/crates/apub/src/fetcher/search.rs @@ -1,7 +1,5 @@ -use crate::{ - objects::{comment::ApubComment, community::ApubCommunity, person::ApubPerson, post::ApubPost}, - protocol::objects::{group::Group, note::Note, page::Page, person::Person}, -}; +use super::post_or_comment::{PageOrNote, PostOrComment}; +use crate::fetcher::user_or_community::{PersonOrGroup, UserOrCommunity}; use activitypub_federation::{ config::Data, fetch::{object_id::ObjectId, webfinger::webfinger_resolve_actor}, @@ -9,7 +7,7 @@ use activitypub_federation::{ }; use chrono::{DateTime, Utc}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyError, LemmyResult}; use serde::Deserialize; use url::Url; @@ -18,28 +16,22 @@ use url::Url; /// which gets resolved to an URL. #[tracing::instrument(skip_all)] pub(crate) async fn search_query_to_object_id( - query: &str, + mut query: String, context: &Data, -) -> Result { - Ok(match Url::parse(query) { +) -> LemmyResult { + Ok(match Url::parse(&query) { Ok(url) => { // its already an url, just go with it ObjectId::from(url).dereference(context).await? } Err(_) => { // not an url, try to resolve via webfinger - let mut chars = query.chars(); - let kind = chars.next(); - let identifier = chars.as_str(); - match kind { - Some('@') => SearchableObjects::Person( - webfinger_resolve_actor::(identifier, context).await?, - ), - Some('!') => SearchableObjects::Community( - webfinger_resolve_actor::(identifier, context).await?, - ), - _ => return Err(LemmyErrorType::InvalidQuery)?, + if query.starts_with('!') || query.starts_with('@') { + query.remove(0); } + SearchableObjects::PersonOrCommunity(Box::new( + webfinger_resolve_actor::(&query, context).await?, + )) } }) } @@ -51,7 +43,7 @@ pub(crate) async fn search_query_to_object_id( pub(crate) async fn search_query_to_object_id_local( query: &str, context: &Data, -) -> Result { +) -> LemmyResult { let url = Url::parse(query)?; ObjectId::from(url).dereference_local(context).await } @@ -59,19 +51,15 @@ pub(crate) async fn search_query_to_object_id_local( /// The types of ActivityPub objects that can be fetched directly by searching for their ID. #[derive(Debug)] pub(crate) enum SearchableObjects { - Person(ApubPerson), - Community(ApubCommunity), - Post(ApubPost), - Comment(ApubComment), + PostOrComment(Box), + PersonOrCommunity(Box), } #[derive(Deserialize)] #[serde(untagged)] pub(crate) enum SearchableKinds { - Group(Group), - Person(Person), - Page(Page), - Note(Note), + PageOrNote(Box), + PersonOrGroup(Box), } #[async_trait::async_trait] @@ -82,10 +70,8 @@ impl Object for SearchableObjects { fn last_refreshed_at(&self) -> Option> { match self { - SearchableObjects::Person(p) => p.last_refreshed_at(), - SearchableObjects::Community(c) => c.last_refreshed_at(), - SearchableObjects::Post(p) => p.last_refreshed_at(), - SearchableObjects::Comment(c) => c.last_refreshed_at(), + SearchableObjects::PostOrComment(p) => p.last_refreshed_at(), + SearchableObjects::PersonOrCommunity(p) => p.last_refreshed_at(), } } @@ -98,38 +84,32 @@ impl Object for SearchableObjects { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { - let c = ApubCommunity::read_from_id(object_id.clone(), context).await?; - if let Some(c) = c { - return Ok(Some(SearchableObjects::Community(c))); + ) -> LemmyResult> { + let uc = UserOrCommunity::read_from_id(object_id.clone(), context).await?; + if let Some(uc) = uc { + return Ok(Some(SearchableObjects::PersonOrCommunity(Box::new(uc)))); } - let p = ApubPerson::read_from_id(object_id.clone(), context).await?; - if let Some(p) = p { - return Ok(Some(SearchableObjects::Person(p))); - } - let p = ApubPost::read_from_id(object_id.clone(), context).await?; - if let Some(p) = p { - return Ok(Some(SearchableObjects::Post(p))); - } - let c = ApubComment::read_from_id(object_id, context).await?; - if let Some(c) = c { - return Ok(Some(SearchableObjects::Comment(c))); + let pc = PostOrComment::read_from_id(object_id.clone(), context).await?; + if let Some(pc) = pc { + return Ok(Some(SearchableObjects::PostOrComment(Box::new(pc)))); } Ok(None) } #[tracing::instrument(skip_all)] - async fn delete(self, data: &Data) -> Result<(), LemmyError> { + async fn delete(self, data: &Data) -> LemmyResult<()> { match self { - SearchableObjects::Person(p) => p.delete(data).await, - SearchableObjects::Community(c) => c.delete(data).await, - SearchableObjects::Post(p) => p.delete(data).await, - SearchableObjects::Comment(c) => c.delete(data).await, + SearchableObjects::PostOrComment(pc) => pc.delete(data).await, + SearchableObjects::PersonOrCommunity(pc) => pc.delete(data).await, } } - async fn into_json(self, _data: &Data) -> Result { - unimplemented!() + async fn into_json(self, data: &Data) -> LemmyResult { + use SearchableObjects::*; + Ok(match self { + PostOrComment(pc) => SearchableKinds::PageOrNote(Box::new(pc.into_json(data).await?)), + PersonOrCommunity(pc) => SearchableKinds::PersonOrGroup(Box::new(pc.into_json(data).await?)), + }) } #[tracing::instrument(skip_all)] @@ -137,24 +117,23 @@ impl Object for SearchableObjects { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { + use SearchableKinds::*; match apub { - SearchableKinds::Group(a) => ApubCommunity::verify(a, expected_domain, data).await, - SearchableKinds::Person(a) => ApubPerson::verify(a, expected_domain, data).await, - SearchableKinds::Page(a) => ApubPost::verify(a, expected_domain, data).await, - SearchableKinds::Note(a) => ApubComment::verify(a, expected_domain, data).await, + PageOrNote(pn) => PostOrComment::verify(pn, expected_domain, data).await, + PersonOrGroup(pg) => UserOrCommunity::verify(pg, expected_domain, data).await, } } #[tracing::instrument(skip_all)] - async fn from_json(apub: Self::Kind, context: &Data) -> Result { - use SearchableKinds as SAT; + async fn from_json(apub: Self::Kind, context: &Data) -> LemmyResult { + use SearchableKinds::*; use SearchableObjects as SO; Ok(match apub { - SAT::Group(g) => SO::Community(ApubCommunity::from_json(g, context).await?), - SAT::Person(p) => SO::Person(ApubPerson::from_json(p, context).await?), - SAT::Page(p) => SO::Post(ApubPost::from_json(p, context).await?), - SAT::Note(n) => SO::Comment(ApubComment::from_json(n, context).await?), + PageOrNote(pg) => SO::PostOrComment(Box::new(PostOrComment::from_json(*pg, context).await?)), + PersonOrGroup(pg) => { + SO::PersonOrCommunity(Box::new(UserOrCommunity::from_json(*pg, context).await?)) + } }) } } diff --git a/crates/apub/src/fetcher/site_or_community_or_user.rs b/crates/apub/src/fetcher/site_or_community_or_user.rs index 76ee566c9..c6a1bb17e 100644 --- a/crates/apub/src/fetcher/site_or_community_or_user.rs +++ b/crates/apub/src/fetcher/site_or_community_or_user.rs @@ -1,6 +1,6 @@ use crate::{ fetcher::user_or_community::{PersonOrGroup, UserOrCommunity}, - objects::instance::ApubSite, + objects::{community::ApubCommunity, instance::ApubSite, person::ApubPerson}, protocol::objects::instance::Instance, }; use activitypub_federation::{ @@ -9,7 +9,7 @@ use activitypub_federation::{ }; use chrono::{DateTime, Utc}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use reqwest::Url; use serde::{Deserialize, Serialize}; @@ -41,23 +41,31 @@ impl Object for SiteOrCommunityOrUser { } #[tracing::instrument(skip_all)] - async fn read_from_id( - _object_id: Url, - _data: &Data, - ) -> Result, LemmyError> { - unimplemented!(); + async fn read_from_id(object_id: Url, data: &Data) -> LemmyResult> { + let site = ApubSite::read_from_id(object_id.clone(), data).await?; + Ok(match site { + Some(o) => Some(SiteOrCommunityOrUser::Site(o)), + None => UserOrCommunity::read_from_id(object_id, data) + .await? + .map(SiteOrCommunityOrUser::UserOrCommunity), + }) } #[tracing::instrument(skip_all)] - async fn delete(self, data: &Data) -> Result<(), LemmyError> { + async fn delete(self, data: &Data) -> LemmyResult<()> { match self { SiteOrCommunityOrUser::Site(p) => p.delete(data).await, SiteOrCommunityOrUser::UserOrCommunity(p) => p.delete(data).await, } } - async fn into_json(self, _data: &Data) -> Result { - unimplemented!() + async fn into_json(self, data: &Data) -> LemmyResult { + Ok(match self { + SiteOrCommunityOrUser::Site(p) => SiteOrPersonOrGroup::Instance(p.into_json(data).await?), + SiteOrCommunityOrUser::UserOrCommunity(p) => { + SiteOrPersonOrGroup::PersonOrGroup(p.into_json(data).await?) + } + }) } #[tracing::instrument(skip_all)] @@ -65,7 +73,7 @@ impl Object for SiteOrCommunityOrUser { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { match apub { SiteOrPersonOrGroup::Instance(a) => ApubSite::verify(a, expected_domain, data).await, SiteOrPersonOrGroup::PersonOrGroup(a) => { @@ -75,8 +83,18 @@ impl Object for SiteOrCommunityOrUser { } #[tracing::instrument(skip_all)] - async fn from_json(_apub: Self::Kind, _data: &Data) -> Result { - unimplemented!(); + async fn from_json(apub: Self::Kind, data: &Data) -> LemmyResult { + Ok(match apub { + SiteOrPersonOrGroup::Instance(a) => { + SiteOrCommunityOrUser::Site(ApubSite::from_json(a, data).await?) + } + SiteOrPersonOrGroup::PersonOrGroup(a) => SiteOrCommunityOrUser::UserOrCommunity(match a { + PersonOrGroup::Person(p) => UserOrCommunity::User(ApubPerson::from_json(p, data).await?), + PersonOrGroup::Group(g) => { + UserOrCommunity::Community(ApubCommunity::from_json(g, data).await?) + } + }), + }) } } @@ -103,6 +121,9 @@ impl Actor for SiteOrCommunityOrUser { } fn inbox(&self) -> Url { - unimplemented!() + match self { + SiteOrCommunityOrUser::Site(u) => u.inbox(), + SiteOrCommunityOrUser::UserOrCommunity(c) => c.inbox(), + } } } diff --git a/crates/apub/src/fetcher/user_or_community.rs b/crates/apub/src/fetcher/user_or_community.rs index 93e955c7b..129af8803 100644 --- a/crates/apub/src/fetcher/user_or_community.rs +++ b/crates/apub/src/fetcher/user_or_community.rs @@ -10,7 +10,7 @@ use activitypub_federation::{ use chrono::{DateTime, Utc}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::source::activity::ActorType; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyError, LemmyResult}; use serde::{Deserialize, Serialize}; use url::Url; @@ -47,10 +47,7 @@ impl Object for UserOrCommunity { } #[tracing::instrument(skip_all)] - async fn read_from_id( - object_id: Url, - data: &Data, - ) -> Result, LemmyError> { + async fn read_from_id(object_id: Url, data: &Data) -> LemmyResult> { let person = ApubPerson::read_from_id(object_id.clone(), data).await?; Ok(match person { Some(o) => Some(UserOrCommunity::User(o)), @@ -61,15 +58,18 @@ impl Object for UserOrCommunity { } #[tracing::instrument(skip_all)] - async fn delete(self, data: &Data) -> Result<(), LemmyError> { + async fn delete(self, data: &Data) -> LemmyResult<()> { match self { UserOrCommunity::User(p) => p.delete(data).await, UserOrCommunity::Community(p) => p.delete(data).await, } } - async fn into_json(self, _data: &Data) -> Result { - unimplemented!() + async fn into_json(self, data: &Data) -> LemmyResult { + Ok(match self { + UserOrCommunity::User(p) => PersonOrGroup::Person(p.into_json(data).await?), + UserOrCommunity::Community(p) => PersonOrGroup::Group(p.into_json(data).await?), + }) } #[tracing::instrument(skip_all)] @@ -77,7 +77,7 @@ impl Object for UserOrCommunity { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { match apub { PersonOrGroup::Person(a) => ApubPerson::verify(a, expected_domain, data).await, PersonOrGroup::Group(a) => ApubCommunity::verify(a, expected_domain, data).await, @@ -85,7 +85,7 @@ impl Object for UserOrCommunity { } #[tracing::instrument(skip_all)] - async fn from_json(apub: Self::Kind, data: &Data) -> Result { + async fn from_json(apub: Self::Kind, data: &Data) -> LemmyResult { Ok(match apub { PersonOrGroup::Person(p) => UserOrCommunity::User(ApubPerson::from_json(p, data).await?), PersonOrGroup::Group(p) => { @@ -118,7 +118,10 @@ impl Actor for UserOrCommunity { } fn inbox(&self) -> Url { - unimplemented!() + match self { + UserOrCommunity::User(p) => p.inbox(), + UserOrCommunity::Community(p) => p.inbox(), + } } } diff --git a/crates/apub/src/http/comment.rs b/crates/apub/src/http/comment.rs index 931caaee4..d6b3c818d 100644 --- a/crates/apub/src/http/comment.rs +++ b/crates/apub/src/http/comment.rs @@ -1,12 +1,21 @@ use crate::{ - http::{create_apub_response, create_apub_tombstone_response, err_object_not_local}, + http::{ + check_community_public, + create_apub_response, + create_apub_tombstone_response, + redirect_remote_object, + }, objects::comment::ApubComment, }; use activitypub_federation::{config::Data, traits::Object}; use actix_web::{web::Path, HttpResponse}; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::{newtypes::CommentId, source::comment::Comment, traits::Crud}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{ + newtypes::CommentId, + source::{comment::Comment, community::Community, post::Post}, + traits::Crud, +}; +use lemmy_utils::error::LemmyResult; use serde::Deserialize; #[derive(Deserialize)] @@ -19,11 +28,16 @@ pub(crate) struct CommentQuery { pub(crate) async fn get_apub_comment( info: Path, context: Data, -) -> Result { +) -> LemmyResult { let id = CommentId(info.comment_id.parse::()?); + // Can't use CommentView here because it excludes deleted/removed/local-only items let comment: ApubComment = Comment::read(&mut context.pool(), id).await?.into(); + let post = Post::read(&mut context.pool(), comment.post_id).await?; + let community = Community::read(&mut context.pool(), post.community_id).await?; + check_community_public(&community)?; + if !comment.local { - Err(err_object_not_local()) + Ok(redirect_remote_object(&comment.ap_id)) } else if !comment.deleted && !comment.removed { create_apub_response(&comment.into_json(&context).await?) } else { diff --git a/crates/apub/src/http/community.rs b/crates/apub/src/http/community.rs index 3c964fe0a..37482aedb 100644 --- a/crates/apub/src/http/community.rs +++ b/crates/apub/src/http/community.rs @@ -1,27 +1,24 @@ use crate::{ - activity_lists::GroupInboxActivities, collections::{ community_featured::ApubCommunityFeatured, community_follower::ApubCommunityFollower, community_moderators::ApubCommunityModerators, community_outbox::ApubCommunityOutbox, }, - http::{create_apub_response, create_apub_tombstone_response}, - objects::{community::ApubCommunity, person::ApubPerson}, + http::{check_community_public, create_apub_response, create_apub_tombstone_response}, + objects::community::ApubCommunity, }; use activitypub_federation::{ - actix_web::inbox::receive_activity, config::Data, - protocol::context::WithContext, traits::{Collection, Object}, }; -use actix_web::{web, web::Bytes, HttpRequest, HttpResponse}; +use actix_web::{web, HttpResponse}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{source::community::Community, traits::ApubActor}; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; use serde::Deserialize; -#[derive(Deserialize)] +#[derive(Deserialize, Clone)] pub(crate) struct CommunityQuery { community_name: String, } @@ -31,58 +28,47 @@ pub(crate) struct CommunityQuery { pub(crate) async fn get_apub_community_http( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let community: ApubCommunity = Community::read_from_name(&mut context.pool(), &info.community_name, true) .await? + .ok_or(LemmyErrorType::NotFound)? .into(); - if !community.deleted && !community.removed { - let apub = community.into_json(&context).await?; - - create_apub_response(&apub) - } else { - create_apub_tombstone_response(community.actor_id.clone()) + if community.deleted || community.removed { + return create_apub_tombstone_response(community.actor_id.clone()); } -} + check_community_public(&community)?; -/// Handler for all incoming receive to community inboxes. -#[tracing::instrument(skip_all)] -pub async fn community_inbox( - request: HttpRequest, - body: Bytes, - data: Data, -) -> Result { - receive_activity::, ApubPerson, LemmyContext>( - request, body, &data, - ) - .await + let apub = community.into_json(&context).await?; + create_apub_response(&apub) } /// Returns an empty followers collection, only populating the size (for privacy). pub(crate) async fn get_apub_community_followers( info: web::Path, context: Data, -) -> Result { - let community = - Community::read_from_name(&mut context.pool(), &info.community_name, false).await?; +) -> LemmyResult { + let community = Community::read_from_name(&mut context.pool(), &info.community_name, false) + .await? + .ok_or(LemmyErrorType::NotFound)?; + check_community_public(&community)?; let followers = ApubCommunityFollower::read_local(&community.into(), &context).await?; create_apub_response(&followers) } /// Returns the community outbox, which is populated by a maximum of 20 posts (but no other -/// activites like votes or comments). +/// activities like votes or comments). pub(crate) async fn get_apub_community_outbox( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let community: ApubCommunity = Community::read_from_name(&mut context.pool(), &info.community_name, false) .await? + .ok_or(LemmyErrorType::NotFound)? .into(); - if community.deleted || community.removed { - Err(LemmyErrorType::Deleted)? - } + check_community_public(&community)?; let outbox = ApubCommunityOutbox::read_local(&community, &context).await?; create_apub_response(&outbox) } @@ -91,14 +77,13 @@ pub(crate) async fn get_apub_community_outbox( pub(crate) async fn get_apub_community_moderators( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let community: ApubCommunity = Community::read_from_name(&mut context.pool(), &info.community_name, false) .await? + .ok_or(LemmyErrorType::NotFound)? .into(); - if community.deleted || community.removed { - Err(LemmyErrorType::Deleted)? - } + check_community_public(&community)?; let moderators = ApubCommunityModerators::read_local(&community, &context).await?; create_apub_response(&moderators) } @@ -107,14 +92,180 @@ pub(crate) async fn get_apub_community_moderators( pub(crate) async fn get_apub_community_featured( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let community: ApubCommunity = Community::read_from_name(&mut context.pool(), &info.community_name, false) .await? + .ok_or(LemmyErrorType::NotFound)? .into(); - if community.deleted || community.removed { - Err(LemmyErrorType::Deleted)? - } + check_community_public(&community)?; let featured = ApubCommunityFeatured::read_local(&community, &context).await?; create_apub_response(&featured) } + +#[cfg(test)] +pub(crate) mod tests { + + use super::*; + use crate::protocol::objects::{group::Group, tombstone::Tombstone}; + use actix_web::body::to_bytes; + use lemmy_db_schema::{ + newtypes::InstanceId, + source::{ + community::CommunityInsertForm, + instance::Instance, + local_site::{LocalSite, LocalSiteInsertForm}, + local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm}, + site::{Site, SiteInsertForm}, + }, + traits::Crud, + CommunityVisibility, + }; + use serde::de::DeserializeOwned; + use serial_test::serial; + + async fn init( + deleted: bool, + visibility: CommunityVisibility, + context: &Data, + ) -> LemmyResult<(Instance, Community)> { + let instance = + Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?; + create_local_site(context, instance.id).await?; + + let community_form = CommunityInsertForm { + deleted: Some(deleted), + visibility: Some(visibility), + ..CommunityInsertForm::new( + instance.id, + "testcom6".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ) + }; + let community = Community::create(&mut context.pool(), &community_form).await?; + Ok((instance, community)) + } + + /// Necessary for the community outbox fetching + async fn create_local_site( + context: &Data, + instance_id: InstanceId, + ) -> LemmyResult<()> { + // Create a local site, since this is necessary for community fetching. + let site_form = SiteInsertForm::new("test site".to_string(), instance_id); + let site = Site::create(&mut context.pool(), &site_form).await?; + + let local_site_form = LocalSiteInsertForm::new(site.id); + let local_site = LocalSite::create(&mut context.pool(), &local_site_form).await?; + + let local_site_rate_limit_form = LocalSiteRateLimitInsertForm::new(local_site.id); + LocalSiteRateLimit::create(&mut context.pool(), &local_site_rate_limit_form).await?; + Ok(()) + } + + async fn decode_response(res: HttpResponse) -> LemmyResult { + let body = to_bytes(res.into_body()).await.unwrap_or_default(); + let body = std::str::from_utf8(&body)?; + Ok(serde_json::from_str(body)?) + } + + #[tokio::test] + #[serial] + async fn test_get_community() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (instance, community) = init(false, CommunityVisibility::Public, &context).await?; + + // fetch invalid community + let query = CommunityQuery { + community_name: "asd".to_string(), + }; + let res = get_apub_community_http(query.into(), context.reset_request_count()).await; + assert!(res.is_err()); + + // fetch valid community + let query = CommunityQuery { + community_name: community.name.clone(), + }; + let res = get_apub_community_http(query.clone().into(), context.reset_request_count()).await?; + assert_eq!(200, res.status()); + let res_group: Group = decode_response(res).await?; + let community: ApubCommunity = community.into(); + let group = community.clone().into_json(&context).await?; + assert_eq!(group, res_group); + + let res = + get_apub_community_featured(query.clone().into(), context.reset_request_count()).await?; + assert_eq!(200, res.status()); + let res = + get_apub_community_followers(query.clone().into(), context.reset_request_count()).await?; + assert_eq!(200, res.status()); + let res = + get_apub_community_moderators(query.clone().into(), context.reset_request_count()).await?; + assert_eq!(200, res.status()); + let res = get_apub_community_outbox(query.into(), context.reset_request_count()).await?; + assert_eq!(200, res.status()); + + Instance::delete(&mut context.pool(), instance.id).await?; + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_get_deleted_community() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (instance, community) = init(true, CommunityVisibility::LocalOnly, &context).await?; + + // should return tombstone + let query = CommunityQuery { + community_name: community.name.clone(), + }; + let res = get_apub_community_http(query.clone().into(), context.reset_request_count()).await?; + assert_eq!(410, res.status()); + let res_tombstone = decode_response::(res).await; + assert!(res_tombstone.is_ok()); + + let res = + get_apub_community_featured(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = + get_apub_community_followers(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = + get_apub_community_moderators(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = get_apub_community_outbox(query.into(), context.reset_request_count()).await; + assert!(res.is_err()); + + //Community::delete(&mut context.pool(), community.id).await?; + Instance::delete(&mut context.pool(), instance.id).await?; + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_get_local_only_community() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (instance, community) = init(false, CommunityVisibility::LocalOnly, &context).await?; + + let query = CommunityQuery { + community_name: community.name.clone(), + }; + let res = get_apub_community_http(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = + get_apub_community_featured(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = + get_apub_community_followers(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = + get_apub_community_moderators(query.clone().into(), context.reset_request_count()).await; + assert!(res.is_err()); + let res = get_apub_community_outbox(query.into(), context.reset_request_count()).await; + assert!(res.is_err()); + + Instance::delete(&mut context.pool(), instance.id).await?; + Ok(()) + } +} diff --git a/crates/apub/src/http/mod.rs b/crates/apub/src/http/mod.rs index c261d9e49..bc148eb9c 100644 --- a/crates/apub/src/http/mod.rs +++ b/crates/apub/src/http/mod.rs @@ -2,7 +2,7 @@ use crate::{ activity_lists::SharedInboxActivities, fetcher::user_or_community::UserOrCommunity, protocol::objects::tombstone::Tombstone, - CONTEXT, + FEDERATION_CONTEXT, }; use activitypub_federation::{ actix_web::inbox::receive_activity, @@ -11,12 +11,16 @@ use activitypub_federation::{ FEDERATION_CONTENT_TYPE, }; use actix_web::{web, web::Bytes, HttpRequest, HttpResponse}; -use http::StatusCode; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::source::activity::SentActivity; -use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; +use lemmy_db_schema::{ + newtypes::DbUrl, + source::{activity::SentActivity, community::Community}, + CommunityVisibility, +}; +use lemmy_utils::error::{FederationError, LemmyErrorType, LemmyResult}; use serde::{Deserialize, Serialize}; -use std::ops::Deref; +use std::{ops::Deref, time::Duration}; +use tokio::time::timeout; use url::Url; mod comment; @@ -26,13 +30,22 @@ mod post; pub mod routes; pub mod site; +const INCOMING_ACTIVITY_TIMEOUT: Duration = Duration::from_secs(9); + pub async fn shared_inbox( request: HttpRequest, body: Bytes, data: Data, ) -> LemmyResult { - receive_activity::(request, body, &data) + let receive_fut = + receive_activity::(request, body, &data); + // Set a timeout shorter than `REQWEST_TIMEOUT` for processing incoming activities. This is to + // avoid taking a long time to process an incoming activity when a required data fetch times out. + // In this case our own instance would timeout and be marked as dead by the sender. Better to + // consider the activity broken and move on. + timeout(INCOMING_ACTIVITY_TIMEOUT, receive_fut) .await + .map_err(|_| FederationError::InboxTimeout)? } /// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub @@ -43,7 +56,7 @@ fn create_apub_response(data: &T) -> LemmyResult where T: Serialize, { - let json = serde_json::to_string_pretty(&WithContext::new(data, CONTEXT.clone()))?; + let json = serde_json::to_string_pretty(&WithContext::new(data, FEDERATION_CONTEXT.clone()))?; Ok( HttpResponse::Ok() @@ -54,18 +67,23 @@ where fn create_apub_tombstone_response>(id: T) -> LemmyResult { let tombstone = Tombstone::new(id.into()); - let json = serde_json::to_string_pretty(&WithContext::new(tombstone, CONTEXT.deref().clone()))?; + let json = serde_json::to_string_pretty(&WithContext::new( + tombstone, + FEDERATION_CONTEXT.deref().clone(), + ))?; Ok( HttpResponse::Gone() .content_type(FEDERATION_CONTENT_TYPE) - .status(StatusCode::GONE) + .status(actix_web::http::StatusCode::GONE) .body(json), ) } -fn err_object_not_local() -> LemmyError { - LemmyErrorType::ObjectNotLocal.into() +fn redirect_remote_object(url: &DbUrl) -> HttpResponse { + let mut res = HttpResponse::PermanentRedirect(); + res.insert_header((actix_web::http::header::LOCATION, url.as_str())); + res.finish() } #[derive(Deserialize)] @@ -79,7 +97,7 @@ pub struct ActivityQuery { pub(crate) async fn get_activity( info: web::Path, context: web::Data, -) -> Result { +) -> LemmyResult { let settings = context.settings(); let activity_id = Url::parse(&format!( "{}/activities/{}/{}", @@ -88,7 +106,9 @@ pub(crate) async fn get_activity( info.id ))? .into(); - let activity = SentActivity::read_from_apub_id(&mut context.pool(), &activity_id).await?; + let activity = SentActivity::read_from_apub_id(&mut context.pool(), &activity_id) + .await + .map_err(|_| FederationError::CouldntFindActivity)?; let sensitive = activity.sensitive; if sensitive { @@ -97,3 +117,14 @@ pub(crate) async fn get_activity( create_apub_response(&activity.data) } } + +/// Ensure that the community is public and not removed/deleted. +fn check_community_public(community: &Community) -> LemmyResult<()> { + if community.deleted || community.removed { + Err(LemmyErrorType::Deleted)? + } + if community.visibility != CommunityVisibility::Public { + return Err(LemmyErrorType::NotFound.into()); + } + Ok(()) +} diff --git a/crates/apub/src/http/person.rs b/crates/apub/src/http/person.rs index 254313634..0f628c497 100644 --- a/crates/apub/src/http/person.rs +++ b/crates/apub/src/http/person.rs @@ -1,20 +1,13 @@ use crate::{ - activity_lists::PersonInboxActivities, - fetcher::user_or_community::UserOrCommunity, http::{create_apub_response, create_apub_tombstone_response}, objects::person::ApubPerson, protocol::collections::empty_outbox::EmptyOutbox, }; -use activitypub_federation::{ - actix_web::inbox::receive_activity, - config::Data, - protocol::context::WithContext, - traits::Object, -}; -use actix_web::{web, web::Bytes, HttpRequest, HttpResponse}; +use activitypub_federation::{config::Data, traits::Object}; +use actix_web::{web, HttpResponse}; use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url}; use lemmy_db_schema::{source::person::Person, traits::ApubActor}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; use serde::Deserialize; #[derive(Deserialize)] @@ -27,11 +20,12 @@ pub struct PersonQuery { pub(crate) async fn get_apub_person_http( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let user_name = info.into_inner().user_name; // TODO: this needs to be able to read deleted persons, so that it can send tombstones let person: ApubPerson = Person::read_from_name(&mut context.pool(), &user_name, true) .await? + .ok_or(LemmyErrorType::NotFound)? .into(); if !person.deleted { @@ -43,24 +37,14 @@ pub(crate) async fn get_apub_person_http( } } -#[tracing::instrument(skip_all)] -pub async fn person_inbox( - request: HttpRequest, - body: Bytes, - data: Data, -) -> Result { - receive_activity::, UserOrCommunity, LemmyContext>( - request, body, &data, - ) - .await -} - #[tracing::instrument(skip_all)] pub(crate) async fn get_apub_person_outbox( info: web::Path, context: Data, -) -> Result { - let person = Person::read_from_name(&mut context.pool(), &info.user_name, false).await?; +) -> LemmyResult { + let person = Person::read_from_name(&mut context.pool(), &info.user_name, false) + .await? + .ok_or(LemmyErrorType::NotFound)?; let outbox_id = generate_outbox_url(&person.actor_id)?.into(); let outbox = EmptyOutbox::new(outbox_id)?; create_apub_response(&outbox) diff --git a/crates/apub/src/http/post.rs b/crates/apub/src/http/post.rs index f65968f15..ce6612826 100644 --- a/crates/apub/src/http/post.rs +++ b/crates/apub/src/http/post.rs @@ -1,12 +1,21 @@ use crate::{ - http::{create_apub_response, create_apub_tombstone_response, err_object_not_local}, + http::{ + check_community_public, + create_apub_response, + create_apub_tombstone_response, + redirect_remote_object, + }, objects::post::ApubPost, }; use activitypub_federation::{config::Data, traits::Object}; use actix_web::{web, HttpResponse}; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::{newtypes::PostId, source::post::Post, traits::Crud}; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{ + newtypes::PostId, + source::{community::Community, post::Post}, + traits::Crud, +}; +use lemmy_utils::error::LemmyResult; use serde::Deserialize; #[derive(Deserialize)] @@ -19,11 +28,15 @@ pub(crate) struct PostQuery { pub(crate) async fn get_apub_post( info: web::Path, context: Data, -) -> Result { +) -> LemmyResult { let id = PostId(info.post_id.parse::()?); + // Can't use PostView here because it excludes deleted/removed/local-only items let post: ApubPost = Post::read(&mut context.pool(), id).await?.into(); + let community = Community::read(&mut context.pool(), post.community_id).await?; + check_community_public(&community)?; + if !post.local { - Err(err_object_not_local()) + Ok(redirect_remote_object(&post.ap_id)) } else if !post.deleted && !post.removed { create_apub_response(&post.into_json(&context).await?) } else { diff --git a/crates/apub/src/http/routes.rs b/crates/apub/src/http/routes.rs index 0012fccf2..9479e6312 100644 --- a/crates/apub/src/http/routes.rs +++ b/crates/apub/src/http/routes.rs @@ -1,7 +1,6 @@ use crate::http::{ comment::get_apub_comment, community::{ - community_inbox, get_apub_community_featured, get_apub_community_followers, get_apub_community_http, @@ -9,10 +8,10 @@ use crate::http::{ get_apub_community_outbox, }, get_activity, - person::{get_apub_person_http, get_apub_person_outbox, person_inbox}, + person::{get_apub_person_http, get_apub_person_outbox}, post::get_apub_post, shared_inbox, - site::{get_apub_site_http, get_apub_site_inbox, get_apub_site_outbox}, + site::{get_apub_site_http, get_apub_site_outbox}, }; use actix_web::{ guard::{Guard, GuardContext}, @@ -56,10 +55,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("") .guard(InboxRequestGuard) - .route("/c/{community_name}/inbox", web::post().to(community_inbox)) - .route("/u/{user_name}/inbox", web::post().to(person_inbox)) - .route("/inbox", web::post().to(shared_inbox)) - .route("/site_inbox", web::post().to(get_apub_site_inbox)), + .route("/inbox", web::post().to(shared_inbox)), ); } diff --git a/crates/apub/src/http/site.rs b/crates/apub/src/http/site.rs index 071f4aa6e..95175a006 100644 --- a/crates/apub/src/http/site.rs +++ b/crates/apub/src/http/site.rs @@ -1,34 +1,24 @@ use crate::{ - activity_lists::SiteInboxActivities, http::create_apub_response, - objects::{instance::ApubSite, person::ApubPerson}, + objects::instance::ApubSite, protocol::collections::empty_outbox::EmptyOutbox, }; -use activitypub_federation::{ - actix_web::inbox::receive_activity, - config::Data, - protocol::context::WithContext, - traits::Object, -}; -use actix_web::{web::Bytes, HttpRequest, HttpResponse}; +use activitypub_federation::{config::Data, traits::Object}; +use actix_web::HttpResponse; use lemmy_api_common::context::LemmyContext; -use lemmy_db_views::structs::SiteView; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::source::site::Site; +use lemmy_utils::error::LemmyResult; use url::Url; -pub(crate) async fn get_apub_site_http( - context: Data, -) -> Result { - let site: ApubSite = SiteView::read_local(&mut context.pool()).await?.site.into(); +pub(crate) async fn get_apub_site_http(context: Data) -> LemmyResult { + let site: ApubSite = Site::read_local(&mut context.pool()).await?.into(); let apub = site.into_json(&context).await?; create_apub_response(&apub) } #[tracing::instrument(skip_all)] -pub(crate) async fn get_apub_site_outbox( - context: Data, -) -> Result { +pub(crate) async fn get_apub_site_outbox(context: Data) -> LemmyResult { let outbox_id = format!( "{}/site_outbox", context.settings().get_protocol_and_hostname() @@ -36,15 +26,3 @@ pub(crate) async fn get_apub_site_outbox( let outbox = EmptyOutbox::new(Url::parse(&outbox_id)?)?; create_apub_response(&outbox) } - -#[tracing::instrument(skip_all)] -pub async fn get_apub_site_inbox( - request: HttpRequest, - body: Bytes, - data: Data, -) -> Result { - receive_activity::, ApubPerson, LemmyContext>( - request, body, &data, - ) - .await -} diff --git a/crates/apub/src/lib.rs b/crates/apub/src/lib.rs index 1ae214c19..a04aec655 100644 --- a/crates/apub/src/lib.rs +++ b/crates/apub/src/lib.rs @@ -1,16 +1,21 @@ use crate::fetcher::post_or_comment::PostOrComment; -use activitypub_federation::config::{Data, UrlVerifier}; -use anyhow::anyhow; +use activitypub_federation::{ + config::{Data, UrlVerifier}, + error::Error as ActivityPubError, +}; use async_trait::async_trait; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ source::{activity::ReceivedActivity, instance::Instance, local_site::LocalSite}, utils::{ActualDbPool, DbPool}, }; -use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; +use lemmy_utils::{ + error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}, + CACHE_DURATION_FEDERATION, +}; use moka::future::Cache; -use once_cell::sync::Lazy; -use std::{sync::Arc, time::Duration}; +use serde_json::Value; +use std::sync::{Arc, LazyLock}; use url::Url; pub mod activities; @@ -23,15 +28,18 @@ pub(crate) mod mentions; pub mod objects; pub mod protocol; -pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 50; -/// All incoming and outgoing federation actions read the blocklist/allowlist and slur filters -/// multiple times. This causes a huge number of database reads if we hit the db directly. So we -/// cache these values for a short time, which will already make a huge difference and ensures that -/// changes take effect quickly. -const BLOCKLIST_CACHE_DURATION: Duration = Duration::from_secs(60); +/// Maximum number of outgoing HTTP requests to fetch a single object. Needs to be high enough +/// to fetch a new community with posts, moderators and featured posts. +pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 100; -static CONTEXT: Lazy> = Lazy::new(|| { - serde_json::from_str(include_str!("../assets/lemmy/context.json")).expect("parse context") +/// Only include a basic context to save space and bandwidth. The main context is hosted statically +/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could +/// theoretically also be moved. +pub static FEDERATION_CONTEXT: LazyLock = LazyLock::new(|| { + Value::Array(vec![ + Value::String("https://join-lemmy.org/context.json".to_string()), + Value::String("https://www.w3.org/ns/activitystreams".to_string()), + ]) }); #[derive(Clone)] @@ -39,24 +47,25 @@ pub struct VerifyUrlData(pub ActualDbPool); #[async_trait] impl UrlVerifier for VerifyUrlData { - async fn verify(&self, url: &Url) -> Result<(), anyhow::Error> { + async fn verify(&self, url: &Url) -> Result<(), ActivityPubError> { let local_site_data = local_site_data_cached(&mut (&self.0).into()) .await .expect("read local site data"); + use FederationError::*; check_apub_id_valid(url, &local_site_data).map_err(|err| match err { LemmyError { - error_type: LemmyErrorType::FederationDisabled, + error_type: LemmyErrorType::FederationError(Some(FederationDisabled)), .. - } => anyhow!("Federation disabled"), + } => ActivityPubError::Other("Federation disabled".into()), LemmyError { - error_type: LemmyErrorType::DomainBlocked(domain), + error_type: LemmyErrorType::FederationError(Some(DomainBlocked(domain))), .. - } => anyhow!("Domain {domain:?} is blocked"), + } => ActivityPubError::Other(format!("Domain {domain:?} is blocked")), LemmyError { - error_type: LemmyErrorType::DomainNotInAllowList(domain), + error_type: LemmyErrorType::FederationError(Some(DomainNotInAllowList(domain))), .. - } => anyhow!("Domain {domain:?} is not in allowlist"), - _ => anyhow!("Failed validating apub id"), + } => ActivityPubError::Other(format!("Domain {domain:?} is not in allowlist")), + _ => ActivityPubError::Other("Failed validating apub id".into()), })?; Ok(()) } @@ -70,8 +79,11 @@ impl UrlVerifier for VerifyUrlData { /// - URL being in the allowlist (if it is active) /// - URL not being in the blocklist (if it is active) #[tracing::instrument(skip(local_site_data))] -fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> Result<(), LemmyError> { - let domain = apub_id.domain().expect("apud id has domain").to_string(); +fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> LemmyResult<()> { + let domain = apub_id + .domain() + .ok_or(FederationError::UrlWithoutDomain)? + .to_string(); if !local_site_data .local_site @@ -79,7 +91,7 @@ fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> Result .map(|l| l.federation_enabled) .unwrap_or(true) { - Err(LemmyErrorType::FederationDisabled)? + Err(FederationError::FederationDisabled)? } if local_site_data @@ -87,7 +99,7 @@ fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> Result .iter() .any(|i| domain.to_lowercase().eq(&i.domain.to_lowercase())) { - Err(LemmyErrorType::DomainBlocked(domain.clone()))? + Err(FederationError::DomainBlocked(domain.clone()))? } // Only check this if there are instances in the allowlist @@ -97,7 +109,7 @@ fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> Result .iter() .any(|i| domain.to_lowercase().eq(&i.domain.to_lowercase())) { - Err(LemmyErrorType::DomainNotInAllowList(domain))? + Err(FederationError::DomainNotInAllowList(domain))? } Ok(()) @@ -113,10 +125,14 @@ pub(crate) struct LocalSiteData { pub(crate) async fn local_site_data_cached( pool: &mut DbPool<'_>, ) -> LemmyResult> { - static CACHE: Lazy>> = Lazy::new(|| { + // All incoming and outgoing federation actions read the blocklist/allowlist and slur filters + // multiple times. This causes a huge number of database reads if we hit the db directly. So we + // cache these values for a short time, which will already make a huge difference and ensures that + // changes take effect quickly. + static CACHE: LazyLock>> = LazyLock::new(|| { Cache::builder() .max_capacity(1) - .time_to_live(BLOCKLIST_CACHE_DURATION) + .time_to_live(CACHE_DURATION_FEDERATION) .build() }); Ok( @@ -146,8 +162,11 @@ pub(crate) async fn check_apub_id_valid_with_strictness( apub_id: &Url, is_strict: bool, context: &LemmyContext, -) -> Result<(), LemmyError> { - let domain = apub_id.domain().expect("apud id has domain").to_string(); +) -> LemmyResult<()> { + let domain = apub_id + .domain() + .ok_or(FederationError::UrlWithoutDomain)? + .to_string(); let local_instance = context .settings() .get_hostname_without_port() @@ -174,9 +193,12 @@ pub(crate) async fn check_apub_id_valid_with_strictness( .expect("local hostname is valid"); allowed_and_local.push(local_instance); - let domain = apub_id.domain().expect("apud id has domain").to_string(); + let domain = apub_id + .domain() + .ok_or(FederationError::UrlWithoutDomain)? + .to_string(); if !allowed_and_local.contains(&domain) { - Err(LemmyErrorType::FederationDisabledByStrictAllowList)? + Err(FederationError::FederationDisabledByStrictAllowList)? } } Ok(()) @@ -184,13 +206,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness( /// Store received activities in the database. /// -/// This ensures that the same activity doesnt get received and processed more than once, which +/// This ensures that the same activity doesn't get received and processed more than once, which /// would be a waste of resources. #[tracing::instrument(skip(data))] -async fn insert_received_activity( - ap_id: &Url, - data: &Data, -) -> Result<(), LemmyError> { +async fn insert_received_activity(ap_id: &Url, data: &Data) -> LemmyResult<()> { ReceivedActivity::create(&mut data.pool(), &ap_id.clone().into()).await?; Ok(()) } diff --git a/crates/apub/src/mentions.rs b/crates/apub/src/mentions.rs index b088dfd03..cb46be52a 100644 --- a/crates/apub/src/mentions.rs +++ b/crates/apub/src/mentions.rs @@ -11,7 +11,10 @@ use lemmy_db_schema::{ traits::Crud, utils::DbPool, }; -use lemmy_utils::{error::LemmyError, utils::mention::scrape_text_for_mentions}; +use lemmy_utils::{ + error::{FederationError, LemmyResult}, + utils::mention::scrape_text_for_mentions, +}; use serde::{Deserialize, Serialize}; use serde_json::Value; use url::Url; @@ -44,7 +47,7 @@ pub async fn collect_non_local_mentions( comment: &ApubComment, community_id: ObjectId, context: &Data, -) -> Result { +) -> LemmyResult { let parent_creator = get_comment_parent_creator(&mut context.pool(), comment).await?; let mut addressed_ccs: Vec = vec![community_id.into(), parent_creator.id()]; @@ -54,7 +57,10 @@ pub async fn collect_non_local_mentions( name: Some(format!( "@{}@{}", &parent_creator.name, - &parent_creator.id().domain().expect("has domain") + &parent_creator + .id() + .domain() + .ok_or(FederationError::UrlWithoutDomain)? )), kind: MentionType::Mention, }; @@ -94,7 +100,7 @@ pub async fn collect_non_local_mentions( async fn get_comment_parent_creator( pool: &mut DbPool<'_>, comment: &Comment, -) -> Result { +) -> LemmyResult { let parent_creator_id = if let Some(parent_comment_id) = comment.parent_comment_id() { let parent_comment = Comment::read(pool, parent_comment_id).await?; parent_comment.creator_id diff --git a/crates/apub/src/objects/comment.rs b/crates/apub/src/objects/comment.rs index ecee70724..6e13afc91 100644 --- a/crates/apub/src/objects/comment.rs +++ b/crates/apub/src/objects/comment.rs @@ -1,8 +1,9 @@ use crate::{ activities::{verify_is_public, verify_person_in_community}, check_apub_id_valid_with_strictness, + fetcher::markdown_links::markdown_rewrite_remote_links, mentions::collect_non_local_mentions, - objects::{read_from_string_or_source, verify_is_remote_object}, + objects::{append_attachments_to_comment, read_from_string_or_source, verify_is_remote_object}, protocol::{ objects::{note::Note, LanguageTag}, InCommunity, @@ -16,7 +17,10 @@ use activitypub_federation::{ traits::Object, }; use chrono::{DateTime, Utc}; -use lemmy_api_common::{context::LemmyContext, utils::local_site_opt_to_slur_regex}; +use lemmy_api_common::{ + context::LemmyContext, + utils::{get_url_blocklist, is_mod_or_admin, local_site_opt_to_slur_regex, process_markdown}, +}; use lemmy_db_schema::{ source::{ comment::{Comment, CommentInsertForm, CommentUpdateForm}, @@ -26,10 +30,11 @@ use lemmy_db_schema::{ post::Post, }, traits::Crud, + utils::naive_now, }; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType}, - utils::{markdown::markdown_to_html, slurs::remove_slurs}, + error::{FederationError, LemmyError, LemmyResult}, + utils::markdown::markdown_to_html, }; use std::ops::Deref; use url::Url; @@ -64,7 +69,7 @@ impl Object for ApubComment { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { + ) -> LemmyResult> { Ok( Comment::read_from_apub_id(&mut context.pool(), object_id) .await? @@ -73,7 +78,7 @@ impl Object for ApubComment { } #[tracing::instrument(skip_all)] - async fn delete(self, context: &Data) -> Result<(), LemmyError> { + async fn delete(self, context: &Data) -> LemmyResult<()> { if !self.deleted { let form = CommentUpdateForm { deleted: Some(true), @@ -85,7 +90,7 @@ impl Object for ApubComment { } #[tracing::instrument(skip_all)] - async fn into_json(self, context: &Data) -> Result { + async fn into_json(self, context: &Data) -> LemmyResult { let creator_id = self.creator_id; let creator = Person::read(&mut context.pool(), creator_id).await?; @@ -100,7 +105,7 @@ impl Object for ApubComment { } else { post.ap_id.into() }; - let language = LanguageTag::new_single(self.language_id, &mut context.pool()).await?; + let language = Some(LanguageTag::new_single(self.language_id, &mut context.pool()).await?); let maa = collect_non_local_mentions(&self, community.actor_id.clone().into(), context).await?; let note = Note { @@ -119,28 +124,46 @@ impl Object for ApubComment { distinguished: Some(self.distinguished), language, audience: Some(community.actor_id.into()), + attachment: vec![], }; Ok(note) } + /// Recursively fetches all parent comments. This can lead to a stack overflow so we need to + /// Box::pin all large futures on the heap. #[tracing::instrument(skip_all)] async fn verify( note: &Note, expected_domain: &Url, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { verify_domains_match(note.id.inner(), expected_domain)?; verify_domains_match(note.attributed_to.inner(), note.id.inner())?; verify_is_public(¬e.to, ¬e.cc)?; - let community = note.community(context).await?; + let community = Box::pin(note.community(context)).await?; - check_apub_id_valid_with_strictness(note.id.inner(), community.local, context).await?; - verify_is_remote_object(note.id.inner(), context.settings())?; - verify_person_in_community(¬e.attributed_to, &community, context).await?; - let (post, _) = note.get_parents(context).await?; - if post.locked { - Err(LemmyErrorType::PostIsLocked)? + Box::pin(check_apub_id_valid_with_strictness( + note.id.inner(), + community.local, + context, + )) + .await?; + verify_is_remote_object(¬e.id, context)?; + Box::pin(verify_person_in_community( + ¬e.attributed_to, + &community, + context, + )) + .await?; + + let (post, _) = Box::pin(note.get_parents(context)).await?; + let creator = Box::pin(note.attributed_to.dereference(context)).await?; + let is_mod_or_admin = is_mod_or_admin(&mut context.pool(), &creator, community.id) + .await + .is_ok(); + if post.locked && !is_mod_or_admin { + Err(FederationError::PostIsLocked)? } else { Ok(()) } @@ -150,7 +173,7 @@ impl Object for ApubComment { /// /// If the parent community, post and comment(s) are not known locally, these are also fetched. #[tracing::instrument(skip_all)] - async fn from_json(note: Note, context: &Data) -> Result { + async fn from_json(note: Note, context: &Data) -> LemmyResult { let creator = note.attributed_to.dereference(context).await?; let (post, parent_comment) = note.get_parents(context).await?; @@ -158,9 +181,14 @@ impl Object for ApubComment { let local_site = LocalSite::read(&mut context.pool()).await.ok(); let slur_regex = &local_site_opt_to_slur_regex(&local_site); - let content = remove_slurs(&content, slur_regex); - let language_id = - LanguageTag::to_language_id_single(note.language, &mut context.pool()).await?; + let url_blocklist = get_url_blocklist(context).await?; + let content = append_attachments_to_comment(content, ¬e.attachment, context).await?; + let content = process_markdown(&content, slur_regex, &url_blocklist, context).await?; + let content = markdown_rewrite_remote_links(content, context).await; + let language_id = Some( + LanguageTag::to_language_id_single(note.language.unwrap_or_default(), &mut context.pool()) + .await?, + ); let form = CommentInsertForm { creator_id: creator.id, @@ -176,16 +204,20 @@ impl Object for ApubComment { language_id, }; let parent_comment_path = parent_comment.map(|t| t.0.path); - let comment = Comment::create(&mut context.pool(), &form, parent_comment_path.as_ref()).await?; + let timestamp: DateTime = note.updated.or(note.published).unwrap_or_else(naive_now); + let comment = Comment::insert_apub( + &mut context.pool(), + Some(timestamp), + &form, + parent_comment_path.as_ref(), + ) + .await?; Ok(comment.into()) } } #[cfg(test)] pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; use crate::{ objects::{ @@ -193,53 +225,51 @@ pub(crate) mod tests { instance::ApubSite, person::{tests::parse_lemmy_person, ApubPerson}, post::ApubPost, - tests::init_context, }, protocol::tests::file_to_json_object, }; use assert_json_diff::assert_json_include; use html2md::parse_html; use lemmy_db_schema::source::site::Site; + use pretty_assertions::assert_eq; use serial_test::serial; async fn prepare_comment_test( url: &Url, context: &Data, - ) -> (ApubPerson, ApubCommunity, ApubPost, ApubSite) { - // use separate counter so this doesnt affect tests + ) -> LemmyResult<(ApubPerson, ApubCommunity, ApubPost, ApubSite)> { + // use separate counter so this doesn't affect tests let context2 = context.reset_request_count(); - let (person, site) = parse_lemmy_person(&context2).await; - let community = parse_lemmy_community(&context2).await; - let post_json = file_to_json_object("assets/lemmy/objects/page.json").unwrap(); - ApubPost::verify(&post_json, url, &context2).await.unwrap(); - let post = ApubPost::from_json(post_json, &context2).await.unwrap(); - (person, community, post, site) + let (person, site) = parse_lemmy_person(&context2).await?; + let community = parse_lemmy_community(&context2).await?; + let post_json = file_to_json_object("assets/lemmy/objects/page.json")?; + ApubPost::verify(&post_json, url, &context2).await?; + let post = ApubPost::from_json(post_json, &context2).await?; + Ok((person, community, post, site)) } - async fn cleanup(data: (ApubPerson, ApubCommunity, ApubPost, ApubSite), context: &LemmyContext) { - Post::delete(&mut context.pool(), data.2.id).await.unwrap(); - Community::delete(&mut context.pool(), data.1.id) - .await - .unwrap(); - Person::delete(&mut context.pool(), data.0.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), data.3.id).await.unwrap(); - LocalSite::delete(&mut context.pool()).await.unwrap(); + async fn cleanup( + (person, community, post, site): (ApubPerson, ApubCommunity, ApubPost, ApubSite), + context: &LemmyContext, + ) -> LemmyResult<()> { + Post::delete(&mut context.pool(), post.id).await?; + Community::delete(&mut context.pool(), community.id).await?; + Person::delete(&mut context.pool(), person.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + LocalSite::delete(&mut context.pool()).await?; + Ok(()) } #[tokio::test] #[serial] - pub(crate) async fn test_parse_lemmy_comment() { - let context = init_context().await; - let url = Url::parse("https://enterprise.lemmy.ml/comment/38741").unwrap(); - let data = prepare_comment_test(&url, &context).await; + pub(crate) async fn test_parse_lemmy_comment() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let url = Url::parse("https://enterprise.lemmy.ml/comment/38741")?; + let data = prepare_comment_test(&url, &context).await?; - let json: Note = file_to_json_object("assets/lemmy/objects/note.json").unwrap(); - ApubComment::verify(&json, &url, &context).await.unwrap(); - let comment = ApubComment::from_json(json.clone(), &context) - .await - .unwrap(); + let json: Note = file_to_json_object("assets/lemmy/objects/note.json")?; + ApubComment::verify(&json, &url, &context).await?; + let comment = ApubComment::from_json(json.clone(), &context).await?; assert_eq!(comment.ap_id, url.into()); assert_eq!(comment.content.len(), 14); @@ -247,45 +277,38 @@ pub(crate) mod tests { assert_eq!(context.request_count(), 0); let comment_id = comment.id; - let to_apub = comment.into_json(&context).await.unwrap(); + let to_apub = comment.into_json(&context).await?; assert_json_include!(actual: json, expected: to_apub); - Comment::delete(&mut context.pool(), comment_id) - .await - .unwrap(); - cleanup(data, &context).await; + Comment::delete(&mut context.pool(), comment_id).await?; + cleanup(data, &context).await?; + Ok(()) } #[tokio::test] #[serial] - async fn test_parse_pleroma_comment() { - let context = init_context().await; - let url = Url::parse("https://enterprise.lemmy.ml/comment/38741").unwrap(); - let data = prepare_comment_test(&url, &context).await; + async fn test_parse_pleroma_comment() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let url = Url::parse("https://enterprise.lemmy.ml/comment/38741")?; + let data = prepare_comment_test(&url, &context).await?; let pleroma_url = - Url::parse("https://queer.hacktivis.me/objects/8d4973f4-53de-49cd-8c27-df160e16a9c2") - .unwrap(); - let person_json = file_to_json_object("assets/pleroma/objects/person.json").unwrap(); - ApubPerson::verify(&person_json, &pleroma_url, &context) - .await - .unwrap(); - ApubPerson::from_json(person_json, &context).await.unwrap(); - let json = file_to_json_object("assets/pleroma/objects/note.json").unwrap(); - ApubComment::verify(&json, &pleroma_url, &context) - .await - .unwrap(); - let comment = ApubComment::from_json(json, &context).await.unwrap(); + Url::parse("https://queer.hacktivis.me/objects/8d4973f4-53de-49cd-8c27-df160e16a9c2")?; + let person_json = file_to_json_object("assets/pleroma/objects/person.json")?; + ApubPerson::verify(&person_json, &pleroma_url, &context).await?; + ApubPerson::from_json(person_json, &context).await?; + let json = file_to_json_object("assets/pleroma/objects/note.json")?; + ApubComment::verify(&json, &pleroma_url, &context).await?; + let comment = ApubComment::from_json(json, &context).await?; assert_eq!(comment.ap_id, pleroma_url.into()); - assert_eq!(comment.content.len(), 64); + assert_eq!(comment.content.len(), 10); assert!(!comment.local); assert_eq!(context.request_count(), 1); - Comment::delete(&mut context.pool(), comment.id) - .await - .unwrap(); - cleanup(data, &context).await; + Comment::delete(&mut context.pool(), comment.id).await?; + cleanup(data, &context).await?; + Ok(()) } #[tokio::test] diff --git a/crates/apub/src/objects/community.rs b/crates/apub/src/objects/community.rs index 69d6231c0..7ee204ac9 100644 --- a/crates/apub/src/objects/community.rs +++ b/crates/apub/src/objects/community.rs @@ -1,10 +1,11 @@ use crate::{ activities::GetActorType, check_apub_id_valid, + fetcher::markdown_links::markdown_rewrite_remote_links_opt, local_site_data_cached, - objects::instance::fetch_instance_actor_for_object, + objects::{instance::fetch_instance_actor_for_object, read_from_string_or_source_opt}, protocol::{ - objects::{group::Group, Endpoints, LanguageTag}, + objects::{group::Group, LanguageTag}, ImageObject, Source, }, @@ -12,25 +13,40 @@ use crate::{ use activitypub_federation::{ config::Data, kinds::actor::GroupType, + protocol::values::MediaTypeHtml, traits::{Actor, Object}, }; use chrono::{DateTime, Utc}; use lemmy_api_common::{ context::LemmyContext, - utils::{generate_featured_url, generate_moderators_url, generate_outbox_url}, + utils::{ + generate_featured_url, + generate_moderators_url, + generate_outbox_url, + get_url_blocklist, + local_site_opt_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_apub, + }, }; use lemmy_db_schema::{ + sensitive::SensitiveString, source::{ activity::ActorType, actor_language::CommunityLanguage, - community::{Community, CommunityUpdateForm}, + community::{Community, CommunityInsertForm, CommunityUpdateForm}, + local_site::LocalSite, }, traits::{ApubActor, Crud}, + utils::naive_now, }; use lemmy_db_views_actor::structs::CommunityFollowerView; -use lemmy_utils::{error::LemmyError, utils::markdown::markdown_to_html}; +use lemmy_utils::{ + error::{LemmyError, LemmyResult}, + spawn_try_task, + utils::markdown::markdown_to_html, +}; use std::ops::Deref; -use tracing::debug; use url::Url; #[derive(Clone, Debug)] @@ -63,7 +79,7 @@ impl Object for ApubCommunity { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { + ) -> LemmyResult> { Ok( Community::read_from_apub_id(&mut context.pool(), &object_id.into()) .await? @@ -72,7 +88,7 @@ impl Object for ApubCommunity { } #[tracing::instrument(skip_all)] - async fn delete(self, context: &Data) -> Result<(), LemmyError> { + async fn delete(self, context: &Data) -> LemmyResult<()> { let form = CommunityUpdateForm { deleted: Some(true), ..Default::default() @@ -82,7 +98,7 @@ impl Object for ApubCommunity { } #[tracing::instrument(skip_all)] - async fn into_json(self, data: &Data) -> Result { + async fn into_json(self, data: &Data) -> LemmyResult { let community_id = self.id; let langs = CommunityLanguage::read(&mut data.pool(), community_id).await?; let language = LanguageTag::new_multiple(langs, &mut data.pool()).await?; @@ -92,18 +108,18 @@ impl Object for ApubCommunity { id: self.id().into(), preferred_username: self.name.clone(), name: Some(self.title.clone()), - summary: self.description.as_ref().map(|b| markdown_to_html(b)), - source: self.description.clone().map(Source::new), + content: self.sidebar.as_ref().map(|d| markdown_to_html(d)), + source: self.sidebar.clone().map(Source::new), + summary: self.description.clone(), + media_type: self.sidebar.as_ref().map(|_| MediaTypeHtml::Html), icon: self.icon.clone().map(ImageObject::new), image: self.banner.clone().map(ImageObject::new), sensitive: Some(self.nsfw), featured: Some(generate_featured_url(&self.actor_id)?.into()), inbox: self.inbox_url.clone().into(), outbox: generate_outbox_url(&self.actor_id)?.into(), - followers: self.followers_url.clone().into(), - endpoints: self.shared_inbox_url.clone().map(|s| Endpoints { - shared_inbox: s.into(), - }), + followers: self.followers_url.clone().map(Into::into), + endpoints: None, public_key: self.public_key(), language, published: Some(self.published), @@ -119,44 +135,79 @@ impl Object for ApubCommunity { group: &Group, expected_domain: &Url, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { group.verify(expected_domain, context).await } /// Converts a `Group` to `Community`, inserts it into the database and updates moderators. #[tracing::instrument(skip_all)] - async fn from_json( - group: Group, - context: &Data, - ) -> Result { + async fn from_json(group: Group, context: &Data) -> LemmyResult { let instance_id = fetch_instance_actor_for_object(&group.id, context).await?; - let form = Group::into_insert_form(group.clone(), instance_id); + let local_site = LocalSite::read(&mut context.pool()).await.ok(); + let slur_regex = &local_site_opt_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(context).await?; + let sidebar = read_from_string_or_source_opt(&group.content, &None, &group.source); + let sidebar = process_markdown_opt(&sidebar, slur_regex, &url_blocklist, context).await?; + let sidebar = markdown_rewrite_remote_links_opt(sidebar, context).await; + let icon = proxy_image_link_opt_apub(group.icon.map(|i| i.url), context).await?; + let banner = proxy_image_link_opt_apub(group.image.map(|i| i.url), context).await?; + + let form = CommunityInsertForm { + published: group.published, + updated: group.updated, + deleted: Some(false), + nsfw: Some(group.sensitive.unwrap_or(false)), + actor_id: Some(group.id.into()), + local: Some(false), + last_refreshed_at: Some(naive_now()), + icon, + banner, + sidebar, + description: group.summary, + followers_url: group.followers.clone().map(Into::into), + inbox_url: Some( + group + .endpoints + .map(|e| e.shared_inbox) + .unwrap_or(group.inbox) + .into(), + ), + moderators_url: group.attributed_to.clone().map(Into::into), + posting_restricted_to_mods: group.posting_restricted_to_mods, + featured_url: group.featured.clone().map(Into::into), + ..CommunityInsertForm::new( + instance_id, + group.preferred_username.clone(), + group.name.unwrap_or(group.preferred_username.clone()), + group.public_key.public_key_pem, + ) + }; let languages = LanguageTag::to_language_id_multiple(group.language, &mut context.pool()).await?; - let community = Community::create(&mut context.pool(), &form).await?; + let timestamp = group.updated.or(group.published).unwrap_or_else(naive_now); + let community = Community::insert_apub(&mut context.pool(), timestamp, &form).await?; CommunityLanguage::update(&mut context.pool(), languages, community.id).await?; let community: ApubCommunity = community.into(); - // Fetching mods and outbox is not necessary for Lemmy to work, so ignore errors. Besides, - // we need to ignore these errors so that tests can work entirely offline. - let fetch_outbox = group.outbox.dereference(&community, context); - let fetch_followers = group.followers.dereference(&community, context); - - if let Some(moderators) = group.attributed_to { - let fetch_moderators = moderators.dereference(&community, context); - // Fetch mods, outbox and followers in parallel - let res = tokio::join!(fetch_outbox, fetch_moderators, fetch_followers); - res.0.map_err(|e| debug!("{}", e)).ok(); - res.1.map_err(|e| debug!("{}", e)).ok(); - res.2.map_err(|e| debug!("{}", e)).ok(); - } else { - let res = tokio::join!(fetch_outbox, fetch_followers); - res.0.map_err(|e| debug!("{}", e)).ok(); - res.1.map_err(|e| debug!("{}", e)).ok(); - } + // These collections are not necessary for Lemmy to work, so ignore errors. + let community_ = community.clone(); + let context_ = context.reset_request_count(); + spawn_try_task(async move { + group.outbox.dereference(&community_, &context_).await.ok(); + if let Some(followers) = group.followers { + followers.dereference(&community_, &context_).await.ok(); + } + if let Some(featured) = group.featured { + featured.dereference(&community_, &context_).await.ok(); + } + if let Some(moderators) = group.attributed_to { + moderators.dereference(&community_, &context_).await.ok(); + } + Ok(()) + }); Ok(community) } @@ -172,7 +223,7 @@ impl Actor for ApubCommunity { } fn private_key_pem(&self) -> Option { - self.private_key.clone() + self.private_key.clone().map(SensitiveString::into_inner) } fn inbox(&self) -> Url { @@ -180,7 +231,7 @@ impl Actor for ApubCommunity { } fn shared_inbox(&self) -> Option { - self.shared_inbox_url.clone().map(Into::into) + None } } @@ -193,10 +244,7 @@ impl GetActorType for ApubCommunity { impl ApubCommunity { /// For a given community, returns the inboxes of all followers. #[tracing::instrument(skip_all)] - pub(crate) async fn get_follower_inboxes( - &self, - context: &LemmyContext, - ) -> Result, LemmyError> { + pub(crate) async fn get_follower_inboxes(&self, context: &LemmyContext) -> LemmyResult> { let id = self.id; let local_site_data = local_site_data_cached(&mut context.pool()).await?; @@ -216,51 +264,57 @@ impl ApubCommunity { #[cfg(test)] pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; use crate::{ - objects::{instance::tests::parse_lemmy_instance, tests::init_context}, + objects::instance::tests::parse_lemmy_instance, protocol::tests::file_to_json_object, }; use activitypub_federation::fetch::collection_id::CollectionId; - use lemmy_db_schema::{source::site::Site, traits::Crud}; + use lemmy_db_schema::source::site::Site; + use pretty_assertions::assert_eq; use serial_test::serial; - pub(crate) async fn parse_lemmy_community(context: &Data) -> ApubCommunity { - // use separate counter so this doesnt affect tests + pub(crate) async fn parse_lemmy_community( + context: &Data, + ) -> LemmyResult { + // use separate counter so this doesn't affect tests let context2 = context.reset_request_count(); - let mut json: Group = file_to_json_object("assets/lemmy/objects/group.json").unwrap(); + let mut json: Group = file_to_json_object("assets/lemmy/objects/group.json")?; // change these links so they dont fetch over the network json.attributed_to = None; - json.outbox = - CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox").unwrap(); - json.followers = - CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_followers").unwrap(); + json.outbox = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox")?; + json.followers = Some(CollectionId::parse( + "https://enterprise.lemmy.ml/c/tenforward/not_followers", + )?); - let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward").unwrap(); - ApubCommunity::verify(&json, &url, &context2).await.unwrap(); - let community = ApubCommunity::from_json(json, &context2).await.unwrap(); - // this makes requests to the (intentionally broken) outbox and followers collections - assert_eq!(context2.request_count(), 2); - community + let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?; + ApubCommunity::verify(&json, &url, &context2).await?; + let community = ApubCommunity::from_json(json, &context2).await?; + Ok(community) } #[tokio::test] #[serial] - async fn test_parse_lemmy_community() { - let context = init_context().await; - let site = parse_lemmy_instance(&context).await; - let community = parse_lemmy_community(&context).await; + async fn test_parse_lemmy_community() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let site = parse_lemmy_instance(&context).await?; + let community = parse_lemmy_community(&context).await?; assert_eq!(community.title, "Ten Forward"); assert!(!community.local); - assert_eq!(community.description.as_ref().unwrap().len(), 132); - Community::delete(&mut context.pool(), community.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), site.id).await.unwrap(); + // Test the sidebar and description + assert_eq!( + community.sidebar.as_ref().map(std::string::String::len), + Some(63) + ); + assert_eq!( + community.description, + Some("A description of ten forward.".into()) + ); + + Community::delete(&mut context.pool(), community.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + Ok(()) } } diff --git a/crates/apub/src/objects/instance.rs b/crates/apub/src/objects/instance.rs index 3044d77f2..a123c85ba 100644 --- a/crates/apub/src/objects/instance.rs +++ b/crates/apub/src/objects/instance.rs @@ -1,6 +1,8 @@ +use super::verify_is_remote_object; use crate::{ activities::GetActorType, check_apub_id_valid_with_strictness, + fetcher::markdown_links::markdown_rewrite_remote_links_opt, local_site_data_cached, objects::read_from_string_or_source_opt, protocol::{ @@ -17,20 +19,30 @@ use activitypub_federation::{ traits::{Actor, Object}, }; use chrono::{DateTime, Utc}; -use lemmy_api_common::{context::LemmyContext, utils::local_site_opt_to_slur_regex}; +use lemmy_api_common::{ + context::LemmyContext, + utils::{ + get_url_blocklist, + local_site_opt_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_apub, + }, +}; use lemmy_db_schema::{ newtypes::InstanceId, + sensitive::SensitiveString, source::{ activity::ActorType, actor_language::SiteLanguage, instance::Instance as DbInstance, + local_site::LocalSite, site::{Site, SiteInsertForm}, }, traits::Crud, utils::naive_now, }; use lemmy_utils::{ - error::LemmyError, + error::{FederationError, LemmyError, LemmyResult}, utils::{ markdown::markdown_to_html, slurs::{check_slurs, check_slurs_opt}, @@ -67,10 +79,7 @@ impl Object for ApubSite { } #[tracing::instrument(skip_all)] - async fn read_from_id( - object_id: Url, - data: &Data, - ) -> Result, LemmyError> { + async fn read_from_id(object_id: Url, data: &Data) -> LemmyResult> { Ok( Site::read_from_apub_id(&mut data.pool(), &object_id.into()) .await? @@ -78,12 +87,12 @@ impl Object for ApubSite { ) } - async fn delete(self, _data: &Data) -> Result<(), LemmyError> { - unimplemented!() + async fn delete(self, _data: &Data) -> LemmyResult<()> { + Err(FederationError::CantDeleteSite.into()) } #[tracing::instrument(skip_all)] - async fn into_json(self, data: &Data) -> Result { + async fn into_json(self, data: &Data) -> LemmyResult { let site_id = self.id; let langs = SiteLanguage::read(&mut data.pool(), site_id).await?; let language = LanguageTag::new_multiple(langs, &mut data.pool()).await?; @@ -92,6 +101,7 @@ impl Object for ApubSite { kind: ApplicationType::Application, id: self.id().into(), name: self.name.clone(), + preferred_username: Some(data.domain().to_string()), content: self.sidebar.as_ref().map(|d| markdown_to_html(d)), source: self.sidebar.clone().map(Source::new), summary: self.description.clone(), @@ -99,9 +109,10 @@ impl Object for ApubSite { icon: self.icon.clone().map(ImageObject::new), image: self.banner.clone().map(ImageObject::new), inbox: self.inbox_url.clone().into(), - outbox: Url::parse(&format!("{}/site_outbox", self.actor_id))?, + outbox: Url::parse(&format!("{}site_outbox", self.actor_id))?, public_key: self.public_key(), language, + content_warning: self.content_warning.clone(), published: self.published, updated: self.updated, }; @@ -113,9 +124,10 @@ impl Object for ApubSite { apub: &Self::Kind, expected_domain: &Url, data: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { check_apub_id_valid_with_strictness(apub.id.inner(), true, data).await?; verify_domains_match(expected_domain, apub.id.inner())?; + verify_is_remote_object(&apub.id, data)?; let local_site_data = local_site_data_cached(&mut data.pool()).await?; let slur_regex = &local_site_opt_to_slur_regex(&local_site_data.local_site); @@ -126,18 +138,29 @@ impl Object for ApubSite { } #[tracing::instrument(skip_all)] - async fn from_json(apub: Self::Kind, data: &Data) -> Result { - let domain = apub.id.inner().domain().expect("group id has domain"); - let instance = DbInstance::read_or_create(&mut data.pool(), domain.to_string()).await?; + async fn from_json(apub: Self::Kind, context: &Data) -> LemmyResult { + let domain = apub + .id + .inner() + .domain() + .ok_or(FederationError::UrlWithoutDomain)?; + let instance = DbInstance::read_or_create(&mut context.pool(), domain.to_string()).await?; + let local_site = LocalSite::read(&mut context.pool()).await.ok(); + let slur_regex = &local_site_opt_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(context).await?; let sidebar = read_from_string_or_source_opt(&apub.content, &None, &apub.source); + let sidebar = process_markdown_opt(&sidebar, slur_regex, &url_blocklist, context).await?; + let sidebar = markdown_rewrite_remote_links_opt(sidebar, context).await; + let icon = proxy_image_link_opt_apub(apub.icon.map(|i| i.url), context).await?; + let banner = proxy_image_link_opt_apub(apub.image.map(|i| i.url), context).await?; let site_form = SiteInsertForm { name: apub.name.clone(), sidebar, updated: apub.updated, - icon: apub.icon.clone().map(|i| i.url.into()), - banner: apub.image.clone().map(|i| i.url.into()), + icon, + banner, description: apub.summary, actor_id: Some(apub.id.clone().into()), last_refreshed_at: Some(naive_now()), @@ -145,11 +168,13 @@ impl Object for ApubSite { public_key: Some(apub.public_key.public_key_pem.clone()), private_key: None, instance_id: instance.id, + content_warning: apub.content_warning, }; - let languages = LanguageTag::to_language_id_multiple(apub.language, &mut data.pool()).await?; + let languages = + LanguageTag::to_language_id_multiple(apub.language, &mut context.pool()).await?; - let site = Site::create(&mut data.pool(), &site_form).await?; - SiteLanguage::update(&mut data.pool(), languages, &site).await?; + let site = Site::create(&mut context.pool(), &site_form).await?; + SiteLanguage::update(&mut context.pool(), languages, &site).await?; Ok(site.into()) } } @@ -164,7 +189,7 @@ impl Actor for ApubSite { } fn private_key_pem(&self) -> Option { - self.private_key.clone() + self.private_key.clone().map(SensitiveString::into_inner) } fn inbox(&self) -> Url { @@ -181,7 +206,7 @@ impl GetActorType for ApubSite { pub(in crate::objects) async fn fetch_instance_actor_for_object + Clone>( object_id: &T, context: &Data, -) -> Result { +) -> LemmyResult { let object_id: Url = object_id.clone().into(); let instance_id = Site::instance_actor_id_from_url(object_id); let site = ObjectId::::from(instance_id.clone()) @@ -192,7 +217,9 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object + C Err(e) => { // Failed to fetch instance actor, its probably not a lemmy instance debug!("Failed to dereference site for {}: {}", &instance_id, e); - let domain = instance_id.domain().expect("has domain"); + let domain = instance_id + .domain() + .ok_or(FederationError::UrlWithoutDomain)?; Ok( DbInstance::read_or_create(&mut context.pool(), domain.to_string()) .await? @@ -204,32 +231,33 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object + C #[cfg(test)] pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; - use crate::{objects::tests::init_context, protocol::tests::file_to_json_object}; - use lemmy_db_schema::traits::Crud; + use crate::protocol::tests::file_to_json_object; + use pretty_assertions::assert_eq; use serial_test::serial; - pub(crate) async fn parse_lemmy_instance(context: &Data) -> ApubSite { - let json: Instance = file_to_json_object("assets/lemmy/objects/instance.json").unwrap(); - let id = Url::parse("https://enterprise.lemmy.ml/").unwrap(); - ApubSite::verify(&json, &id, context).await.unwrap(); - let site = ApubSite::from_json(json, context).await.unwrap(); + pub(crate) async fn parse_lemmy_instance(context: &Data) -> LemmyResult { + let json: Instance = file_to_json_object("assets/lemmy/objects/instance.json")?; + let id = Url::parse("https://enterprise.lemmy.ml/")?; + ApubSite::verify(&json, &id, context).await?; + let site = ApubSite::from_json(json, context).await?; assert_eq!(context.request_count(), 0); - site + Ok(site) } #[tokio::test] #[serial] - async fn test_parse_lemmy_instance() { - let context = init_context().await; - let site = parse_lemmy_instance(&context).await; + async fn test_parse_lemmy_instance() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let site = parse_lemmy_instance(&context).await?; assert_eq!(site.name, "Enterprise"); - assert_eq!(site.description.as_ref().unwrap().len(), 15); + assert_eq!( + site.description.as_ref().map(std::string::String::len), + Some(15) + ); - Site::delete(&mut context.pool(), site.id).await.unwrap(); + Site::delete(&mut context.pool(), site.id).await?; + Ok(()) } } diff --git a/crates/apub/src/objects/mod.rs b/crates/apub/src/objects/mod.rs index 116c7f4fb..f837f7ad3 100644 --- a/crates/apub/src/objects/mod.rs +++ b/crates/apub/src/objects/mod.rs @@ -1,9 +1,16 @@ -use crate::protocol::Source; -use activitypub_federation::protocol::values::MediaTypeMarkdownOrHtml; +use crate::protocol::{objects::page::Attachment, Source}; +use activitypub_federation::{ + config::Data, + fetch::object_id::ObjectId, + protocol::values::MediaTypeMarkdownOrHtml, + traits::Object, +}; use anyhow::anyhow; use html2md::parse_html; -use lemmy_utils::{error::LemmyError, settings::structs::Settings}; -use url::Url; +use lemmy_api_common::context::LemmyContext; +use lemmy_utils::error::LemmyResult; +use serde::Deserialize; +use std::fmt::Debug; pub mod comment; pub mod community; @@ -39,70 +46,38 @@ pub(crate) fn read_from_string_or_source_opt( .map(|content| read_from_string_or_source(content, media_type, source)) } +pub(crate) async fn append_attachments_to_comment( + content: String, + attachments: &[Attachment], + context: &Data, +) -> LemmyResult { + let mut content = content; + // Don't modify comments with no attachments + if !attachments.is_empty() { + content += "\n"; + for attachment in attachments { + content = content + "\n" + &attachment.as_markdown(context).await?; + } + } + + Ok(content) +} + /// When for example a Post is made in a remote community, the community will send it back, /// wrapped in Announce. If we simply receive this like any other federated object, overwrite the /// existing, local Post. In particular, it will set the field local = false, so that the object /// can't be fetched from the Activitypub HTTP endpoint anymore (which only serves local objects). -pub(crate) fn verify_is_remote_object(id: &Url, settings: &Settings) -> Result<(), LemmyError> { - let local_domain = settings.get_hostname_without_port()?; - if id.domain() == Some(&local_domain) { +pub(crate) fn verify_is_remote_object( + id: &ObjectId, + context: &Data, +) -> LemmyResult<()> +where + T: Object + Debug + Send + 'static, + for<'de2> ::Kind: Deserialize<'de2>, +{ + if id.is_local(context) { Err(anyhow!("cant accept local object from remote instance").into()) } else { Ok(()) } } - -#[cfg(test)] -pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - - use activitypub_federation::config::{Data, FederationConfig}; - use anyhow::anyhow; - use lemmy_api_common::{context::LemmyContext, request::client_builder}; - use lemmy_db_schema::{source::secret::Secret, utils::build_db_pool_for_tests}; - use lemmy_utils::{rate_limit::RateLimitCell, settings::SETTINGS}; - use reqwest::{Request, Response}; - use reqwest_middleware::{ClientBuilder, Middleware, Next}; - use task_local_extensions::Extensions; - - struct BlockedMiddleware; - - /// A reqwest middleware which blocks all requests - #[async_trait::async_trait] - impl Middleware for BlockedMiddleware { - async fn handle( - &self, - _req: Request, - _extensions: &mut Extensions, - _next: Next<'_>, - ) -> reqwest_middleware::Result { - Err(anyhow!("Network requests not allowed").into()) - } - } - - // TODO: would be nice if we didnt have to use a full context for tests. - pub(crate) async fn init_context() -> Data { - // call this to run migrations - let pool = build_db_pool_for_tests().await; - - let client = client_builder(&SETTINGS).build().unwrap(); - - let client = ClientBuilder::new(client).with(BlockedMiddleware).build(); - let secret = Secret { - id: 0, - jwt_secret: String::new(), - }; - - let rate_limit_cell = RateLimitCell::with_test_config(); - - let context = LemmyContext::create(pool, client, secret, rate_limit_cell.clone()); - let config = FederationConfig::builder() - .domain("example.com") - .app_data(context) - .build() - .await - .unwrap(); - config.to_request_data() - } -} diff --git a/crates/apub/src/objects/person.rs b/crates/apub/src/objects/person.rs index 1102567d0..737579662 100644 --- a/crates/apub/src/objects/person.rs +++ b/crates/apub/src/objects/person.rs @@ -1,13 +1,12 @@ +use super::verify_is_remote_object; use crate::{ activities::GetActorType, check_apub_id_valid_with_strictness, + fetcher::markdown_links::markdown_rewrite_remote_links_opt, local_site_data_cached, objects::{instance::fetch_instance_actor_for_object, read_from_string_or_source_opt}, protocol::{ - objects::{ - person::{Person, UserTypes}, - Endpoints, - }, + objects::person::{Person, UserTypes}, ImageObject, Source, }, @@ -20,18 +19,26 @@ use activitypub_federation::{ use chrono::{DateTime, Utc}; use lemmy_api_common::{ context::LemmyContext, - utils::{generate_outbox_url, local_site_opt_to_slur_regex}, + utils::{ + generate_outbox_url, + get_url_blocklist, + local_site_opt_to_slur_regex, + process_markdown_opt, + proxy_image_link_opt_apub, + }, }; use lemmy_db_schema::{ + sensitive::SensitiveString, source::{ activity::ActorType, + local_site::LocalSite, person::{Person as DbPerson, PersonInsertForm, PersonUpdateForm}, }, traits::{ApubActor, Crud}, utils::naive_now, }; use lemmy_utils::{ - error::LemmyError, + error::{LemmyError, LemmyResult}, utils::{ markdown::markdown_to_html, slurs::{check_slurs, check_slurs_opt}, @@ -70,7 +77,7 @@ impl Object for ApubPerson { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { + ) -> LemmyResult> { Ok( DbPerson::read_from_apub_id(&mut context.pool(), &object_id.into()) .await? @@ -79,7 +86,7 @@ impl Object for ApubPerson { } #[tracing::instrument(skip_all)] - async fn delete(self, context: &Data) -> Result<(), LemmyError> { + async fn delete(self, context: &Data) -> LemmyResult<()> { let form = PersonUpdateForm { deleted: Some(true), ..Default::default() @@ -89,7 +96,7 @@ impl Object for ApubPerson { } #[tracing::instrument(skip_all)] - async fn into_json(self, _context: &Data) -> Result { + async fn into_json(self, _context: &Data) -> LemmyResult { let kind = if self.bot_account { UserTypes::Service } else { @@ -108,9 +115,7 @@ impl Object for ApubPerson { matrix_user_id: self.matrix_user_id.clone(), published: Some(self.published), outbox: generate_outbox_url(&self.actor_id)?.into(), - endpoints: self.shared_inbox_url.clone().map(|s| Endpoints { - shared_inbox: s.into(), - }), + endpoints: None, public_key: self.public_key(), updated: self.updated, inbox: self.inbox_url.clone().into(), @@ -123,13 +128,14 @@ impl Object for ApubPerson { person: &Person, expected_domain: &Url, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { let local_site_data = local_site_data_cached(&mut context.pool()).await?; let slur_regex = &local_site_opt_to_slur_regex(&local_site_data.local_site); check_slurs(&person.preferred_username, slur_regex)?; check_slurs_opt(&person.name, slur_regex)?; verify_domains_match(person.id.inner(), expected_domain)?; + verify_is_remote_object(&person.id, context)?; check_apub_id_valid_with_strictness(person.id.inner(), false, context).await?; let bio = read_from_string_or_source_opt(&person.summary, &None, &person.source); @@ -138,13 +144,17 @@ impl Object for ApubPerson { } #[tracing::instrument(skip_all)] - async fn from_json( - person: Person, - context: &Data, - ) -> Result { + async fn from_json(person: Person, context: &Data) -> LemmyResult { let instance_id = fetch_instance_actor_for_object(&person.id, context).await?; + let local_site = LocalSite::read(&mut context.pool()).await.ok(); + let slur_regex = &local_site_opt_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(context).await?; let bio = read_from_string_or_source_opt(&person.summary, &None, &person.source); + let bio = process_markdown_opt(&bio, slur_regex, &url_blocklist, context).await?; + let bio = markdown_rewrite_remote_links_opt(bio, context).await; + let avatar = proxy_image_link_opt_apub(person.icon.map(|i| i.url), context).await?; + let banner = proxy_image_link_opt_apub(person.image.map(|i| i.url), context).await?; // Some Mastodon users have `name: ""` (empty string), need to convert that to `None` // https://github.com/mastodon/mastodon/issues/25233 @@ -156,8 +166,8 @@ impl Object for ApubPerson { banned: None, ban_expires: None, deleted: Some(false), - avatar: person.icon.map(|i| i.url.into()), - banner: person.image.map(|i| i.url.into()), + avatar, + banner, published: person.published.map(Into::into), updated: person.updated.map(Into::into), actor_id: Some(person.id.into()), @@ -167,8 +177,13 @@ impl Object for ApubPerson { private_key: None, public_key: person.public_key.public_key_pem, last_refreshed_at: Some(naive_now()), - inbox_url: Some(person.inbox.into()), - shared_inbox_url: person.endpoints.map(|e| e.shared_inbox.into()), + inbox_url: Some( + person + .endpoints + .map(|e| e.shared_inbox) + .unwrap_or(person.inbox) + .into(), + ), matrix_user_id: person.matrix_user_id, instance_id, }; @@ -188,7 +203,7 @@ impl Actor for ApubPerson { } fn private_key_pem(&self) -> Option { - self.private_key.clone() + self.private_key.clone().map(SensitiveString::into_inner) } fn inbox(&self) -> Url { @@ -196,7 +211,7 @@ impl Actor for ApubPerson { } fn shared_inbox(&self) -> Option { - self.shared_inbox_url.clone().map(Into::into) + None } } @@ -208,73 +223,74 @@ impl GetActorType for ApubPerson { #[cfg(test)] pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; use crate::{ - objects::{ - instance::{tests::parse_lemmy_instance, ApubSite}, - tests::init_context, - }, + objects::instance::{tests::parse_lemmy_instance, ApubSite}, protocol::{objects::instance::Instance, tests::file_to_json_object}, }; use activitypub_federation::fetch::object_id::ObjectId; - use lemmy_db_schema::{source::site::Site, traits::Crud}; + use lemmy_db_schema::source::site::Site; + use pretty_assertions::assert_eq; use serial_test::serial; - pub(crate) async fn parse_lemmy_person(context: &Data) -> (ApubPerson, ApubSite) { - let site = parse_lemmy_instance(context).await; - let json = file_to_json_object("assets/lemmy/objects/person.json").unwrap(); - let url = Url::parse("https://enterprise.lemmy.ml/u/picard").unwrap(); - ApubPerson::verify(&json, &url, context).await.unwrap(); - let person = ApubPerson::from_json(json, context).await.unwrap(); + pub(crate) async fn parse_lemmy_person( + context: &Data, + ) -> LemmyResult<(ApubPerson, ApubSite)> { + let site = parse_lemmy_instance(context).await?; + let json = file_to_json_object("assets/lemmy/objects/person.json")?; + let url = Url::parse("https://enterprise.lemmy.ml/u/picard")?; + ApubPerson::verify(&json, &url, context).await?; + let person = ApubPerson::from_json(json, context).await?; assert_eq!(context.request_count(), 0); - (person, site) + Ok((person, site)) } #[tokio::test] #[serial] - async fn test_parse_lemmy_person() { - let context = init_context().await; - let (person, site) = parse_lemmy_person(&context).await; + async fn test_parse_lemmy_person() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (person, site) = parse_lemmy_person(&context).await?; assert_eq!(person.display_name, Some("Jean-Luc Picard".to_string())); assert!(!person.local); - assert_eq!(person.bio.as_ref().unwrap().len(), 39); + assert_eq!(person.bio.as_ref().map(std::string::String::len), Some(39)); - cleanup((person, site), &context).await; + cleanup((person, site), &context).await?; + Ok(()) } #[tokio::test] #[serial] - async fn test_parse_pleroma_person() { - let context = init_context().await; + async fn test_parse_pleroma_person() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; // create and parse a fake pleroma instance actor, to avoid network request during test - let mut json: Instance = file_to_json_object("assets/lemmy/objects/instance.json").unwrap(); - json.id = ObjectId::parse("https://queer.hacktivis.me/").unwrap(); - let url = Url::parse("https://queer.hacktivis.me/users/lanodan").unwrap(); - ApubSite::verify(&json, &url, &context).await.unwrap(); - let site = ApubSite::from_json(json, &context).await.unwrap(); + let mut json: Instance = file_to_json_object("assets/lemmy/objects/instance.json")?; + json.id = ObjectId::parse("https://queer.hacktivis.me/")?; + let url = Url::parse("https://queer.hacktivis.me/users/lanodan")?; + ApubSite::verify(&json, &url, &context).await?; + let site = ApubSite::from_json(json, &context).await?; - let json = file_to_json_object("assets/pleroma/objects/person.json").unwrap(); - ApubPerson::verify(&json, &url, &context).await.unwrap(); - let person = ApubPerson::from_json(json, &context).await.unwrap(); + let json = file_to_json_object("assets/pleroma/objects/person.json")?; + ApubPerson::verify(&json, &url, &context).await?; + let person = ApubPerson::from_json(json, &context).await?; assert_eq!(person.actor_id, url.into()); assert_eq!(person.name, "lanodan"); assert!(!person.local); assert_eq!(context.request_count(), 0); - assert_eq!(person.bio.as_ref().unwrap().len(), 873); + assert_eq!(person.bio.as_ref().map(std::string::String::len), Some(812)); - cleanup((person, site), &context).await; + cleanup((person, site), &context).await?; + Ok(()) } - async fn cleanup(data: (ApubPerson, ApubSite), context: &LemmyContext) { - DbPerson::delete(&mut context.pool(), data.0.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), data.1.id).await.unwrap(); + async fn cleanup( + (person, site): (ApubPerson, ApubSite), + context: &LemmyContext, + ) -> LemmyResult<()> { + DbPerson::delete(&mut context.pool(), person.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + Ok(()) } } diff --git a/crates/apub/src/objects/post.rs b/crates/apub/src/objects/post.rs index a86d4342f..ee88cf3ec 100644 --- a/crates/apub/src/objects/post.rs +++ b/crates/apub/src/objects/post.rs @@ -1,11 +1,12 @@ use crate::{ activities::{verify_is_public, verify_person_in_community}, check_apub_id_valid_with_strictness, + fetcher::markdown_links::{markdown_rewrite_remote_links_opt, to_local_url}, local_site_data_cached, objects::{read_from_string_or_source_opt, verify_is_remote_object}, protocol::{ objects::{ - page::{Attachment, AttributedTo, Page, PageType}, + page::{Attachment, AttributedTo, Hashtag, HashtagType, Page, PageType}, LanguageTag, }, ImageObject, @@ -24,26 +25,27 @@ use chrono::{DateTime, Utc}; use html2text::{from_read_with_decorator, render::text_renderer::TrivialDecorator}; use lemmy_api_common::{ context::LemmyContext, - request::fetch_site_data, - utils::{is_mod_or_admin, local_site_opt_to_sensitive, local_site_opt_to_slur_regex}, + request::generate_post_link_metadata, + utils::{get_url_blocklist, local_site_opt_to_slur_regex, process_markdown_opt}, }; use lemmy_db_schema::{ - self, source::{ community::Community, local_site::LocalSite, - moderator::{ModLockPost, ModLockPostForm}, person::Person, post::{Post, PostInsertForm, PostUpdateForm}, }, traits::Crud, + utils::naive_now, }; +use lemmy_db_views_actor::structs::CommunityModeratorView; use lemmy_utils::{ - error::LemmyError, + error::{LemmyError, LemmyResult}, + spawn_try_task, utils::{ markdown::markdown_to_html, - slurs::{check_slurs_opt, remove_slurs}, - validation::check_url_scheme, + slurs::check_slurs_opt, + validation::{is_url_blocked, is_valid_url}, }, }; use std::ops::Deref; @@ -52,7 +54,7 @@ use url::Url; const MAX_TITLE_LENGTH: usize = 200; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct ApubPost(pub(crate) Post); impl Deref for ApubPost { @@ -82,7 +84,7 @@ impl Object for ApubPost { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { + ) -> LemmyResult> { Ok( Post::read_from_apub_id(&mut context.pool(), object_id) .await? @@ -91,7 +93,7 @@ impl Object for ApubPost { } #[tracing::instrument(skip_all)] - async fn delete(self, context: &Data) -> Result<(), LemmyError> { + async fn delete(self, context: &Data) -> LemmyResult<()> { if !self.deleted { let form = PostUpdateForm { deleted: Some(true), @@ -104,12 +106,30 @@ impl Object for ApubPost { // Turn a Lemmy post into an ActivityPub page that can be sent out over the network. #[tracing::instrument(skip_all)] - async fn into_json(self, context: &Data) -> Result { + async fn into_json(self, context: &Data) -> LemmyResult { let creator_id = self.creator_id; let creator = Person::read(&mut context.pool(), creator_id).await?; let community_id = self.community_id; let community = Community::read(&mut context.pool(), community_id).await?; - let language = LanguageTag::new_single(self.language_id, &mut context.pool()).await?; + let language = Some(LanguageTag::new_single(self.language_id, &mut context.pool()).await?); + + let attachment = self + .url + .clone() + .map(|url| { + Attachment::new( + url.into(), + self.url_content_type.clone(), + self.alt_text.clone(), + ) + }) + .into_iter() + .collect(); + let hashtag = Hashtag { + href: self.ap_id.clone().into(), + name: format!("#{}", &community.name), + kind: HashtagType::Hashtag, + }; let page = Page { kind: PageType::Page, @@ -121,15 +141,15 @@ impl Object for ApubPost { content: self.body.as_ref().map(|b| markdown_to_html(b)), media_type: Some(MediaTypeMarkdownOrHtml::Html), source: self.body.clone().map(Source::new), - attachment: self.url.clone().map(Attachment::new).into_iter().collect(), + attachment, image: self.thumbnail_url.clone().map(ImageObject::new), - comments_enabled: Some(!self.locked), sensitive: Some(self.nsfw), language, published: Some(self.published), updated: self.updated, audience: Some(community.actor_id.into()), in_reply_to: None, + tag: vec![hashtag], }; Ok(page) } @@ -139,13 +159,9 @@ impl Object for ApubPost { page: &Page, expected_domain: &Url, context: &Data, - ) -> Result<(), LemmyError> { - // We can't verify the domain in case of mod action, because the mod may be on a different - // instance from the post author. - if !page.is_mod_action(context).await? { - verify_domains_match(page.id.inner(), expected_domain)?; - verify_is_remote_object(page.id.inner(), context.settings())?; - }; + ) -> LemmyResult<()> { + verify_domains_match(page.id.inner(), expected_domain)?; + verify_is_remote_object(&page.id, context)?; let community = page.community(context).await?; check_apub_id_valid_with_strictness(page.id.inner(), community.local, context).await?; @@ -161,11 +177,16 @@ impl Object for ApubPost { } #[tracing::instrument(skip_all)] - async fn from_json(page: Page, context: &Data) -> Result { + async fn from_json(page: Page, context: &Data) -> LemmyResult { let creator = page.creator()?.dereference(context).await?; let community = page.community(context).await?; if community.posting_restricted_to_mods { - is_mod_or_admin(&mut context.pool(), &creator, community.id).await?; + CommunityModeratorView::check_is_community_moderator( + &mut context.pool(), + community.id, + creator.id, + ) + .await?; } let mut name = page .name @@ -192,99 +213,64 @@ impl Object for ApubPost { name = name.chars().take(MAX_TITLE_LENGTH).collect(); } - // read existing, local post if any (for generating mod log) - let old_post = page.id.dereference_local(context).await; + let first_attachment = page.attachment.first(); + let local_site = LocalSite::read(&mut context.pool()).await.ok(); - let form = if !page.is_mod_action(context).await? { - let first_attachment = page.attachment.into_iter().map(Attachment::url).next(); - let url = if first_attachment.is_some() { - first_attachment - } else if page.kind == PageType::Video { - // we cant display videos directly, so insert a link to external video page - Some(page.id.inner().clone()) - } else { - None - }; - check_url_scheme(&url)?; - - let local_site = LocalSite::read(&mut context.pool()).await.ok(); - let allow_sensitive = local_site_opt_to_sensitive(&local_site); - let page_is_sensitive = page.sensitive.unwrap_or(false); - let include_image = allow_sensitive || !page_is_sensitive; - - // Only fetch metadata if the post has a url and was not seen previously. We dont want to - // waste resources by fetching metadata for the same post multiple times. - // Additionally, only fetch image if content is not sensitive or is allowed on local site. - let (metadata_res, thumbnail) = match &url { - Some(url) if old_post.is_err() => { - fetch_site_data( - context.client(), - context.settings(), - Some(url), - include_image, - ) - .await - } - _ => (None, None), - }; - // If no image was included with metadata, use post image instead when available. - let thumbnail_url = thumbnail.or_else(|| page.image.map(|i| i.url.into())); - - let (embed_title, embed_description, embed_video_url) = metadata_res - .map(|u| (u.title, u.description, u.embed_video_url)) - .unwrap_or_default(); - let slur_regex = &local_site_opt_to_slur_regex(&local_site); - - let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source) - .map(|s| remove_slurs(&s, slur_regex)); - let language_id = - LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?; - - PostInsertForm { - name, - url: url.map(Into::into), - body, - creator_id: creator.id, - community_id: community.id, - removed: None, - locked: page.comments_enabled.map(|e| !e), - published: page.published.map(Into::into), - updated: page.updated.map(Into::into), - deleted: Some(false), - nsfw: page.sensitive, - embed_title, - embed_description, - embed_video_url, - thumbnail_url, - ap_id: Some(page.id.clone().into()), - local: Some(false), - language_id, - featured_community: None, - featured_local: None, - } + let url = if let Some(attachment) = first_attachment.cloned() { + Some(attachment.url()) + } else if page.kind == PageType::Video { + // we cant display videos directly, so insert a link to external video page + Some(page.id.inner().clone()) } else { - // if is mod action, only update locked/stickied fields, nothing else - PostInsertForm::builder() - .name(name) - .creator_id(creator.id) - .community_id(community.id) - .ap_id(Some(page.id.clone().into())) - .locked(page.comments_enabled.map(|e| !e)) - .updated(page.updated.map(Into::into)) - .build() + None }; - let post = Post::create(&mut context.pool(), &form).await?; + let url_blocklist = get_url_blocklist(context).await?; - // write mod log entry for lock - if Page::is_locked_changed(&old_post, &page.comments_enabled) { - let form = ModLockPostForm { - mod_person_id: creator.id, - post_id: post.id, - locked: Some(post.locked), - }; - ModLockPost::create(&mut context.pool(), &form).await?; - } + let url = if let Some(url) = url { + is_url_blocked(&url, &url_blocklist)?; + is_valid_url(&url)?; + to_local_url(url.as_str(), context).await.or(Some(url)) + } else { + None + }; + + let alt_text = first_attachment.cloned().and_then(Attachment::alt_text); + + let slur_regex = &local_site_opt_to_slur_regex(&local_site); + + let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source); + let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?; + let body = markdown_rewrite_remote_links_opt(body, context).await; + let language_id = Some( + LanguageTag::to_language_id_single(page.language.unwrap_or_default(), &mut context.pool()) + .await?, + ); + + let form = PostInsertForm { + url: url.map(Into::into), + body, + alt_text, + published: page.published.map(Into::into), + updated: page.updated.map(Into::into), + deleted: Some(false), + nsfw: page.sensitive, + ap_id: Some(page.id.clone().into()), + local: Some(false), + language_id, + ..PostInsertForm::new(name, creator.id, community.id) + }; + + let timestamp = page.updated.or(page.published).unwrap_or_else(naive_now); + let post = Post::insert_apub(&mut context.pool(), timestamp, &form).await?; + let post_ = post.clone(); + let context_ = context.reset_request_count(); + + // Generates a post thumbnail in background task, because some sites can be very slow to + // respond. + spawn_try_task( + async move { generate_post_link_metadata(post_, None, |_| None, context_).await }, + ); Ok(post.into()) } @@ -292,75 +278,62 @@ impl Object for ApubPost { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; use crate::{ objects::{ - community::{tests::parse_lemmy_community, ApubCommunity}, - instance::ApubSite, + community::tests::parse_lemmy_community, person::{tests::parse_lemmy_person, ApubPerson}, - post::ApubPost, - tests::init_context, }, protocol::tests::file_to_json_object, }; use lemmy_db_schema::source::site::Site; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_parse_lemmy_post() { - let context = init_context().await; - let (person, site) = parse_lemmy_person(&context).await; - let community = parse_lemmy_community(&context).await; + async fn test_parse_lemmy_post() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let (person, site) = parse_lemmy_person(&context).await?; + let community = parse_lemmy_community(&context).await?; - let json = file_to_json_object("assets/lemmy/objects/page.json").unwrap(); - let url = Url::parse("https://enterprise.lemmy.ml/post/55143").unwrap(); - ApubPost::verify(&json, &url, &context).await.unwrap(); - let post = ApubPost::from_json(json, &context).await.unwrap(); + let json = file_to_json_object("assets/lemmy/objects/page.json")?; + let url = Url::parse("https://enterprise.lemmy.ml/post/55143")?; + ApubPost::verify(&json, &url, &context).await?; + let post = ApubPost::from_json(json, &context).await?; assert_eq!(post.ap_id, url.into()); assert_eq!(post.name, "Post title"); assert!(post.body.is_some()); - assert_eq!(post.body.as_ref().unwrap().len(), 45); + assert_eq!(post.body.as_ref().map(std::string::String::len), Some(45)); assert!(!post.locked); assert!(!post.featured_community); - assert_eq!(context.request_count(), 0); + assert_eq!(context.request_count(), 1); - cleanup(&context, person, site, community, post).await; + Post::delete(&mut context.pool(), post.id).await?; + Person::delete(&mut context.pool(), person.id).await?; + Community::delete(&mut context.pool(), community.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + Ok(()) } #[tokio::test] #[serial] - async fn test_convert_mastodon_post_title() { - let context = init_context().await; - let (person, site) = parse_lemmy_person(&context).await; - let community = parse_lemmy_community(&context).await; + async fn test_convert_mastodon_post_title() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let community = parse_lemmy_community(&context).await?; - let json = file_to_json_object("assets/mastodon/objects/page.json").unwrap(); - let post = ApubPost::from_json(json, &context).await.unwrap(); + let json = file_to_json_object("assets/mastodon/objects/person.json")?; + let person = ApubPerson::from_json(json, &context).await?; + + let json = file_to_json_object("assets/mastodon/objects/page.json")?; + let post = ApubPost::from_json(json, &context).await?; assert_eq!(post.name, "Variable never resetting at refresh"); - cleanup(&context, person, site, community, post).await; - } - - async fn cleanup( - context: &Data, - person: ApubPerson, - site: ApubSite, - community: ApubCommunity, - post: ApubPost, - ) { - Post::delete(&mut context.pool(), post.id).await.unwrap(); - Person::delete(&mut context.pool(), person.id) - .await - .unwrap(); - Community::delete(&mut context.pool(), community.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), site.id).await.unwrap(); + Post::delete(&mut context.pool(), post.id).await?; + Person::delete(&mut context.pool(), person.id).await?; + Community::delete(&mut context.pool(), community.id).await?; + Ok(()) } } diff --git a/crates/apub/src/objects/private_message.rs b/crates/apub/src/objects/private_message.rs index c0b0b8f1d..573210c71 100644 --- a/crates/apub/src/objects/private_message.rs +++ b/crates/apub/src/objects/private_message.rs @@ -1,5 +1,7 @@ +use super::verify_is_remote_object; use crate::{ check_apub_id_valid_with_strictness, + fetcher::markdown_links::markdown_rewrite_remote_links, objects::read_from_string_or_source, protocol::{ objects::chat_message::{ChatMessage, ChatMessageType}, @@ -14,18 +16,26 @@ use activitypub_federation::{ use chrono::{DateTime, Utc}; use lemmy_api_common::{ context::LemmyContext, - utils::{check_person_block, check_private_messages_enabled}, + utils::{ + check_private_messages_enabled, + get_url_blocklist, + local_site_opt_to_slur_regex, + process_markdown, + }, }; use lemmy_db_schema::{ source::{ + local_site::LocalSite, person::Person, + person_block::PersonBlock, private_message::{PrivateMessage, PrivateMessageInsertForm}, }, traits::Crud, + utils::naive_now, }; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::{ - error::{LemmyError, LemmyErrorType}, + error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}, utils::markdown::markdown_to_html, }; use std::ops::Deref; @@ -61,7 +71,7 @@ impl Object for ApubPrivateMessage { async fn read_from_id( object_id: Url, context: &Data, - ) -> Result, LemmyError> { + ) -> LemmyResult> { Ok( PrivateMessage::read_from_apub_id(&mut context.pool(), object_id) .await? @@ -69,13 +79,13 @@ impl Object for ApubPrivateMessage { ) } - async fn delete(self, _context: &Data) -> Result<(), LemmyError> { + async fn delete(self, _context: &Data) -> LemmyResult<()> { // do nothing, because pm can't be fetched over http - unimplemented!() + Err(LemmyErrorType::NotFound.into()) } #[tracing::instrument(skip_all)] - async fn into_json(self, context: &Data) -> Result { + async fn into_json(self, context: &Data) -> LemmyResult { let creator_id = self.creator_id; let creator = Person::read(&mut context.pool(), creator_id).await?; @@ -101,14 +111,15 @@ impl Object for ApubPrivateMessage { note: &ChatMessage, expected_domain: &Url, context: &Data, - ) -> Result<(), LemmyError> { + ) -> LemmyResult<()> { verify_domains_match(note.id.inner(), expected_domain)?; verify_domains_match(note.attributed_to.inner(), note.id.inner())?; + verify_is_remote_object(¬e.id, context)?; check_apub_id_valid_with_strictness(note.id.inner(), false, context).await?; let person = note.attributed_to.dereference(context).await?; if person.banned { - Err(LemmyErrorType::PersonIsBannedFromSite( + Err(FederationError::PersonIsBannedFromSite( person.actor_id.to_string(), ))? } else { @@ -120,16 +131,21 @@ impl Object for ApubPrivateMessage { async fn from_json( note: ChatMessage, context: &Data, - ) -> Result { + ) -> LemmyResult { let creator = note.attributed_to.dereference(context).await?; let recipient = note.to[0].dereference(context).await?; - check_person_block(creator.id, recipient.id, &mut context.pool()).await?; + PersonBlock::read(&mut context.pool(), recipient.id, creator.id).await?; let recipient_local_user = LocalUserView::read_person(&mut context.pool(), recipient.id).await?; check_private_messages_enabled(&recipient_local_user)?; + let local_site = LocalSite::read(&mut context.pool()).await.ok(); + let slur_regex = &local_site_opt_to_slur_regex(&local_site); + let url_blocklist = get_url_blocklist(context).await?; let content = read_from_string_or_source(¬e.content, &None, ¬e.source); + let content = process_markdown(&content, slur_regex, &url_blocklist, context).await?; + let content = markdown_rewrite_remote_links(content, context).await; let form = PrivateMessageInsertForm { creator_id: creator.id, @@ -142,111 +158,93 @@ impl Object for ApubPrivateMessage { ap_id: Some(note.id.into()), local: Some(false), }; - let pm = PrivateMessage::create(&mut context.pool(), &form).await?; + let timestamp = note.updated.or(note.published).unwrap_or_else(naive_now); + let pm = PrivateMessage::insert_apub(&mut context.pool(), timestamp, &form).await?; Ok(pm.into()) } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; use crate::{ objects::{ instance::{tests::parse_lemmy_instance, ApubSite}, person::ApubPerson, - tests::init_context, }, protocol::tests::file_to_json_object, }; use assert_json_diff::assert_json_include; use lemmy_db_schema::source::site::Site; + use pretty_assertions::assert_eq; use serial_test::serial; async fn prepare_comment_test( url: &Url, context: &Data, - ) -> (ApubPerson, ApubPerson, ApubSite) { + ) -> LemmyResult<(ApubPerson, ApubPerson, ApubSite)> { let context2 = context.reset_request_count(); - let lemmy_person = file_to_json_object("assets/lemmy/objects/person.json").unwrap(); - let site = parse_lemmy_instance(&context2).await; - ApubPerson::verify(&lemmy_person, url, &context2) - .await - .unwrap(); - let person1 = ApubPerson::from_json(lemmy_person, &context2) - .await - .unwrap(); - let pleroma_person = file_to_json_object("assets/pleroma/objects/person.json").unwrap(); - let pleroma_url = Url::parse("https://queer.hacktivis.me/users/lanodan").unwrap(); - ApubPerson::verify(&pleroma_person, &pleroma_url, &context2) - .await - .unwrap(); - let person2 = ApubPerson::from_json(pleroma_person, &context2) - .await - .unwrap(); - (person1, person2, site) + let lemmy_person = file_to_json_object("assets/lemmy/objects/person.json")?; + let site = parse_lemmy_instance(&context2).await?; + ApubPerson::verify(&lemmy_person, url, &context2).await?; + let person1 = ApubPerson::from_json(lemmy_person, &context2).await?; + let pleroma_person = file_to_json_object("assets/pleroma/objects/person.json")?; + let pleroma_url = Url::parse("https://queer.hacktivis.me/users/lanodan")?; + ApubPerson::verify(&pleroma_person, &pleroma_url, &context2).await?; + let person2 = ApubPerson::from_json(pleroma_person, &context2).await?; + Ok((person1, person2, site)) } - async fn cleanup(data: (ApubPerson, ApubPerson, ApubSite), context: &Data) { - Person::delete(&mut context.pool(), data.0.id) - .await - .unwrap(); - Person::delete(&mut context.pool(), data.1.id) - .await - .unwrap(); - Site::delete(&mut context.pool(), data.2.id).await.unwrap(); + async fn cleanup( + (person1, person2, site): (ApubPerson, ApubPerson, ApubSite), + context: &Data, + ) -> LemmyResult<()> { + Person::delete(&mut context.pool(), person1.id).await?; + Person::delete(&mut context.pool(), person2.id).await?; + Site::delete(&mut context.pool(), site.id).await?; + Ok(()) } #[tokio::test] #[serial] - async fn test_parse_lemmy_pm() { - let context = init_context().await; - let url = Url::parse("https://enterprise.lemmy.ml/private_message/1621").unwrap(); - let data = prepare_comment_test(&url, &context).await; - let json: ChatMessage = file_to_json_object("assets/lemmy/objects/chat_message.json").unwrap(); - ApubPrivateMessage::verify(&json, &url, &context) - .await - .unwrap(); - let pm = ApubPrivateMessage::from_json(json.clone(), &context) - .await - .unwrap(); + async fn test_parse_lemmy_pm() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let url = Url::parse("https://enterprise.lemmy.ml/private_message/1621")?; + let data = prepare_comment_test(&url, &context).await?; + let json: ChatMessage = file_to_json_object("assets/lemmy/objects/chat_message.json")?; + ApubPrivateMessage::verify(&json, &url, &context).await?; + let pm = ApubPrivateMessage::from_json(json.clone(), &context).await?; assert_eq!(pm.ap_id.clone(), url.into()); assert_eq!(pm.content.len(), 20); assert_eq!(context.request_count(), 0); let pm_id = pm.id; - let to_apub = pm.into_json(&context).await.unwrap(); + let to_apub = pm.into_json(&context).await?; assert_json_include!(actual: json, expected: to_apub); - PrivateMessage::delete(&mut context.pool(), pm_id) - .await - .unwrap(); - cleanup(data, &context).await; + PrivateMessage::delete(&mut context.pool(), pm_id).await?; + cleanup(data, &context).await?; + Ok(()) } #[tokio::test] #[serial] - async fn test_parse_pleroma_pm() { - let context = init_context().await; - let url = Url::parse("https://enterprise.lemmy.ml/private_message/1621").unwrap(); - let data = prepare_comment_test(&url, &context).await; - let pleroma_url = Url::parse("https://queer.hacktivis.me/objects/2").unwrap(); - let json = file_to_json_object("assets/pleroma/objects/chat_message.json").unwrap(); - ApubPrivateMessage::verify(&json, &pleroma_url, &context) - .await - .unwrap(); - let pm = ApubPrivateMessage::from_json(json, &context).await.unwrap(); + async fn test_parse_pleroma_pm() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + let url = Url::parse("https://enterprise.lemmy.ml/private_message/1621")?; + let data = prepare_comment_test(&url, &context).await?; + let pleroma_url = Url::parse("https://queer.hacktivis.me/objects/2")?; + let json = file_to_json_object("assets/pleroma/objects/chat_message.json")?; + ApubPrivateMessage::verify(&json, &pleroma_url, &context).await?; + let pm = ApubPrivateMessage::from_json(json, &context).await?; assert_eq!(pm.ap_id, pleroma_url.into()); assert_eq!(pm.content.len(), 3); assert_eq!(context.request_count(), 0); - PrivateMessage::delete(&mut context.pool(), pm.id) - .await - .unwrap(); - cleanup(data, &context).await; + PrivateMessage::delete(&mut context.pool(), pm.id).await?; + cleanup(data, &context).await?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/block/block_user.rs b/crates/apub/src/protocol/activities/block/block_user.rs index b958b58e1..96135d645 100644 --- a/crates/apub/src/protocol/activities/block/block_user.rs +++ b/crates/apub/src/protocol/activities/block/block_user.rs @@ -12,7 +12,7 @@ use activitypub_federation::{ use anyhow::anyhow; use chrono::{DateTime, Utc}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use url::Url; @@ -38,12 +38,12 @@ pub struct BlockUser { pub(crate) remove_data: Option, /// block reason, written to mod log pub(crate) summary: Option, - pub(crate) expires: Option>, + pub(crate) end_time: Option>, } #[async_trait::async_trait] impl InCommunity for BlockUser { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let target = self.target.dereference(context).await?; let community = match target { SiteOrCommunity::Community(c) => c, diff --git a/crates/apub/src/protocol/activities/block/mod.rs b/crates/apub/src/protocol/activities/block/mod.rs index 1b00245f8..090b999f0 100644 --- a/crates/apub/src/protocol/activities/block/mod.rs +++ b/crates/apub/src/protocol/activities/block/mod.rs @@ -3,18 +3,16 @@ pub mod undo_block_user; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::block::{block_user::BlockUser, undo_block_user::UndoBlockUser}, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_block() { - test_parse_lemmy_item::("assets/lemmy/activities/block/block_user.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/block/undo_block_user.json") - .unwrap(); + fn test_parse_lemmy_block() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/activities/block/block_user.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/block/undo_block_user.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/block/undo_block_user.rs b/crates/apub/src/protocol/activities/block/undo_block_user.rs index 758d3fd4b..e038fa2dc 100644 --- a/crates/apub/src/protocol/activities/block/undo_block_user.rs +++ b/crates/apub/src/protocol/activities/block/undo_block_user.rs @@ -10,7 +10,7 @@ use activitypub_federation::{ protocol::helpers::deserialize_one_or_many, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use url::Url; @@ -29,11 +29,15 @@ pub struct UndoBlockUser { pub(crate) kind: UndoType, pub(crate) id: Url, pub(crate) audience: Option>, + + /// Quick and dirty solution. + /// TODO: send a separate Delete activity instead + pub(crate) restore_data: Option, } #[async_trait::async_trait] impl InCommunity for UndoBlockUser { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.object.community(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/community/announce.rs b/crates/apub/src/protocol/activities/community/announce.rs index e149e5fd1..60720365a 100644 --- a/crates/apub/src/protocol/activities/community/announce.rs +++ b/crates/apub/src/protocol/activities/community/announce.rs @@ -23,7 +23,7 @@ pub struct AnnounceActivity { } /// Use this to receive community inbox activities, and then announce them if valid. This -/// ensures that all json fields are kept, even if Lemmy doesnt understand them. +/// ensures that all json fields are kept, even if Lemmy doesn't understand them. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct RawAnnouncableActivities { pub(crate) id: Url, diff --git a/crates/apub/src/protocol/activities/community/collection_add.rs b/crates/apub/src/protocol/activities/community/collection_add.rs index edf67740a..777ad8b62 100644 --- a/crates/apub/src/protocol/activities/community/collection_add.rs +++ b/crates/apub/src/protocol/activities/community/collection_add.rs @@ -11,7 +11,7 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::source::community::Community; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -33,7 +33,7 @@ pub struct CollectionAdd { #[async_trait::async_trait] impl InCommunity for CollectionAdd { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let (community, _) = Community::get_by_collection_url(&mut context.pool(), &self.clone().target.into()).await?; if let Some(audience) = &self.audience { diff --git a/crates/apub/src/protocol/activities/community/collection_remove.rs b/crates/apub/src/protocol/activities/community/collection_remove.rs index 960951732..afc0c24a0 100644 --- a/crates/apub/src/protocol/activities/community/collection_remove.rs +++ b/crates/apub/src/protocol/activities/community/collection_remove.rs @@ -11,7 +11,7 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::source::community::Community; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -33,7 +33,7 @@ pub struct CollectionRemove { #[async_trait::async_trait] impl InCommunity for CollectionRemove { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let (community, _) = Community::get_by_collection_url(&mut context.pool(), &self.clone().target.into()).await?; if let Some(audience) = &self.audience { diff --git a/crates/apub/src/protocol/activities/community/lock_page.rs b/crates/apub/src/protocol/activities/community/lock_page.rs index 33b108531..5c8ecfca9 100644 --- a/crates/apub/src/protocol/activities/community/lock_page.rs +++ b/crates/apub/src/protocol/activities/community/lock_page.rs @@ -11,12 +11,12 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{source::community::Community, traits::Crud}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; -use strum_macros::Display; +use strum::Display; use url::Url; -#[derive(Clone, Debug, Deserialize, Serialize, Display)] +#[derive(Clone, Debug, Display, Deserialize, Serialize)] pub enum LockType { Lock, } @@ -53,7 +53,7 @@ pub struct UndoLockPage { #[async_trait::async_trait] impl InCommunity for LockPage { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let post = self.object.dereference(context).await?; let community = Community::read(&mut context.pool(), post.community_id).await?; if let Some(audience) = &self.audience { @@ -65,7 +65,7 @@ impl InCommunity for LockPage { #[async_trait::async_trait] impl InCommunity for UndoLockPage { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.object.community(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/community/mod.rs b/crates/apub/src/protocol/activities/community/mod.rs index 99a6d0b9c..0c52e6e77 100644 --- a/crates/apub/src/protocol/activities/community/mod.rs +++ b/crates/apub/src/protocol/activities/community/mod.rs @@ -7,9 +7,6 @@ pub mod update; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::community::{ announce::AnnounceActivity, @@ -21,37 +18,32 @@ mod tests { }, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_community_activities() { + fn test_parse_lemmy_community_activities() -> LemmyResult<()> { test_parse_lemmy_item::( "assets/lemmy/activities/community/announce_create_page.json", - ) - .unwrap(); + )?; - test_parse_lemmy_item::("assets/lemmy/activities/community/add_mod.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/community/remove_mod.json") - .unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/community/add_mod.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/community/remove_mod.json")?; test_parse_lemmy_item::( "assets/lemmy/activities/community/add_featured_post.json", - ) - .unwrap(); + )?; test_parse_lemmy_item::( "assets/lemmy/activities/community/remove_featured_post.json", - ) - .unwrap(); + )?; - test_parse_lemmy_item::("assets/lemmy/activities/community/lock_page.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/community/undo_lock_page.json") - .unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/community/lock_page.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/community/undo_lock_page.json")?; test_parse_lemmy_item::( "assets/lemmy/activities/community/update_community.json", - ) - .unwrap(); + )?; - test_parse_lemmy_item::("assets/lemmy/activities/community/report_page.json").unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/community/report_page.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/community/report.rs b/crates/apub/src/protocol/activities/community/report.rs index 6e8a1bbf3..dd0f72f43 100644 --- a/crates/apub/src/protocol/activities/community/report.rs +++ b/crates/apub/src/protocol/activities/community/report.rs @@ -11,7 +11,7 @@ use activitypub_federation::{ protocol::helpers::deserialize_one, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use serde::{Deserialize, Serialize}; use url::Url; @@ -21,17 +21,57 @@ pub struct Report { pub(crate) actor: ObjectId, #[serde(deserialize_with = "deserialize_one")] pub(crate) to: [ObjectId; 1], - pub(crate) object: ObjectId, - pub(crate) summary: String, + pub(crate) object: ReportObject, + /// Report reason as sent by Lemmy + pub(crate) summary: Option, + /// Report reason as sent by Mastodon + pub(crate) content: Option, #[serde(rename = "type")] pub(crate) kind: FlagType, pub(crate) id: Url, pub(crate) audience: Option>, } +impl Report { + pub fn reason(&self) -> LemmyResult { + self + .summary + .clone() + .or(self.content.clone()) + .ok_or(LemmyErrorType::NotFound.into()) + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(untagged)] +pub(crate) enum ReportObject { + Lemmy(ObjectId), + /// Mastodon sends an array containing user id and one or more post ids + Mastodon(Vec), +} + +impl ReportObject { + pub async fn dereference(self, context: &Data) -> LemmyResult { + match self { + ReportObject::Lemmy(l) => l.dereference(context).await, + ReportObject::Mastodon(objects) => { + for o in objects { + // Find the first reported item which can be dereferenced as post or comment (Lemmy can + // only handle one item per report). + let deref = ObjectId::from(o).dereference(context).await; + if deref.is_ok() { + return deref; + } + } + Err(LemmyErrorType::NotFound.into()) + } + } + } +} + #[async_trait::async_trait] impl InCommunity for Report { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.to[0].dereference(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/community/update.rs b/crates/apub/src/protocol/activities/community/update.rs index 49ec1f5d6..268f05073 100644 --- a/crates/apub/src/protocol/activities/community/update.rs +++ b/crates/apub/src/protocol/activities/community/update.rs @@ -10,7 +10,7 @@ use activitypub_federation::{ protocol::helpers::deserialize_one_or_many, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -34,7 +34,7 @@ pub struct UpdateCommunity { #[async_trait::async_trait] impl InCommunity for UpdateCommunity { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community: ApubCommunity = self.object.id.clone().dereference(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/create_or_update/mod.rs b/crates/apub/src/protocol/activities/create_or_update/mod.rs index a516bf9b4..3d9dbbb1d 100644 --- a/crates/apub/src/protocol/activities/create_or_update/mod.rs +++ b/crates/apub/src/protocol/activities/create_or_update/mod.rs @@ -4,9 +4,6 @@ pub mod page; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::create_or_update::{ chat_message::CreateOrUpdateChatMessage, @@ -15,24 +12,22 @@ mod tests { }, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_create_or_update() { + fn test_parse_lemmy_create_or_update() -> LemmyResult<()> { test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_page.json", - ) - .unwrap(); + )?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/update_page.json", - ) - .unwrap(); + )?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_note.json", - ) - .unwrap(); + )?; test_parse_lemmy_item::( "assets/lemmy/activities/create_or_update/create_private_message.json", - ) - .unwrap(); + )?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/create_or_update/note.rs b/crates/apub/src/protocol/activities/create_or_update/note.rs index 6fabc0aaa..ff0728174 100644 --- a/crates/apub/src/protocol/activities/create_or_update/note.rs +++ b/crates/apub/src/protocol/activities/create_or_update/note.rs @@ -11,7 +11,7 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{source::community::Community, traits::Crud}; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -34,7 +34,7 @@ pub struct CreateOrUpdateNote { #[async_trait::async_trait] impl InCommunity for CreateOrUpdateNote { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let post = self.object.get_parents(context).await?.0; let community = Community::read(&mut context.pool(), post.community_id).await?; if let Some(audience) = &self.audience { diff --git a/crates/apub/src/protocol/activities/create_or_update/page.rs b/crates/apub/src/protocol/activities/create_or_update/page.rs index ec64c31a0..21052a9ef 100644 --- a/crates/apub/src/protocol/activities/create_or_update/page.rs +++ b/crates/apub/src/protocol/activities/create_or_update/page.rs @@ -9,7 +9,7 @@ use activitypub_federation::{ protocol::helpers::deserialize_one_or_many, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -30,7 +30,7 @@ pub struct CreateOrUpdatePage { #[async_trait::async_trait] impl InCommunity for CreateOrUpdatePage { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.object.community(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/deletion/delete.rs b/crates/apub/src/protocol/activities/deletion/delete.rs index 2a7bfeebe..3a29da069 100644 --- a/crates/apub/src/protocol/activities/deletion/delete.rs +++ b/crates/apub/src/protocol/activities/deletion/delete.rs @@ -15,7 +15,7 @@ use lemmy_db_schema::{ source::{community::Community, post::Post}, traits::Crud, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use url::Url; @@ -40,11 +40,14 @@ pub struct Delete { /// If summary is present, this is a mod action (Remove in Lemmy terms). Otherwise, its a user /// deleting their own content. pub(crate) summary: Option, + /// Nonstandard field, only valid if object refers to a Person. If present, all content from the + /// user should be deleted along with the account + pub(crate) remove_data: Option, } #[async_trait::async_trait] impl InCommunity for Delete { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community_id = match DeletableObjects::read_from_db(self.object.id(), context).await? { DeletableObjects::Community(c) => c.id, DeletableObjects::Comment(c) => { @@ -52,6 +55,7 @@ impl InCommunity for Delete { post.community_id } DeletableObjects::Post(p) => p.community_id, + DeletableObjects::Person(_) => return Err(anyhow!("Person is not part of community").into()), DeletableObjects::PrivateMessage(_) => { return Err(anyhow!("Private message is not part of community").into()) } diff --git a/crates/apub/src/protocol/activities/deletion/delete_user.rs b/crates/apub/src/protocol/activities/deletion/delete_user.rs index 46b070fab..70bba0718 100644 --- a/crates/apub/src/protocol/activities/deletion/delete_user.rs +++ b/crates/apub/src/protocol/activities/deletion/delete_user.rs @@ -23,6 +23,7 @@ pub struct DeleteUser { #[serde(deserialize_with = "deserialize_one_or_many", default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub(crate) cc: Vec, - /// Nonstandard field. If present, all content from the user should be deleted along with the account + /// Nonstandard field. If present, all content from the user should be deleted along with the + /// account pub(crate) remove_data: Option, } diff --git a/crates/apub/src/protocol/activities/deletion/mod.rs b/crates/apub/src/protocol/activities/deletion/mod.rs index 226cd2cac..e47f3a418 100644 --- a/crates/apub/src/protocol/activities/deletion/mod.rs +++ b/crates/apub/src/protocol/activities/deletion/mod.rs @@ -4,31 +4,27 @@ pub mod undo_delete; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::deletion::{delete::Delete, delete_user::DeleteUser, undo_delete::UndoDelete}, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_deletion() { - test_parse_lemmy_item::("assets/lemmy/activities/deletion/remove_note.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/deletion/delete_page.json").unwrap(); + fn test_parse_lemmy_deletion() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/activities/deletion/remove_note.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/deletion/delete_page.json")?; - test_parse_lemmy_item::("assets/lemmy/activities/deletion/undo_remove_note.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/deletion/undo_delete_page.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/deletion/delete_private_message.json") - .unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/deletion/undo_remove_note.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/deletion/undo_delete_page.json")?; + test_parse_lemmy_item::( + "assets/lemmy/activities/deletion/delete_private_message.json", + )?; test_parse_lemmy_item::( "assets/lemmy/activities/deletion/undo_delete_private_message.json", - ) - .unwrap(); + )?; - test_parse_lemmy_item::("assets/lemmy/activities/deletion/delete_user.json") - .unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/deletion/delete_user.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/deletion/undo_delete.rs b/crates/apub/src/protocol/activities/deletion/undo_delete.rs index 35d9951b1..508b90113 100644 --- a/crates/apub/src/protocol/activities/deletion/undo_delete.rs +++ b/crates/apub/src/protocol/activities/deletion/undo_delete.rs @@ -10,7 +10,7 @@ use activitypub_federation::{ protocol::helpers::deserialize_one_or_many, }; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use url::Url; @@ -35,7 +35,7 @@ pub struct UndoDelete { #[async_trait::async_trait] impl InCommunity for UndoDelete { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.object.community(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/following/mod.rs b/crates/apub/src/protocol/activities/following/mod.rs index 329807343..ec263adae 100644 --- a/crates/apub/src/protocol/activities/following/mod.rs +++ b/crates/apub/src/protocol/activities/following/mod.rs @@ -4,19 +4,17 @@ pub mod undo_follow; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::following::{accept::AcceptFollow, follow::Follow, undo_follow::UndoFollow}, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_accept_follow() { - test_parse_lemmy_item::("assets/lemmy/activities/following/follow.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/following/accept.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/following/undo_follow.json") - .unwrap(); + fn test_parse_lemmy_accept_follow() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/activities/following/follow.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/following/accept.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/following/undo_follow.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/mod.rs b/crates/apub/src/protocol/activities/mod.rs index 24095faba..d1da73a16 100644 --- a/crates/apub/src/protocol/activities/mod.rs +++ b/crates/apub/src/protocol/activities/mod.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use strum_macros::Display; +use strum::Display; pub mod block; pub mod community; @@ -16,71 +16,90 @@ pub enum CreateOrUpdateType { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::{ - community::announce::AnnounceActivity, + community::{announce::AnnounceActivity, report::Report}, create_or_update::{note::CreateOrUpdateNote, page::CreateOrUpdatePage}, deletion::delete::Delete, - following::{follow::Follow, undo_follow::UndoFollow}, + following::{accept::AcceptFollow, follow::Follow, undo_follow::UndoFollow}, voting::{undo_vote::UndoVote, vote::Vote}, }, tests::test_json, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_smithereen_activities() { - test_json::("assets/smithereen/activities/create_note.json").unwrap(); + fn test_parse_smithereen_activities() -> LemmyResult<()> { + test_json::("assets/smithereen/activities/create_note.json")?; + Ok(()) } #[test] - fn test_parse_pleroma_activities() { - test_json::("assets/pleroma/activities/create_note.json").unwrap(); - test_json::("assets/pleroma/activities/delete.json").unwrap(); - test_json::("assets/pleroma/activities/follow.json").unwrap(); + fn test_parse_pleroma_activities() -> LemmyResult<()> { + test_json::("assets/pleroma/activities/create_note.json")?; + test_json::("assets/pleroma/activities/delete.json")?; + test_json::("assets/pleroma/activities/follow.json")?; + Ok(()) } #[test] - fn test_parse_mastodon_activities() { - test_json::("assets/mastodon/activities/create_note.json").unwrap(); - test_json::("assets/mastodon/activities/delete.json").unwrap(); - test_json::("assets/mastodon/activities/follow.json").unwrap(); - test_json::("assets/mastodon/activities/undo_follow.json").unwrap(); - test_json::("assets/mastodon/activities/like_page.json").unwrap(); - test_json::("assets/mastodon/activities/undo_like_page.json").unwrap(); + fn test_parse_mastodon_activities() -> LemmyResult<()> { + test_json::("assets/mastodon/activities/create_note.json")?; + test_json::("assets/mastodon/activities/delete.json")?; + test_json::("assets/mastodon/activities/follow.json")?; + test_json::("assets/mastodon/activities/undo_follow.json")?; + test_json::("assets/mastodon/activities/like_page.json")?; + test_json::("assets/mastodon/activities/undo_like_page.json")?; + test_json::("assets/mastodon/activities/flag.json")?; + Ok(()) } #[test] - fn test_parse_lotide_activities() { - test_json::("assets/lotide/activities/follow.json").unwrap(); - test_json::("assets/lotide/activities/create_page.json").unwrap(); - test_json::("assets/lotide/activities/create_page_image.json").unwrap(); - test_json::("assets/lotide/activities/create_note_reply.json").unwrap(); + fn test_parse_lotide_activities() -> LemmyResult<()> { + test_json::("assets/lotide/activities/follow.json")?; + test_json::("assets/lotide/activities/create_page.json")?; + test_json::("assets/lotide/activities/create_page_image.json")?; + test_json::("assets/lotide/activities/create_note_reply.json")?; + Ok(()) } #[test] - fn test_parse_friendica_activities() { - test_json::("assets/friendica/activities/create_page_1.json").unwrap(); - test_json::("assets/friendica/activities/create_page_2.json").unwrap(); - test_json::("assets/friendica/activities/create_note.json").unwrap(); - test_json::("assets/friendica/activities/update_note.json").unwrap(); - test_json::("assets/friendica/activities/delete.json").unwrap(); - test_json::("assets/friendica/activities/like_page.json").unwrap(); - test_json::("assets/friendica/activities/dislike_page.json").unwrap(); - test_json::("assets/friendica/activities/undo_dislike_page.json").unwrap(); + fn test_parse_friendica_activities() -> LemmyResult<()> { + test_json::("assets/friendica/activities/create_page_1.json")?; + test_json::("assets/friendica/activities/create_page_2.json")?; + test_json::("assets/friendica/activities/create_note.json")?; + test_json::("assets/friendica/activities/update_note.json")?; + test_json::("assets/friendica/activities/delete.json")?; + test_json::("assets/friendica/activities/like_page.json")?; + test_json::("assets/friendica/activities/dislike_page.json")?; + test_json::("assets/friendica/activities/undo_dislike_page.json")?; + Ok(()) } #[test] - fn test_parse_gnusocial_activities() { - test_json::("assets/gnusocial/activities/create_page.json").unwrap(); - test_json::("assets/gnusocial/activities/create_note.json").unwrap(); - test_json::("assets/gnusocial/activities/like_note.json").unwrap(); + fn test_parse_gnusocial_activities() -> LemmyResult<()> { + test_json::("assets/gnusocial/activities/create_page.json")?; + test_json::("assets/gnusocial/activities/create_note.json")?; + test_json::("assets/gnusocial/activities/like_note.json")?; + Ok(()) } #[test] - fn test_parse_peertube_activities() { - test_json::("assets/peertube/activities/announce_video.json").unwrap(); + fn test_parse_peertube_activities() -> LemmyResult<()> { + test_json::("assets/peertube/activities/announce_video.json")?; + Ok(()) + } + + #[test] + fn test_parse_mbin_activities() -> LemmyResult<()> { + test_json::("assets/mbin/activities/accept.json")?; + test_json::("assets/mbin/activities/flag.json")?; + Ok(()) + } + + #[test] + fn test_parse_wordpress_activities() -> LemmyResult<()> { + test_json::("assets/wordpress/activities/announce.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/voting/mod.rs b/crates/apub/src/protocol/activities/voting/mod.rs index 26a9d01a2..fabf734f9 100644 --- a/crates/apub/src/protocol/activities/voting/mod.rs +++ b/crates/apub/src/protocol/activities/voting/mod.rs @@ -3,22 +3,19 @@ pub mod vote; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ activities::voting::{undo_vote::UndoVote, vote::Vote}, tests::test_parse_lemmy_item, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_lemmy_voting() { - test_parse_lemmy_item::("assets/lemmy/activities/voting/like_note.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/voting/dislike_page.json").unwrap(); + fn test_parse_lemmy_voting() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/activities/voting/like_note.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/voting/dislike_page.json")?; - test_parse_lemmy_item::("assets/lemmy/activities/voting/undo_like_note.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/activities/voting/undo_dislike_page.json") - .unwrap(); + test_parse_lemmy_item::("assets/lemmy/activities/voting/undo_like_note.json")?; + test_parse_lemmy_item::("assets/lemmy/activities/voting/undo_dislike_page.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/activities/voting/undo_vote.rs b/crates/apub/src/protocol/activities/voting/undo_vote.rs index 746ae68df..e9ccbd593 100644 --- a/crates/apub/src/protocol/activities/voting/undo_vote.rs +++ b/crates/apub/src/protocol/activities/voting/undo_vote.rs @@ -5,7 +5,7 @@ use crate::{ }; use activitypub_federation::{config::Data, fetch::object_id::ObjectId, kinds::activity::UndoType}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -22,7 +22,7 @@ pub struct UndoVote { #[async_trait::async_trait] impl InCommunity for UndoVote { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self.object.community(context).await?; if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; diff --git a/crates/apub/src/protocol/activities/voting/vote.rs b/crates/apub/src/protocol/activities/voting/vote.rs index 0c199c729..883fc85fb 100644 --- a/crates/apub/src/protocol/activities/voting/vote.rs +++ b/crates/apub/src/protocol/activities/voting/vote.rs @@ -6,10 +6,9 @@ use crate::{ }; use activitypub_federation::{config::Data, fetch::object_id::ObjectId}; use lemmy_api_common::context::LemmyContext; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyResult}; use serde::{Deserialize, Serialize}; -use std::convert::TryFrom; -use strum_macros::Display; +use strum::Display; use url::Url; #[derive(Clone, Debug, Deserialize, Serialize)] @@ -36,7 +35,7 @@ impl TryFrom for VoteType { match value { 1 => Ok(VoteType::Like), -1 => Ok(VoteType::Dislike), - _ => Err(LemmyErrorType::InvalidVoteValue.into()), + _ => Err(FederationError::InvalidVoteValue.into()), } } } @@ -52,7 +51,7 @@ impl From<&VoteType> for i16 { #[async_trait::async_trait] impl InCommunity for Vote { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let community = self .object .dereference(context) diff --git a/crates/apub/src/protocol/collections/empty_outbox.rs b/crates/apub/src/protocol/collections/empty_outbox.rs index 3801c04e9..1e7a4c6a8 100644 --- a/crates/apub/src/protocol/collections/empty_outbox.rs +++ b/crates/apub/src/protocol/collections/empty_outbox.rs @@ -1,5 +1,5 @@ use activitypub_federation::kinds::collection::OrderedCollectionType; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -14,7 +14,7 @@ pub(crate) struct EmptyOutbox { } impl EmptyOutbox { - pub(crate) fn new(outbox_id: Url) -> Result { + pub(crate) fn new(outbox_id: Url) -> LemmyResult { Ok(EmptyOutbox { r#type: OrderedCollectionType::OrderedCollection, id: outbox_id, diff --git a/crates/apub/src/protocol/collections/mod.rs b/crates/apub/src/protocol/collections/mod.rs index 2362a998a..0fb24349c 100644 --- a/crates/apub/src/protocol/collections/mod.rs +++ b/crates/apub/src/protocol/collections/mod.rs @@ -6,9 +6,6 @@ pub(crate) mod group_outbox; #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ collections::{ empty_outbox::EmptyOutbox, @@ -19,23 +16,24 @@ mod tests { }, tests::{test_json, test_parse_lemmy_item}, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; #[test] - fn test_parse_lemmy_collections() { - test_parse_lemmy_item::("assets/lemmy/collections/group_followers.json") - .unwrap(); + fn test_parse_lemmy_collections() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/collections/group_followers.json")?; let outbox = - test_parse_lemmy_item::("assets/lemmy/collections/group_outbox.json").unwrap(); + test_parse_lemmy_item::("assets/lemmy/collections/group_outbox.json")?; assert_eq!(outbox.ordered_items.len() as i32, outbox.total_items); - test_parse_lemmy_item::("assets/lemmy/collections/group_featured_posts.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/collections/group_moderators.json") - .unwrap(); - test_parse_lemmy_item::("assets/lemmy/collections/person_outbox.json").unwrap(); + test_parse_lemmy_item::("assets/lemmy/collections/group_featured_posts.json")?; + test_parse_lemmy_item::("assets/lemmy/collections/group_moderators.json")?; + test_parse_lemmy_item::("assets/lemmy/collections/person_outbox.json")?; + Ok(()) } #[test] - fn test_parse_mastodon_collections() { - test_json::("assets/mastodon/collections/featured.json").unwrap(); + fn test_parse_mastodon_collections() -> LemmyResult<()> { + test_json::("assets/mastodon/collections/featured.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/mod.rs b/crates/apub/src/protocol/mod.rs index dba21f99d..a4774ac1d 100644 --- a/crates/apub/src/protocol/mod.rs +++ b/crates/apub/src/protocol/mod.rs @@ -7,7 +7,7 @@ use activitypub_federation::{ }; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::newtypes::DbUrl; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::collections::HashMap; use url::Url; @@ -16,7 +16,7 @@ pub mod activities; pub(crate) mod collections; pub(crate) mod objects; -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Source { pub(crate) content: String, @@ -32,7 +32,7 @@ impl Source { } } -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct ImageObject { #[serde(rename = "type")] @@ -71,10 +71,10 @@ impl IdOrNestedObject { IdOrNestedObject::NestedObject(n) => n.object_id(), } } - pub(crate) async fn object(self, context: &Data) -> Result { + pub(crate) async fn object(self, context: &Data) -> LemmyResult { match self { // TODO: move IdOrNestedObject struct to library and make fetch_object_http private - IdOrNestedObject::Id(i) => Ok(fetch_object_http(&i, context).await?), + IdOrNestedObject::Id(i) => Ok(fetch_object_http(&i, context).await?.object), IdOrNestedObject::NestedObject(o) => Ok(o), } } @@ -83,28 +83,25 @@ impl IdOrNestedObject { #[async_trait::async_trait] pub trait InCommunity { // TODO: after we use audience field and remove backwards compat, it should be possible to change - // this to simply `fn community(&self) -> Result, LemmyError>` - async fn community(&self, context: &Data) -> Result; + // this to simply `fn community(&self) -> LemmyResult>` + async fn community(&self, context: &Data) -> LemmyResult; } #[cfg(test)] pub(crate) mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use activitypub_federation::protocol::context::WithContext; use assert_json_diff::assert_json_include; - use lemmy_utils::error::LemmyError; + use lemmy_utils::error::LemmyResult; use serde::{de::DeserializeOwned, Serialize}; use std::{collections::HashMap, fs::File, io::BufReader}; - pub(crate) fn file_to_json_object(path: &str) -> Result { + pub(crate) fn file_to_json_object(path: &str) -> LemmyResult { let file = File::open(path)?; let reader = BufReader::new(file); Ok(serde_json::from_reader(reader)?) } - pub(crate) fn test_json(path: &str) -> Result, LemmyError> { + pub(crate) fn test_json(path: &str) -> LemmyResult> { file_to_json_object::>(path) } @@ -112,7 +109,7 @@ pub(crate) mod tests { /// Ensures that there are no breaking changes in sent data. pub(crate) fn test_parse_lemmy_item( path: &str, - ) -> Result { + ) -> LemmyResult { // parse file as T let parsed = file_to_json_object::(path)?; diff --git a/crates/apub/src/protocol/objects/group.rs b/crates/apub/src/protocol/objects/group.rs index 8346e85b7..affafe269 100644 --- a/crates/apub/src/protocol/objects/group.rs +++ b/crates/apub/src/protocol/objects/group.rs @@ -7,7 +7,7 @@ use crate::{ community_outbox::ApubCommunityOutbox, }, local_site_data_cached, - objects::{community::ApubCommunity, read_from_string_or_source_opt}, + objects::community::ApubCommunity, protocol::{ objects::{Endpoints, LanguageTag}, ImageObject, @@ -15,23 +15,20 @@ use crate::{ }, }; use activitypub_federation::{ + config::Data, fetch::{collection_id::CollectionId, object_id::ObjectId}, kinds::actor::GroupType, protocol::{ helpers::deserialize_skip_error, public_key::PublicKey, + values::MediaTypeHtml, verification::verify_domains_match, }, }; use chrono::{DateTime, Utc}; use lemmy_api_common::{context::LemmyContext, utils::local_site_opt_to_slur_regex}; -use lemmy_db_schema::{ - newtypes::InstanceId, - source::community::{CommunityInsertForm, CommunityUpdateForm}, - utils::naive_now, -}; use lemmy_utils::{ - error::LemmyError, + error::LemmyResult, utils::slurs::{check_slurs, check_slurs_opt}, }; use serde::{Deserialize, Serialize}; @@ -40,7 +37,7 @@ use std::fmt::Debug; use url::Url; #[skip_serializing_none] -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Group { #[serde(rename = "type")] @@ -49,14 +46,19 @@ pub struct Group { /// username, set at account creation and usually fixed after that pub(crate) preferred_username: String, pub(crate) inbox: Url, - pub(crate) followers: CollectionId, + pub(crate) followers: Option>, pub(crate) public_key: PublicKey, /// title pub(crate) name: Option, - pub(crate) summary: Option, + // sidebar + pub(crate) content: Option, #[serde(deserialize_with = "deserialize_skip_error", default)] pub(crate) source: Option, + pub(crate) media_type: Option, + // short instance description + pub(crate) summary: Option, + #[serde(deserialize_with = "deserialize_skip_error", default)] pub(crate) icon: Option, /// banner pub(crate) image: Option, @@ -79,8 +81,8 @@ impl Group { pub(crate) async fn verify( &self, expected_domain: &Url, - context: &LemmyContext, - ) -> Result<(), LemmyError> { + context: &Data, + ) -> LemmyResult<()> { check_apub_id_valid_with_strictness(self.id.inner(), true, context).await?; verify_domains_match(expected_domain, self.id.inner())?; @@ -89,68 +91,7 @@ impl Group { check_slurs(&self.preferred_username, slur_regex)?; check_slurs_opt(&self.name, slur_regex)?; - let description = read_from_string_or_source_opt(&self.summary, &None, &self.source); - check_slurs_opt(&description, slur_regex)?; + check_slurs_opt(&self.summary, slur_regex)?; Ok(()) } - - pub(crate) fn into_insert_form(self, instance_id: InstanceId) -> CommunityInsertForm { - let description = read_from_string_or_source_opt(&self.summary, &None, &self.source); - - CommunityInsertForm { - name: self.preferred_username.clone(), - title: self.name.unwrap_or(self.preferred_username.clone()), - description, - removed: None, - published: self.published, - updated: self.updated, - deleted: Some(false), - nsfw: Some(self.sensitive.unwrap_or(false)), - actor_id: Some(self.id.into()), - local: Some(false), - private_key: None, - hidden: None, - public_key: self.public_key.public_key_pem, - last_refreshed_at: Some(naive_now()), - icon: self.icon.map(|i| i.url.into()), - banner: self.image.map(|i| i.url.into()), - followers_url: Some(self.followers.into()), - inbox_url: Some(self.inbox.into()), - shared_inbox_url: self.endpoints.map(|e| e.shared_inbox.into()), - moderators_url: self.attributed_to.map(Into::into), - posting_restricted_to_mods: self.posting_restricted_to_mods, - instance_id, - featured_url: self.featured.map(Into::into), - } - } - - pub(crate) fn into_update_form(self) -> CommunityUpdateForm { - CommunityUpdateForm { - title: Some(self.name.unwrap_or(self.preferred_username)), - description: Some(read_from_string_or_source_opt( - &self.summary, - &None, - &self.source, - )), - removed: None, - published: self.published.map(Into::into), - updated: Some(self.updated.map(Into::into)), - deleted: None, - nsfw: Some(self.sensitive.unwrap_or(false)), - actor_id: Some(self.id.into()), - local: None, - private_key: None, - hidden: None, - public_key: Some(self.public_key.public_key_pem), - last_refreshed_at: Some(naive_now()), - icon: Some(self.icon.map(|i| i.url.into())), - banner: Some(self.image.map(|i| i.url.into())), - followers_url: Some(self.followers.into()), - inbox_url: Some(self.inbox.into()), - shared_inbox_url: Some(self.endpoints.map(|e| e.shared_inbox.into())), - moderators_url: self.attributed_to.map(Into::into), - posting_restricted_to_mods: self.posting_restricted_to_mods, - featured_url: self.featured.map(Into::into), - } - } } diff --git a/crates/apub/src/protocol/objects/instance.rs b/crates/apub/src/protocol/objects/instance.rs index 8c9944306..0eef948e7 100644 --- a/crates/apub/src/protocol/objects/instance.rs +++ b/crates/apub/src/protocol/objects/instance.rs @@ -19,8 +19,10 @@ pub struct Instance { #[serde(rename = "type")] pub(crate) kind: ApplicationType, pub(crate) id: ObjectId, - // site name + /// site name pub(crate) name: String, + /// instance domain, necessary for mastodon authorized fetch + pub(crate) preferred_username: Option, pub(crate) inbox: Url, /// mandatory field in activitypub, lemmy currently serves an empty outbox pub(crate) outbox: Url, @@ -30,15 +32,17 @@ pub struct Instance { pub(crate) content: Option, #[serde(deserialize_with = "deserialize_skip_error", default)] pub(crate) source: Option, + pub(crate) media_type: Option, // short instance description pub(crate) summary: Option, - pub(crate) media_type: Option, /// instance icon pub(crate) icon: Option, /// instance banner pub(crate) image: Option, #[serde(default)] pub(crate) language: Vec, + /// nonstandard field + pub(crate) content_warning: Option, pub(crate) published: DateTime, pub(crate) updated: Option>, } diff --git a/crates/apub/src/protocol/objects/mod.rs b/crates/apub/src/protocol/objects/mod.rs index 9a3dab185..00fe26d2b 100644 --- a/crates/apub/src/protocol/objects/mod.rs +++ b/crates/apub/src/protocol/objects/mod.rs @@ -4,7 +4,7 @@ use lemmy_db_schema::{ source::language::Language, utils::DbPool, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; use serde::{Deserialize, Serialize}; use url::Url; @@ -16,42 +16,51 @@ pub(crate) mod page; pub(crate) mod person; pub(crate) mod tombstone; -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Endpoints { pub shared_inbox: Url, } /// As specified in https://schema.org/Language -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[serde(rename_all = "camelCase")] pub(crate) struct LanguageTag { pub(crate) identifier: String, pub(crate) name: String, } +impl Default for LanguageTag { + fn default() -> Self { + LanguageTag { + identifier: "und".to_string(), + name: "Undetermined".to_string(), + } + } +} + impl LanguageTag { pub(crate) async fn new_single( lang: LanguageId, pool: &mut DbPool<'_>, - ) -> Result, LemmyError> { + ) -> LemmyResult { let lang = Language::read_from_id(pool, lang).await?; // undetermined if lang.id == UNDETERMINED_ID { - Ok(None) + Ok(LanguageTag::default()) } else { - Ok(Some(LanguageTag { + Ok(LanguageTag { identifier: lang.code, name: lang.name, - })) + }) } } pub(crate) async fn new_multiple( lang_ids: Vec, pool: &mut DbPool<'_>, - ) -> Result, LemmyError> { + ) -> LemmyResult> { let mut langs = Vec::::new(); for l in lang_ids { @@ -69,35 +78,29 @@ impl LanguageTag { } pub(crate) async fn to_language_id_single( - lang: Option, + lang: Self, pool: &mut DbPool<'_>, - ) -> Result, LemmyError> { - let identifier = lang.map(|l| l.identifier); - let language = Language::read_id_from_code(pool, identifier.as_deref()).await?; - - Ok(language) + ) -> LemmyResult { + Ok(Language::read_id_from_code(pool, &lang.identifier).await?) } pub(crate) async fn to_language_id_multiple( langs: Vec, pool: &mut DbPool<'_>, - ) -> Result, LemmyError> { + ) -> LemmyResult> { let mut language_ids = Vec::new(); for l in langs { let id = l.identifier; - language_ids.push(Language::read_id_from_code(pool, Some(&id)).await?); + language_ids.push(Language::read_id_from_code(pool, &id).await?); } - Ok(language_ids.into_iter().flatten().collect()) + Ok(language_ids.into_iter().collect()) } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{ objects::{ chat_message::ChatMessage, @@ -110,77 +113,113 @@ mod tests { }, tests::{test_json, test_parse_lemmy_item}, }; + use lemmy_utils::error::LemmyResult; #[test] - fn test_parse_objects_lemmy() { - test_parse_lemmy_item::("assets/lemmy/objects/instance.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/group.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/person.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/page.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/note.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/chat_message.json").unwrap(); - test_parse_lemmy_item::("assets/lemmy/objects/tombstone.json").unwrap(); + fn test_parse_objects_lemmy() -> LemmyResult<()> { + test_parse_lemmy_item::("assets/lemmy/objects/instance.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/group.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/person.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/page.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/note.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/chat_message.json")?; + test_parse_lemmy_item::("assets/lemmy/objects/tombstone.json")?; + Ok(()) } #[test] - fn test_parse_objects_pleroma() { - test_json::("assets/pleroma/objects/person.json").unwrap(); - test_json::("assets/pleroma/objects/note.json").unwrap(); - test_json::("assets/pleroma/objects/chat_message.json").unwrap(); + fn test_parse_objects_pleroma() -> LemmyResult<()> { + test_json::("assets/pleroma/objects/person.json")?; + test_json::("assets/pleroma/objects/note.json")?; + test_json::("assets/pleroma/objects/chat_message.json")?; + Ok(()) } #[test] - fn test_parse_objects_smithereen() { - test_json::("assets/smithereen/objects/person.json").unwrap(); - test_json::("assets/smithereen/objects/note.json").unwrap(); + fn test_parse_objects_smithereen() -> LemmyResult<()> { + test_json::("assets/smithereen/objects/person.json")?; + test_json::("assets/smithereen/objects/note.json")?; + Ok(()) } #[test] - fn test_parse_objects_mastodon() { - test_json::("assets/mastodon/objects/person.json").unwrap(); - test_json::("assets/mastodon/objects/note.json").unwrap(); - test_json::("assets/mastodon/objects/page.json").unwrap(); + fn test_parse_objects_mastodon() -> LemmyResult<()> { + test_json::("assets/mastodon/objects/person.json")?; + test_json::("assets/mastodon/objects/note_1.json")?; + test_json::("assets/mastodon/objects/note_2.json")?; + test_json::("assets/mastodon/objects/page.json")?; + Ok(()) } #[test] - fn test_parse_objects_lotide() { - test_json::("assets/lotide/objects/group.json").unwrap(); - test_json::("assets/lotide/objects/person.json").unwrap(); - test_json::("assets/lotide/objects/note.json").unwrap(); - test_json::("assets/lotide/objects/page.json").unwrap(); - test_json::("assets/lotide/objects/tombstone.json").unwrap(); + fn test_parse_objects_lotide() -> LemmyResult<()> { + test_json::("assets/lotide/objects/group.json")?; + test_json::("assets/lotide/objects/person.json")?; + test_json::("assets/lotide/objects/note.json")?; + test_json::("assets/lotide/objects/page.json")?; + test_json::("assets/lotide/objects/tombstone.json")?; + Ok(()) } #[test] - fn test_parse_object_friendica() { - test_json::("assets/friendica/objects/person_1.json").unwrap(); - test_json::("assets/friendica/objects/person_2.json").unwrap(); - test_json::("assets/friendica/objects/page_1.json").unwrap(); - test_json::("assets/friendica/objects/page_2.json").unwrap(); - test_json::("assets/friendica/objects/note_1.json").unwrap(); - test_json::("assets/friendica/objects/note_2.json").unwrap(); + fn test_parse_object_friendica() -> LemmyResult<()> { + test_json::("assets/friendica/objects/person_1.json")?; + test_json::("assets/friendica/objects/person_2.json")?; + test_json::("assets/friendica/objects/page_1.json")?; + test_json::("assets/friendica/objects/page_2.json")?; + test_json::("assets/friendica/objects/note_1.json")?; + test_json::("assets/friendica/objects/note_2.json")?; + Ok(()) } #[test] - fn test_parse_object_gnusocial() { - test_json::("assets/gnusocial/objects/person.json").unwrap(); - test_json::("assets/gnusocial/objects/group.json").unwrap(); - test_json::("assets/gnusocial/objects/page.json").unwrap(); - test_json::("assets/gnusocial/objects/note.json").unwrap(); + fn test_parse_object_gnusocial() -> LemmyResult<()> { + test_json::("assets/gnusocial/objects/person.json")?; + test_json::("assets/gnusocial/objects/group.json")?; + test_json::("assets/gnusocial/objects/page.json")?; + test_json::("assets/gnusocial/objects/note.json")?; + Ok(()) } #[test] - fn test_parse_object_peertube() { - test_json::("assets/peertube/objects/person.json").unwrap(); - test_json::("assets/peertube/objects/group.json").unwrap(); - test_json::("assets/peertube/objects/video.json").unwrap(); - test_json::("assets/peertube/objects/note.json").unwrap(); + fn test_parse_object_peertube() -> LemmyResult<()> { + test_json::("assets/peertube/objects/person.json")?; + test_json::("assets/peertube/objects/group.json")?; + test_json::("assets/peertube/objects/video.json")?; + test_json::("assets/peertube/objects/note.json")?; + Ok(()) } #[test] - fn test_parse_object_mobilizon() { - test_json::("assets/mobilizon/objects/group.json").unwrap(); - test_json::("assets/mobilizon/objects/event.json").unwrap(); - test_json::("assets/mobilizon/objects/person.json").unwrap(); + fn test_parse_object_mobilizon() -> LemmyResult<()> { + test_json::("assets/mobilizon/objects/group.json")?; + test_json::("assets/mobilizon/objects/event.json")?; + test_json::("assets/mobilizon/objects/person.json")?; + Ok(()) + } + + #[test] + fn test_parse_object_discourse() -> LemmyResult<()> { + test_json::("assets/discourse/objects/group.json")?; + test_json::("assets/discourse/objects/page.json")?; + test_json::("assets/discourse/objects/person.json")?; + Ok(()) + } + + #[test] + fn test_parse_object_nodebb() -> LemmyResult<()> { + test_json::("assets/nodebb/objects/group.json")?; + test_json::("assets/nodebb/objects/page.json")?; + test_json::("assets/nodebb/objects/person.json")?; + Ok(()) + } + + #[test] + fn test_parse_object_wordpress() -> LemmyResult<()> { + test_json::("assets/wordpress/objects/group.json")?; + test_json::("assets/wordpress/objects/page.json")?; + test_json::("assets/wordpress/objects/person.json")?; + test_json::("assets/wordpress/objects/note.json")?; + Ok(()) } } diff --git a/crates/apub/src/protocol/objects/note.rs b/crates/apub/src/protocol/objects/note.rs index 259a8fcfa..21b5220f5 100644 --- a/crates/apub/src/protocol/objects/note.rs +++ b/crates/apub/src/protocol/objects/note.rs @@ -3,7 +3,11 @@ use crate::{ fetcher::post_or_comment::PostOrComment, mentions::MentionOrValue, objects::{comment::ApubComment, community::ApubCommunity, person::ApubPerson, post::ApubPost}, - protocol::{objects::LanguageTag, InCommunity, Source}, + protocol::{ + objects::{page::Attachment, LanguageTag}, + InCommunity, + Source, + }, }; use activitypub_federation::{ config::Data, @@ -20,10 +24,9 @@ use lemmy_db_schema::{ source::{community::Community, post::Post}, traits::Crud, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; -use std::ops::Deref; use url::Url; #[skip_serializing_none] @@ -51,16 +54,28 @@ pub struct Note { pub(crate) distinguished: Option, pub(crate) language: Option, pub(crate) audience: Option>, + #[serde(default)] + pub(crate) attachment: Vec, } impl Note { pub(crate) async fn get_parents( &self, context: &Data, - ) -> Result<(ApubPost, Option), LemmyError> { - // Fetch parent comment chain in a box, otherwise it can cause a stack overflow. - let parent = Box::pin(self.in_reply_to.dereference(context).await?); - match parent.deref() { + ) -> LemmyResult<(ApubPost, Option)> { + // We use recursion here to fetch the entire comment chain up to the top-level parent. This is + // necessary because we need to know the post and parent comment in order to insert a new + // comment. However it can also lead to stack overflow when fetching many comments recursively. + // To avoid this we check the request count against max comment depth, which based on testing + // can be handled without risking stack overflow. This is not a perfect solution, because in + // some cases we have to fetch user profiles too, and reach the limit after only 25 comments + // or so. + // A cleaner solution would be converting the recursion into a loop, but that is tricky. + if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 { + Err(LemmyErrorType::MaxCommentDepthReached)?; + } + let parent = self.in_reply_to.dereference(context).await?; + match parent { PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Comment(c) => { let post_id = c.post_id; @@ -73,7 +88,7 @@ impl Note { #[async_trait::async_trait] impl InCommunity for Note { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { let (post, _) = self.get_parents(context).await?; let community = Community::read(&mut context.pool(), post.community_id).await?; if let Some(audience) = &self.audience { diff --git a/crates/apub/src/protocol/objects/page.rs b/crates/apub/src/protocol/objects/page.rs index 0d4880383..3ce720bc0 100644 --- a/crates/apub/src/protocol/objects/page.rs +++ b/crates/apub/src/protocol/objects/page.rs @@ -19,9 +19,8 @@ use activitypub_federation::{ }; use chrono::{DateTime, Utc}; use itertools::Itertools; -use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::newtypes::DbUrl; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_api_common::{context::LemmyContext, utils::proxy_image_link}; +use lemmy_utils::error::{FederationError, LemmyError, LemmyErrorType, LemmyResult}; use serde::{de::Error, Deserialize, Deserializer, Serialize}; use serde_with::skip_serializing_none; use url::Url; @@ -43,7 +42,7 @@ pub struct Page { pub(crate) kind: PageType, pub(crate) id: ObjectId, pub(crate) attributed_to: AttributedTo, - #[serde(deserialize_with = "deserialize_one_or_many")] + #[serde(deserialize_with = "deserialize_one_or_many", default)] pub(crate) to: Vec, // If there is inReplyTo field this is actually a comment and must not be parsed #[serde(deserialize_with = "deserialize_not_present", default)] @@ -61,35 +60,42 @@ pub struct Page { #[serde(default)] pub(crate) attachment: Vec, pub(crate) image: Option, - pub(crate) comments_enabled: Option, pub(crate) sensitive: Option, pub(crate) published: Option>, pub(crate) updated: Option>, pub(crate) language: Option, pub(crate) audience: Option>, + #[serde(deserialize_with = "deserialize_skip_error", default)] + pub(crate) tag: Vec, } #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Link { - pub(crate) href: Url, - pub(crate) r#type: LinkType, + href: Url, + media_type: Option, + r#type: LinkType, } #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Image { #[serde(rename = "type")] - pub(crate) kind: ImageType, - pub(crate) url: Url, + kind: ImageType, + url: Url, + /// Used for alt_text + name: Option, } #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Document { #[serde(rename = "type")] - pub(crate) kind: DocumentType, - pub(crate) url: Url, + kind: DocumentType, + url: Url, + media_type: Option, + /// Used for alt_text + name: Option, } #[derive(Clone, Debug, Deserialize, Serialize)] @@ -111,6 +117,32 @@ impl Attachment { Attachment::Document(d) => d.url, } } + + pub(crate) fn alt_text(self) -> Option { + match self { + Attachment::Image(i) => i.name, + Attachment::Document(d) => d.name, + _ => None, + } + } + + pub(crate) async fn as_markdown(&self, context: &Data) -> LemmyResult { + let (url, name, media_type) = match self { + Attachment::Image(i) => (i.url.clone(), i.name.clone(), Some(String::from("image"))), + Attachment::Document(d) => (d.url.clone(), d.name.clone(), d.media_type.clone()), + Attachment::Link(l) => (l.href.clone(), None, l.media_type.clone()), + }; + + let is_image = + media_type.is_some_and(|media| media.starts_with("video") || media.starts_with("image")); + + if is_image { + let url = proxy_image_link(url, context).await?; + Ok(format!("![{}]({url})", name.unwrap_or_default())) + } else { + Ok(format!("[{url}]({url})")) + } + } } #[derive(Clone, Debug, Deserialize, Serialize)] @@ -128,50 +160,49 @@ pub(crate) struct AttributedToPeertube { pub id: ObjectId, } +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct Hashtag { + pub(crate) href: Url, + pub(crate) name: String, + #[serde(rename = "type")] + pub(crate) kind: HashtagType, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum HashtagType { + Hashtag, +} + impl Page { - /// Only mods can change the post's locked status. So if it is changed from the default value, - /// it is a mod action and needs to be verified as such. - /// - /// Locked needs to be false on a newly created post (verified in [[CreatePost]]. - pub(crate) async fn is_mod_action( - &self, - context: &Data, - ) -> Result { - let old_post = self.id.clone().dereference_local(context).await; - Ok(Page::is_locked_changed(&old_post, &self.comments_enabled)) - } - - pub(crate) fn is_locked_changed( - old_post: &Result, - new_comments_enabled: &Option, - ) -> bool { - if let Some(new_comments_enabled) = new_comments_enabled { - if let Ok(old_post) = old_post { - return new_comments_enabled != &!old_post.locked; - } - } - - false - } - - pub(crate) fn creator(&self) -> Result, LemmyError> { + pub(crate) fn creator(&self) -> LemmyResult> { match &self.attributed_to { AttributedTo::Lemmy(l) => Ok(l.clone()), AttributedTo::Peertube(p) => p .iter() .find(|a| a.kind == PersonOrGroupType::Person) .map(|a| ObjectId::::from(a.id.clone().into_inner())) - .ok_or_else(|| LemmyErrorType::PageDoesNotSpecifyCreator.into()), + .ok_or_else(|| FederationError::PageDoesNotSpecifyCreator.into()), } } } impl Attachment { - pub(crate) fn new(url: DbUrl) -> Attachment { - Attachment::Link(Link { - href: url.into(), - r#type: Default::default(), - }) + /// Creates new attachment for a given link and mime type. + pub(crate) fn new(url: Url, media_type: Option, alt_text: Option) -> Attachment { + let is_image = media_type.clone().unwrap_or_default().starts_with("image"); + if is_image { + Attachment::Image(Image { + kind: Default::default(), + url, + name: alt_text, + }) + } else { + Attachment::Link(Link { + href: url, + media_type, + r#type: Default::default(), + }) + } } } @@ -181,15 +212,17 @@ impl ActivityHandler for Page { type DataType = LemmyContext; type Error = LemmyError; fn id(&self) -> &Url { - unimplemented!() + self.id.inner() } + fn actor(&self) -> &Url { - unimplemented!() + debug_assert!(false); + self.id.inner() } - async fn verify(&self, data: &Data) -> Result<(), LemmyError> { + async fn verify(&self, data: &Data) -> LemmyResult<()> { ApubPost::verify(self, self.id.inner(), data).await } - async fn receive(self, data: &Data) -> Result<(), LemmyError> { + async fn receive(self, data: &Data) -> LemmyResult<()> { ApubPost::from_json(self, data).await?; Ok(()) } @@ -197,7 +230,11 @@ impl ActivityHandler for Page { #[async_trait::async_trait] impl InCommunity for Page { - async fn community(&self, context: &Data) -> Result { + async fn community(&self, context: &Data) -> LemmyResult { + if let Some(audience) = &self.audience { + return audience.dereference(context).await; + } + let community = match &self.attributed_to { AttributedTo::Lemmy(_) => { let mut iter = self.to.iter().merge(self.cc.iter()); @@ -208,7 +245,7 @@ impl InCommunity for Page { break c; } } else { - Err(LemmyErrorType::NoCommunityFoundInCc)? + Err(LemmyErrorType::NotFound)?; } } } @@ -216,11 +253,12 @@ impl InCommunity for Page { p.iter() .find(|a| a.kind == PersonOrGroupType::Group) .map(|a| ObjectId::::from(a.id.clone().into_inner())) - .ok_or(LemmyErrorType::PageDoesNotSpecifyGroup)? + .ok_or(LemmyErrorType::NotFound)? .dereference(context) .await? } }; + if let Some(audience) = &self.audience { verify_community_matches(audience, community.actor_id.clone())?; } @@ -242,9 +280,6 @@ where #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::protocol::{objects::page::Page, tests::test_parse_lemmy_item}; #[test] diff --git a/crates/apub/src/protocol/objects/person.rs b/crates/apub/src/protocol/objects/person.rs index cc9607c7f..c8cae5541 100644 --- a/crates/apub/src/protocol/objects/person.rs +++ b/crates/apub/src/protocol/objects/person.rs @@ -38,6 +38,7 @@ pub struct Person { #[serde(deserialize_with = "deserialize_skip_error", default)] pub(crate) source: Option, /// user avatar + #[serde(deserialize_with = "deserialize_skip_error", default)] pub(crate) icon: Option, /// user banner pub(crate) image: Option, diff --git a/crates/db_perf/Cargo.toml b/crates/db_perf/Cargo.toml new file mode 100644 index 000000000..44ea8a36f --- /dev/null +++ b/crates/db_perf/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "lemmy_db_perf" +publish = false +version.workspace = true +edition.workspace = true +description.workspace = true +license.workspace = true +homepage.workspace = true +documentation.workspace = true +repository.workspace = true + + +[lints] +workspace = true + +[dependencies] +anyhow = { workspace = true } +clap = { workspace = true } +diesel = { workspace = true } +diesel-async = { workspace = true } +lemmy_db_schema = { workspace = true } +lemmy_db_views = { workspace = true, features = ["full"] } +lemmy_utils = { workspace = true, features = ["full"] } +tokio = { workspace = true } +url = { workspace = true } diff --git a/crates/db_perf/src/main.rs b/crates/db_perf/src/main.rs new file mode 100644 index 000000000..0fa5c0549 --- /dev/null +++ b/crates/db_perf/src/main.rs @@ -0,0 +1,203 @@ +mod series; + +use crate::series::ValuesFromSeries; +use anyhow::Context; +use clap::Parser; +use diesel::{ + dsl::{self, sql}, + sql_types, + ExpressionMethods, + IntoSql, +}; +use diesel_async::{RunQueryDsl, SimpleAsyncConnection}; +use lemmy_db_schema::{ + schema::post, + source::{ + community::{Community, CommunityInsertForm}, + instance::Instance, + person::{Person, PersonInsertForm}, + site::Site, + }, + traits::Crud, + utils::{build_db_pool, get_conn, now}, + PostSortType, +}; +use lemmy_db_views::{post_view::PostQuery, structs::PaginationCursor}; +use lemmy_utils::error::{LemmyErrorExt2, LemmyResult}; +use std::num::NonZeroU32; +use url::Url; + +#[derive(Parser, Debug)] +struct CmdArgs { + #[arg(long, default_value_t = 3.try_into().unwrap())] + communities: NonZeroU32, + #[arg(long, default_value_t = 3.try_into().unwrap())] + people: NonZeroU32, + #[arg(long, default_value_t = 100000.try_into().unwrap())] + posts: NonZeroU32, + #[arg(long, default_value_t = 0)] + read_post_pages: u32, + #[arg(long)] + explain_insertions: bool, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let mut result = try_main().await.into_anyhow(); + if let Ok(path) = std::env::var("PGDATA") { + result = result.with_context(|| { + format!("Failed to run lemmy_db_perf (more details might be available in {path}/log)") + }); + } + result +} + +async fn try_main() -> LemmyResult<()> { + let args = CmdArgs::parse(); + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let conn = &mut get_conn(pool).await?; + + if args.explain_insertions { + // log_nested_statements is enabled to log trigger execution + conn + .batch_execute( + "SET auto_explain.log_min_duration = 0; SET auto_explain.log_nested_statements = on;", + ) + .await?; + } + + let instance = Instance::read_or_create(&mut conn.into(), "reddit.com".to_owned()).await?; + + println!("🫃 creating {} people", args.people); + let mut person_ids = vec![]; + for i in 0..args.people.get() { + let form = PersonInsertForm::test_form(instance.id, &format!("p{i}")); + person_ids.push(Person::create(&mut conn.into(), &form).await?.id); + } + + println!("🌍 creating {} communities", args.communities); + let mut community_ids = vec![]; + for i in 0..args.communities.get() { + let form = CommunityInsertForm::new( + instance.id, + format!("c{i}"), + i.to_string(), + "pubkey".to_string(), + ); + community_ids.push(Community::create(&mut conn.into(), &form).await?.id); + } + + let post_batches = args.people.get() * args.communities.get(); + let posts_per_batch = args.posts.get() / post_batches; + let num_posts = post_batches * posts_per_batch; + println!( + "📜 creating {} posts ({} featured in community)", + num_posts, post_batches + ); + let mut num_inserted_posts = 0; + // TODO: progress bar + for person_id in &person_ids { + for community_id in &community_ids { + let n = dsl::insert_into(post::table) + .values(ValuesFromSeries { + start: 1, + stop: posts_per_batch.into(), + selection: ( + "AAAAAAAAAAA".into_sql::(), + person_id.into_sql::(), + community_id.into_sql::(), + series::current_value.eq(1), + now() + - sql::("make_interval(secs => ") + .bind::(series::current_value) + .sql(")"), + ), + }) + .into_columns(( + post::name, + post::creator_id, + post::community_id, + post::featured_community, + post::published, + )) + .execute(conn) + .await?; + num_inserted_posts += n; + } + } + // Make sure the println above shows the correct amount + assert_eq!(num_inserted_posts, num_posts as usize); + + // Manually trigger and wait for a statistics update to ensure consistent and high amount of + // accuracy in the statistics used for query planning + println!("🧮 updating database statistics"); + conn.batch_execute("ANALYZE;").await?; + + // Enable auto_explain + conn + .batch_execute( + "SET auto_explain.log_min_duration = 0; SET auto_explain.log_nested_statements = off;", + ) + .await?; + + // TODO: show execution duration stats + let mut page_after = None; + for page_num in 1..=args.read_post_pages { + println!( + "👀 getting page {page_num} of posts (pagination cursor used: {})", + page_after.is_some() + ); + + // TODO: include local_user + let post_views = PostQuery { + community_id: community_ids.as_slice().first().cloned(), + sort: Some(PostSortType::New), + limit: Some(20), + page_after, + ..Default::default() + } + .list(&site()?, &mut conn.into()) + .await?; + + if let Some(post_view) = post_views.into_iter().last() { + println!("👀 getting pagination cursor data for next page"); + let cursor_data = PaginationCursor::after_post(&post_view) + .read(&mut conn.into()) + .await?; + page_after = Some(cursor_data); + } else { + println!("👀 reached empty page"); + break; + } + } + + // Delete everything, which might prevent problems if this is not run using scripts/db_perf.sh + Instance::delete(&mut conn.into(), instance.id).await?; + + if let Ok(path) = std::env::var("PGDATA") { + println!("🪵 query plans written in {path}/log"); + } + + Ok(()) +} + +fn site() -> LemmyResult { + Ok(Site { + id: Default::default(), + name: String::new(), + sidebar: None, + published: Default::default(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: Url::parse("http://example.com")?.into(), + last_refreshed_at: Default::default(), + inbox_url: Url::parse("http://example.com")?.into(), + private_key: None, + public_key: String::new(), + instance_id: Default::default(), + content_warning: None, + }) +} diff --git a/crates/db_perf/src/series.rs b/crates/db_perf/src/series.rs new file mode 100644 index 000000000..8efc078b1 --- /dev/null +++ b/crates/db_perf/src/series.rs @@ -0,0 +1,100 @@ +use diesel::{ + dsl, + expression::{is_aggregate, ValidGrouping}, + pg::Pg, + query_builder::{AsQuery, AstPass, QueryFragment}, + result::Error, + sql_types, + AppearsOnTable, + Expression, + Insertable, + QueryId, + SelectableExpression, +}; + +/// Gererates a series of rows for insertion. +/// +/// An inclusive range is created from `start` and `stop`. A row for each number is generated using +/// `selection`, which can be a tuple. [`current_value`] is an expression that gets the current +/// value. +/// +/// For example, if there's a `numbers` table with a `number` column, this inserts all numbers from +/// 1 to 10 in a single statement: +/// +/// ``` +/// dsl::insert_into(numbers::table) +/// .values(ValuesFromSeries { +/// start: 1, +/// stop: 10, +/// selection: series::current_value, +/// }) +/// .into_columns(numbers::number) +/// ``` +#[derive(QueryId)] +pub struct ValuesFromSeries { + pub start: i64, + pub stop: i64, + pub selection: S, +} + +impl> QueryFragment for ValuesFromSeries { + fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> Result<(), Error> { + self.selection.walk_ast(out.reborrow())?; + out.push_sql(" FROM generate_series("); + out.push_bind_param::(&self.start)?; + out.push_sql(", "); + out.push_bind_param::(&self.stop)?; + out.push_sql(")"); + + Ok(()) + } +} + +impl Expression for ValuesFromSeries { + type SqlType = S::SqlType; +} + +impl> AppearsOnTable for ValuesFromSeries {} + +impl> SelectableExpression for ValuesFromSeries {} + +impl> Insertable for ValuesFromSeries +where + dsl::BareSelect: AsQuery + Insertable, +{ + type Values = as Insertable>::Values; + + fn values(self) -> Self::Values { + dsl::select(self).values() + } +} + +impl> ValidGrouping<()> + for ValuesFromSeries +{ + type IsAggregate = is_aggregate::No; +} + +#[expect(non_camel_case_types)] +#[derive(QueryId, Clone, Copy, Debug)] +pub struct current_value; + +impl QueryFragment for current_value { + fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> Result<(), Error> { + out.push_identifier("generate_series")?; + + Ok(()) + } +} + +impl Expression for current_value { + type SqlType = sql_types::BigInt; +} + +impl AppearsOnTable for current_value {} + +impl SelectableExpression for current_value {} + +impl ValidGrouping<()> for current_value { + type IsAggregate = is_aggregate::No; +} diff --git a/crates/db_schema/Cargo.toml b/crates/db_schema/Cargo.toml index 3370a5594..c9b2a7930 100644 --- a/crates/db_schema/Cargo.toml +++ b/crates/db_schema/Cargo.toml @@ -13,8 +13,12 @@ name = "lemmy_db_schema" path = "src/lib.rs" doctest = false +[lints] +workspace = true + [features] full = [ + "lemmy_utils/full", "diesel", "diesel-derive-newtype", "diesel-derive-enum", @@ -23,7 +27,6 @@ full = [ "lemmy_utils", "activitypub_federation", "regex", - "once_cell", "serde_json", "diesel_ltree", "diesel-async", @@ -33,6 +36,7 @@ full = [ "tokio-postgres", "tokio-postgres-rustls", "rustls", + "i-love-jesus", ] [dependencies] @@ -41,7 +45,6 @@ serde = { workspace = true } serde_with = { workspace = true } url = { workspace = true } strum = { workspace = true } -strum_macros = { workspace = true } serde_json = { workspace = true, optional = true } activitypub_federation = { workspace = true, optional = true } lemmy_utils = { workspace = true, optional = true } @@ -60,12 +63,10 @@ diesel-async = { workspace = true, features = [ "deadpool", ], optional = true } regex = { workspace = true, optional = true } -once_cell = { workspace = true, optional = true } diesel_ltree = { workspace = true, optional = true } -typed-builder = { workspace = true } async-trait = { workspace = true } tracing = { workspace = true } -deadpool = { version = "0.9.5", features = ["rt_tokio_1"], optional = true } +deadpool = { version = "0.12.1", features = ["rt_tokio_1"], optional = true } ts-rs = { workspace = true, optional = true } futures-util = { workspace = true } tokio = { workspace = true, optional = true } @@ -73,6 +74,11 @@ tokio-postgres = { workspace = true, optional = true } tokio-postgres-rustls = { workspace = true, optional = true } rustls = { workspace = true, optional = true } uuid = { workspace = true, features = ["v4"] } +i-love-jesus = { workspace = true, optional = true } +anyhow = { workspace = true } +moka.workspace = true +derive-new.workspace = true [dev-dependencies] serial_test = { workspace = true } +pretty_assertions = { workspace = true } diff --git a/crates/db_schema/replaceable_schema/triggers.sql b/crates/db_schema/replaceable_schema/triggers.sql new file mode 100644 index 000000000..973d3325f --- /dev/null +++ b/crates/db_schema/replaceable_schema/triggers.sql @@ -0,0 +1,615 @@ +-- A trigger is associated with a table instead of a schema, so they can't be in the `r` schema. This is +-- okay if the function specified after `EXECUTE FUNCTION` is in `r`, since dropping the function drops the trigger. +-- +-- Tables that are updated by triggers should not have foreign keys that aren't set to `INITIALLY DEFERRED` +-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and +-- before the automatic deletion of the row that references it. This is not a problem for insert or delete. +-- +-- Triggers that update multiple tables should use this order: person_aggregates, comment_aggregates, +-- post_aggregates, community_aggregates, site_aggregates +-- * The order matters because the updated rows are locked until the end of the transaction, and statements +-- in a trigger don't use separate transactions. This means that updates closer to the beginning cause +-- longer locks because the duration of each update extends the durations of the locks caused by previous +-- updates. Long locks are worse on rows that have more concurrent transactions trying to update them. The +-- listed order starts with tables that are less likely to have such rows. +-- https://www.postgresql.org/docs/16/transaction-iso.html#XACT-READ-COMMITTED +-- * Using the same order in every trigger matters because a deadlock is possible if multiple transactions +-- update the same rows in a different order. +-- https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS +-- +-- +-- Create triggers for both post and comments +CREATE FUNCTION r.creator_id_from_post_aggregates (agg post_aggregates) + RETURNS int IMMUTABLE PARALLEL SAFE RETURN agg.creator_id; + +CREATE FUNCTION r.creator_id_from_comment_aggregates (agg comment_aggregates) + RETURNS int IMMUTABLE PARALLEL SAFE RETURN ( + SELECT + creator_id + FROM + comment + WHERE + comment.id = agg.comment_id LIMIT 1 +); + +CREATE PROCEDURE r.post_or_comment (table_name text) +LANGUAGE plpgsql +AS $a$ +BEGIN + EXECUTE replace($b$ + -- When a thing gets a vote, update its aggregates and its creator's aggregates + CALL r.create_triggers ('thing_like', $$ + BEGIN + WITH thing_diff AS ( UPDATE + thing_aggregates AS a + SET + score = a.score + diff.upvotes - diff.downvotes, upvotes = a.upvotes + diff.upvotes, downvotes = a.downvotes + diff.downvotes, controversy_rank = r.controversy_rank ((a.upvotes + diff.upvotes)::numeric, (a.downvotes + diff.downvotes)::numeric) + FROM ( + SELECT + (thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff + WHERE + a.thing_id = diff.thing_id + AND (diff.upvotes, diff.downvotes) != (0, 0) + RETURNING + r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score) + UPDATE + person_aggregates AS a + SET + thing_score = a.thing_score + diff.score FROM ( + SELECT + creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff + WHERE + a.person_id = diff.creator_id + AND diff.score != 0; + RETURN NULL; + END; + $$); + $b$, + 'thing', + table_name); +END; +$a$; + +CALL r.post_or_comment ('post'); + +CALL r.post_or_comment ('comment'); + +-- Create triggers that update counts in parent aggregates +CREATE FUNCTION r.parent_comment_ids (path ltree) + RETURNS SETOF int + LANGUAGE sql + IMMUTABLE parallel safe +BEGIN + ATOMIC + SELECT + comment_id::int + FROM + string_to_table (ltree2text (path), '.') AS comment_id + -- Skip first and last +LIMIT (nlevel (path) - 2) OFFSET 1; + +END; + +CALL r.create_triggers ('comment', $$ +BEGIN + UPDATE + person_aggregates AS a + SET + comment_count = a.comment_count + diff.comment_count + FROM ( + SELECT + (comment).creator_id, coalesce(sum(count_diff), 0) AS comment_count + FROM select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (comment) + GROUP BY (comment).creator_id) AS diff +WHERE + a.person_id = diff.creator_id + AND diff.comment_count != 0; + +UPDATE + comment_aggregates AS a +SET + child_count = a.child_count + diff.child_count +FROM ( + SELECT + parent_id, + coalesce(sum(count_diff), 0) AS child_count + FROM ( + -- For each inserted or deleted comment, this outputs 1 row for each parent comment. + -- For example, this: + -- + -- count_diff | (comment).path + -- ------------+---------------- + -- 1 | 0.5.6.7 + -- 1 | 0.5.6.7.8 + -- + -- becomes this: + -- + -- count_diff | parent_id + -- ------------+----------- + -- 1 | 5 + -- 1 | 6 + -- 1 | 5 + -- 1 | 6 + -- 1 | 7 + SELECT + count_diff, + parent_id + FROM + select_old_and_new_rows AS old_and_new_rows, + LATERAL r.parent_comment_ids ((comment).path) AS parent_id) AS expanded_old_and_new_rows + GROUP BY + parent_id) AS diff +WHERE + a.comment_id = diff.parent_id + AND diff.child_count != 0; + +WITH post_diff AS ( + UPDATE + post_aggregates AS a + SET + comments = a.comments + diff.comments, + newest_comment_time = GREATEST (a.newest_comment_time, diff.newest_comment_time), + newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro) + FROM ( + SELECT + post.id AS post_id, + coalesce(sum(count_diff), 0) AS comments, + -- Old rows are excluded using `count_diff = 1` + max((comment).published) FILTER (WHERE count_diff = 1) AS newest_comment_time, + max((comment).published) FILTER (WHERE count_diff = 1 + -- Ignore comments from the post's creator + AND post.creator_id != (comment).creator_id + -- Ignore comments on old posts + AND post.published > ((comment).published - '2 days'::interval)) AS newest_comment_time_necro, + r.is_counted (post.*) AS include_in_community_aggregates + FROM + select_old_and_new_rows AS old_and_new_rows + LEFT JOIN post ON post.id = (comment).post_id + WHERE + r.is_counted (comment) + GROUP BY + post.id) AS diff + WHERE + a.post_id = diff.post_id + AND (diff.comments, + GREATEST (a.newest_comment_time, diff.newest_comment_time), + GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)) != (0, + a.newest_comment_time, + a.newest_comment_time_necro) + RETURNING + a.community_id, + diff.comments, + diff.include_in_community_aggregates) +UPDATE + community_aggregates AS a +SET + comments = a.comments + diff.comments +FROM ( + SELECT + community_id, + sum(comments) AS comments + FROM + post_diff + WHERE + post_diff.include_in_community_aggregates + GROUP BY + community_id) AS diff +WHERE + a.community_id = diff.community_id + AND diff.comments != 0; + +UPDATE + site_aggregates AS a +SET + comments = a.comments + diff.comments +FROM ( + SELECT + coalesce(sum(count_diff), 0) AS comments + FROM + select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (comment) + AND (comment).local) AS diff +WHERE + diff.comments != 0; + +RETURN NULL; + +END; + +$$); + +CALL r.create_triggers ('post', $$ +BEGIN + UPDATE + person_aggregates AS a + SET + post_count = a.post_count + diff.post_count + FROM ( + SELECT + (post).creator_id, coalesce(sum(count_diff), 0) AS post_count + FROM select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (post) + GROUP BY (post).creator_id) AS diff +WHERE + a.person_id = diff.creator_id + AND diff.post_count != 0; + +UPDATE + community_aggregates AS a +SET + posts = a.posts + diff.posts +FROM ( + SELECT + (post).community_id, + coalesce(sum(count_diff), 0) AS posts + FROM + select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (post) + GROUP BY + (post).community_id) AS diff +WHERE + a.community_id = diff.community_id + AND diff.posts != 0; + +UPDATE + site_aggregates AS a +SET + posts = a.posts + diff.posts +FROM ( + SELECT + coalesce(sum(count_diff), 0) AS posts + FROM + select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (post) + AND (post).local) AS diff +WHERE + diff.posts != 0; + +RETURN NULL; + +END; + +$$); + +CALL r.create_triggers ('community', $$ +BEGIN + UPDATE + site_aggregates AS a + SET + communities = a.communities + diff.communities + FROM ( + SELECT + coalesce(sum(count_diff), 0) AS communities + FROM select_old_and_new_rows AS old_and_new_rows + WHERE + r.is_counted (community) + AND (community).local) AS diff +WHERE + diff.communities != 0; + +RETURN NULL; + +END; + +$$); + +CALL r.create_triggers ('person', $$ +BEGIN + UPDATE + site_aggregates AS a + SET + users = a.users + diff.users + FROM ( + SELECT + coalesce(sum(count_diff), 0) AS users + FROM select_old_and_new_rows AS old_and_new_rows + WHERE (person).local) AS diff +WHERE + diff.users != 0; + +RETURN NULL; + +END; + +$$); + +-- For community_aggregates.comments, don't include comments of deleted or removed posts +CREATE FUNCTION r.update_comment_count_from_post () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + UPDATE + community_aggregates AS a + SET + comments = a.comments + diff.comments + FROM ( + SELECT + old_post.community_id, + sum(( + CASE WHEN r.is_counted (new_post.*) THEN + 1 + ELSE + -1 + END) * post_aggregates.comments) AS comments + FROM + new_post + INNER JOIN old_post ON new_post.id = old_post.id + AND (r.is_counted (new_post.*) != r.is_counted (old_post.*)) + INNER JOIN post_aggregates ON post_aggregates.post_id = new_post.id + GROUP BY + old_post.community_id) AS diff +WHERE + a.community_id = diff.community_id + AND diff.comments != 0; + RETURN NULL; +END; +$$; + +CREATE TRIGGER comment_count + AFTER UPDATE ON post REFERENCING OLD TABLE AS old_post NEW TABLE AS new_post + FOR EACH STATEMENT + EXECUTE FUNCTION r.update_comment_count_from_post (); + +-- Count subscribers for communities. +-- subscribers should be updated only when a local community is followed by a local or remote person. +-- subscribers_local should be updated only when a local person follows a local or remote community. +CALL r.create_triggers ('community_follower', $$ +BEGIN + UPDATE + community_aggregates AS a + SET + subscribers = a.subscribers + diff.subscribers, subscribers_local = a.subscribers_local + diff.subscribers_local + FROM ( + SELECT + (community_follower).community_id, coalesce(sum(count_diff) FILTER (WHERE community.local), 0) AS subscribers, coalesce(sum(count_diff) FILTER (WHERE person.local), 0) AS subscribers_local + FROM select_old_and_new_rows AS old_and_new_rows + LEFT JOIN community ON community.id = (community_follower).community_id + LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff +WHERE + a.community_id = diff.community_id + AND (diff.subscribers, diff.subscribers_local) != (0, 0); + +RETURN NULL; + +END; + +$$); + +-- These triggers create and update rows in each aggregates table to match its associated table's rows. +-- Deleting rows and updating IDs are already handled by `CASCADE` in foreign key constraints. +CREATE FUNCTION r.comment_aggregates_from_comment () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + INSERT INTO comment_aggregates (comment_id, published) + SELECT + id, + published + FROM + new_comment; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates + AFTER INSERT ON comment REFERENCING NEW TABLE AS new_comment + FOR EACH STATEMENT + EXECUTE FUNCTION r.comment_aggregates_from_comment (); + +CREATE FUNCTION r.community_aggregates_from_community () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + INSERT INTO community_aggregates (community_id, published) + SELECT + id, + published + FROM + new_community; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates + AFTER INSERT ON community REFERENCING NEW TABLE AS new_community + FOR EACH STATEMENT + EXECUTE FUNCTION r.community_aggregates_from_community (); + +CREATE FUNCTION r.person_aggregates_from_person () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + INSERT INTO person_aggregates (person_id) + SELECT + id + FROM + new_person; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates + AFTER INSERT ON person REFERENCING NEW TABLE AS new_person + FOR EACH STATEMENT + EXECUTE FUNCTION r.person_aggregates_from_person (); + +CREATE FUNCTION r.post_aggregates_from_post () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + INSERT INTO post_aggregates (post_id, published, newest_comment_time, newest_comment_time_necro, community_id, creator_id, instance_id, featured_community, featured_local) + SELECT + new_post.id, + new_post.published, + new_post.published, + new_post.published, + new_post.community_id, + new_post.creator_id, + community.instance_id, + new_post.featured_community, + new_post.featured_local + FROM + new_post + INNER JOIN community ON community.id = new_post.community_id; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates + AFTER INSERT ON post REFERENCING NEW TABLE AS new_post + FOR EACH STATEMENT + EXECUTE FUNCTION r.post_aggregates_from_post (); + +CREATE FUNCTION r.post_aggregates_from_post_update () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + UPDATE + post_aggregates + SET + featured_community = new_post.featured_community, + featured_local = new_post.featured_local + FROM + new_post + INNER JOIN old_post ON old_post.id = new_post.id + AND (old_post.featured_community, + old_post.featured_local) != (new_post.featured_community, + new_post.featured_local) + WHERE + post_aggregates.post_id = new_post.id; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates_update + AFTER UPDATE ON post REFERENCING OLD TABLE AS old_post NEW TABLE AS new_post + FOR EACH STATEMENT + EXECUTE FUNCTION r.post_aggregates_from_post_update (); + +CREATE FUNCTION r.site_aggregates_from_site () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + -- only 1 row can be in site_aggregates because of the index idx_site_aggregates_1_row_only. + -- we only ever want to have a single value in site_aggregate because the site_aggregate triggers update all rows in that table. + -- a cleaner check would be to insert it for the local_site but that would break assumptions at least in the tests + INSERT INTO site_aggregates (site_id) + VALUES (NEW.id) + ON CONFLICT ((TRUE)) + DO NOTHING; + RETURN NULL; +END; +$$; + +CREATE TRIGGER aggregates + AFTER INSERT ON site + FOR EACH ROW + EXECUTE FUNCTION r.site_aggregates_from_site (); + +-- Change the order of some cascading deletions to make deletion triggers run before the deletion of rows that the triggers need to read +CREATE FUNCTION r.delete_comments_before_post () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + DELETE FROM comment AS c + WHERE c.post_id = OLD.id; + RETURN OLD; +END; +$$; + +CREATE TRIGGER delete_comments + BEFORE DELETE ON post + FOR EACH ROW + EXECUTE FUNCTION r.delete_comments_before_post (); + +CREATE FUNCTION r.delete_follow_before_person () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + DELETE FROM community_follower AS c + WHERE c.person_id = OLD.id; + RETURN OLD; +END; +$$; + +CREATE TRIGGER delete_follow + BEFORE DELETE ON person + FOR EACH ROW + EXECUTE FUNCTION r.delete_follow_before_person (); + +-- Triggers that change values before insert or update +CREATE FUNCTION r.comment_change_values () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +DECLARE + id text = NEW.id::text; +BEGIN + -- Make `path` end with `id` if it doesn't already + IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN + NEW.path = NEW.path || id; + END IF; + -- Set local ap_id + IF NEW.local THEN + NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/comment/' || id)); + END IF; + RETURN NEW; +END +$$; + +CREATE TRIGGER change_values + BEFORE INSERT OR UPDATE ON comment + FOR EACH ROW + EXECUTE FUNCTION r.comment_change_values (); + +CREATE FUNCTION r.post_change_values () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + -- Set local ap_id + IF NEW.local THEN + NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/post/' || NEW.id::text)); + END IF; + RETURN NEW; +END +$$; + +CREATE TRIGGER change_values + BEFORE INSERT ON post + FOR EACH ROW + EXECUTE FUNCTION r.post_change_values (); + +CREATE FUNCTION r.private_message_change_values () + RETURNS TRIGGER + LANGUAGE plpgsql + AS $$ +BEGIN + -- Set local ap_id + IF NEW.local THEN + NEW.ap_id = coalesce(NEW.ap_id, r.local_url ('/private_message/' || NEW.id::text)); + END IF; + RETURN NEW; +END +$$; + +CREATE TRIGGER change_values + BEFORE INSERT ON private_message + FOR EACH ROW + EXECUTE FUNCTION r.private_message_change_values (); + diff --git a/crates/db_schema/replaceable_schema/utils.sql b/crates/db_schema/replaceable_schema/utils.sql new file mode 100644 index 000000000..c766d25f2 --- /dev/null +++ b/crates/db_schema/replaceable_schema/utils.sql @@ -0,0 +1,153 @@ +-- Each calculation used in triggers should be a single SQL language +-- expression so it can be inlined in migrations. +CREATE FUNCTION r.controversy_rank (upvotes numeric, downvotes numeric) + RETURNS float + LANGUAGE sql + IMMUTABLE PARALLEL SAFE RETURN CASE WHEN downvotes <= 0 + OR upvotes <= 0 THEN + 0 + ELSE + ( + upvotes + downvotes) ^ CASE WHEN upvotes > downvotes THEN + downvotes::float / upvotes::float + ELSE + upvotes::float / downvotes::float + END + END; + +CREATE FUNCTION r.hot_rank (score numeric, published timestamp with time zone) + RETURNS double precision + LANGUAGE sql + IMMUTABLE PARALLEL SAFE RETURN + -- after a week, it will default to 0. + CASE WHEN ( +now() - published) > '0 days' + AND ( +now() - published) < '7 days' THEN + -- Use greatest(2,score), so that the hot_rank will be positive and not ignored. + log ( + greatest (2, score + 2)) / power (((EXTRACT(EPOCH FROM (now() - published)) / 3600) + 2), 1.8) + ELSE + -- if the post is from the future, set hot score to 0. otherwise you can game the post to + -- always be on top even with only 1 vote by setting it to the future + 0.0 + END; + +CREATE FUNCTION r.scaled_rank (score numeric, published timestamp with time zone, users_active_month numeric) + RETURNS double precision + LANGUAGE sql + IMMUTABLE PARALLEL SAFE + -- Add 2 to avoid divide by zero errors + -- Default for score = 1, active users = 1, and now, is (0.1728 / log(2 + 1)) = 0.3621 + -- There may need to be a scale factor multiplied to users_active_month, to make + -- the log curve less pronounced. This can be tuned in the future. + RETURN ( + r.hot_rank (score, published) / log(2 + users_active_month) +); + +-- For tables with `deleted` and `removed` columns, this function determines which rows to include in a count. +CREATE FUNCTION r.is_counted (item record) + RETURNS bool + LANGUAGE plpgsql + IMMUTABLE PARALLEL SAFE + AS $$ +BEGIN + RETURN COALESCE(NOT (item.deleted + OR item.removed), FALSE); +END; +$$; + +CREATE FUNCTION r.local_url (url_path text) + RETURNS text + LANGUAGE sql + STABLE PARALLEL SAFE RETURN ( +current_setting('lemmy.protocol_and_hostname') || url_path +); + +-- This function creates statement-level triggers for all operation types. It's designed this way +-- because of these limitations: +-- * A trigger that uses transition tables can only handle 1 operation type. +-- * Transition tables must be relevant for the operation type (for example, `NEW TABLE` is +-- not allowed for a `DELETE` trigger) +-- * Transition tables are only provided to the trigger function, not to functions that it calls. +-- +-- This function can only be called once per table. The trigger function body given as the 2nd argument +-- and can contain these names, which are replaced with a `SELECT` statement in parenthesis if needed: +-- * `select_old_rows` +-- * `select_new_rows` +-- * `select_old_and_new_rows` with 2 columns: +-- 1. `count_diff`: `-1` for old rows and `1` for new rows, which can be used with `sum` to get the number +-- to add to a count +-- 2. (same name as the trigger's table): the old or new row as a composite value +CREATE PROCEDURE r.create_triggers (table_name text, function_body text) +LANGUAGE plpgsql +AS $a$ +DECLARE + defs text := $$ + -- Delete + CREATE FUNCTION r.thing_delete_statement () + RETURNS TRIGGER + LANGUAGE plpgsql + AS function_body_delete; + CREATE TRIGGER delete_statement + AFTER DELETE ON thing REFERENCING OLD TABLE AS select_old_rows + FOR EACH STATEMENT + EXECUTE FUNCTION r.thing_delete_statement ( ); + -- Insert + CREATE FUNCTION r.thing_insert_statement ( ) + RETURNS TRIGGER + LANGUAGE plpgsql + AS function_body_insert; + CREATE TRIGGER insert_statement + AFTER INSERT ON thing REFERENCING NEW TABLE AS select_new_rows + FOR EACH STATEMENT + EXECUTE FUNCTION r.thing_insert_statement ( ); + -- Update + CREATE FUNCTION r.thing_update_statement ( ) + RETURNS TRIGGER + LANGUAGE plpgsql + AS function_body_update; + CREATE TRIGGER update_statement + AFTER UPDATE ON thing REFERENCING OLD TABLE AS select_old_rows NEW TABLE AS select_new_rows + FOR EACH STATEMENT + EXECUTE FUNCTION r.thing_update_statement ( ); + $$; + select_old_and_new_rows text := $$ ( + SELECT + -1 AS count_diff, + old_table::thing AS thing + FROM + select_old_rows AS old_table + UNION ALL + SELECT + 1 AS count_diff, + new_table::thing AS thing + FROM + select_new_rows AS new_table) $$; + empty_select_new_rows text := $$ ( + SELECT + * + FROM + -- Real transition table + select_old_rows + WHERE + FALSE) $$; + empty_select_old_rows text := $$ ( + SELECT + * + FROM + -- Real transition table + select_new_rows + WHERE + FALSE) $$; + BEGIN + function_body := replace(function_body, 'select_old_and_new_rows', select_old_and_new_rows); + -- `select_old_rows` and `select_new_rows` are made available as empty tables if they don't already exist + defs := replace(defs, 'function_body_delete', quote_literal(replace(function_body, 'select_new_rows', empty_select_new_rows))); + defs := replace(defs, 'function_body_insert', quote_literal(replace(function_body, 'select_old_rows', empty_select_old_rows))); + defs := replace(defs, 'function_body_update', quote_literal(function_body)); + defs := replace(defs, 'thing', table_name); + EXECUTE defs; +END; +$a$; + diff --git a/crates/db_schema/src/aggregates/comment_aggregates.rs b/crates/db_schema/src/aggregates/comment_aggregates.rs index e081d1a1e..a97bb565b 100644 --- a/crates/db_schema/src/aggregates/comment_aggregates.rs +++ b/crates/db_schema/src/aggregates/comment_aggregates.rs @@ -10,10 +10,7 @@ use diesel_async::RunQueryDsl; impl CommentAggregates { pub async fn read(pool: &mut DbPool<'_>, comment_id: CommentId) -> Result { let conn = &mut get_conn(pool).await?; - comment_aggregates::table - .filter(comment_aggregates::comment_id.eq(comment_id)) - .first::(conn) - .await + comment_aggregates::table.find(comment_id).first(conn).await } pub async fn update_hot_rank( @@ -22,8 +19,7 @@ impl CommentAggregates { ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(comment_aggregates::table) - .filter(comment_aggregates::comment_id.eq(comment_id)) + diesel::update(comment_aggregates::table.find(comment_id)) .set(comment_aggregates::hot_rank.eq(hot_rank( comment_aggregates::score, comment_aggregates::published, @@ -35,8 +31,6 @@ impl CommentAggregates { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ aggregates::comment_aggregates::CommentAggregates, @@ -50,82 +44,65 @@ mod tests { traits::{Crud, Likeable}, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_comment_agg".into()) - .public_key("pubkey".into()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_comment_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let another_person = PersonInsertForm::builder() - .name("jerry_comment_agg".into()) - .public_key("pubkey".into()) - .instance_id(inserted_instance.id) - .build(); + let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_comment_agg"); - let another_inserted_person = Person::create(pool, &another_person).await.unwrap(); + let another_inserted_person = Person::create(pool, &another_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL_comment_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_comment_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let child_comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); let _inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; let comment_like = CommentLikeForm { comment_id: inserted_comment.id, - post_id: inserted_post.id, person_id: inserted_person.id, score: 1, }; - CommentLike::like(pool, &comment_like).await.unwrap(); + CommentLike::like(pool, &comment_like).await?; - let comment_aggs_before_delete = CommentAggregates::read(pool, inserted_comment.id) - .await - .unwrap(); + let comment_aggs_before_delete = CommentAggregates::read(pool, inserted_comment.id).await?; assert_eq!(1, comment_aggs_before_delete.score); assert_eq!(1, comment_aggs_before_delete.upvotes); @@ -134,52 +111,43 @@ mod tests { // Add a post dislike from the other person let comment_dislike = CommentLikeForm { comment_id: inserted_comment.id, - post_id: inserted_post.id, person_id: another_inserted_person.id, score: -1, }; - CommentLike::like(pool, &comment_dislike).await.unwrap(); + CommentLike::like(pool, &comment_dislike).await?; - let comment_aggs_after_dislike = CommentAggregates::read(pool, inserted_comment.id) - .await - .unwrap(); + let comment_aggs_after_dislike = CommentAggregates::read(pool, inserted_comment.id).await?; assert_eq!(0, comment_aggs_after_dislike.score); assert_eq!(1, comment_aggs_after_dislike.upvotes); assert_eq!(1, comment_aggs_after_dislike.downvotes); // Remove the first comment like - CommentLike::remove(pool, inserted_person.id, inserted_comment.id) - .await - .unwrap(); - let after_like_remove = CommentAggregates::read(pool, inserted_comment.id) - .await - .unwrap(); + CommentLike::remove(pool, inserted_person.id, inserted_comment.id).await?; + let after_like_remove = CommentAggregates::read(pool, inserted_comment.id).await?; assert_eq!(-1, after_like_remove.score); assert_eq!(0, after_like_remove.upvotes); assert_eq!(1, after_like_remove.downvotes); // Remove the parent post - Post::delete(pool, inserted_post.id).await.unwrap(); + Post::delete(pool, inserted_post.id).await?; // Should be none found, since the post was deleted let after_delete = CommentAggregates::read(pool, inserted_comment.id).await; assert!(after_delete.is_err()); // This should delete all the associated rows, and fire triggers - Person::delete(pool, another_inserted_person.id) - .await - .unwrap(); - let person_num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + Person::delete(pool, another_inserted_person.id).await?; + let person_num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, person_num_deleted); // Delete the community - let community_num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); + let community_num_deleted = Community::delete(pool, inserted_community.id).await?; assert_eq!(1, community_num_deleted); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/aggregates/community_aggregates.rs b/crates/db_schema/src/aggregates/community_aggregates.rs index e968fdb08..0359d8632 100644 --- a/crates/db_schema/src/aggregates/community_aggregates.rs +++ b/crates/db_schema/src/aggregates/community_aggregates.rs @@ -1,10 +1,7 @@ use crate::{ aggregates::structs::CommunityAggregates, newtypes::CommunityId, - schema::{ - community_aggregates, - community_aggregates::{community_id, subscribers}, - }, + schema::{community_aggregates, community_aggregates::subscribers}, utils::{get_conn, DbPool}, }; use diesel::{result::Error, ExpressionMethods, QueryDsl}; @@ -14,8 +11,8 @@ impl CommunityAggregates { pub async fn read(pool: &mut DbPool<'_>, for_community_id: CommunityId) -> Result { let conn = &mut get_conn(pool).await?; community_aggregates::table - .filter(community_id.eq(for_community_id)) - .first::(conn) + .find(for_community_id) + .first(conn) .await } @@ -26,17 +23,15 @@ impl CommunityAggregates { ) -> Result { let conn = &mut get_conn(pool).await?; let new_subscribers: i64 = new_subscribers.into(); - diesel::update(community_aggregates::table.filter(community_id.eq(for_community_id))) + diesel::update(community_aggregates::table.find(for_community_id)) .set(subscribers.eq(new_subscribers)) - .get_result::(conn) + .get_result(conn) .await } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ aggregates::community_aggregates::CommunityAggregates, @@ -50,51 +45,41 @@ mod tests { traits::{Crud, Followable}, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_community_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let another_person = PersonInsertForm::builder() - .name("jerry_community_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_community_agg"); - let another_inserted_person = Person::create(pool, &another_person).await.unwrap(); + let another_inserted_person = Person::create(pool, &another_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL_community_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_community_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); - - let another_community = CommunityInsertForm::builder() - .name("TIL_community_agg_2".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let another_inserted_community = Community::create(pool, &another_community).await.unwrap(); + let another_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_community_agg_2".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let another_inserted_community = Community::create(pool, &another_community).await?; let first_person_follow = CommunityFollowerForm { community_id: inserted_community.id, @@ -102,9 +87,7 @@ mod tests { pending: false, }; - CommunityFollower::follow(pool, &first_person_follow) - .await - .unwrap(); + CommunityFollower::follow(pool, &first_person_follow).await?; let second_person_follow = CommunityFollowerForm { community_id: inserted_community.id, @@ -112,9 +95,7 @@ mod tests { pending: false, }; - CommunityFollower::follow(pool, &second_person_follow) - .await - .unwrap(); + CommunityFollower::follow(pool, &second_person_follow).await?; let another_community_follow = CommunityFollowerForm { community_id: another_inserted_community.id, @@ -122,105 +103,86 @@ mod tests { pending: false, }; - CommunityFollower::follow(pool, &another_community_follow) - .await - .unwrap(); + CommunityFollower::follow(pool, &another_community_follow).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let child_comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); let _inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; - let community_aggregates_before_delete = CommunityAggregates::read(pool, inserted_community.id) - .await - .unwrap(); + let community_aggregates_before_delete = + CommunityAggregates::read(pool, inserted_community.id).await?; assert_eq!(2, community_aggregates_before_delete.subscribers); + assert_eq!(2, community_aggregates_before_delete.subscribers_local); assert_eq!(1, community_aggregates_before_delete.posts); assert_eq!(2, community_aggregates_before_delete.comments); // Test the other community - let another_community_aggs = CommunityAggregates::read(pool, another_inserted_community.id) - .await - .unwrap(); + let another_community_aggs = + CommunityAggregates::read(pool, another_inserted_community.id).await?; assert_eq!(1, another_community_aggs.subscribers); + assert_eq!(1, another_community_aggs.subscribers_local); assert_eq!(0, another_community_aggs.posts); assert_eq!(0, another_community_aggs.comments); // Unfollow test - CommunityFollower::unfollow(pool, &second_person_follow) - .await - .unwrap(); - let after_unfollow = CommunityAggregates::read(pool, inserted_community.id) - .await - .unwrap(); + CommunityFollower::unfollow(pool, &second_person_follow).await?; + let after_unfollow = CommunityAggregates::read(pool, inserted_community.id).await?; assert_eq!(1, after_unfollow.subscribers); + assert_eq!(1, after_unfollow.subscribers_local); // Follow again just for the later tests - CommunityFollower::follow(pool, &second_person_follow) - .await - .unwrap(); - let after_follow_again = CommunityAggregates::read(pool, inserted_community.id) - .await - .unwrap(); + CommunityFollower::follow(pool, &second_person_follow).await?; + let after_follow_again = CommunityAggregates::read(pool, inserted_community.id).await?; assert_eq!(2, after_follow_again.subscribers); + assert_eq!(2, after_follow_again.subscribers_local); // Remove a parent post (the comment count should also be 0) - Post::delete(pool, inserted_post.id).await.unwrap(); - let after_parent_post_delete = CommunityAggregates::read(pool, inserted_community.id) - .await - .unwrap(); + Post::delete(pool, inserted_post.id).await?; + let after_parent_post_delete = CommunityAggregates::read(pool, inserted_community.id).await?; assert_eq!(0, after_parent_post_delete.comments); assert_eq!(0, after_parent_post_delete.posts); // Remove the 2nd person - Person::delete(pool, another_inserted_person.id) - .await - .unwrap(); - let after_person_delete = CommunityAggregates::read(pool, inserted_community.id) - .await - .unwrap(); + Person::delete(pool, another_inserted_person.id).await?; + let after_person_delete = CommunityAggregates::read(pool, inserted_community.id).await?; assert_eq!(1, after_person_delete.subscribers); + assert_eq!(1, after_person_delete.subscribers_local); // This should delete all the associated rows, and fire triggers - let person_num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + let person_num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, person_num_deleted); // Delete the community - let community_num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); + let community_num_deleted = Community::delete(pool, inserted_community.id).await?; assert_eq!(1, community_num_deleted); - let another_community_num_deleted = Community::delete(pool, another_inserted_community.id) - .await - .unwrap(); + let another_community_num_deleted = + Community::delete(pool, another_inserted_community.id).await?; assert_eq!(1, another_community_num_deleted); // Should be none found, since the creator was deleted let after_delete = CommunityAggregates::read(pool, inserted_community.id).await; assert!(after_delete.is_err()); + + Ok(()) } } diff --git a/crates/db_schema/src/aggregates/person_aggregates.rs b/crates/db_schema/src/aggregates/person_aggregates.rs index e6195de28..6e0eacc07 100644 --- a/crates/db_schema/src/aggregates/person_aggregates.rs +++ b/crates/db_schema/src/aggregates/person_aggregates.rs @@ -4,23 +4,18 @@ use crate::{ schema::person_aggregates, utils::{get_conn, DbPool}, }; -use diesel::{result::Error, ExpressionMethods, QueryDsl}; +use diesel::{result::Error, QueryDsl}; use diesel_async::RunQueryDsl; impl PersonAggregates { pub async fn read(pool: &mut DbPool<'_>, person_id: PersonId) -> Result { let conn = &mut get_conn(pool).await?; - person_aggregates::table - .filter(person_aggregates::person_id.eq(person_id)) - .first::(conn) - .await + person_aggregates::table.find(person_id).first(conn).await } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ aggregates::person_aggregates::PersonAggregates, @@ -34,99 +29,81 @@ mod tests { traits::{Crud, Likeable}, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_user_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_user_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let another_person = PersonInsertForm::builder() - .name("jerry_user_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_user_agg"); - let another_inserted_person = Person::create(pool, &another_person).await.unwrap(); + let another_inserted_person = Person::create(pool, &another_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL_site_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_site_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let inserted_community = Community::create(pool, &new_community).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; let post_like = PostLikeForm { post_id: inserted_post.id, person_id: inserted_person.id, score: 1, }; + let _inserted_post_like = PostLike::like(pool, &post_like).await?; - let _inserted_post_like = PostLike::like(pool, &post_like).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; let mut comment_like = CommentLikeForm { comment_id: inserted_comment.id, person_id: inserted_person.id, - post_id: inserted_post.id, score: 1, }; - let _inserted_comment_like = CommentLike::like(pool, &comment_like).await.unwrap(); - - let child_comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let _inserted_comment_like = CommentLike::like(pool, &comment_like).await?; + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); let inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; let child_comment_like = CommentLikeForm { comment_id: inserted_child_comment.id, person_id: another_inserted_person.id, - post_id: inserted_post.id, score: 1, }; - let _inserted_child_comment_like = CommentLike::like(pool, &child_comment_like).await.unwrap(); + let _inserted_child_comment_like = CommentLike::like(pool, &child_comment_like).await?; - let person_aggregates_before_delete = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + let person_aggregates_before_delete = PersonAggregates::read(pool, inserted_person.id).await?; assert_eq!(1, person_aggregates_before_delete.post_count); assert_eq!(1, person_aggregates_before_delete.post_score); @@ -134,12 +111,8 @@ mod tests { assert_eq!(2, person_aggregates_before_delete.comment_score); // Remove a post like - PostLike::remove(pool, inserted_person.id, inserted_post.id) - .await - .unwrap(); - let after_post_like_remove = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + PostLike::remove(pool, inserted_person.id, inserted_post.id).await?; + let after_post_like_remove = PersonAggregates::read(pool, inserted_person.id).await?; assert_eq!(0, after_post_like_remove.post_score); Comment::update( @@ -150,8 +123,7 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; Comment::update( pool, inserted_child_comment.id, @@ -160,47 +132,34 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let after_parent_comment_removed = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + let after_parent_comment_removed = PersonAggregates::read(pool, inserted_person.id).await?; assert_eq!(0, after_parent_comment_removed.comment_count); // TODO: fix person aggregate comment score calculation // assert_eq!(0, after_parent_comment_removed.comment_score); // Remove a parent comment (the scores should also be removed) - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Comment::delete(pool, inserted_child_comment.id) - .await - .unwrap(); - let after_parent_comment_delete = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + Comment::delete(pool, inserted_comment.id).await?; + Comment::delete(pool, inserted_child_comment.id).await?; + let after_parent_comment_delete = PersonAggregates::read(pool, inserted_person.id).await?; assert_eq!(0, after_parent_comment_delete.comment_count); // TODO: fix person aggregate comment score calculation // assert_eq!(0, after_parent_comment_delete.comment_score); // Add in the two comments again, then delete the post. - let new_parent_comment = Comment::create(pool, &comment_form, None).await.unwrap(); + let new_parent_comment = Comment::create(pool, &comment_form, None).await?; let _new_child_comment = - Comment::create(pool, &child_comment_form, Some(&new_parent_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&new_parent_comment.path)).await?; comment_like.comment_id = new_parent_comment.id; - CommentLike::like(pool, &comment_like).await.unwrap(); - let after_comment_add = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + CommentLike::like(pool, &comment_like).await?; + let after_comment_add = PersonAggregates::read(pool, inserted_person.id).await?; assert_eq!(2, after_comment_add.comment_count); // TODO: fix person aggregate comment score calculation // assert_eq!(1, after_comment_add.comment_score); - Post::delete(pool, inserted_post.id).await.unwrap(); - let after_post_delete = PersonAggregates::read(pool, inserted_person.id) - .await - .unwrap(); + Post::delete(pool, inserted_post.id).await?; + let after_post_delete = PersonAggregates::read(pool, inserted_person.id).await?; // TODO: fix person aggregate comment score calculation // assert_eq!(0, after_post_delete.comment_score); assert_eq!(0, after_post_delete.comment_count); @@ -208,22 +167,20 @@ mod tests { assert_eq!(0, after_post_delete.post_count); // This should delete all the associated rows, and fire triggers - let person_num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + let person_num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, person_num_deleted); - Person::delete(pool, another_inserted_person.id) - .await - .unwrap(); + Person::delete(pool, another_inserted_person.id).await?; // Delete the community - let community_num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); + let community_num_deleted = Community::delete(pool, inserted_community.id).await?; assert_eq!(1, community_num_deleted); // Should be none found let after_delete = PersonAggregates::read(pool, inserted_person.id).await; assert!(after_delete.is_err()); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/aggregates/person_post_aggregates.rs b/crates/db_schema/src/aggregates/person_post_aggregates.rs index 1cbaa2422..f6e108ee9 100644 --- a/crates/db_schema/src/aggregates/person_post_aggregates.rs +++ b/crates/db_schema/src/aggregates/person_post_aggregates.rs @@ -1,11 +1,11 @@ use crate::{ aggregates::structs::{PersonPostAggregates, PersonPostAggregatesForm}, - diesel::BoolExpressionMethods, + diesel::OptionalExtension, newtypes::{PersonId, PostId}, schema::person_post_aggregates::dsl::{person_id, person_post_aggregates, post_id}, utils::{get_conn, DbPool}, }; -use diesel::{insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{insert_into, result::Error, QueryDsl}; use diesel_async::RunQueryDsl; impl PersonPostAggregates { @@ -26,11 +26,12 @@ impl PersonPostAggregates { pool: &mut DbPool<'_>, person_id_: PersonId, post_id_: PostId, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; person_post_aggregates - .filter(post_id.eq(post_id_).and(person_id.eq(person_id_))) - .first::(conn) + .find((person_id_, post_id_)) + .first(conn) .await + .optional() } } diff --git a/crates/db_schema/src/aggregates/post_aggregates.rs b/crates/db_schema/src/aggregates/post_aggregates.rs index 1816ee5f3..b63017317 100644 --- a/crates/db_schema/src/aggregates/post_aggregates.rs +++ b/crates/db_schema/src/aggregates/post_aggregates.rs @@ -14,10 +14,7 @@ use diesel_async::RunQueryDsl; impl PostAggregates { pub async fn read(pool: &mut DbPool<'_>, post_id: PostId) -> Result { let conn = &mut get_conn(pool).await?; - post_aggregates::table - .filter(post_aggregates::post_id.eq(post_id)) - .first::(conn) - .await + post_aggregates::table.find(post_id).first(conn).await } pub async fn update_ranks(pool: &mut DbPool<'_>, post_id: PostId) -> Result { @@ -33,8 +30,7 @@ impl PostAggregates { .first::(conn) .await?; - diesel::update(post_aggregates::table) - .filter(post_aggregates::post_id.eq(post_id)) + diesel::update(post_aggregates::table.find(post_id)) .set(( post_aggregates::hot_rank.eq(hot_rank(post_aggregates::score, post_aggregates::published)), post_aggregates::hot_rank_active.eq(hot_rank( @@ -54,8 +50,6 @@ impl PostAggregates { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ aggregates::post_aggregates::PostAggregates, @@ -69,69 +63,55 @@ mod tests { traits::{Crud, Likeable}, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_community_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let another_person = PersonInsertForm::builder() - .name("jerry_community_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let another_person = PersonInsertForm::test_form(inserted_instance.id, "jerry_community_agg"); - let another_inserted_person = Person::create(pool, &another_person).await.unwrap(); + let another_inserted_person = Person::create(pool, &another_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL_community_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_community_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let child_comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); let inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; let post_like = PostLikeForm { post_id: inserted_post.id, @@ -139,9 +119,9 @@ mod tests { score: 1, }; - PostLike::like(pool, &post_like).await.unwrap(); + PostLike::like(pool, &post_like).await?; - let post_aggs_before_delete = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggs_before_delete = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(2, post_aggs_before_delete.comments); assert_eq!(1, post_aggs_before_delete.score); @@ -155,9 +135,9 @@ mod tests { score: -1, }; - PostLike::like(pool, &post_dislike).await.unwrap(); + PostLike::like(pool, &post_dislike).await?; - let post_aggs_after_dislike = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggs_after_dislike = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(2, post_aggs_after_dislike.comments); assert_eq!(0, post_aggs_after_dislike.score); @@ -165,90 +145,76 @@ mod tests { assert_eq!(1, post_aggs_after_dislike.downvotes); // Remove the comments - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Comment::delete(pool, inserted_child_comment.id) - .await - .unwrap(); - let after_comment_delete = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + Comment::delete(pool, inserted_comment.id).await?; + Comment::delete(pool, inserted_child_comment.id).await?; + let after_comment_delete = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(0, after_comment_delete.comments); assert_eq!(0, after_comment_delete.score); assert_eq!(1, after_comment_delete.upvotes); assert_eq!(1, after_comment_delete.downvotes); // Remove the first post like - PostLike::remove(pool, inserted_person.id, inserted_post.id) - .await - .unwrap(); - let after_like_remove = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + PostLike::remove(pool, inserted_person.id, inserted_post.id).await?; + let after_like_remove = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(0, after_like_remove.comments); assert_eq!(-1, after_like_remove.score); assert_eq!(0, after_like_remove.upvotes); assert_eq!(1, after_like_remove.downvotes); // This should delete all the associated rows, and fire triggers - Person::delete(pool, another_inserted_person.id) - .await - .unwrap(); - let person_num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + Person::delete(pool, another_inserted_person.id).await?; + let person_num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, person_num_deleted); // Delete the community - let community_num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); + let community_num_deleted = Community::delete(pool, inserted_community.id).await?; assert_eq!(1, community_num_deleted); // Should be none found, since the creator was deleted let after_delete = PostAggregates::read(pool, inserted_post.id).await; assert!(after_delete.is_err()); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } #[tokio::test] #[serial] - async fn test_soft_delete() { + async fn test_soft_delete() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_community_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_community_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL_community_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_community_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let post_aggregates_before = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggregates_before = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(1, post_aggregates_before.comments); Comment::update( @@ -259,10 +225,9 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let post_aggregates_after_remove = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggregates_after_remove = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(0, post_aggregates_after_remove.comments); Comment::update( @@ -273,8 +238,7 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; Comment::update( pool, @@ -284,10 +248,9 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let post_aggregates_after_delete = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggregates_after_delete = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(0, post_aggregates_after_delete.comments); Comment::update( @@ -298,19 +261,17 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let post_aggregates_after_delete_remove = - PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let post_aggregates_after_delete_remove = PostAggregates::read(pool, inserted_post.id).await?; assert_eq!(0, post_aggregates_after_delete_remove.comments); - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Post::delete(pool, inserted_post.id).await.unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Comment::delete(pool, inserted_comment.id).await?; + Post::delete(pool, inserted_post.id).await?; + Person::delete(pool, inserted_person.id).await?; + Community::delete(pool, inserted_community.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/aggregates/site_aggregates.rs b/crates/db_schema/src/aggregates/site_aggregates.rs index 4fe57720a..379ddd2d9 100644 --- a/crates/db_schema/src/aggregates/site_aggregates.rs +++ b/crates/db_schema/src/aggregates/site_aggregates.rs @@ -9,14 +9,12 @@ use diesel_async::RunQueryDsl; impl SiteAggregates { pub async fn read(pool: &mut DbPool<'_>) -> Result { let conn = &mut get_conn(pool).await?; - site_aggregates::table.first::(conn).await + site_aggregates::table.first(conn).await } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ aggregates::site_aggregates::SiteAggregates, @@ -31,86 +29,76 @@ mod tests { traits::Crud, utils::{build_db_pool_for_tests, DbPool}, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; async fn prepare_site_with_community( pool: &mut DbPool<'_>, - ) -> (Instance, Person, Site, Community) { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + ) -> Result<(Instance, Person, Site, Community), Error> { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("thommy_site_agg".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy_site_agg"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let site_form = SiteInsertForm::builder() - .name("test_site".into()) - .instance_id(inserted_instance.id) - .build(); + let site_form = SiteInsertForm::new("test_site".into(), inserted_instance.id); + let inserted_site = Site::create(pool, &site_form).await?; - let inserted_site = Site::create(pool, &site_form).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL_site_agg".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); - let new_community = CommunityInsertForm::builder() - .name("TIL_site_agg".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); - ( + Ok(( inserted_instance, inserted_person, inserted_site, inserted_community, - ) + )) } #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); let (inserted_instance, inserted_person, inserted_site, inserted_community) = - prepare_site_with_community(pool).await; + prepare_site_with_community(pool).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); // Insert two of those posts - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - let _inserted_post_again = Post::create(pool, &new_post).await.unwrap(); + let inserted_post = Post::create(pool, &new_post).await?; + let _inserted_post_again = Post::create(pool, &new_post).await?; - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); // Insert two of those comments - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let child_comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); let _inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; - let site_aggregates_before_delete = SiteAggregates::read(pool).await.unwrap(); + let site_aggregates_before_delete = SiteAggregates::read(pool).await?; // TODO: this is unstable, sometimes it returns 0 users, sometimes 1 //assert_eq!(0, site_aggregates_before_delete.users); @@ -119,42 +107,42 @@ mod tests { assert_eq!(2, site_aggregates_before_delete.comments); // Try a post delete - Post::delete(pool, inserted_post.id).await.unwrap(); - let site_aggregates_after_post_delete = SiteAggregates::read(pool).await.unwrap(); + Post::delete(pool, inserted_post.id).await?; + let site_aggregates_after_post_delete = SiteAggregates::read(pool).await?; assert_eq!(1, site_aggregates_after_post_delete.posts); assert_eq!(0, site_aggregates_after_post_delete.comments); // This shouuld delete all the associated rows, and fire triggers - let person_num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); + let person_num_deleted = Person::delete(pool, inserted_person.id).await?; assert_eq!(1, person_num_deleted); // Delete the community - let community_num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); + let community_num_deleted = Community::delete(pool, inserted_community.id).await?; assert_eq!(1, community_num_deleted); // Site should still exist, it can without a site creator. let after_delete_creator = SiteAggregates::read(pool).await; assert!(after_delete_creator.is_ok()); - Site::delete(pool, inserted_site.id).await.unwrap(); + Site::delete(pool, inserted_site.id).await?; let after_delete_site = SiteAggregates::read(pool).await; assert!(after_delete_site.is_err()); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } #[tokio::test] #[serial] - async fn test_soft_delete() { + async fn test_soft_delete() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); let (inserted_instance, inserted_person, inserted_site, inserted_community) = - prepare_site_with_community(pool).await; + prepare_site_with_community(pool).await?; - let site_aggregates_before = SiteAggregates::read(pool).await.unwrap(); + let site_aggregates_before = SiteAggregates::read(pool).await?; assert_eq!(1, site_aggregates_before.communities); Community::update( @@ -165,10 +153,9 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let site_aggregates_after_delete = SiteAggregates::read(pool).await.unwrap(); + let site_aggregates_after_delete = SiteAggregates::read(pool).await?; assert_eq!(0, site_aggregates_after_delete.communities); Community::update( @@ -179,8 +166,7 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; Community::update( pool, @@ -190,10 +176,9 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let site_aggregates_after_remove = SiteAggregates::read(pool).await.unwrap(); + let site_aggregates_after_remove = SiteAggregates::read(pool).await?; assert_eq!(0, site_aggregates_after_remove.communities); Community::update( @@ -204,17 +189,16 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let site_aggregates_after_remove_delete = SiteAggregates::read(pool).await.unwrap(); + let site_aggregates_after_remove_delete = SiteAggregates::read(pool).await?; assert_eq!(0, site_aggregates_after_remove_delete.communities); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Site::delete(pool, inserted_site.id).await.unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Community::delete(pool, inserted_community.id).await?; + Site::delete(pool, inserted_site.id).await?; + Person::delete(pool, inserted_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/aggregates/structs.rs b/crates/db_schema/src/aggregates/structs.rs index 641ca3b3d..fd7f70409 100644 --- a/crates/db_schema/src/aggregates/structs.rs +++ b/crates/db_schema/src/aggregates/structs.rs @@ -9,17 +9,23 @@ use crate::schema::{ site_aggregates, }; use chrono::{DateTime, Utc}; +#[cfg(feature = "full")] +use i_love_jesus::CursorKeysModule; use serde::{Deserialize, Serialize}; #[cfg(feature = "full")] use ts_rs::TS; #[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = comment_aggregates))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] +#[cfg_attr(feature = "full", diesel(primary_key(comment_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// Aggregate data for a comment. pub struct CommentAggregates { - pub id: i32, pub comment_id: CommentId, pub score: i64, pub upvotes: i64, @@ -34,16 +40,19 @@ pub struct CommentAggregates { } #[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = community_aggregates))] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::community::Community)) )] +#[cfg_attr(feature = "full", diesel(primary_key(community_id)))] #[cfg_attr(feature = "full", ts(export))] /// Aggregate data for a community. pub struct CommunityAggregates { - pub id: i32, pub community_id: CommunityId, pub subscribers: i64, pub posts: i64, @@ -59,31 +68,50 @@ pub struct CommunityAggregates { pub users_active_half_year: i64, #[serde(skip)] pub hot_rank: f64, + pub subscribers_local: i64, } #[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Default)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = person_aggregates))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::person::Person)))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// Aggregate data for a person. pub struct PersonAggregates { - pub id: i32, pub person_id: PersonId, pub post_count: i64, + #[serde(skip)] pub post_score: i64, pub comment_count: i64, + #[serde(skip)] pub comment_score: i64, } #[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive( + Queryable, + Selectable, + Associations, + Identifiable, + TS, + CursorKeysModule + ) +)] #[cfg_attr(feature = "full", diesel(table_name = post_aggregates))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] +#[cfg_attr(feature = "full", diesel(primary_key(post_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] +#[cfg_attr(feature = "full", cursor_keys_module(name = post_aggregates_keys))] /// Aggregate data for a post. pub struct PostAggregates { - pub id: i32, pub post_id: PostId, pub comments: i64, pub score: i64, @@ -94,7 +122,6 @@ pub struct PostAggregates { /// A newest comment time, limited to 2 days, to prevent necrobumping pub newest_comment_time_necro: DateTime, /// The time of the newest comment in the post. - #[serde(skip)] pub newest_comment_time: DateTime, /// If the post is featured on the community. #[serde(skip)] @@ -120,12 +147,16 @@ pub struct PostAggregates { } #[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr(feature = "full", diesel(table_name = person_post_aggregates))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, post_id)))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::person::Person)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] /// Aggregate data for a person's post. pub struct PersonPostAggregates { - pub id: i32, pub person_id: PersonId, pub post_id: PostId, /// The number of comments they've read on that post. @@ -145,14 +176,18 @@ pub struct PersonPostAggregatesForm { pub published: Option>, } -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy, Hash)] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = site_aggregates))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::site::Site)))] +#[cfg_attr(feature = "full", diesel(primary_key(site_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// Aggregate data for a site. pub struct SiteAggregates { - pub id: i32, pub site_id: SiteId, pub users: i64, pub posts: i64, diff --git a/crates/db_schema/src/impls/activity.rs b/crates/db_schema/src/impls/activity.rs index 16b0fca40..fff0c2f0c 100644 --- a/crates/db_schema/src/impls/activity.rs +++ b/crates/db_schema/src/impls/activity.rs @@ -1,6 +1,6 @@ use crate::{ diesel::OptionalExtension, - newtypes::DbUrl, + newtypes::{ActivityId, DbUrl}, source::activity::{ReceivedActivity, SentActivity, SentActivityForm}, utils::{get_conn, DbPool}, }; @@ -25,30 +25,26 @@ impl SentActivity { pub async fn read_from_apub_id(pool: &mut DbPool<'_>, object_id: &DbUrl) -> Result { use crate::schema::sent_activity::dsl::{ap_id, sent_activity}; let conn = &mut get_conn(pool).await?; - sent_activity - .filter(ap_id.eq(object_id)) - .first::(conn) - .await + sent_activity.filter(ap_id.eq(object_id)).first(conn).await } - pub async fn read(pool: &mut DbPool<'_>, object_id: i64) -> Result { + pub async fn read(pool: &mut DbPool<'_>, object_id: ActivityId) -> Result { use crate::schema::sent_activity::dsl::sent_activity; let conn = &mut get_conn(pool).await?; - sent_activity.find(object_id).first::(conn).await + sent_activity.find(object_id).first(conn).await } } impl ReceivedActivity { pub async fn create(pool: &mut DbPool<'_>, ap_id_: &DbUrl) -> Result<(), Error> { - use crate::schema::received_activity::dsl::{ap_id, id, received_activity}; + use crate::schema::received_activity::dsl::{ap_id, received_activity}; let conn = &mut get_conn(pool).await?; - let res = insert_into(received_activity) + let rows_affected = insert_into(received_activity) .values(ap_id.eq(ap_id_)) .on_conflict_do_nothing() - .returning(id) - .get_result::(conn) + .execute(conn) .await .optional()?; - if res.is_some() { + if rows_affected == Some(1) { // new activity inserted successfully Ok(()) } else { @@ -63,40 +59,36 @@ impl ReceivedActivity { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; use crate::{source::activity::ActorType, utils::build_db_pool_for_tests}; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serde_json::json; use serial_test::serial; use url::Url; #[tokio::test] #[serial] - async fn receive_activity_duplicate() { + async fn receive_activity_duplicate() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let ap_id: DbUrl = Url::parse("http://example.com/activity/531") - .unwrap() - .into(); + let ap_id: DbUrl = Url::parse("http://example.com/activity/531")?.into(); - // inserting activity for first time - let res = ReceivedActivity::create(pool, &ap_id).await; - assert!(res.is_ok()); + // inserting activity should only work once + ReceivedActivity::create(pool, &ap_id).await?; + let second = ReceivedActivity::create(pool, &ap_id).await; + assert!(second.is_err()); - let res = ReceivedActivity::create(pool, &ap_id).await; - assert!(res.is_err()); + Ok(()) } #[tokio::test] #[serial] - async fn sent_activity_write_read() { + async fn sent_activity_write_read() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let ap_id: DbUrl = Url::parse("http://example.com/activity/412") - .unwrap() - .into(); + let ap_id: DbUrl = Url::parse("http://example.com/activity/412")?.into(); let data = json!({ "key1": "0xF9BA143B95FF6D82", "key2": "42", @@ -107,20 +99,20 @@ mod tests { ap_id: ap_id.clone(), data: data.clone(), sensitive, - actor_apub_id: Url::parse("http://example.com/u/exampleuser") - .unwrap() - .into(), + actor_apub_id: Url::parse("http://example.com/u/exampleuser")?.into(), actor_type: ActorType::Person, send_all_instances: false, send_community_followers_of: None, send_inboxes: vec![], }; - SentActivity::create(pool, form).await.unwrap(); + SentActivity::create(pool, form).await?; - let res = SentActivity::read_from_apub_id(pool, &ap_id).await.unwrap(); + let res = SentActivity::read_from_apub_id(pool, &ap_id).await?; assert_eq!(res.ap_id, ap_id); assert_eq!(res.data, data); assert_eq!(res.sensitive, sensitive); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/actor_language.rs b/crates/db_schema/src/impls/actor_language.rs index 313762a72..bff729f41 100644 --- a/crates/db_schema/src/impls/actor_language.rs +++ b/crates/db_schema/src/impls/actor_language.rs @@ -26,7 +26,7 @@ use diesel::{ QueryDsl, }; use diesel_async::{AsyncPgConnection, RunQueryDsl}; -use lemmy_utils::error::{LemmyError, LemmyErrorType}; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use tokio::sync::OnceCell; pub const UNDETERMINED_ID: LanguageId = LanguageId(0); @@ -43,20 +43,13 @@ impl LocalUserLanguage { }; let conn = &mut get_conn(pool).await?; - conn - .build_transaction() - .run(|conn| { - Box::pin(async move { - let langs = local_user_language - .filter(local_user_id.eq(for_local_user_id)) - .order(language_id) - .select(language_id) - .get_results(conn) - .await?; - convert_read_languages(conn, langs).await - }) as _ - }) - .await + let langs = local_user_language + .filter(local_user_id.eq(for_local_user_id)) + .order(language_id) + .select(language_id) + .get_results(conn) + .await?; + convert_read_languages(conn, langs).await } /// Update the user's languages. @@ -90,22 +83,33 @@ impl LocalUserLanguage { .build_transaction() .run(|conn| { Box::pin(async move { - use crate::schema::local_user_language::dsl::{local_user_id, local_user_language}; - // Clear the current user languages - delete(local_user_language.filter(local_user_id.eq(for_local_user_id))) - .execute(conn) - .await?; + use crate::schema::local_user_language::dsl::{ + language_id, + local_user_id, + local_user_language, + }; + // Delete old languages, not including new languages + let delete_old = delete(local_user_language) + .filter(local_user_id.eq(for_local_user_id)) + .filter(language_id.ne_all(&lang_ids)) + .execute(conn); - for l in lang_ids { - let form = LocalUserLanguageForm { + let forms = lang_ids + .iter() + .map(|&l| LocalUserLanguageForm { local_user_id: for_local_user_id, language_id: l, - }; - insert_into(local_user_language) - .values(form) - .get_result::(conn) - .await?; - } + }) + .collect::>(); + + // Insert new languages + let insert_new = insert_into(local_user_language) + .values(forms) + .on_conflict((language_id, local_user_id)) + .do_nothing() + .execute(conn); + + tokio::try_join!(delete_old, insert_new)?; Ok(()) }) as _ }) @@ -119,7 +123,7 @@ impl SiteLanguage { site::table .inner_join(local_site::table) .inner_join(site_language::table) - .order(site_language::id) + .order(site_language::language_id) .select(site_language::language_id) .load(conn) .await @@ -157,23 +161,30 @@ impl SiteLanguage { .build_transaction() .run(|conn| { Box::pin(async move { - use crate::schema::site_language::dsl::{site_id, site_language}; + use crate::schema::site_language::dsl::{language_id, site_id, site_language}; - // Clear the current languages - delete(site_language.filter(site_id.eq(for_site_id))) - .execute(conn) - .await?; + // Delete old languages, not including new languages + let delete_old = delete(site_language) + .filter(site_id.eq(for_site_id)) + .filter(language_id.ne_all(&lang_ids)) + .execute(conn); - for l in lang_ids { - let form = SiteLanguageForm { + let forms = lang_ids + .iter() + .map(|&l| SiteLanguageForm { site_id: for_site_id, language_id: l, - }; - insert_into(site_language) - .values(form) - .get_result::(conn) - .await?; - } + }) + .collect::>(); + + // Insert new languages + let insert_new = insert_into(site_language) + .values(forms) + .on_conflict((site_id, language_id)) + .do_nothing() + .execute(conn); + + tokio::try_join!(delete_old, insert_new)?; CommunityLanguage::limit_languages(conn, instance_id).await?; @@ -188,28 +199,22 @@ impl CommunityLanguage { /// Returns true if the given language is one of configured languages for given community pub async fn is_allowed_community_language( pool: &mut DbPool<'_>, - for_language_id: Option, + for_language_id: LanguageId, for_community_id: CommunityId, - ) -> Result<(), LemmyError> { - use crate::schema::community_language::dsl::{community_id, community_language, language_id}; + ) -> LemmyResult<()> { + use crate::schema::community_language::dsl::community_language; let conn = &mut get_conn(pool).await?; - if let Some(for_language_id) = for_language_id { - let is_allowed = select(exists( - community_language - .filter(language_id.eq(for_language_id)) - .filter(community_id.eq(for_community_id)), - )) - .get_result(conn) - .await?; + let is_allowed = select(exists( + community_language.find((for_community_id, for_language_id)), + )) + .get_result(conn) + .await?; - if is_allowed { - Ok(()) - } else { - Err(LemmyErrorType::LanguageNotAllowed)? - } - } else { + if is_allowed { Ok(()) + } else { + Err(LemmyErrorType::LanguageNotAllowed)? } } @@ -276,8 +281,8 @@ impl CommunityLanguage { } let form = lang_ids - .into_iter() - .map(|language_id| CommunityLanguageForm { + .iter() + .map(|&language_id| CommunityLanguageForm { community_id: for_community_id, language_id, }) @@ -287,26 +292,26 @@ impl CommunityLanguage { .build_transaction() .run(|conn| { Box::pin(async move { - use crate::schema::community_language::dsl::{community_id, community_language}; - use diesel::result::DatabaseErrorKind::UniqueViolation; - // Clear the current languages - delete(community_language.filter(community_id.eq(for_community_id))) - .execute(conn) - .await?; + use crate::schema::community_language::dsl::{ + community_id, + community_language, + language_id, + }; + // Delete old languages, not including new languages + let delete_old = delete(community_language) + .filter(community_id.eq(for_community_id)) + .filter(language_id.ne_all(&lang_ids)) + .execute(conn); - let insert_res = insert_into(community_language) + // Insert new languages + let insert_new = insert_into(community_language) .values(form) - .get_result::(conn) - .await; + .on_conflict((community_id, language_id)) + .do_nothing() + .execute(conn); + + tokio::try_join!(delete_old, insert_new)?; - if let Err(Error::DatabaseError(UniqueViolation, _info)) = insert_res { - // race condition: this function was probably called simultaneously from another caller. ignore error - // tracing::warn!("unique error: {_info:#?}"); - // _info.constraint_name() should be = "community_language_community_id_language_id_key" - return Ok(()); - } else { - insert_res?; - } Ok(()) }) as _ }) @@ -318,7 +323,7 @@ pub async fn default_post_language( pool: &mut DbPool<'_>, community_id: CommunityId, local_user_id: LocalUserId, -) -> Result, Error> { +) -> Result { use crate::schema::{community_language::dsl as cl, local_user_language::dsl as ul}; let conn = &mut get_conn(pool).await?; let mut intersection = ul::local_user_language @@ -330,12 +335,12 @@ pub async fn default_post_language( .await?; if intersection.len() == 1 { - Ok(intersection.pop()) + Ok(intersection.pop().unwrap_or(UNDETERMINED_ID)) } else if intersection.len() == 2 && intersection.contains(&UNDETERMINED_ID) { intersection.retain(|i| i != &UNDETERMINED_ID); - Ok(intersection.pop()) + Ok(intersection.pop().unwrap_or(UNDETERMINED_ID)) } else { - Ok(None) + Ok(UNDETERMINED_ID) } } @@ -383,331 +388,254 @@ async fn convert_read_languages( } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; use crate::{ - impls::actor_language::{ - convert_read_languages, - convert_update_languages, - default_post_language, - get_conn, - CommunityLanguage, - DbPool, - Language, - LanguageId, - LocalUserLanguage, - QueryDsl, - RunQueryDsl, - SiteLanguage, - }, source::{ community::{Community, CommunityInsertForm}, instance::Instance, local_site::{LocalSite, LocalSiteInsertForm}, local_user::{LocalUser, LocalUserInsertForm}, person::{Person, PersonInsertForm}, - site::{Site, SiteInsertForm}, + site::SiteInsertForm, }, traits::Crud, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; - async fn test_langs1(pool: &mut DbPool<'_>) -> Vec { - vec![ - Language::read_id_from_code(pool, Some("en")) - .await - .unwrap() - .unwrap(), - Language::read_id_from_code(pool, Some("fr")) - .await - .unwrap() - .unwrap(), - Language::read_id_from_code(pool, Some("ru")) - .await - .unwrap() - .unwrap(), - ] + async fn test_langs1(pool: &mut DbPool<'_>) -> Result, Error> { + Ok(vec![ + Language::read_id_from_code(pool, "en").await?, + Language::read_id_from_code(pool, "fr").await?, + Language::read_id_from_code(pool, "ru").await?, + ]) } - async fn test_langs2(pool: &mut DbPool<'_>) -> Vec { - vec![ - Language::read_id_from_code(pool, Some("fi")) - .await - .unwrap() - .unwrap(), - Language::read_id_from_code(pool, Some("se")) - .await - .unwrap() - .unwrap(), - ] + async fn test_langs2(pool: &mut DbPool<'_>) -> Result, Error> { + Ok(vec![ + Language::read_id_from_code(pool, "fi").await?, + Language::read_id_from_code(pool, "se").await?, + ]) } - async fn create_test_site(pool: &mut DbPool<'_>) -> (Site, Instance) { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + async fn create_test_site(pool: &mut DbPool<'_>) -> Result<(Site, Instance), Error> { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let site_form = SiteInsertForm::builder() - .name("test site".to_string()) - .instance_id(inserted_instance.id) - .build(); - let site = Site::create(pool, &site_form).await.unwrap(); + let site_form = SiteInsertForm::new("test site".to_string(), inserted_instance.id); + let site = Site::create(pool, &site_form).await?; // Create a local site, since this is necessary for local languages - let local_site_form = LocalSiteInsertForm::builder().site_id(site.id).build(); - LocalSite::create(pool, &local_site_form).await.unwrap(); + let local_site_form = LocalSiteInsertForm::new(site.id); + LocalSite::create(pool, &local_site_form).await?; - (site, inserted_instance) + Ok((site, inserted_instance)) } #[tokio::test] #[serial] - async fn test_convert_update_languages() { + async fn test_convert_update_languages() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); // call with empty vec, returns all languages - let conn = &mut get_conn(pool).await.unwrap(); - let converted1 = convert_update_languages(conn, vec![]).await.unwrap(); + let conn = &mut get_conn(pool).await?; + let converted1 = convert_update_languages(conn, vec![]).await?; assert_eq!(184, converted1.len()); // call with nonempty vec, returns same vec - let test_langs = test_langs1(&mut conn.into()).await; - let converted2 = convert_update_languages(conn, test_langs.clone()) - .await - .unwrap(); + let test_langs = test_langs1(&mut conn.into()).await?; + let converted2 = convert_update_languages(conn, test_langs.clone()).await?; assert_eq!(test_langs, converted2); + + Ok(()) } #[tokio::test] #[serial] - async fn test_convert_read_languages() { + async fn test_convert_read_languages() -> Result<(), Error> { use crate::schema::language::dsl::{id, language}; let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); // call with all languages, returns empty vec - let conn = &mut get_conn(pool).await.unwrap(); - let all_langs = language.select(id).get_results(conn).await.unwrap(); - let converted1: Vec = convert_read_languages(conn, all_langs).await.unwrap(); + let conn = &mut get_conn(pool).await?; + let all_langs = language.select(id).get_results(conn).await?; + let converted1: Vec = convert_read_languages(conn, all_langs).await?; assert_eq!(0, converted1.len()); // call with nonempty vec, returns same vec - let test_langs = test_langs1(&mut conn.into()).await; - let converted2 = convert_read_languages(conn, test_langs.clone()) - .await - .unwrap(); + let test_langs = test_langs1(&mut conn.into()).await?; + let converted2 = convert_read_languages(conn, test_langs.clone()).await?; assert_eq!(test_langs, converted2); + + Ok(()) } #[tokio::test] #[serial] - async fn test_site_languages() { + async fn test_site_languages() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let (site, instance) = create_test_site(pool).await; - let site_languages1 = SiteLanguage::read_local_raw(pool).await.unwrap(); + let (site, instance) = create_test_site(pool).await?; + let site_languages1 = SiteLanguage::read_local_raw(pool).await?; // site is created with all languages assert_eq!(184, site_languages1.len()); - let test_langs = test_langs1(pool).await; - SiteLanguage::update(pool, test_langs.clone(), &site) - .await - .unwrap(); + let test_langs = test_langs1(pool).await?; + SiteLanguage::update(pool, test_langs.clone(), &site).await?; - let site_languages2 = SiteLanguage::read_local_raw(pool).await.unwrap(); + let site_languages2 = SiteLanguage::read_local_raw(pool).await?; // after update, site only has new languages assert_eq!(test_langs, site_languages2); - Site::delete(pool, site.id).await.unwrap(); - Instance::delete(pool, instance.id).await.unwrap(); - LocalSite::delete(pool).await.unwrap(); + Site::delete(pool, site.id).await?; + Instance::delete(pool, instance.id).await?; + LocalSite::delete(pool).await?; + + Ok(()) } #[tokio::test] #[serial] - async fn test_user_languages() { + async fn test_user_languages() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let (site, instance) = create_test_site(pool).await; - let mut test_langs = test_langs1(pool).await; - SiteLanguage::update(pool, test_langs.clone(), &site) - .await - .unwrap(); + let (site, instance) = create_test_site(pool).await?; - let person_form = PersonInsertForm::builder() - .name("my test person".to_string()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - let person = Person::create(pool, &person_form).await.unwrap(); - let local_user_form = LocalUserInsertForm::builder() - .person_id(person.id) - .password_encrypted("my_pw".to_string()) - .build(); + let person_form = PersonInsertForm::test_form(instance.id, "my test person"); + let person = Person::create(pool, &person_form).await?; + let local_user_form = LocalUserInsertForm::test_form(person.id); - let local_user = LocalUser::create(pool, &local_user_form).await.unwrap(); - let local_user_langs1 = LocalUserLanguage::read(pool, local_user.id).await.unwrap(); + let local_user = LocalUser::create(pool, &local_user_form, vec![]).await?; + let local_user_langs1 = LocalUserLanguage::read(pool, local_user.id).await?; - // new user should be initialized with site languages and undetermined - //test_langs.push(UNDETERMINED_ID); - //test_langs.sort(); - test_langs.insert(0, UNDETERMINED_ID); - assert_eq!(test_langs, local_user_langs1); + // new user should be initialized with all languages + assert_eq!(0, local_user_langs1.len()); // update user languages - let test_langs2 = test_langs2(pool).await; - LocalUserLanguage::update(pool, test_langs2, local_user.id) - .await - .unwrap(); - let local_user_langs2 = LocalUserLanguage::read(pool, local_user.id).await.unwrap(); + let test_langs2 = test_langs2(pool).await?; + LocalUserLanguage::update(pool, test_langs2, local_user.id).await?; + let local_user_langs2 = LocalUserLanguage::read(pool, local_user.id).await?; assert_eq!(3, local_user_langs2.len()); - Person::delete(pool, person.id).await.unwrap(); - LocalUser::delete(pool, local_user.id).await.unwrap(); - Site::delete(pool, site.id).await.unwrap(); - LocalSite::delete(pool).await.unwrap(); - Instance::delete(pool, instance.id).await.unwrap(); + Person::delete(pool, person.id).await?; + LocalUser::delete(pool, local_user.id).await?; + Site::delete(pool, site.id).await?; + LocalSite::delete(pool).await?; + Instance::delete(pool, instance.id).await?; + + Ok(()) } #[tokio::test] #[serial] - async fn test_community_languages() { + async fn test_community_languages() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let (site, instance) = create_test_site(pool).await; - let test_langs = test_langs1(pool).await; - SiteLanguage::update(pool, test_langs.clone(), &site) - .await - .unwrap(); + let (site, instance) = create_test_site(pool).await?; + let test_langs = test_langs1(pool).await?; + SiteLanguage::update(pool, test_langs.clone(), &site).await?; - let read_site_langs = SiteLanguage::read(pool, site.id).await.unwrap(); + let read_site_langs = SiteLanguage::read(pool, site.id).await?; assert_eq!(test_langs, read_site_langs); // Test the local ones are the same - let read_local_site_langs = SiteLanguage::read_local_raw(pool).await.unwrap(); + let read_local_site_langs = SiteLanguage::read_local_raw(pool).await?; assert_eq!(test_langs, read_local_site_langs); - let community_form = CommunityInsertForm::builder() - .name("test community".to_string()) - .title("test community".to_string()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - let community = Community::create(pool, &community_form).await.unwrap(); - let community_langs1 = CommunityLanguage::read(pool, community.id).await.unwrap(); + let community_form = CommunityInsertForm::new( + instance.id, + "test community".to_string(), + "test community".to_string(), + "pubkey".to_string(), + ); + let community = Community::create(pool, &community_form).await?; + let community_langs1 = CommunityLanguage::read(pool, community.id).await?; // community is initialized with site languages assert_eq!(test_langs, community_langs1); let allowed_lang1 = - CommunityLanguage::is_allowed_community_language(pool, Some(test_langs[0]), community.id) - .await; + CommunityLanguage::is_allowed_community_language(pool, test_langs[0], community.id).await; assert!(allowed_lang1.is_ok()); - let test_langs2 = test_langs2(pool).await; + let test_langs2 = test_langs2(pool).await?; let allowed_lang2 = - CommunityLanguage::is_allowed_community_language(pool, Some(test_langs2[0]), community.id) - .await; + CommunityLanguage::is_allowed_community_language(pool, test_langs2[0], community.id).await; assert!(allowed_lang2.is_err()); // limit site languages to en, fi. after this, community languages should be updated to // intersection of old languages (en, fr, ru) and (en, fi), which is only fi. - SiteLanguage::update(pool, vec![test_langs[0], test_langs2[0]], &site) - .await - .unwrap(); - let community_langs2 = CommunityLanguage::read(pool, community.id).await.unwrap(); + SiteLanguage::update(pool, vec![test_langs[0], test_langs2[0]], &site).await?; + let community_langs2 = CommunityLanguage::read(pool, community.id).await?; assert_eq!(vec![test_langs[0]], community_langs2); // update community languages to different ones - CommunityLanguage::update(pool, test_langs2.clone(), community.id) - .await - .unwrap(); - let community_langs3 = CommunityLanguage::read(pool, community.id).await.unwrap(); + CommunityLanguage::update(pool, test_langs2.clone(), community.id).await?; + let community_langs3 = CommunityLanguage::read(pool, community.id).await?; assert_eq!(test_langs2, community_langs3); - Community::delete(pool, community.id).await.unwrap(); - Site::delete(pool, site.id).await.unwrap(); - LocalSite::delete(pool).await.unwrap(); - Instance::delete(pool, instance.id).await.unwrap(); + Community::delete(pool, community.id).await?; + Site::delete(pool, site.id).await?; + LocalSite::delete(pool).await?; + Instance::delete(pool, instance.id).await?; + + Ok(()) } #[tokio::test] #[serial] - async fn test_default_post_language() { + async fn test_default_post_language() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let (site, instance) = create_test_site(pool).await; - let test_langs = test_langs1(pool).await; - let test_langs2 = test_langs2(pool).await; + let (site, instance) = create_test_site(pool).await?; + let test_langs = test_langs1(pool).await?; + let test_langs2 = test_langs2(pool).await?; - let community_form = CommunityInsertForm::builder() - .name("test community".to_string()) - .title("test community".to_string()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - let community = Community::create(pool, &community_form).await.unwrap(); - CommunityLanguage::update(pool, test_langs, community.id) - .await - .unwrap(); + let community_form = CommunityInsertForm::new( + instance.id, + "test community".to_string(), + "test community".to_string(), + "pubkey".to_string(), + ); + let community = Community::create(pool, &community_form).await?; + CommunityLanguage::update(pool, test_langs, community.id).await?; - let person_form = PersonInsertForm::builder() - .name("my test person".to_string()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - let person = Person::create(pool, &person_form).await.unwrap(); - let local_user_form = LocalUserInsertForm::builder() - .person_id(person.id) - .password_encrypted("my_pw".to_string()) - .build(); - let local_user = LocalUser::create(pool, &local_user_form).await.unwrap(); - LocalUserLanguage::update(pool, test_langs2, local_user.id) - .await - .unwrap(); + let person_form = PersonInsertForm::test_form(instance.id, "my test person"); + let person = Person::create(pool, &person_form).await?; + let local_user_form = LocalUserInsertForm::test_form(person.id); + let local_user = LocalUser::create(pool, &local_user_form, vec![]).await?; + LocalUserLanguage::update(pool, test_langs2, local_user.id).await?; // no overlap in user/community languages, so defaults to undetermined - let def1 = default_post_language(pool, community.id, local_user.id) - .await - .unwrap(); - assert_eq!(None, def1); + let def1 = default_post_language(pool, community.id, local_user.id).await?; + assert_eq!(UNDETERMINED_ID, def1); - let ru = Language::read_id_from_code(pool, Some("ru")) - .await - .unwrap() - .unwrap(); + let ru = Language::read_id_from_code(pool, "ru").await?; let test_langs3 = vec![ ru, - Language::read_id_from_code(pool, Some("fi")) - .await - .unwrap() - .unwrap(), - Language::read_id_from_code(pool, Some("se")) - .await - .unwrap() - .unwrap(), + Language::read_id_from_code(pool, "fi").await?, + Language::read_id_from_code(pool, "se").await?, UNDETERMINED_ID, ]; - LocalUserLanguage::update(pool, test_langs3, local_user.id) - .await - .unwrap(); + LocalUserLanguage::update(pool, test_langs3, local_user.id).await?; // this time, both have ru as common lang - let def2 = default_post_language(pool, community.id, local_user.id) - .await - .unwrap(); - assert_eq!(Some(ru), def2); + let def2 = default_post_language(pool, community.id, local_user.id).await?; + assert_eq!(ru, def2); - Person::delete(pool, person.id).await.unwrap(); - Community::delete(pool, community.id).await.unwrap(); - LocalUser::delete(pool, local_user.id).await.unwrap(); - Site::delete(pool, site.id).await.unwrap(); - LocalSite::delete(pool).await.unwrap(); - Instance::delete(pool, instance.id).await.unwrap(); + Person::delete(pool, person.id).await?; + Community::delete(pool, community.id).await?; + LocalUser::delete(pool, local_user.id).await?; + Site::delete(pool, site.id).await?; + LocalSite::delete(pool).await?; + Instance::delete(pool, instance.id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/impls/captcha_answer.rs b/crates/db_schema/src/impls/captcha_answer.rs index 0404ce005..d7183e4fb 100644 --- a/crates/db_schema/src/impls/captcha_answer.rs +++ b/crates/db_schema/src/impls/captcha_answer.rs @@ -1,5 +1,5 @@ use crate::{ - schema::captcha_answer::dsl::{answer, captcha_answer, uuid}, + schema::captcha_answer::dsl::{answer, captcha_answer}, source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer}, utils::{functions::lower, get_conn, DbPool}, }; @@ -13,6 +13,7 @@ use diesel::{ QueryDsl, }; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl CaptchaAnswer { pub async fn insert(pool: &mut DbPool<'_>, captcha: &CaptchaAnswerForm) -> Result { @@ -27,31 +28,30 @@ impl CaptchaAnswer { pub async fn check_captcha( pool: &mut DbPool<'_>, to_check: CheckCaptchaAnswer, - ) -> Result { + ) -> LemmyResult<()> { let conn = &mut get_conn(pool).await?; // fetch requested captcha - let captcha_exists = select(exists( - captcha_answer - .filter((uuid).eq(to_check.uuid)) - .filter(lower(answer).eq(to_check.answer.to_lowercase().clone())), - )) - .get_result::(conn) - .await?; + let captcha_exists = + select(exists(captcha_answer.find(to_check.uuid).filter( + lower(answer).eq(to_check.answer.to_lowercase().clone()), + ))) + .get_result::(conn) + .await?; // delete checked captcha - delete(captcha_answer.filter(uuid.eq(to_check.uuid))) + delete(captcha_answer.find(to_check.uuid)) .execute(conn) .await?; - Ok(captcha_exists) + captcha_exists + .then_some(()) + .ok_or(LemmyErrorType::CaptchaIncorrect.into()) } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer}, @@ -84,7 +84,6 @@ mod tests { .await; assert!(result.is_ok()); - assert!(result.unwrap()); } #[tokio::test] @@ -120,7 +119,6 @@ mod tests { ) .await; - assert!(result_repeat.is_ok()); - assert!(!result_repeat.unwrap()); + assert!(result_repeat.is_err()); } } diff --git a/crates/db_schema/src/impls/comment.rs b/crates/db_schema/src/impls/comment.rs index cab033df3..30d18465f 100644 --- a/crates/db_schema/src/impls/comment.rs +++ b/crates/db_schema/src/impls/comment.rs @@ -1,6 +1,7 @@ use crate::{ + diesel::{DecoratableTarget, OptionalExtension}, newtypes::{CommentId, DbUrl, PersonId}, - schema::comment::dsl::{ap_id, comment, content, creator_id, deleted, path, removed, updated}, + schema::comment, source::comment::{ Comment, CommentInsertForm, @@ -11,14 +12,10 @@ use crate::{ CommentUpdateForm, }, traits::{Crud, Likeable, Saveable}, - utils::{get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT}, -}; -use diesel::{ - dsl::{insert_into, sql_query}, - result::Error, - ExpressionMethods, - QueryDsl, + utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT}, }; +use chrono::{DateTime, Utc}; +use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; use diesel_async::RunQueryDsl; use diesel_ltree::Ltree; use url::Url; @@ -30,11 +27,11 @@ impl Comment { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - diesel::update(comment.filter(creator_id.eq(for_creator_id))) + diesel::update(comment::table.filter(comment::creator_id.eq(for_creator_id))) .set(( - content.eq(DELETED_REPLACEMENT_TEXT), - deleted.eq(true), - updated.eq(naive_now()), + comment::content.eq(DELETED_REPLACEMENT_TEXT), + comment::deleted.eq(true), + comment::updated.eq(naive_now()), )) .get_results::(conn) .await @@ -43,11 +40,14 @@ impl Comment { pub async fn update_removed_for_creator( pool: &mut DbPool<'_>, for_creator_id: PersonId, - new_removed: bool, + removed: bool, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - diesel::update(comment.filter(creator_id.eq(for_creator_id))) - .set((removed.eq(new_removed), updated.eq(naive_now()))) + diesel::update(comment::table.filter(comment::creator_id.eq(for_creator_id))) + .set(( + comment::removed.eq(removed), + comment::updated.eq(naive_now()), + )) .get_results::(conn) .await } @@ -56,87 +56,47 @@ impl Comment { pool: &mut DbPool<'_>, comment_form: &CommentInsertForm, parent_path: Option<&Ltree>, + ) -> Result { + Self::insert_apub(pool, None, comment_form, parent_path).await + } + + pub async fn insert_apub( + pool: &mut DbPool<'_>, + timestamp: Option>, + comment_form: &CommentInsertForm, + parent_path: Option<&Ltree>, ) -> Result { let conn = &mut get_conn(pool).await?; + let comment_form = (comment_form, parent_path.map(|p| comment::path.eq(p))); - // Insert, to get the id - let inserted_comment = insert_into(comment) - .values(comment_form) - .on_conflict(ap_id) - .do_update() - .set(comment_form) - .get_result::(conn) - .await; - - if let Ok(comment_insert) = inserted_comment { - let comment_id = comment_insert.id; - - // You need to update the ltree column - let ltree = Ltree(if let Some(parent_path) = parent_path { - // The previous parent will already have 0 in it - // Append this comment id - format!("{}.{}", parent_path.0, comment_id) - } else { - // '0' is always the first path, append to that - format!("{}.{}", 0, comment_id) - }); - - let updated_comment = diesel::update(comment.find(comment_id)) - .set(path.eq(ltree)) + if let Some(timestamp) = timestamp { + insert_into(comment::table) + .values(comment_form) + .on_conflict(comment::ap_id) + .filter_target(coalesce(comment::updated, comment::published).lt(timestamp)) + .do_update() + .set(comment_form) .get_result::(conn) - .await; - - // Update the child count for the parent comment_aggregates - // You could do this with a trigger, but since you have to do this manually anyway, - // you can just have it here - if let Some(parent_path) = parent_path { - // You have to update counts for all parents, not just the immediate one - // TODO if the performance of this is terrible, it might be better to do this as part of a - // scheduled query... although the counts would often be wrong. - // - // The child_count query for reference: - // select c.id, c.path, count(c2.id) as child_count from comment c - // left join comment c2 on c2.path <@ c.path and c2.path != c.path - // group by c.id - - let parent_id = parent_path.0.split('.').nth(1); - - if let Some(parent_id) = parent_id { - let top_parent = format!("0.{}", parent_id); - let update_child_count_stmt = format!( - " -update comment_aggregates ca set child_count = c.child_count -from ( - select c.id, c.path, count(c2.id) as child_count from comment c - join comment c2 on c2.path <@ c.path and c2.path != c.path - and c.path <@ '{top_parent}' - group by c.id -) as c -where ca.comment_id = c.id" - ); - - sql_query(update_child_count_stmt).execute(conn).await?; - } - } - updated_comment + .await } else { - inserted_comment + insert_into(comment::table) + .values(comment_form) + .get_result::(conn) + .await } } + pub async fn read_from_apub_id( pool: &mut DbPool<'_>, object_id: Url, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let object_id: DbUrl = object_id.into(); - Ok( - comment - .filter(ap_id.eq(object_id)) - .first::(conn) - .await - .ok() - .map(Into::into), - ) + comment::table + .filter(comment::ap_id.eq(object_id)) + .first(conn) + .await + .optional() } pub fn parent_comment_id(&self) -> Option { @@ -157,9 +117,10 @@ impl Crud for Comment { type UpdateForm = CommentUpdateForm; type IdType = CommentId; - /// This is unimplemented, use [[Comment::create]] - async fn create(_pool: &mut DbPool<'_>, _comment_form: &Self::InsertForm) -> Result { - unimplemented!(); + /// Use [[Comment::create]] + async fn create(pool: &mut DbPool<'_>, comment_form: &Self::InsertForm) -> Result { + debug_assert!(false); + Comment::create(pool, comment_form, None).await } async fn update( @@ -168,7 +129,7 @@ impl Crud for Comment { comment_form: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(comment.find(comment_id)) + diesel::update(comment::table.find(comment_id)) .set(comment_form) .get_result::(conn) .await @@ -192,18 +153,14 @@ impl Likeable for CommentLike { } async fn remove( pool: &mut DbPool<'_>, - person_id_: PersonId, - comment_id_: CommentId, + person_id: PersonId, + comment_id: CommentId, ) -> Result { - use crate::schema::comment_like::dsl::{comment_id, comment_like, person_id}; + use crate::schema::comment_like::dsl::comment_like; let conn = &mut get_conn(pool).await?; - diesel::delete( - comment_like - .filter(comment_id.eq(comment_id_)) - .filter(person_id.eq(person_id_)), - ) - .execute(conn) - .await + diesel::delete(comment_like.find((person_id, comment_id))) + .execute(conn) + .await } } @@ -228,12 +185,10 @@ impl Saveable for CommentSaved { pool: &mut DbPool<'_>, comment_saved_form: &CommentSavedForm, ) -> Result { - use crate::schema::comment_saved::dsl::{comment_id, comment_saved, person_id}; + use crate::schema::comment_saved::dsl::comment_saved; let conn = &mut get_conn(pool).await?; diesel::delete( - comment_saved - .filter(comment_id.eq(comment_saved_form.comment_id)) - .filter(person_id.eq(comment_saved_form.person_id)), + comment_saved.find((comment_saved_form.person_id, comment_saved_form.comment_id)), ) .execute(conn) .await @@ -242,8 +197,6 @@ impl Saveable for CommentSaved { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ newtypes::LanguageId, @@ -266,50 +219,44 @@ mod tests { utils::build_db_pool_for_tests, }; use diesel_ltree::Ltree; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; + use url::Url; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("terry".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "terry"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let new_community = CommunityInsertForm::builder() - .name("test community".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; let expected_comment = Comment { id: inserted_comment.id, @@ -321,37 +268,35 @@ mod tests { path: Ltree(format!("0.{}", inserted_comment.id)), published: inserted_comment.published, updated: None, - ap_id: inserted_comment.ap_id.clone(), + ap_id: Url::parse(&format!( + "https://lemmy-alpha/comment/{}", + inserted_comment.id + ))? + .into(), distinguished: false, local: true, language_id: LanguageId::default(), }; - let child_comment_form = CommentInsertForm::builder() - .content("A child comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - + let child_comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A child comment".into(), + ); let inserted_child_comment = - Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)) - .await - .unwrap(); + Comment::create(pool, &child_comment_form, Some(&inserted_comment.path)).await?; // Comment Like let comment_like_form = CommentLikeForm { comment_id: inserted_comment.id, - post_id: inserted_post.id, person_id: inserted_person.id, score: 1, }; - let inserted_comment_like = CommentLike::like(pool, &comment_like_form).await.unwrap(); + let inserted_comment_like = CommentLike::like(pool, &comment_like_form).await?; let expected_comment_like = CommentLike { - id: inserted_comment_like.id, comment_id: inserted_comment.id, - post_id: inserted_post.id, person_id: inserted_person.id, published: inserted_comment_like.published, score: 1, @@ -363,10 +308,9 @@ mod tests { person_id: inserted_person.id, }; - let inserted_comment_saved = CommentSaved::save(pool, &comment_saved_form).await.unwrap(); + let inserted_comment_saved = CommentSaved::save(pool, &comment_saved_form).await?; let expected_comment_saved = CommentSaved { - id: inserted_comment_saved.id, comment_id: inserted_comment.id, person_id: inserted_person.id, published: inserted_comment_saved.published, @@ -377,27 +321,17 @@ mod tests { ..Default::default() }; - let updated_comment = Comment::update(pool, inserted_comment.id, &comment_update_form) - .await - .unwrap(); + let updated_comment = Comment::update(pool, inserted_comment.id, &comment_update_form).await?; - let read_comment = Comment::read(pool, inserted_comment.id).await.unwrap(); - let like_removed = CommentLike::remove(pool, inserted_person.id, inserted_comment.id) - .await - .unwrap(); - let saved_removed = CommentSaved::unsave(pool, &comment_saved_form) - .await - .unwrap(); - let num_deleted = Comment::delete(pool, inserted_comment.id).await.unwrap(); - Comment::delete(pool, inserted_child_comment.id) - .await - .unwrap(); - Post::delete(pool, inserted_post.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + let read_comment = Comment::read(pool, inserted_comment.id).await?; + let like_removed = CommentLike::remove(pool, inserted_person.id, inserted_comment.id).await?; + let saved_removed = CommentSaved::unsave(pool, &comment_saved_form).await?; + let num_deleted = Comment::delete(pool, inserted_comment.id).await?; + Comment::delete(pool, inserted_child_comment.id).await?; + Post::delete(pool, inserted_post.id).await?; + Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_comment, read_comment); assert_eq!(expected_comment, inserted_comment); @@ -411,5 +345,7 @@ mod tests { assert_eq!(1, like_removed); assert_eq!(1, saved_removed); assert_eq!(1, num_deleted); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/comment_reply.rs b/crates/db_schema/src/impls/comment_reply.rs index c5b5a3c6a..5a33a51d7 100644 --- a/crates/db_schema/src/impls/comment_reply.rs +++ b/crates/db_schema/src/impls/comment_reply.rs @@ -1,6 +1,7 @@ use crate::{ + diesel::OptionalExtension, newtypes::{CommentId, CommentReplyId, PersonId}, - schema::comment_reply::dsl::{comment_id, comment_reply, read, recipient_id}, + schema::comment_reply, source::comment_reply::{CommentReply, CommentReplyInsertForm, CommentReplyUpdateForm}, traits::Crud, utils::{get_conn, DbPool}, @@ -21,10 +22,10 @@ impl Crud for CommentReply { let conn = &mut get_conn(pool).await?; // since the return here isnt utilized, we dont need to do an update - // but get_result doesnt return the existing row here - insert_into(comment_reply) + // but get_result doesn't return the existing row here + insert_into(comment_reply::table) .values(comment_reply_form) - .on_conflict((recipient_id, comment_id)) + .on_conflict((comment_reply::recipient_id, comment_reply::comment_id)) .do_update() .set(comment_reply_form) .get_result::(conn) @@ -37,7 +38,7 @@ impl Crud for CommentReply { comment_reply_form: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(comment_reply.find(comment_reply_id)) + diesel::update(comment_reply::table.find(comment_reply_id)) .set(comment_reply_form) .get_result::(conn) .await @@ -51,11 +52,11 @@ impl CommentReply { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; diesel::update( - comment_reply - .filter(recipient_id.eq(for_recipient_id)) - .filter(read.eq(false)), + comment_reply::table + .filter(comment_reply::recipient_id.eq(for_recipient_id)) + .filter(comment_reply::read.eq(false)), ) - .set(read.eq(true)) + .set(comment_reply::read.eq(true)) .get_results::(conn) .await } @@ -63,121 +64,26 @@ impl CommentReply { pub async fn read_by_comment( pool: &mut DbPool<'_>, for_comment_id: CommentId, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - comment_reply - .filter(comment_id.eq(for_comment_id)) - .first::(conn) + comment_reply::table + .filter(comment_reply::comment_id.eq(for_comment_id)) + .first(conn) .await - } -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - - use crate::{ - source::{ - comment::{Comment, CommentInsertForm}, - comment_reply::{CommentReply, CommentReplyInsertForm, CommentReplyUpdateForm}, - community::{Community, CommunityInsertForm}, - instance::Instance, - person::{Person, PersonInsertForm}, - post::{Post, PostInsertForm}, - }, - traits::Crud, - utils::build_db_pool_for_tests, - }; - use serial_test::serial; - - #[tokio::test] - #[serial] - async fn test_crud() { - let pool = &build_db_pool_for_tests().await; - let pool = &mut pool.into(); - - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); - - let new_person = PersonInsertForm::builder() - .name("terrylake".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_person = Person::create(pool, &new_person).await.unwrap(); - - let recipient_form = PersonInsertForm::builder() - .name("terrylakes recipient".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap(); - - let new_community = CommunityInsertForm::builder() - .name("test community lake".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); - - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let comment_reply_form = CommentReplyInsertForm { - recipient_id: inserted_recipient.id, - comment_id: inserted_comment.id, - read: None, - }; - - let inserted_reply = CommentReply::create(pool, &comment_reply_form) - .await - .unwrap(); - - let expected_reply = CommentReply { - id: inserted_reply.id, - recipient_id: inserted_reply.recipient_id, - comment_id: inserted_reply.comment_id, - read: false, - published: inserted_reply.published, - }; - - let read_reply = CommentReply::read(pool, inserted_reply.id).await.unwrap(); - - let comment_reply_update_form = CommentReplyUpdateForm { read: Some(false) }; - let updated_reply = CommentReply::update(pool, inserted_reply.id, &comment_reply_update_form) - .await - .unwrap(); - - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Post::delete(pool, inserted_post.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Person::delete(pool, inserted_recipient.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); - - assert_eq!(expected_reply, read_reply); - assert_eq!(expected_reply, inserted_reply); - assert_eq!(expected_reply, updated_reply); + .optional() + } + + pub async fn read_by_comment_and_person( + pool: &mut DbPool<'_>, + for_comment_id: CommentId, + for_recipient_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + comment_reply::table + .filter(comment_reply::comment_id.eq(for_comment_id)) + .filter(comment_reply::recipient_id.eq(for_recipient_id)) + .first(conn) + .await + .optional() } } diff --git a/crates/db_schema/src/impls/comment_report.rs b/crates/db_schema/src/impls/comment_report.rs index ff93915e1..19c12876f 100644 --- a/crates/db_schema/src/impls/comment_report.rs +++ b/crates/db_schema/src/impls/comment_report.rs @@ -1,6 +1,9 @@ use crate::{ - newtypes::{CommentReportId, PersonId}, - schema::comment_report::dsl::{comment_report, resolved, resolver_id, updated}, + newtypes::{CommentId, CommentReportId, PersonId}, + schema::comment_report::{ + comment_id, + dsl::{comment_report, resolved, resolver_id, updated}, + }, source::comment_report::{CommentReport, CommentReportForm}, traits::Reportable, utils::{get_conn, naive_now, DbPool}, @@ -17,6 +20,7 @@ use diesel_async::RunQueryDsl; impl Reportable for CommentReport { type Form = CommentReportForm; type IdType = CommentReportId; + type ObjectIdType = CommentId; /// creates a comment report and returns it /// /// * `conn` - the postgres connection @@ -53,6 +57,22 @@ impl Reportable for CommentReport { .await } + async fn resolve_all_for_object( + pool: &mut DbPool<'_>, + comment_id_: CommentId, + by_resolver_id: PersonId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + update(comment_report.filter(comment_id.eq(comment_id_))) + .set(( + resolved.eq(true), + resolver_id.eq(by_resolver_id), + updated.eq(naive_now()), + )) + .execute(conn) + .await + } + /// unresolve a comment report /// /// * `conn` - the postgres connection diff --git a/crates/db_schema/src/impls/community.rs b/crates/db_schema/src/impls/community.rs index 442239289..8efc579e9 100644 --- a/crates/db_schema/src/impls/community.rs +++ b/crates/db_schema/src/impls/community.rs @@ -1,6 +1,14 @@ use crate::{ + diesel::{DecoratableTarget, OptionalExtension}, newtypes::{CommunityId, DbUrl, PersonId}, - schema::{community, community_follower, instance}, + schema::{ + community, + community_follower, + community_moderator, + community_person_ban, + instance, + post, + }, source::{ actor_language::CommunityLanguage, community::{ @@ -14,24 +22,34 @@ use crate::{ CommunityPersonBanForm, CommunityUpdateForm, }, + post::Post, }, traits::{ApubActor, Bannable, Crud, Followable, Joinable}, - utils::{functions::lower, get_conn, DbPool}, + utils::{ + functions::{coalesce, lower}, + get_conn, + DbPool, + }, + ListingType, SubscribedType, }; +use chrono::{DateTime, Utc}; use diesel::{ deserialize, - dsl, - dsl::insert_into, + dsl::{self, exists, insert_into, not}, pg::Pg, result::Error, + select, sql_types, + update, + BoolExpressionMethods, ExpressionMethods, NullableExpressionMethods, QueryDsl, Queryable, }; use diesel_async::RunQueryDsl; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; #[async_trait] impl Crud for Community { @@ -40,25 +58,15 @@ impl Crud for Community { type IdType = CommunityId; async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { - let is_new_community = match &form.actor_id { - Some(id) => Community::read_from_apub_id(pool, id).await?.is_none(), - None => true, - }; let conn = &mut get_conn(pool).await?; - // Can't do separate insert/update commands because InsertForm/UpdateForm aren't convertible let community_ = insert_into(community::table) .values(form) - .on_conflict(community::actor_id) - .do_update() - .set(form) .get_result::(conn) .await?; // Initialize languages for new community - if is_new_community { - CommunityLanguage::update(pool, vec![], community_.id).await?; - } + CommunityLanguage::update(pool, vec![], community_.id).await?; Ok(community_) } @@ -83,9 +91,8 @@ impl Joinable for CommunityModerator { pool: &mut DbPool<'_>, community_moderator_form: &CommunityModeratorForm, ) -> Result { - use crate::schema::community_moderator::dsl::community_moderator; let conn = &mut get_conn(pool).await?; - insert_into(community_moderator) + insert_into(community_moderator::table) .values(community_moderator_form) .get_result::(conn) .await @@ -95,13 +102,11 @@ impl Joinable for CommunityModerator { pool: &mut DbPool<'_>, community_moderator_form: &CommunityModeratorForm, ) -> Result { - use crate::schema::community_moderator::dsl::{community_id, community_moderator, person_id}; let conn = &mut get_conn(pool).await?; - diesel::delete( - community_moderator - .filter(community_id.eq(community_moderator_form.community_id)) - .filter(person_id.eq(community_moderator_form.person_id)), - ) + diesel::delete(community_moderator::table.find(( + community_moderator_form.person_id, + community_moderator_form.community_id, + ))) .execute(conn) .await } @@ -113,29 +118,105 @@ pub enum CollectionType { } impl Community { - /// Get the community which has a given moderators or featured url, also return the collection type + pub async fn insert_apub( + pool: &mut DbPool<'_>, + timestamp: DateTime, + form: &CommunityInsertForm, + ) -> Result { + let is_new_community = match &form.actor_id { + Some(id) => Community::read_from_apub_id(pool, id).await?.is_none(), + None => true, + }; + let conn = &mut get_conn(pool).await?; + + // Can't do separate insert/update commands because InsertForm/UpdateForm aren't convertible + let community_ = insert_into(community::table) + .values(form) + .on_conflict(community::actor_id) + .filter_target(coalesce(community::updated, community::published).lt(timestamp)) + .do_update() + .set(form) + .get_result::(conn) + .await?; + + // Initialize languages for new community + if is_new_community { + CommunityLanguage::update(pool, vec![], community_.id).await?; + } + + Ok(community_) + } + + /// Get the community which has a given moderators or featured url, also return the collection + /// type pub async fn get_by_collection_url( pool: &mut DbPool<'_>, url: &DbUrl, - ) -> Result<(Community, CollectionType), Error> { - use crate::schema::community::dsl::{featured_url, moderators_url}; - use CollectionType::*; + ) -> LemmyResult<(Community, CollectionType)> { let conn = &mut get_conn(pool).await?; let res = community::table - .filter(moderators_url.eq(url)) - .first::(conn) + .filter(community::moderators_url.eq(url)) + .first(conn) .await; + if let Ok(c) = res { - return Ok((c, Moderators)); + Ok((c, CollectionType::Moderators)) + } else { + let res = community::table + .filter(community::featured_url.eq(url)) + .first(conn) + .await; + if let Ok(c) = res { + Ok((c, CollectionType::Featured)) + } else { + Err(LemmyErrorType::NotFound.into()) + } } - let res = community::table - .filter(featured_url.eq(url)) - .first::(conn) - .await; - if let Ok(c) = res { - return Ok((c, Featured)); + } + + pub async fn set_featured_posts( + community_id: CommunityId, + posts: Vec, + pool: &mut DbPool<'_>, + ) -> Result<(), Error> { + let conn = &mut get_conn(pool).await?; + for p in &posts { + debug_assert!(p.community_id == community_id); } - Err(diesel::NotFound) + // Mark the given posts as featured and all other posts as not featured. + let post_ids = posts.iter().map(|p| p.id); + update(post::table) + .filter(post::community_id.eq(community_id)) + // This filter is just for performance + .filter(post::featured_community.or(post::id.eq_any(post_ids.clone()))) + .set(post::featured_community.eq(post::id.eq_any(post_ids))) + .execute(conn) + .await?; + Ok(()) + } + + pub async fn get_random_community_id( + pool: &mut DbPool<'_>, + type_: &Option, + ) -> Result { + let conn = &mut get_conn(pool).await?; + sql_function!(fn random() -> Text); + + let mut query = community::table + .filter(not(community::deleted)) + .filter(not(community::removed)) + .into_boxed(); + + if let Some(ListingType::Local) = type_ { + query = query.filter(community::local); + } + + query + .select(community::id) + .order(random()) + .limit(1) + .first::(conn) + .await } } @@ -144,37 +225,68 @@ impl CommunityModerator { pool: &mut DbPool<'_>, for_community_id: CommunityId, ) -> Result { - use crate::schema::community_moderator::dsl::{community_id, community_moderator}; let conn = &mut get_conn(pool).await?; - diesel::delete(community_moderator.filter(community_id.eq(for_community_id))) - .execute(conn) - .await + diesel::delete( + community_moderator::table.filter(community_moderator::community_id.eq(for_community_id)), + ) + .execute(conn) + .await } pub async fn leave_all_communities( pool: &mut DbPool<'_>, for_person_id: PersonId, ) -> Result { - use crate::schema::community_moderator::dsl::{community_moderator, person_id}; let conn = &mut get_conn(pool).await?; - diesel::delete(community_moderator.filter(person_id.eq(for_person_id))) - .execute(conn) - .await + diesel::delete( + community_moderator::table.filter(community_moderator::person_id.eq(for_person_id)), + ) + .execute(conn) + .await } pub async fn get_person_moderated_communities( pool: &mut DbPool<'_>, for_person_id: PersonId, ) -> Result, Error> { - use crate::schema::community_moderator::dsl::{community_id, community_moderator, person_id}; let conn = &mut get_conn(pool).await?; - community_moderator - .filter(person_id.eq(for_person_id)) - .select(community_id) + community_moderator::table + .filter(community_moderator::person_id.eq(for_person_id)) + .select(community_moderator::community_id) .load::(conn) .await } + + /// Checks to make sure the acting moderator was added earlier than the target moderator + pub async fn is_higher_mod_check( + pool: &mut DbPool<'_>, + for_community_id: CommunityId, + mod_person_id: PersonId, + target_person_ids: Vec, + ) -> LemmyResult<()> { + let conn = &mut get_conn(pool).await?; + + // Build the list of persons + let mut persons = target_person_ids; + persons.push(mod_person_id); + persons.dedup(); + + let res = community_moderator::table + .filter(community_moderator::community_id.eq(for_community_id)) + .filter(community_moderator::person_id.eq_any(persons)) + .order_by(community_moderator::published) + // This does a limit 1 select first + .first::(conn) + .await?; + + // If the first result sorted by published is the acting mod + if res.person_id == mod_person_id { + Ok(()) + } else { + Err(LemmyErrorType::NotHigherMod)? + } + } } #[async_trait] @@ -184,11 +296,13 @@ impl Bannable for CommunityPersonBan { pool: &mut DbPool<'_>, community_person_ban_form: &CommunityPersonBanForm, ) -> Result { - use crate::schema::community_person_ban::dsl::{community_id, community_person_ban, person_id}; let conn = &mut get_conn(pool).await?; - insert_into(community_person_ban) + insert_into(community_person_ban::table) .values(community_person_ban_form) - .on_conflict((community_id, person_id)) + .on_conflict(( + community_person_ban::community_id, + community_person_ban::person_id, + )) .do_update() .set(community_person_ban_form) .get_result::(conn) @@ -199,13 +313,11 @@ impl Bannable for CommunityPersonBan { pool: &mut DbPool<'_>, community_person_ban_form: &CommunityPersonBanForm, ) -> Result { - use crate::schema::community_person_ban::dsl::{community_id, community_person_ban, person_id}; let conn = &mut get_conn(pool).await?; - diesel::delete( - community_person_ban - .filter(community_id.eq(community_person_ban_form.community_id)) - .filter(person_id.eq(community_person_ban_form.person_id)), - ) + diesel::delete(community_person_ban::table.find(( + community_person_ban_form.person_id, + community_person_ban_form.community_id, + ))) .execute(conn) .await } @@ -232,18 +344,18 @@ impl CommunityFollower { /// Check if a remote instance has any followers on local instance. For this it is enough to check /// if any follow relation is stored. Dont use this for local community. - pub async fn has_local_followers( + pub async fn check_has_local_followers( pool: &mut DbPool<'_>, remote_community_id: CommunityId, - ) -> Result { - use crate::schema::community_follower::dsl::{community_follower, community_id}; - use diesel::dsl::{exists, select}; + ) -> LemmyResult<()> { let conn = &mut get_conn(pool).await?; - select(exists( - community_follower.filter(community_id.eq(remote_community_id)), - )) - .get_result(conn) - .await + select(exists(community_follower::table.filter( + community_follower::community_id.eq(remote_community_id), + ))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::CommunityHasNoFollowers.into()) } } @@ -262,11 +374,13 @@ impl Queryable, Pg> for SubscribedType { impl Followable for CommunityFollower { type Form = CommunityFollowerForm; async fn follow(pool: &mut DbPool<'_>, form: &CommunityFollowerForm) -> Result { - use crate::schema::community_follower::dsl::{community_follower, community_id, person_id}; let conn = &mut get_conn(pool).await?; - insert_into(community_follower) + insert_into(community_follower::table) .values(form) - .on_conflict((community_id, person_id)) + .on_conflict(( + community_follower::community_id, + community_follower::person_id, + )) .do_update() .set(form) .get_result::(conn) @@ -274,35 +388,21 @@ impl Followable for CommunityFollower { } async fn follow_accepted( pool: &mut DbPool<'_>, - community_id_: CommunityId, - person_id_: PersonId, + community_id: CommunityId, + person_id: PersonId, ) -> Result { - use crate::schema::community_follower::dsl::{ - community_follower, - community_id, - pending, - person_id, - }; let conn = &mut get_conn(pool).await?; - diesel::update( - community_follower - .filter(community_id.eq(community_id_)) - .filter(person_id.eq(person_id_)), - ) - .set(pending.eq(false)) - .get_result::(conn) - .await + diesel::update(community_follower::table.find((person_id, community_id))) + .set(community_follower::pending.eq(false)) + .get_result::(conn) + .await } + async fn unfollow(pool: &mut DbPool<'_>, form: &CommunityFollowerForm) -> Result { - use crate::schema::community_follower::dsl::{community_follower, community_id, person_id}; let conn = &mut get_conn(pool).await?; - diesel::delete( - community_follower - .filter(community_id.eq(&form.community_id)) - .filter(person_id.eq(&form.person_id)), - ) - .execute(conn) - .await + diesel::delete(community_follower::table.find((form.person_id, form.community_id))) + .execute(conn) + .await } } @@ -313,21 +413,18 @@ impl ApubActor for Community { object_id: &DbUrl, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - Ok( - community::table - .filter(community::actor_id.eq(object_id)) - .first::(conn) - .await - .ok() - .map(Into::into), - ) + community::table + .filter(community::actor_id.eq(object_id)) + .first(conn) + .await + .optional() } async fn read_from_name( pool: &mut DbPool<'_>, community_name: &str, include_deleted: bool, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let mut q = community::table .into_boxed() @@ -338,30 +435,28 @@ impl ApubActor for Community { .filter(community::deleted.eq(false)) .filter(community::removed.eq(false)); } - q.first::(conn).await + q.first(conn).await.optional() } async fn read_from_name_and_domain( pool: &mut DbPool<'_>, community_name: &str, for_domain: &str, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; community::table .inner_join(instance::table) .filter(lower(community::name).eq(community_name.to_lowercase())) .filter(lower(instance::domain).eq(for_domain.to_lowercase())) .select(community::all_columns) - .first::(conn) + .first(conn) .await + .optional() } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::{ source::{ community::{ @@ -376,44 +471,44 @@ mod tests { CommunityUpdateForm, }, instance::Instance, + local_user::LocalUser, person::{Person, PersonInsertForm}, }, traits::{Bannable, Crud, Followable, Joinable}, utils::build_db_pool_for_tests, + CommunityVisibility, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("bobbee".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let bobby_person = PersonInsertForm::test_form(inserted_instance.id, "bobby"); + let inserted_bobby = Person::create(pool, &bobby_person).await?; - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let artemis_person = PersonInsertForm::test_form(inserted_instance.id, "artemis"); + let inserted_artemis = Person::create(pool, &artemis_person).await?; - let new_community = CommunityInsertForm::builder() - .name("TIL".into()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "TIL".into(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; let expected_community = Community { id: inserted_community.id, name: "TIL".into(), title: "nada".to_owned(), + sidebar: None, description: None, nsfw: false, removed: false, @@ -429,101 +524,128 @@ mod tests { banner: None, followers_url: inserted_community.followers_url.clone(), inbox_url: inserted_community.inbox_url.clone(), - shared_inbox_url: None, moderators_url: None, featured_url: None, hidden: false, posting_restricted_to_mods: false, instance_id: inserted_instance.id, + visibility: CommunityVisibility::Public, }; let community_follower_form = CommunityFollowerForm { community_id: inserted_community.id, - person_id: inserted_person.id, + person_id: inserted_bobby.id, pending: false, }; - let inserted_community_follower = CommunityFollower::follow(pool, &community_follower_form) - .await - .unwrap(); + let inserted_community_follower = + CommunityFollower::follow(pool, &community_follower_form).await?; let expected_community_follower = CommunityFollower { - id: inserted_community_follower.id, community_id: inserted_community.id, - person_id: inserted_person.id, + person_id: inserted_bobby.id, pending: false, published: inserted_community_follower.published, }; - let community_moderator_form = CommunityModeratorForm { + let bobby_moderator_form = CommunityModeratorForm { community_id: inserted_community.id, - person_id: inserted_person.id, + person_id: inserted_bobby.id, }; - let inserted_community_moderator = CommunityModerator::join(pool, &community_moderator_form) - .await - .unwrap(); + let inserted_bobby_moderator = CommunityModerator::join(pool, &bobby_moderator_form).await?; + + let artemis_moderator_form = CommunityModeratorForm { + community_id: inserted_community.id, + person_id: inserted_artemis.id, + }; + + let _inserted_artemis_moderator = + CommunityModerator::join(pool, &artemis_moderator_form).await?; let expected_community_moderator = CommunityModerator { - id: inserted_community_moderator.id, community_id: inserted_community.id, - person_id: inserted_person.id, - published: inserted_community_moderator.published, + person_id: inserted_bobby.id, + published: inserted_bobby_moderator.published, }; + let moderator_person_ids = vec![inserted_bobby.id, inserted_artemis.id]; + + // Make sure bobby is marked as a higher mod than artemis, and vice versa + let bobby_higher_check = CommunityModerator::is_higher_mod_check( + pool, + inserted_community.id, + inserted_bobby.id, + moderator_person_ids.clone(), + ) + .await; + assert!(bobby_higher_check.is_ok()); + + // Also check the other is_higher_mod_or_admin function just in case + let bobby_higher_check_2 = LocalUser::is_higher_mod_or_admin_check( + pool, + inserted_community.id, + inserted_bobby.id, + moderator_person_ids.clone(), + ) + .await; + assert!(bobby_higher_check_2.is_ok()); + + // This should throw an error, since artemis was added later + let artemis_higher_check = CommunityModerator::is_higher_mod_check( + pool, + inserted_community.id, + inserted_artemis.id, + moderator_person_ids, + ) + .await; + assert!(artemis_higher_check.is_err()); + let community_person_ban_form = CommunityPersonBanForm { community_id: inserted_community.id, - person_id: inserted_person.id, + person_id: inserted_bobby.id, expires: None, }; - let inserted_community_person_ban = CommunityPersonBan::ban(pool, &community_person_ban_form) - .await - .unwrap(); + let inserted_community_person_ban = + CommunityPersonBan::ban(pool, &community_person_ban_form).await?; let expected_community_person_ban = CommunityPersonBan { - id: inserted_community_person_ban.id, community_id: inserted_community.id, - person_id: inserted_person.id, + person_id: inserted_bobby.id, published: inserted_community_person_ban.published, expires: None, }; - let read_community = Community::read(pool, inserted_community.id).await.unwrap(); + let read_community = Community::read(pool, inserted_community.id).await?; let update_community_form = CommunityUpdateForm { title: Some("nada".to_owned()), ..Default::default() }; - let updated_community = Community::update(pool, inserted_community.id, &update_community_form) - .await - .unwrap(); + let updated_community = + Community::update(pool, inserted_community.id, &update_community_form).await?; - let ignored_community = CommunityFollower::unfollow(pool, &community_follower_form) - .await - .unwrap(); - let left_community = CommunityModerator::leave(pool, &community_moderator_form) - .await - .unwrap(); - let unban = CommunityPersonBan::unban(pool, &community_person_ban_form) - .await - .unwrap(); - let num_deleted = Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + let ignored_community = CommunityFollower::unfollow(pool, &community_follower_form).await?; + let left_community = CommunityModerator::leave(pool, &bobby_moderator_form).await?; + let unban = CommunityPersonBan::unban(pool, &community_person_ban_form).await?; + let num_deleted = Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_bobby.id).await?; + Person::delete(pool, inserted_artemis.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_community, read_community); assert_eq!(expected_community, inserted_community); assert_eq!(expected_community, updated_community); assert_eq!(expected_community_follower, inserted_community_follower); - assert_eq!(expected_community_moderator, inserted_community_moderator); + assert_eq!(expected_community_moderator, inserted_bobby_moderator); assert_eq!(expected_community_person_ban, inserted_community_person_ban); assert_eq!(1, ignored_community); assert_eq!(1, left_community); assert_eq!(1, unban); // assert_eq!(2, loaded_count); assert_eq!(1, num_deleted); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/community_block.rs b/crates/db_schema/src/impls/community_block.rs index 9dc21bf2f..cd541cd8b 100644 --- a/crates/db_schema/src/impls/community_block.rs +++ b/crates/db_schema/src/impls/community_block.rs @@ -1,20 +1,64 @@ use crate::{ - schema::community_block::dsl::{community_block, community_id, person_id}, - source::community_block::{CommunityBlock, CommunityBlockForm}, + newtypes::{CommunityId, PersonId}, + schema::{community, community_block}, + source::{ + community::Community, + community_block::{CommunityBlock, CommunityBlockForm}, + }, traits::Blockable, utils::{get_conn, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::{exists, insert_into, not}, + result::Error, + select, + ExpressionMethods, + QueryDsl, +}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; + +impl CommunityBlock { + pub async fn read( + pool: &mut DbPool<'_>, + for_person_id: PersonId, + for_community_id: CommunityId, + ) -> LemmyResult<()> { + let conn = &mut get_conn(pool).await?; + select(not(exists( + community_block::table.find((for_person_id, for_community_id)), + ))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::CommunityIsBlocked.into()) + } + + pub async fn for_person( + pool: &mut DbPool<'_>, + person_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + community_block::table + .inner_join(community::table) + .select(community::all_columns) + .filter(community_block::person_id.eq(person_id)) + .filter(community::deleted.eq(false)) + .filter(community::removed.eq(false)) + .order_by(community_block::published) + .load::(conn) + .await + } +} #[async_trait] impl Blockable for CommunityBlock { type Form = CommunityBlockForm; async fn block(pool: &mut DbPool<'_>, community_block_form: &Self::Form) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(community_block) + insert_into(community_block::table) .values(community_block_form) - .on_conflict((person_id, community_id)) + .on_conflict((community_block::person_id, community_block::community_id)) .do_update() .set(community_block_form) .get_result::(conn) @@ -25,11 +69,10 @@ impl Blockable for CommunityBlock { community_block_form: &Self::Form, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::delete( - community_block - .filter(person_id.eq(community_block_form.person_id)) - .filter(community_id.eq(community_block_form.community_id)), - ) + diesel::delete(community_block::table.find(( + community_block_form.person_id, + community_block_form.community_id, + ))) .execute(conn) .await } diff --git a/crates/db_schema/src/impls/custom_emoji.rs b/crates/db_schema/src/impls/custom_emoji.rs index 050301659..9ba359071 100644 --- a/crates/db_schema/src/impls/custom_emoji.rs +++ b/crates/db_schema/src/impls/custom_emoji.rs @@ -8,36 +8,37 @@ use crate::{ custom_emoji::{CustomEmoji, CustomEmojiInsertForm, CustomEmojiUpdateForm}, custom_emoji_keyword::{CustomEmojiKeyword, CustomEmojiKeywordInsertForm}, }, + traits::Crud, utils::{get_conn, DbPool}, }; use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; use diesel_async::RunQueryDsl; -impl CustomEmoji { - pub async fn create(pool: &mut DbPool<'_>, form: &CustomEmojiInsertForm) -> Result { +#[async_trait] +impl Crud for CustomEmoji { + type InsertForm = CustomEmojiInsertForm; + type UpdateForm = CustomEmojiUpdateForm; + type IdType = CustomEmojiId; + + async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { let conn = &mut get_conn(pool).await?; insert_into(custom_emoji) .values(form) .get_result::(conn) .await } - pub async fn update( + + async fn update( pool: &mut DbPool<'_>, - emoji_id: CustomEmojiId, - form: &CustomEmojiUpdateForm, + emoji_id: Self::IdType, + new_custom_emoji: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; diesel::update(custom_emoji.find(emoji_id)) - .set(form) + .set(new_custom_emoji) .get_result::(conn) .await } - pub async fn delete(pool: &mut DbPool<'_>, emoji_id: CustomEmojiId) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::delete(custom_emoji.find(emoji_id)) - .execute(conn) - .await - } } impl CustomEmojiKeyword { diff --git a/crates/db_schema/src/impls/email_verification.rs b/crates/db_schema/src/impls/email_verification.rs index c5a8792fb..39c7fe0bc 100644 --- a/crates/db_schema/src/impls/email_verification.rs +++ b/crates/db_schema/src/impls/email_verification.rs @@ -25,7 +25,7 @@ impl EmailVerification { let conn = &mut get_conn(pool).await?; insert_into(email_verification) .values(form) - .get_result::(conn) + .get_result(conn) .await } @@ -34,7 +34,7 @@ impl EmailVerification { email_verification .filter(verification_token.eq(token)) .filter(published.gt(now.into_sql::() - 7.days())) - .first::(conn) + .first(conn) .await } pub async fn delete_old_tokens_for_local_user( diff --git a/crates/db_schema/src/impls/federation_allowlist.rs b/crates/db_schema/src/impls/federation_allowlist.rs index eb67acce8..cbfd14b03 100644 --- a/crates/db_schema/src/impls/federation_allowlist.rs +++ b/crates/db_schema/src/impls/federation_allowlist.rs @@ -49,18 +49,18 @@ impl FederationAllowList { } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{federation_allowlist::FederationAllowList, instance::Instance}, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_allowlist_insert_and_clear() { + async fn test_allowlist_insert_and_clear() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); let domains = vec![ @@ -71,9 +71,9 @@ mod tests { let allowed = Some(domains.clone()); - FederationAllowList::replace(pool, allowed).await.unwrap(); + FederationAllowList::replace(pool, allowed).await?; - let allows = Instance::allowlist(pool).await.unwrap(); + let allows = Instance::allowlist(pool).await?; let allows_domains = allows .iter() .map(|i| i.domain.clone()) @@ -85,13 +85,13 @@ mod tests { // Now test clearing them via Some(empty vec) let clear_allows = Some(Vec::new()); - FederationAllowList::replace(pool, clear_allows) - .await - .unwrap(); - let allows = Instance::allowlist(pool).await.unwrap(); + FederationAllowList::replace(pool, clear_allows).await?; + let allows = Instance::allowlist(pool).await?; assert_eq!(0, allows.len()); - Instance::delete_all(pool).await.unwrap(); + Instance::delete_all(pool).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/impls/federation_queue_state.rs b/crates/db_schema/src/impls/federation_queue_state.rs new file mode 100644 index 000000000..8c49dc568 --- /dev/null +++ b/crates/db_schema/src/impls/federation_queue_state.rs @@ -0,0 +1,46 @@ +use crate::{ + newtypes::InstanceId, + source::federation_queue_state::FederationQueueState, + utils::{get_conn, DbPool}, +}; +use diesel::{prelude::*, result::Error}; +use diesel_async::RunQueryDsl; + +impl FederationQueueState { + /// load state or return a default empty value + pub async fn load( + pool: &mut DbPool<'_>, + instance_id_: InstanceId, + ) -> Result { + use crate::schema::federation_queue_state::dsl::{federation_queue_state, instance_id}; + let conn = &mut get_conn(pool).await?; + Ok( + federation_queue_state + .filter(instance_id.eq(&instance_id_)) + .select(FederationQueueState::as_select()) + .get_result(conn) + .await + .optional()? + .unwrap_or(FederationQueueState { + instance_id: instance_id_, + fail_count: 0, + last_retry: None, + last_successful_id: None, // this value is set to the most current id for new instances + last_successful_published_time: None, + }), + ) + } + pub async fn upsert(pool: &mut DbPool<'_>, state: &FederationQueueState) -> Result<(), Error> { + use crate::schema::federation_queue_state::dsl::{federation_queue_state, instance_id}; + let conn = &mut get_conn(pool).await?; + + state + .insert_into(federation_queue_state) + .on_conflict(instance_id) + .do_update() + .set(state) + .execute(conn) + .await?; + Ok(()) + } +} diff --git a/crates/db_schema/src/impls/image_upload.rs b/crates/db_schema/src/impls/image_upload.rs deleted file mode 100644 index 58edbf2e3..000000000 --- a/crates/db_schema/src/impls/image_upload.rs +++ /dev/null @@ -1,47 +0,0 @@ -use crate::{ - newtypes::{ImageUploadId, LocalUserId}, - schema::image_upload::dsl::{image_upload, local_user_id, pictrs_alias}, - source::image_upload::{ImageUpload, ImageUploadForm}, - utils::{get_conn, DbPool}, -}; -use diesel::{insert_into, result::Error, ExpressionMethods, QueryDsl, Table}; -use diesel_async::RunQueryDsl; - -impl ImageUpload { - pub async fn create(pool: &mut DbPool<'_>, form: &ImageUploadForm) -> Result { - let conn = &mut get_conn(pool).await?; - insert_into(image_upload) - .values(form) - .get_result::(conn) - .await - } - - pub async fn get_all_by_local_user_id( - pool: &mut DbPool<'_>, - user_id: &LocalUserId, - ) -> Result, Error> { - let conn = &mut get_conn(pool).await?; - image_upload - .filter(local_user_id.eq(user_id)) - .select(image_upload::all_columns()) - .load::(conn) - .await - } - - pub async fn delete( - pool: &mut DbPool<'_>, - image_upload_id: ImageUploadId, - ) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::delete(image_upload.find(image_upload_id)) - .execute(conn) - .await - } - - pub async fn delete_by_alias(pool: &mut DbPool<'_>, alias: &str) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::delete(image_upload.filter(pictrs_alias.eq(alias))) - .execute(conn) - .await - } -} diff --git a/crates/db_schema/src/impls/images.rs b/crates/db_schema/src/impls/images.rs new file mode 100644 index 000000000..8ded98e41 --- /dev/null +++ b/crates/db_schema/src/impls/images.rs @@ -0,0 +1,96 @@ +use crate::{ + newtypes::DbUrl, + schema::{image_details, local_image, remote_image}, + source::images::{ImageDetails, ImageDetailsForm, LocalImage, LocalImageForm, RemoteImage}, + utils::{get_conn, DbPool}, +}; +use diesel::{ + dsl::exists, + insert_into, + result::Error, + select, + ExpressionMethods, + NotFound, + QueryDsl, +}; +use diesel_async::RunQueryDsl; +use url::Url; + +impl LocalImage { + pub async fn create( + pool: &mut DbPool<'_>, + form: &LocalImageForm, + image_details_form: &ImageDetailsForm, + ) -> Result { + let conn = &mut get_conn(pool).await?; + conn + .build_transaction() + .run(|conn| { + Box::pin(async move { + let local_insert = insert_into(local_image::table) + .values(form) + .get_result::(conn) + .await; + + ImageDetails::create(&mut conn.into(), image_details_form).await?; + + local_insert + }) as _ + }) + .await + } + + pub async fn delete_by_alias(pool: &mut DbPool<'_>, alias: &str) -> Result { + let conn = &mut get_conn(pool).await?; + diesel::delete(local_image::table.filter(local_image::pictrs_alias.eq(alias))) + .get_result(conn) + .await + } + + pub async fn delete_by_url(pool: &mut DbPool<'_>, url: &DbUrl) -> Result { + let alias = url.as_str().split('/').last().ok_or(NotFound)?; + Self::delete_by_alias(pool, alias).await + } +} + +impl RemoteImage { + pub async fn create(pool: &mut DbPool<'_>, links: Vec) -> Result { + let conn = &mut get_conn(pool).await?; + let forms = links + .into_iter() + .map(|url| remote_image::dsl::link.eq::(url.into())) + .collect::>(); + insert_into(remote_image::table) + .values(forms) + .on_conflict_do_nothing() + .execute(conn) + .await + } + + pub async fn validate(pool: &mut DbPool<'_>, link_: DbUrl) -> Result<(), Error> { + let conn = &mut get_conn(pool).await?; + + let exists = select(exists( + remote_image::table.filter(remote_image::link.eq(link_)), + )) + .get_result::(conn) + .await?; + if exists { + Ok(()) + } else { + Err(NotFound) + } + } +} + +impl ImageDetails { + pub async fn create(pool: &mut DbPool<'_>, form: &ImageDetailsForm) -> Result { + let conn = &mut get_conn(pool).await?; + + insert_into(image_details::table) + .values(form) + .on_conflict_do_nothing() + .execute(conn) + .await + } +} diff --git a/crates/db_schema/src/impls/instance.rs b/crates/db_schema/src/impls/instance.rs index 7e162717c..6c72b5e18 100644 --- a/crates/db_schema/src/impls/instance.rs +++ b/crates/db_schema/src/impls/instance.rs @@ -1,23 +1,39 @@ use crate::{ diesel::dsl::IntervalDsl, newtypes::InstanceId, - schema::{federation_allowlist, federation_blocklist, instance, local_site, site}, - source::instance::{Instance, InstanceForm}, - utils::{functions::lower, get_conn, naive_now, now, DbPool}, + schema::{ + federation_allowlist, + federation_blocklist, + federation_queue_state, + instance, + local_site, + site, + }, + source::{ + federation_queue_state::FederationQueueState, + instance::{Instance, InstanceForm}, + }, + utils::{ + functions::{coalesce, lower}, + get_conn, + naive_now, + now, + DbPool, + }, }; use diesel::{ dsl::{count_star, insert_into}, result::Error, - sql_types::{Nullable, Timestamptz}, ExpressionMethods, NullableExpressionMethods, + OptionalExtension, QueryDsl, SelectableHelper, }; use diesel_async::RunQueryDsl; impl Instance { - /// Attempt to read Instance column for the given domain. If it doesnt exist, insert a new one. + /// Attempt to read Instance column for the given domain. If it doesn't exist, insert a new one. /// There is no need for update as the domain of an existing instance cant change. pub async fn read_or_create(pool: &mut DbPool<'_>, domain_: String) -> Result { use crate::schema::instance::domain; @@ -26,16 +42,19 @@ impl Instance { // First try to read the instance row and return directly if found let instance = instance::table .filter(lower(domain).eq(&domain_.to_lowercase())) - .first::(conn) - .await; + .first(conn) + .await + .optional()?; + + // TODO could convert this to unwrap_or_else once async closures are stable match instance { - Ok(i) => Ok(i), - Err(diesel::NotFound) => { + Some(i) => Ok(i), + None => { // Instance not in database yet, insert it - let form = InstanceForm::builder() - .domain(domain_) - .updated(Some(naive_now())) - .build(); + let form = InstanceForm { + updated: Some(naive_now()), + ..InstanceForm::new(domain_) + }; insert_into(instance::table) .values(&form) // Necessary because this method may be called concurrently for the same domain. This @@ -46,9 +65,25 @@ impl Instance { .get_result::(conn) .await } - e => e, } } + pub async fn read(pool: &mut DbPool<'_>, instance_id: InstanceId) -> Result { + let conn = &mut get_conn(pool).await?; + instance::table.find(instance_id).first(conn).await + } + + pub async fn update( + pool: &mut DbPool<'_>, + instance_id: InstanceId, + form: InstanceForm, + ) -> Result { + let mut conn = get_conn(pool).await?; + diesel::update(instance::table.find(instance_id)) + .set(form) + .execute(&mut conn) + .await + } + pub async fn delete(pool: &mut DbPool<'_>, instance_id: InstanceId) -> Result { let conn = &mut get_conn(pool).await?; diesel::delete(instance::table.find(instance_id)) @@ -59,21 +94,25 @@ impl Instance { pub async fn read_all(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; instance::table - .select(instance::all_columns) + .select(Self::as_select()) .get_results(conn) .await } - #[cfg(test)] + /// Only for use in tests pub async fn delete_all(pool: &mut DbPool<'_>) -> Result { let conn = &mut get_conn(pool).await?; + diesel::delete(federation_queue_state::table) + .execute(conn) + .await?; diesel::delete(instance::table).execute(conn).await } + pub async fn allowlist(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; instance::table .inner_join(federation_allowlist::table) - .select(instance::all_columns) + .select(Self::as_select()) .get_results(conn) .await } @@ -82,20 +121,20 @@ impl Instance { let conn = &mut get_conn(pool).await?; instance::table .inner_join(federation_blocklist::table) - .select(instance::all_columns) + .select(Self::as_select()) .get_results(conn) .await } - /// returns a list of all instances, each with a flag of whether the instance is allowed or not and dead or not - /// ordered by id - pub async fn read_all_with_blocked_and_dead( + /// returns a list of all instances, each with a flag of whether the instance is allowed or not + /// and dead or not ordered by id + pub async fn read_federated_with_blocked_and_dead( pool: &mut DbPool<'_>, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let is_dead_expr = coalesce(instance::updated, instance::published).lt(now() - 3.days()); - // this needs to be done in two steps because the meaning of the "blocked" column depends on the existence - // of any value at all in the allowlist. (so a normal join wouldn't work) + // this needs to be done in two steps because the meaning of the "blocked" column depends on the + // existence of any value at all in the allowlist. (so a normal join wouldn't work) let use_allowlist = federation_allowlist::table .select(count_star().gt(0)) .get_result::(conn) @@ -105,7 +144,7 @@ impl Instance { .left_join(federation_allowlist::table) .select(( Self::as_select(), - federation_allowlist::id.nullable().is_not_null(), + federation_allowlist::instance_id.nullable().is_not_null(), is_dead_expr, )) .order_by(instance::id) @@ -116,7 +155,7 @@ impl Instance { .left_join(federation_blocklist::table) .select(( Self::as_select(), - federation_blocklist::id.nullable().is_null(), + federation_blocklist::instance_id.nullable().is_null(), is_dead_expr, )) .order_by(instance::id) @@ -125,19 +164,25 @@ impl Instance { } } - pub async fn linked(pool: &mut DbPool<'_>) -> Result, Error> { + /// returns (instance, blocked, allowed, fed queue state) tuples + pub async fn read_all_with_fed_state( + pool: &mut DbPool<'_>, + ) -> Result, bool, bool)>, Error> { let conn = &mut get_conn(pool).await?; instance::table // omit instance representing the local site .left_join(site::table.inner_join(local_site::table)) .filter(local_site::id.is_null()) - // omit instances in the blocklist .left_join(federation_blocklist::table) - .filter(federation_blocklist::id.is_null()) - .select(instance::all_columns) + .left_join(federation_allowlist::table) + .left_join(federation_queue_state::table) + .select(( + Self::as_select(), + Option::::as_select(), + federation_blocklist::instance_id.nullable().is_not_null(), + federation_allowlist::instance_id.nullable().is_not_null(), + )) .get_results(conn) .await } } - -sql_function! { fn coalesce(x: Nullable, y: Timestamptz) -> Timestamptz; } diff --git a/crates/db_schema/src/impls/instance_block.rs b/crates/db_schema/src/impls/instance_block.rs index f7f81a234..1eb6e8f04 100644 --- a/crates/db_schema/src/impls/instance_block.rs +++ b/crates/db_schema/src/impls/instance_block.rs @@ -1,20 +1,62 @@ use crate::{ - schema::instance_block::dsl::{instance_block, instance_id, person_id}, - source::instance_block::{InstanceBlock, InstanceBlockForm}, + newtypes::{InstanceId, PersonId}, + schema::{instance, instance_block}, + source::{ + instance::Instance, + instance_block::{InstanceBlock, InstanceBlockForm}, + }, traits::Blockable, utils::{get_conn, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::{exists, insert_into, not}, + result::Error, + select, + ExpressionMethods, + QueryDsl, +}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; + +impl InstanceBlock { + pub async fn read( + pool: &mut DbPool<'_>, + for_person_id: PersonId, + for_instance_id: InstanceId, + ) -> LemmyResult<()> { + let conn = &mut get_conn(pool).await?; + select(not(exists( + instance_block::table.find((for_person_id, for_instance_id)), + ))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::InstanceIsBlocked.into()) + } + + pub async fn for_person( + pool: &mut DbPool<'_>, + person_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + instance_block::table + .inner_join(instance::table) + .select(instance::all_columns) + .filter(instance_block::person_id.eq(person_id)) + .order_by(instance_block::published) + .load::(conn) + .await + } +} #[async_trait] impl Blockable for InstanceBlock { type Form = InstanceBlockForm; async fn block(pool: &mut DbPool<'_>, instance_block_form: &Self::Form) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(instance_block) + insert_into(instance_block::table) .values(instance_block_form) - .on_conflict((person_id, instance_id)) + .on_conflict((instance_block::person_id, instance_block::instance_id)) .do_update() .set(instance_block_form) .get_result::(conn) @@ -25,11 +67,10 @@ impl Blockable for InstanceBlock { instance_block_form: &Self::Form, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::delete( - instance_block - .filter(person_id.eq(instance_block_form.person_id)) - .filter(instance_id.eq(instance_block_form.instance_id)), - ) + diesel::delete(instance_block::table.find(( + instance_block_form.person_id, + instance_block_form.instance_id, + ))) .execute(conn) .await } diff --git a/crates/db_schema/src/impls/language.rs b/crates/db_schema/src/impls/language.rs index 53aadbac8..57420fcd4 100644 --- a/crates/db_schema/src/impls/language.rs +++ b/crates/db_schema/src/impls/language.rs @@ -1,7 +1,8 @@ +use super::actor_language::UNDETERMINED_ID; use crate::{ diesel::ExpressionMethods, newtypes::LanguageId, - schema::language::dsl::{code, id, language}, + schema::language, source::language::Language, utils::{get_conn, DbPool}, }; @@ -9,56 +10,52 @@ use diesel::{result::Error, QueryDsl}; use diesel_async::RunQueryDsl; impl Language { - pub async fn read_all(pool: &mut DbPool<'_>) -> Result, Error> { + pub async fn read_all(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; - language.load::(conn).await + language::table.load(conn).await } - pub async fn read_from_id(pool: &mut DbPool<'_>, id_: LanguageId) -> Result { + pub async fn read_from_id(pool: &mut DbPool<'_>, id_: LanguageId) -> Result { let conn = &mut get_conn(pool).await?; - language.filter(id.eq(id_)).first::(conn).await + language::table.find(id_).first(conn).await } - /// Attempts to find the given language code and return its ID. If not found, returns none. - pub async fn read_id_from_code( - pool: &mut DbPool<'_>, - code_: Option<&str>, - ) -> Result, Error> { - if let Some(code_) = code_ { - let conn = &mut get_conn(pool).await?; - Ok( - language - .filter(code.eq(code_)) - .first::(conn) - .await - .map(|l| l.id) - .ok(), - ) - } else { - Ok(None) - } + /// Attempts to find the given language code and return its ID. + pub async fn read_id_from_code(pool: &mut DbPool<'_>, code_: &str) -> Result { + let conn = &mut get_conn(pool).await?; + let res = language::table + .filter(language::code.eq(code_)) + .first::(conn) + .await + .map(|l| l.id); + + // Return undetermined by default + Ok(res.unwrap_or(UNDETERMINED_ID)) } } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{source::language::Language, utils::build_db_pool_for_tests}; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_languages() { + async fn test_languages() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let all = Language::read_all(pool).await.unwrap(); + let all = Language::read_all(pool).await?; assert_eq!(184, all.len()); assert_eq!("ak", all[5].code); assert_eq!("lv", all[99].code); assert_eq!("yi", all[179].code); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/local_site.rs b/crates/db_schema/src/impls/local_site.rs index 2d527c0ee..926814c48 100644 --- a/crates/db_schema/src/impls/local_site.rs +++ b/crates/db_schema/src/impls/local_site.rs @@ -1,32 +1,47 @@ use crate::{ - schema::local_site::dsl::local_site, + schema::local_site, source::local_site::{LocalSite, LocalSiteInsertForm, LocalSiteUpdateForm}, utils::{get_conn, DbPool}, }; use diesel::{dsl::insert_into, result::Error}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, CACHE_DURATION_API}; +use moka::future::Cache; +use std::sync::LazyLock; impl LocalSite { pub async fn create(pool: &mut DbPool<'_>, form: &LocalSiteInsertForm) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(local_site) + insert_into(local_site::table) .values(form) .get_result::(conn) .await } - pub async fn read(pool: &mut DbPool<'_>) -> Result { - let conn = &mut get_conn(pool).await?; - local_site.first::(conn).await + pub async fn read(pool: &mut DbPool<'_>) -> LemmyResult { + static CACHE: LazyLock> = LazyLock::new(|| { + Cache::builder() + .max_capacity(1) + .time_to_live(CACHE_DURATION_API) + .build() + }); + Ok( + CACHE + .try_get_with((), async { + let conn = &mut get_conn(pool).await?; + local_site::table.first(conn).await + }) + .await?, + ) } pub async fn update(pool: &mut DbPool<'_>, form: &LocalSiteUpdateForm) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(local_site) + diesel::update(local_site::table) .set(form) .get_result::(conn) .await } pub async fn delete(pool: &mut DbPool<'_>) -> Result { let conn = &mut get_conn(pool).await?; - diesel::delete(local_site).execute(conn).await + diesel::delete(local_site::table).execute(conn).await } } diff --git a/crates/db_schema/src/impls/local_site_rate_limit.rs b/crates/db_schema/src/impls/local_site_rate_limit.rs index 0c9e96e0b..6ab9ca8b8 100644 --- a/crates/db_schema/src/impls/local_site_rate_limit.rs +++ b/crates/db_schema/src/impls/local_site_rate_limit.rs @@ -1,4 +1,5 @@ use crate::{ + diesel::OptionalExtension, schema::local_site_rate_limit, source::local_site_rate_limit::{ LocalSiteRateLimit, @@ -11,9 +12,9 @@ use diesel::{dsl::insert_into, result::Error}; use diesel_async::RunQueryDsl; impl LocalSiteRateLimit { - pub async fn read(pool: &mut DbPool<'_>) -> Result { + pub async fn read(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; - local_site_rate_limit::table.first::(conn).await + local_site_rate_limit::table.first(conn).await.optional() } pub async fn create( diff --git a/crates/db_schema/src/impls/local_site_url_blocklist.rs b/crates/db_schema/src/impls/local_site_url_blocklist.rs new file mode 100644 index 000000000..73dedabce --- /dev/null +++ b/crates/db_schema/src/impls/local_site_url_blocklist.rs @@ -0,0 +1,49 @@ +use crate::{ + schema::local_site_url_blocklist, + source::local_site_url_blocklist::{LocalSiteUrlBlocklist, LocalSiteUrlBlocklistForm}, + utils::{get_conn, DbPool}, +}; +use diesel::{dsl::insert_into, result::Error}; +use diesel_async::{AsyncPgConnection, RunQueryDsl}; + +impl LocalSiteUrlBlocklist { + pub async fn replace(pool: &mut DbPool<'_>, url_blocklist: Vec) -> Result<(), Error> { + let conn = &mut get_conn(pool).await?; + + conn + .build_transaction() + .run(|conn| { + Box::pin(async move { + use crate::schema::local_site_url_blocklist::dsl::local_site_url_blocklist; + + Self::clear(conn).await?; + + let forms = url_blocklist + .into_iter() + .map(|url| LocalSiteUrlBlocklistForm { url, updated: None }) + .collect::>(); + + insert_into(local_site_url_blocklist) + .values(forms) + .execute(conn) + .await?; + + Ok(()) + }) as _ + }) + .await + } + + async fn clear(conn: &mut AsyncPgConnection) -> Result { + diesel::delete(local_site_url_blocklist::table) + .execute(conn) + .await + } + + pub async fn get_all(pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + local_site_url_blocklist::table + .get_results::(conn) + .await + } +} diff --git a/crates/db_schema/src/impls/local_user.rs b/crates/db_schema/src/impls/local_user.rs index 86960c053..235f053c1 100644 --- a/crates/db_schema/src/impls/local_user.rs +++ b/crates/db_schema/src/impls/local_user.rs @@ -1,28 +1,84 @@ use crate::{ - newtypes::{DbUrl, LocalUserId, PersonId}, - schema::local_user::dsl::{ - accepted_application, - email, - email_verified, - local_user, - password_encrypted, - }, + newtypes::{CommunityId, DbUrl, LanguageId, LocalUserId, PersonId}, + schema::{community, community_moderator, local_user, person, registration_application}, source::{ - actor_language::{LocalUserLanguage, SiteLanguage}, + actor_language::LocalUserLanguage, local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, + local_user_vote_display_mode::{LocalUserVoteDisplayMode, LocalUserVoteDisplayModeInsertForm}, + site::Site, }, - traits::Crud, utils::{ functions::{coalesce, lower}, get_conn, + now, DbPool, }, + CommunityVisibility, }; use bcrypt::{hash, DEFAULT_COST}; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, JoinOnDsl, QueryDsl}; +use diesel::{ + dsl::{insert_into, not, IntervalDsl}, + result::Error, + CombineDsl, + ExpressionMethods, + JoinOnDsl, + QueryDsl, +}; use diesel_async::RunQueryDsl; +use lemmy_utils::error::{LemmyErrorType, LemmyResult}; impl LocalUser { + pub async fn create( + pool: &mut DbPool<'_>, + form: &LocalUserInsertForm, + languages: Vec, + ) -> Result { + let conn = &mut get_conn(pool).await?; + let mut form_with_encrypted_password = form.clone(); + + if let Some(password_encrypted) = &form.password_encrypted { + let password_hash = hash(password_encrypted, DEFAULT_COST).expect("Couldn't hash password"); + form_with_encrypted_password.password_encrypted = Some(password_hash); + } + + let local_user_ = insert_into(local_user::table) + .values(form_with_encrypted_password) + .get_result::(conn) + .await?; + + LocalUserLanguage::update(pool, languages, local_user_.id).await?; + + // Create their vote_display_modes + let vote_display_mode_form = LocalUserVoteDisplayModeInsertForm::new(local_user_.id); + LocalUserVoteDisplayMode::create(pool, &vote_display_mode_form).await?; + + Ok(local_user_) + } + + pub async fn update( + pool: &mut DbPool<'_>, + local_user_id: LocalUserId, + form: &LocalUserUpdateForm, + ) -> Result { + let conn = &mut get_conn(pool).await?; + let res = diesel::update(local_user::table.find(local_user_id)) + .set(form) + .execute(conn) + .await; + // Diesel will throw an error if the query is all Nones (not updating anything), ignore this. + match res { + Err(Error::QueryBuilderError(_)) => Ok(0), + other => other, + } + } + + pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result { + let conn = &mut *get_conn(pool).await?; + diesel::delete(local_user::table.find(id)) + .execute(conn) + .await + } + pub async fn update_password( pool: &mut DbPool<'_>, local_user_id: LocalUserId, @@ -31,16 +87,16 @@ impl LocalUser { let conn = &mut get_conn(pool).await?; let password_hash = hash(new_password, DEFAULT_COST).expect("Couldn't hash password"); - diesel::update(local_user.find(local_user_id)) - .set((password_encrypted.eq(password_hash),)) + diesel::update(local_user::table.find(local_user_id)) + .set((local_user::password_encrypted.eq(password_hash),)) .get_result::(conn) .await } pub async fn set_all_users_email_verified(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; - diesel::update(local_user) - .set(email_verified.eq(true)) + diesel::update(local_user::table) + .set(local_user::email_verified.eq(true)) .get_results::(conn) .await } @@ -49,20 +105,47 @@ impl LocalUser { pool: &mut DbPool<'_>, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - diesel::update(local_user) - .set(accepted_application.eq(true)) + diesel::update(local_user::table) + .set(local_user::accepted_application.eq(true)) .get_results::(conn) .await } - pub async fn is_email_taken(pool: &mut DbPool<'_>, email_: &str) -> Result { + pub async fn delete_old_denied_local_users(pool: &mut DbPool<'_>) -> Result { + let conn = &mut get_conn(pool).await?; + + // Make sure: + // - An admin has interacted with the application + // - The app is older than a week + // - The accepted_application is false + let old_denied_registrations = registration_application::table + .filter(registration_application::admin_id.is_not_null()) + .filter(registration_application::published.lt(now() - 1.week())) + .select(registration_application::local_user_id); + + // Delete based on join logic is here: + // https://stackoverflow.com/questions/60836040/how-do-i-perform-a-delete-with-sub-query-in-diesel-against-a-postgres-database + let local_users = local_user::table + .filter(local_user::id.eq_any(old_denied_registrations)) + .filter(not(local_user::accepted_application)) + .select(local_user::person_id); + + // Delete the person rows, which should automatically clear the local_user ones + let persons = person::table.filter(person::id.eq_any(local_users)); + + diesel::delete(persons).execute(conn).await + } + + pub async fn check_is_email_taken(pool: &mut DbPool<'_>, email: &str) -> LemmyResult<()> { use diesel::dsl::{exists, select}; let conn = &mut get_conn(pool).await?; - select(exists( - local_user.filter(lower(coalesce(email, "")).eq(email_.to_lowercase())), - )) - .get_result(conn) - .await + select(not(exists(local_user::table.filter( + lower(coalesce(local_user::email, "")).eq(email.to_lowercase()), + )))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::EmailAlreadyExists.into()) } // TODO: maybe move this and pass in LocalUserView @@ -76,7 +159,8 @@ impl LocalUser { community, community_block, community_follower, - person, + instance, + instance_block, person_block, post, post_saved, @@ -118,6 +202,13 @@ impl LocalUser { .get_results(conn) .await?; + let blocked_instances = instance_block::dsl::instance_block + .filter(instance_block::person_id.eq(person_id_)) + .inner_join(instance::table) + .select(instance::domain) + .get_results(conn) + .await?; + // TODO: use join for parallel queries? Ok(UserBackupLists { @@ -126,8 +217,146 @@ impl LocalUser { saved_comments, blocked_communities, blocked_users, + blocked_instances, }) } + + /// Checks to make sure the acting admin is higher than the target admin + pub async fn is_higher_admin_check( + pool: &mut DbPool<'_>, + admin_person_id: PersonId, + target_person_ids: Vec, + ) -> LemmyResult<()> { + let conn = &mut get_conn(pool).await?; + + // Build the list of persons + let mut persons = target_person_ids; + persons.push(admin_person_id); + persons.dedup(); + + let res = local_user::table + .filter(local_user::admin.eq(true)) + .filter(local_user::person_id.eq_any(persons)) + .order_by(local_user::id) + // This does a limit 1 select first + .first::(conn) + .await?; + + // If the first result sorted by published is the acting admin + if res.person_id == admin_person_id { + Ok(()) + } else { + Err(LemmyErrorType::NotHigherAdmin)? + } + } + + /// Checks to make sure the acting moderator is higher than the target moderator + pub async fn is_higher_mod_or_admin_check( + pool: &mut DbPool<'_>, + for_community_id: CommunityId, + admin_person_id: PersonId, + target_person_ids: Vec, + ) -> LemmyResult<()> { + let conn = &mut get_conn(pool).await?; + + // Build the list of persons + let mut persons = target_person_ids; + persons.push(admin_person_id); + persons.dedup(); + + let admins = local_user::table + .filter(local_user::admin.eq(true)) + .filter(local_user::person_id.eq_any(&persons)) + .order_by(local_user::id) + .select(local_user::person_id); + + let mods = community_moderator::table + .filter(community_moderator::community_id.eq(for_community_id)) + .filter(community_moderator::person_id.eq_any(&persons)) + .order_by(community_moderator::published) + .select(community_moderator::person_id); + + let res = admins.union_all(mods).get_results::(conn).await?; + let first_person = res.as_slice().first().ok_or(LemmyErrorType::NotHigherMod)?; + + // If the first result sorted by published is the acting mod + if *first_person == admin_person_id { + Ok(()) + } else { + Err(LemmyErrorType::NotHigherMod)? + } + } +} + +/// Adds some helper functions for an optional LocalUser +pub trait LocalUserOptionHelper { + fn person_id(&self) -> Option; + fn local_user_id(&self) -> Option; + fn show_bot_accounts(&self) -> bool; + fn show_read_posts(&self) -> bool; + fn is_admin(&self) -> bool; + fn show_nsfw(&self, site: &Site) -> bool; + fn visible_communities_only(&self, query: Q) -> Q + where + Q: diesel::query_dsl::methods::FilterDsl< + diesel::dsl::Eq, + Output = Q, + >; +} + +impl LocalUserOptionHelper for Option<&LocalUser> { + fn person_id(&self) -> Option { + self.map(|l| l.person_id) + } + + fn local_user_id(&self) -> Option { + self.map(|l| l.id) + } + + fn show_bot_accounts(&self) -> bool { + self.map(|l| l.show_bot_accounts).unwrap_or(true) + } + + fn show_read_posts(&self) -> bool { + self.map(|l| l.show_read_posts).unwrap_or(true) + } + + fn is_admin(&self) -> bool { + self.map(|l| l.admin).unwrap_or(false) + } + + fn show_nsfw(&self, site: &Site) -> bool { + self + .map(|l| l.show_nsfw) + .unwrap_or(site.content_warning.is_some()) + } + + fn visible_communities_only(&self, query: Q) -> Q + where + Q: diesel::query_dsl::methods::FilterDsl< + diesel::dsl::Eq, + Output = Q, + >, + { + if self.is_none() { + query.filter(community::visibility.eq(CommunityVisibility::Public)) + } else { + query + } + } +} + +impl LocalUserInsertForm { + pub fn test_form(person_id: PersonId) -> Self { + Self::new(person_id, Some(String::new())) + } + + pub fn test_form_admin(person_id: PersonId) -> Self { + LocalUserInsertForm { + admin: Some(true), + ..Self::test_form(person_id) + } + } } pub struct UserBackupLists { @@ -136,47 +365,87 @@ pub struct UserBackupLists { pub saved_comments: Vec, pub blocked_communities: Vec, pub blocked_users: Vec, + pub blocked_instances: Vec, } -#[async_trait] -impl Crud for LocalUser { - type InsertForm = LocalUserInsertForm; - type UpdateForm = LocalUserUpdateForm; - type IdType = LocalUserId; +#[cfg(test)] +mod tests { + use crate::{ + source::{ + instance::Instance, + local_user::{LocalUser, LocalUserInsertForm}, + person::{Person, PersonInsertForm}, + }, + traits::Crud, + utils::build_db_pool_for_tests, + }; + use lemmy_utils::error::LemmyResult; + use serial_test::serial; - async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { - let conn = &mut get_conn(pool).await?; - let mut form_with_encrypted_password = form.clone(); - let password_hash = - hash(&form.password_encrypted, DEFAULT_COST).expect("Couldn't hash password"); - form_with_encrypted_password.password_encrypted = password_hash; + #[tokio::test] + #[serial] + async fn test_admin_higher_check() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); - let local_user_ = insert_into(local_user) - .values(form_with_encrypted_password) - .get_result::(conn) - .await?; + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let site_languages = SiteLanguage::read_local_raw(pool).await; - if let Ok(langs) = site_languages { - // if site exists, init user with site languages - LocalUserLanguage::update(pool, langs, local_user_.id).await?; - } else { - // otherwise, init with all languages (this only happens during tests and - // for first admin user, which is created before site) - LocalUserLanguage::update(pool, vec![], local_user_.id).await?; - } + let fiona_person = PersonInsertForm::test_form(inserted_instance.id, "fiona"); + let inserted_fiona_person = Person::create(pool, &fiona_person).await?; - Ok(local_user_) + let fiona_local_user_form = LocalUserInsertForm::test_form_admin(inserted_fiona_person.id); + let _inserted_fiona_local_user = + LocalUser::create(pool, &fiona_local_user_form, vec![]).await?; + + let delores_person = PersonInsertForm::test_form(inserted_instance.id, "delores"); + let inserted_delores_person = Person::create(pool, &delores_person).await?; + let delores_local_user_form = LocalUserInsertForm::test_form_admin(inserted_delores_person.id); + let _inserted_delores_local_user = + LocalUser::create(pool, &delores_local_user_form, vec![]).await?; + + let admin_person_ids = vec![inserted_fiona_person.id, inserted_delores_person.id]; + + // Make sure fiona is marked as a higher admin than delores, and vice versa + let fiona_higher_check = + LocalUser::is_higher_admin_check(pool, inserted_fiona_person.id, admin_person_ids.clone()) + .await; + assert!(fiona_higher_check.is_ok()); + + // This should throw an error, since delores was added later + let delores_higher_check = + LocalUser::is_higher_admin_check(pool, inserted_delores_person.id, admin_person_ids).await; + assert!(delores_higher_check.is_err()); + + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } - async fn update( - pool: &mut DbPool<'_>, - local_user_id: LocalUserId, - form: &Self::UpdateForm, - ) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::update(local_user.find(local_user_id)) - .set(form) - .get_result::(conn) - .await + + #[tokio::test] + #[serial] + async fn test_email_taken() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + + let darwin_email = "charles.darwin@gmail.com"; + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let darwin_person = PersonInsertForm::test_form(inserted_instance.id, "darwin"); + let inserted_darwin_person = Person::create(pool, &darwin_person).await?; + + let mut darwin_local_user_form = + LocalUserInsertForm::test_form_admin(inserted_darwin_person.id); + darwin_local_user_form.email = Some(darwin_email.into()); + let _inserted_darwin_local_user = + LocalUser::create(pool, &darwin_local_user_form, vec![]).await?; + + let check = LocalUser::check_is_email_taken(pool, darwin_email).await; + assert!(check.is_err()); + + let passed_check = LocalUser::check_is_email_taken(pool, "not_charles@gmail.com").await; + assert!(passed_check.is_ok()); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/local_user_vote_display_mode.rs b/crates/db_schema/src/impls/local_user_vote_display_mode.rs new file mode 100644 index 000000000..2d169f81b --- /dev/null +++ b/crates/db_schema/src/impls/local_user_vote_display_mode.rs @@ -0,0 +1,60 @@ +use crate::{ + diesel::OptionalExtension, + newtypes::LocalUserId, + schema::local_user_vote_display_mode, + source::local_user_vote_display_mode::{ + LocalUserVoteDisplayMode, + LocalUserVoteDisplayModeInsertForm, + LocalUserVoteDisplayModeUpdateForm, + }, + utils::{get_conn, DbPool}, +}; +use diesel::{dsl::insert_into, result::Error, QueryDsl}; +use diesel_async::RunQueryDsl; + +impl LocalUserVoteDisplayMode { + pub async fn read(pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + local_user_vote_display_mode::table + .first(conn) + .await + .optional() + } + + pub async fn create( + pool: &mut DbPool<'_>, + form: &LocalUserVoteDisplayModeInsertForm, + ) -> Result { + let conn = &mut get_conn(pool).await?; + insert_into(local_user_vote_display_mode::table) + .values(form) + .get_result::(conn) + .await + } + + pub async fn update( + pool: &mut DbPool<'_>, + local_user_id: LocalUserId, + form: &LocalUserVoteDisplayModeUpdateForm, + ) -> Result<(), Error> { + // avoid error "There are no changes to save. This query cannot be built" + if form.is_empty() { + return Ok(()); + } + let conn = &mut get_conn(pool).await?; + diesel::update(local_user_vote_display_mode::table.find(local_user_id)) + .set(form) + .get_result::(conn) + .await?; + Ok(()) + } +} + +impl LocalUserVoteDisplayModeUpdateForm { + fn is_empty(&self) -> bool { + self.score.is_none() + && self.upvotes.is_none() + && self.downvotes.is_none() + && self.upvote_percentage.is_none() + } +} diff --git a/crates/db_schema/src/impls/login_token.rs b/crates/db_schema/src/impls/login_token.rs index b1d1124d6..c8c44c506 100644 --- a/crates/db_schema/src/impls/login_token.rs +++ b/crates/db_schema/src/impls/login_token.rs @@ -1,12 +1,13 @@ use crate::{ diesel::{ExpressionMethods, QueryDsl}, newtypes::LocalUserId, - schema::login_token::{dsl::login_token, token, user_id}, + schema::login_token::{dsl::login_token, user_id}, source::login_token::{LoginToken, LoginTokenCreateForm}, utils::{get_conn, DbPool}, }; use diesel::{delete, dsl::exists, insert_into, result::Error, select}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl LoginToken { pub async fn create(pool: &mut DbPool<'_>, form: LoginTokenCreateForm) -> Result { @@ -22,15 +23,15 @@ impl LoginToken { pool: &mut DbPool<'_>, user_id_: LocalUserId, token_: &str, - ) -> Result { + ) -> LemmyResult<()> { let conn = &mut get_conn(pool).await?; select(exists( - login_token - .filter(user_id.eq(user_id_)) - .filter(token.eq(token_)), + login_token.find(token_).filter(user_id.eq(user_id_)), )) - .get_result(conn) - .await + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::NotLoggedIn.into()) } pub async fn list( @@ -48,9 +49,7 @@ impl LoginToken { /// Invalidate specific token on user logout. pub async fn invalidate(pool: &mut DbPool<'_>, token_: &str) -> Result { let conn = &mut get_conn(pool).await?; - delete(login_token.filter(token.eq(token_))) - .execute(conn) - .await + delete(login_token.find(token_)).execute(conn).await } /// Invalidate all logins of given user on password reset/change, account deletion or site ban. diff --git a/crates/db_schema/src/impls/mod.rs b/crates/db_schema/src/impls/mod.rs index 3cf0f1066..f115a101f 100644 --- a/crates/db_schema/src/impls/mod.rs +++ b/crates/db_schema/src/impls/mod.rs @@ -10,15 +10,20 @@ pub mod custom_emoji; pub mod email_verification; pub mod federation_allowlist; pub mod federation_blocklist; -pub mod image_upload; +pub mod federation_queue_state; +pub mod images; pub mod instance; pub mod instance_block; pub mod language; pub mod local_site; pub mod local_site_rate_limit; +pub mod local_site_url_blocklist; pub mod local_user; +pub mod local_user_vote_display_mode; pub mod login_token; pub mod moderator; +pub mod oauth_account; +pub mod oauth_provider; pub mod password_reset_request; pub mod person; pub mod person_block; diff --git a/crates/db_schema/src/impls/moderator.rs b/crates/db_schema/src/impls/moderator.rs index 012e05394..b2ef26e69 100644 --- a/crates/db_schema/src/impls/moderator.rs +++ b/crates/db_schema/src/impls/moderator.rs @@ -66,6 +66,20 @@ impl Crud for ModRemovePost { } } +impl ModRemovePost { + pub async fn create_multiple( + pool: &mut DbPool<'_>, + forms: &Vec, + ) -> Result { + use crate::schema::mod_remove_post::dsl::mod_remove_post; + let conn = &mut get_conn(pool).await?; + insert_into(mod_remove_post) + .values(forms) + .execute(conn) + .await + } +} + #[async_trait] impl Crud for ModLockPost { type InsertForm = ModLockPostForm; @@ -153,6 +167,20 @@ impl Crud for ModRemoveComment { } } +impl ModRemoveComment { + pub async fn create_multiple( + pool: &mut DbPool<'_>, + forms: &Vec, + ) -> Result { + use crate::schema::mod_remove_comment::dsl::mod_remove_comment; + let conn = &mut get_conn(pool).await?; + insert_into(mod_remove_comment) + .values(forms) + .execute(conn) + .await + } +} + #[async_trait] impl Crud for ModRemoveCommunity { type InsertForm = ModRemoveCommunityForm; @@ -466,8 +494,6 @@ impl Crud for AdminPurgeComment { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{ @@ -500,58 +526,48 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use diesel::result::Error; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_mod = PersonInsertForm::builder() - .name("the mod".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_mod = PersonInsertForm::test_form(inserted_instance.id, "the mod"); - let inserted_mod = Person::create(pool, &new_mod).await.unwrap(); + let inserted_mod = Person::create(pool, &new_mod).await?; - let new_person = PersonInsertForm::builder() - .name("jim2".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim2"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let new_community = CommunityInsertForm::builder() - .name("mod_community".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "mod_community".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let inserted_community = Community::create(pool, &new_community).await?; - let new_post = PostInsertForm::builder() - .name("A test post thweep".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); + let new_post = PostInsertForm::new( + "A test post thweep".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; // Now the actual tests @@ -562,12 +578,8 @@ mod tests { reason: None, removed: None, }; - let inserted_mod_remove_post = ModRemovePost::create(pool, &mod_remove_post_form) - .await - .unwrap(); - let read_mod_remove_post = ModRemovePost::read(pool, inserted_mod_remove_post.id) - .await - .unwrap(); + let inserted_mod_remove_post = ModRemovePost::create(pool, &mod_remove_post_form).await?; + let read_mod_remove_post = ModRemovePost::read(pool, inserted_mod_remove_post.id).await?; let expected_mod_remove_post = ModRemovePost { id: inserted_mod_remove_post.id, post_id: inserted_post.id, @@ -584,12 +596,8 @@ mod tests { post_id: inserted_post.id, locked: None, }; - let inserted_mod_lock_post = ModLockPost::create(pool, &mod_lock_post_form) - .await - .unwrap(); - let read_mod_lock_post = ModLockPost::read(pool, inserted_mod_lock_post.id) - .await - .unwrap(); + let inserted_mod_lock_post = ModLockPost::create(pool, &mod_lock_post_form).await?; + let read_mod_lock_post = ModLockPost::read(pool, inserted_mod_lock_post.id).await?; let expected_mod_lock_post = ModLockPost { id: inserted_mod_lock_post.id, post_id: inserted_post.id, @@ -606,12 +614,8 @@ mod tests { featured: false, is_featured_community: true, }; - let inserted_mod_feature_post = ModFeaturePost::create(pool, &mod_feature_post_form) - .await - .unwrap(); - let read_mod_feature_post = ModFeaturePost::read(pool, inserted_mod_feature_post.id) - .await - .unwrap(); + let inserted_mod_feature_post = ModFeaturePost::create(pool, &mod_feature_post_form).await?; + let read_mod_feature_post = ModFeaturePost::read(pool, inserted_mod_feature_post.id).await?; let expected_mod_feature_post = ModFeaturePost { id: inserted_mod_feature_post.id, post_id: inserted_post.id, @@ -629,12 +633,10 @@ mod tests { reason: None, removed: None, }; - let inserted_mod_remove_comment = ModRemoveComment::create(pool, &mod_remove_comment_form) - .await - .unwrap(); - let read_mod_remove_comment = ModRemoveComment::read(pool, inserted_mod_remove_comment.id) - .await - .unwrap(); + let inserted_mod_remove_comment = + ModRemoveComment::create(pool, &mod_remove_comment_form).await?; + let read_mod_remove_comment = + ModRemoveComment::read(pool, inserted_mod_remove_comment.id).await?; let expected_mod_remove_comment = ModRemoveComment { id: inserted_mod_remove_comment.id, comment_id: inserted_comment.id, @@ -653,13 +655,9 @@ mod tests { removed: None, }; let inserted_mod_remove_community = - ModRemoveCommunity::create(pool, &mod_remove_community_form) - .await - .unwrap(); + ModRemoveCommunity::create(pool, &mod_remove_community_form).await?; let read_mod_remove_community = - ModRemoveCommunity::read(pool, inserted_mod_remove_community.id) - .await - .unwrap(); + ModRemoveCommunity::read(pool, inserted_mod_remove_community.id).await?; let expected_mod_remove_community = ModRemoveCommunity { id: inserted_mod_remove_community.id, community_id: inserted_community.id, @@ -680,13 +678,9 @@ mod tests { expires: None, }; let inserted_mod_ban_from_community = - ModBanFromCommunity::create(pool, &mod_ban_from_community_form) - .await - .unwrap(); + ModBanFromCommunity::create(pool, &mod_ban_from_community_form).await?; let read_mod_ban_from_community = - ModBanFromCommunity::read(pool, inserted_mod_ban_from_community.id) - .await - .unwrap(); + ModBanFromCommunity::read(pool, inserted_mod_ban_from_community.id).await?; let expected_mod_ban_from_community = ModBanFromCommunity { id: inserted_mod_ban_from_community.id, community_id: inserted_community.id, @@ -707,8 +701,8 @@ mod tests { banned: None, expires: None, }; - let inserted_mod_ban = ModBan::create(pool, &mod_ban_form).await.unwrap(); - let read_mod_ban = ModBan::read(pool, inserted_mod_ban.id).await.unwrap(); + let inserted_mod_ban = ModBan::create(pool, &mod_ban_form).await?; + let read_mod_ban = ModBan::read(pool, inserted_mod_ban.id).await?; let expected_mod_ban = ModBan { id: inserted_mod_ban.id, mod_person_id: inserted_mod.id, @@ -727,12 +721,8 @@ mod tests { community_id: inserted_community.id, removed: None, }; - let inserted_mod_add_community = ModAddCommunity::create(pool, &mod_add_community_form) - .await - .unwrap(); - let read_mod_add_community = ModAddCommunity::read(pool, inserted_mod_add_community.id) - .await - .unwrap(); + let inserted_mod_add_community = ModAddCommunity::create(pool, &mod_add_community_form).await?; + let read_mod_add_community = ModAddCommunity::read(pool, inserted_mod_add_community.id).await?; let expected_mod_add_community = ModAddCommunity { id: inserted_mod_add_community.id, community_id: inserted_community.id, @@ -749,8 +739,8 @@ mod tests { other_person_id: inserted_person.id, removed: None, }; - let inserted_mod_add = ModAdd::create(pool, &mod_add_form).await.unwrap(); - let read_mod_add = ModAdd::read(pool, inserted_mod_add.id).await.unwrap(); + let inserted_mod_add = ModAdd::create(pool, &mod_add_form).await?; + let read_mod_add = ModAdd::read(pool, inserted_mod_add.id).await?; let expected_mod_add = ModAdd { id: inserted_mod_add.id, mod_person_id: inserted_mod.id, @@ -759,14 +749,12 @@ mod tests { when_: inserted_mod_add.when_, }; - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Post::delete(pool, inserted_post.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Person::delete(pool, inserted_mod.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Comment::delete(pool, inserted_comment.id).await?; + Post::delete(pool, inserted_post.id).await?; + Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_person.id).await?; + Person::delete(pool, inserted_mod.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_mod_remove_post, read_mod_remove_post); assert_eq!(expected_mod_lock_post, read_mod_lock_post); @@ -777,5 +765,7 @@ mod tests { assert_eq!(expected_mod_ban, read_mod_ban); assert_eq!(expected_mod_add_community, read_mod_add_community); assert_eq!(expected_mod_add, read_mod_add); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/oauth_account.rs b/crates/db_schema/src/impls/oauth_account.rs new file mode 100644 index 000000000..7210b7a37 --- /dev/null +++ b/crates/db_schema/src/impls/oauth_account.rs @@ -0,0 +1,29 @@ +use crate::{ + newtypes::LocalUserId, + schema::{oauth_account, oauth_account::dsl::local_user_id}, + source::oauth_account::{OAuthAccount, OAuthAccountInsertForm}, + utils::{get_conn, DbPool}, +}; +use diesel::{insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel_async::RunQueryDsl; + +impl OAuthAccount { + pub async fn create(pool: &mut DbPool<'_>, form: &OAuthAccountInsertForm) -> Result { + let conn = &mut get_conn(pool).await?; + insert_into(oauth_account::table) + .values(form) + .get_result::(conn) + .await + } + + pub async fn delete_user_accounts( + pool: &mut DbPool<'_>, + for_local_user_id: LocalUserId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + + diesel::delete(oauth_account::table.filter(local_user_id.eq(for_local_user_id))) + .execute(conn) + .await + } +} diff --git a/crates/db_schema/src/impls/oauth_provider.rs b/crates/db_schema/src/impls/oauth_provider.rs new file mode 100644 index 000000000..9d7e791e7 --- /dev/null +++ b/crates/db_schema/src/impls/oauth_provider.rs @@ -0,0 +1,71 @@ +use crate::{ + newtypes::OAuthProviderId, + schema::oauth_provider, + source::oauth_provider::{ + OAuthProvider, + OAuthProviderInsertForm, + OAuthProviderUpdateForm, + PublicOAuthProvider, + }, + traits::Crud, + utils::{get_conn, DbPool}, +}; +use diesel::{dsl::insert_into, result::Error, QueryDsl}; +use diesel_async::RunQueryDsl; + +#[async_trait] +impl Crud for OAuthProvider { + type InsertForm = OAuthProviderInsertForm; + type UpdateForm = OAuthProviderUpdateForm; + type IdType = OAuthProviderId; + + async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { + let conn = &mut get_conn(pool).await?; + insert_into(oauth_provider::table) + .values(form) + .get_result::(conn) + .await + } + + async fn update( + pool: &mut DbPool<'_>, + oauth_provider_id: OAuthProviderId, + form: &Self::UpdateForm, + ) -> Result { + let conn = &mut get_conn(pool).await?; + diesel::update(oauth_provider::table.find(oauth_provider_id)) + .set(form) + .get_result::(conn) + .await + } +} + +impl OAuthProvider { + pub async fn get_all(pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let oauth_providers = oauth_provider::table + .order(oauth_provider::id) + .select(oauth_provider::all_columns) + .load::(conn) + .await?; + + Ok(oauth_providers) + } + + pub fn convert_providers_to_public( + oauth_providers: Vec, + ) -> Vec { + let mut result = Vec::::new(); + for oauth_provider in &oauth_providers { + if oauth_provider.enabled { + result.push(PublicOAuthProvider(oauth_provider.clone())); + } + } + result + } + + pub async fn get_all_public(pool: &mut DbPool<'_>) -> Result, Error> { + let oauth_providers = OAuthProvider::get_all(pool).await?; + Ok(Self::convert_providers_to_public(oauth_providers)) + } +} diff --git a/crates/db_schema/src/impls/password_reset_request.rs b/crates/db_schema/src/impls/password_reset_request.rs index 5600ffc66..015db5581 100644 --- a/crates/db_schema/src/impls/password_reset_request.rs +++ b/crates/db_schema/src/impls/password_reset_request.rs @@ -1,80 +1,41 @@ use crate::{ newtypes::LocalUserId, - schema::password_reset_request::dsl::{local_user_id, password_reset_request, published, token}, + schema::password_reset_request::dsl::{password_reset_request, published, token}, source::password_reset_request::{PasswordResetRequest, PasswordResetRequestForm}, - traits::Crud, utils::{get_conn, DbPool}, }; use diesel::{ + delete, dsl::{insert_into, now, IntervalDsl}, result::Error, sql_types::Timestamptz, ExpressionMethods, IntoSql, - QueryDsl, }; use diesel_async::RunQueryDsl; -#[async_trait] -impl Crud for PasswordResetRequest { - type InsertForm = PasswordResetRequestForm; - type UpdateForm = PasswordResetRequestForm; - type IdType = i32; - - async fn create(pool: &mut DbPool<'_>, form: &PasswordResetRequestForm) -> Result { - let conn = &mut get_conn(pool).await?; - insert_into(password_reset_request) - .values(form) - .get_result::(conn) - .await - } - async fn update( - pool: &mut DbPool<'_>, - password_reset_request_id: i32, - form: &PasswordResetRequestForm, - ) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::update(password_reset_request.find(password_reset_request_id)) - .set(form) - .get_result::(conn) - .await - } -} - impl PasswordResetRequest { - pub async fn create_token( + pub async fn create( pool: &mut DbPool<'_>, from_local_user_id: LocalUserId, token_: String, ) -> Result { let form = PasswordResetRequestForm { local_user_id: from_local_user_id, - token: token_, + token: token_.into(), }; - - Self::create(pool, &form).await - } - pub async fn read_from_token( - pool: &mut DbPool<'_>, - token_: &str, - ) -> Result { let conn = &mut get_conn(pool).await?; - password_reset_request - .filter(token.eq(token_)) - .filter(published.gt(now.into_sql::() - 1.days())) - .first::(conn) + insert_into(password_reset_request) + .values(form) + .get_result::(conn) .await } - pub async fn get_recent_password_resets_count( - pool: &mut DbPool<'_>, - user_id: LocalUserId, - ) -> Result { + pub async fn read_and_delete(pool: &mut DbPool<'_>, token_: &str) -> Result { let conn = &mut get_conn(pool).await?; - password_reset_request - .filter(local_user_id.eq(user_id)) + delete(password_reset_request) + .filter(token.eq(token_)) .filter(published.gt(now.into_sql::() - 1.days())) - .count() .get_result(conn) .await } @@ -82,8 +43,6 @@ impl PasswordResetRequest { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{ @@ -95,58 +54,55 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_password_reset() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); - - let new_person = PersonInsertForm::builder() - .name("thommy prw".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_person = Person::create(pool, &new_person).await.unwrap(); - - let new_local_user = LocalUserInsertForm::builder() - .person_id(inserted_person.id) - .password_encrypted("pass".to_string()) - .build(); - - let inserted_local_user = LocalUser::create(pool, &new_local_user).await.unwrap(); + // Setup + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + let new_person = PersonInsertForm::test_form(inserted_instance.id, "thommy prw"); + let inserted_person = Person::create(pool, &new_person).await?; + let new_local_user = LocalUserInsertForm::test_form(inserted_person.id); + let inserted_local_user = LocalUser::create(pool, &new_local_user, vec![]).await?; + // Create password reset token let token = "nope"; - let inserted_password_reset_request = - PasswordResetRequest::create_token(pool, inserted_local_user.id, token.to_string()) - .await - .unwrap(); + PasswordResetRequest::create(pool, inserted_local_user.id, token.to_string()).await?; - let expected_password_reset_request = PasswordResetRequest { - id: inserted_password_reset_request.id, - local_user_id: inserted_local_user.id, - token: token.to_string(), - published: inserted_password_reset_request.published, - }; - - let read_password_reset_request = PasswordResetRequest::read_from_token(pool, token) - .await - .unwrap(); - let num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); - - assert_eq!(expected_password_reset_request, read_password_reset_request); + // Read it and verify + let read_password_reset_request = PasswordResetRequest::read_and_delete(pool, token).await?; assert_eq!( - expected_password_reset_request, - inserted_password_reset_request + inserted_password_reset_request.id, + read_password_reset_request.id ); + assert_eq!( + inserted_password_reset_request.local_user_id, + read_password_reset_request.local_user_id + ); + assert_eq!( + inserted_password_reset_request.token, + read_password_reset_request.token + ); + assert_eq!( + inserted_password_reset_request.published, + read_password_reset_request.published + ); + + // Cannot reuse same token again + let read_password_reset_request = PasswordResetRequest::read_and_delete(pool, token).await; + assert!(read_password_reset_request.is_err()); + + // Cleanup + let num_deleted = Person::delete(pool, inserted_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(1, num_deleted); + Ok(()) } } diff --git a/crates/db_schema/src/impls/person.rs b/crates/db_schema/src/impls/person.rs index 12ec0392f..a5f8ae1a0 100644 --- a/crates/db_schema/src/impls/person.rs +++ b/crates/db_schema/src/impls/person.rs @@ -1,6 +1,7 @@ use crate::{ - newtypes::{CommunityId, DbUrl, PersonId}, - schema::{instance, local_user, person, person_follower}, + diesel::OptionalExtension, + newtypes::{CommunityId, DbUrl, InstanceId, PersonId}, + schema::{comment, community, instance, local_user, person, person_follower, post}, source::person::{ Person, PersonFollower, @@ -11,20 +12,30 @@ use crate::{ traits::{ApubActor, Crud, Followable}, utils::{functions::lower, get_conn, naive_now, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, JoinOnDsl, QueryDsl}; +use diesel::{ + dsl::{insert_into, not}, + result::Error, + CombineDsl, + ExpressionMethods, + JoinOnDsl, + QueryDsl, +}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; #[async_trait] impl Crud for Person { type InsertForm = PersonInsertForm; type UpdateForm = PersonUpdateForm; type IdType = PersonId; + + // Override this, so that you don't get back deleted async fn read(pool: &mut DbPool<'_>, person_id: PersonId) -> Result { let conn = &mut get_conn(pool).await?; person::table .filter(person::deleted.eq(false)) .find(person_id) - .first::(conn) + .first(conn) .await } @@ -51,7 +62,8 @@ impl Crud for Person { impl Person { /// Update or insert the person. /// - /// This is necessary for federation, because Activitypub doesnt distinguish between these actions. + /// This is necessary for federation, because Activitypub doesn't distinguish between these + /// actions. pub async fn upsert(pool: &mut DbPool<'_>, form: &PersonInsertForm) -> Result { let conn = &mut get_conn(pool).await?; insert_into(person::table) @@ -84,6 +96,51 @@ impl Person { .get_result::(conn) .await } + + /// Lists local community ids for all posts and comments for a given creator. + pub async fn list_local_community_ids( + pool: &mut DbPool<'_>, + for_creator_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + comment::table + .inner_join(post::table) + .inner_join(community::table.on(post::community_id.eq(community::id))) + .filter(community::local.eq(true)) + .filter(not(community::deleted)) + .filter(not(community::removed)) + .filter(comment::creator_id.eq(for_creator_id)) + .select(community::id) + .union( + post::table + .inner_join(community::table) + .filter(community::local.eq(true)) + .filter(post::creator_id.eq(for_creator_id)) + .select(community::id), + ) + .load::(conn) + .await + } + + pub async fn check_username_taken(pool: &mut DbPool<'_>, username: &str) -> LemmyResult<()> { + use diesel::dsl::{exists, select}; + let conn = &mut get_conn(pool).await?; + select(not(exists( + person::table + .filter(lower(person::name).eq(username.to_lowercase())) + .filter(person::local.eq(true)), + ))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::UsernameAlreadyExists.into()) + } +} + +impl PersonInsertForm { + pub fn test_form(instance_id: InstanceId, name: &str) -> Self { + Self::new(name.to_owned(), "pubkey".to_string(), instance_id) + } } #[async_trait] @@ -93,22 +150,19 @@ impl ApubActor for Person { object_id: &DbUrl, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - Ok( - person::table - .filter(person::deleted.eq(false)) - .filter(person::actor_id.eq(object_id)) - .first::(conn) - .await - .ok() - .map(Into::into), - ) + person::table + .filter(person::deleted.eq(false)) + .filter(person::actor_id.eq(object_id)) + .first(conn) + .await + .optional() } async fn read_from_name( pool: &mut DbPool<'_>, from_name: &str, include_deleted: bool, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let mut q = person::table .into_boxed() @@ -117,14 +171,14 @@ impl ApubActor for Person { if !include_deleted { q = q.filter(person::deleted.eq(false)) } - q.first::(conn).await + q.first(conn).await.optional() } async fn read_from_name_and_domain( pool: &mut DbPool<'_>, person_name: &str, for_domain: &str, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; person::table @@ -132,8 +186,9 @@ impl ApubActor for Person { .filter(lower(person::name).eq(person_name.to_lowercase())) .filter(lower(instance::domain).eq(for_domain.to_lowercase())) .select(person::all_columns) - .first::(conn) + .first(conn) .await + .optional() } } @@ -141,29 +196,26 @@ impl ApubActor for Person { impl Followable for PersonFollower { type Form = PersonFollowerForm; async fn follow(pool: &mut DbPool<'_>, form: &PersonFollowerForm) -> Result { - use crate::schema::person_follower::dsl::{follower_id, person_follower, person_id}; let conn = &mut get_conn(pool).await?; - insert_into(person_follower) + insert_into(person_follower::table) .values(form) - .on_conflict((follower_id, person_id)) + .on_conflict((person_follower::follower_id, person_follower::person_id)) .do_update() .set(form) .get_result::(conn) .await } + + /// Currently no user following async fn follow_accepted(_: &mut DbPool<'_>, _: CommunityId, _: PersonId) -> Result { - unimplemented!() + Err(Error::NotFound) } + async fn unfollow(pool: &mut DbPool<'_>, form: &PersonFollowerForm) -> Result { - use crate::schema::person_follower::dsl::{follower_id, person_follower, person_id}; let conn = &mut get_conn(pool).await?; - diesel::delete( - person_follower - .filter(follower_id.eq(&form.follower_id)) - .filter(person_id.eq(&form.person_id)), - ) - .execute(conn) - .await + diesel::delete(person_follower::table.find((form.follower_id, form.person_id))) + .execute(conn) + .await } } @@ -184,8 +236,6 @@ impl PersonFollower { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{ @@ -195,25 +245,21 @@ mod tests { traits::{Crud, Followable}, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("holly".into()) - .public_key("nada".to_owned()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "holly"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; let expected_person = Person { id: inserted_person.id, @@ -230,72 +276,61 @@ mod tests { local: true, bot_account: false, private_key: None, - public_key: "nada".to_owned(), + public_key: "pubkey".to_owned(), last_refreshed_at: inserted_person.published, inbox_url: inserted_person.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: inserted_instance.id, }; - let read_person = Person::read(pool, inserted_person.id).await.unwrap(); + let read_person = Person::read(pool, inserted_person.id).await?; let update_person_form = PersonUpdateForm { actor_id: Some(inserted_person.actor_id.clone()), ..Default::default() }; - let updated_person = Person::update(pool, inserted_person.id, &update_person_form) - .await - .unwrap(); + let updated_person = Person::update(pool, inserted_person.id, &update_person_form).await?; - let num_deleted = Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + let num_deleted = Person::delete(pool, inserted_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_person, read_person); assert_eq!(expected_person, inserted_person); assert_eq!(expected_person, updated_person); assert_eq!(1, num_deleted); + + Ok(()) } #[tokio::test] #[serial] - async fn follow() { + async fn follow() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let person_form_1 = PersonInsertForm::builder() - .name("erich".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let person_1 = Person::create(pool, &person_form_1).await.unwrap(); - let person_form_2 = PersonInsertForm::builder() - .name("michele".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let person_2 = Person::create(pool, &person_form_2).await.unwrap(); + let person_form_1 = PersonInsertForm::test_form(inserted_instance.id, "erich"); + let person_1 = Person::create(pool, &person_form_1).await?; + let person_form_2 = PersonInsertForm::test_form(inserted_instance.id, "michele"); + let person_2 = Person::create(pool, &person_form_2).await?; let follow_form = PersonFollowerForm { person_id: person_1.id, follower_id: person_2.id, pending: false, }; - let person_follower = PersonFollower::follow(pool, &follow_form).await.unwrap(); + let person_follower = PersonFollower::follow(pool, &follow_form).await?; assert_eq!(person_1.id, person_follower.person_id); assert_eq!(person_2.id, person_follower.follower_id); assert!(!person_follower.pending); - let followers = PersonFollower::list_followers(pool, person_1.id) - .await - .unwrap(); + let followers = PersonFollower::list_followers(pool, person_1.id).await?; assert_eq!(vec![person_2], followers); - let unfollow = PersonFollower::unfollow(pool, &follow_form).await.unwrap(); + let unfollow = PersonFollower::unfollow(pool, &follow_form).await?; assert_eq!(1, unfollow); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/person_block.rs b/crates/db_schema/src/impls/person_block.rs index 0d125cd51..7f2286616 100644 --- a/crates/db_schema/src/impls/person_block.rs +++ b/crates/db_schema/src/impls/person_block.rs @@ -1,24 +1,57 @@ use crate::{ newtypes::PersonId, - schema::person_block::dsl::{person_block, person_id, target_id}, - source::person_block::{PersonBlock, PersonBlockForm}, + schema::{person, person_block}, + source::{ + person::Person, + person_block::{PersonBlock, PersonBlockForm}, + }, traits::Blockable, utils::{get_conn, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::{exists, insert_into, not}, + result::Error, + select, + ExpressionMethods, + JoinOnDsl, + QueryDsl, +}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl PersonBlock { pub async fn read( pool: &mut DbPool<'_>, for_person_id: PersonId, for_recipient_id: PersonId, - ) -> Result { + ) -> LemmyResult<()> { let conn = &mut get_conn(pool).await?; - person_block - .filter(person_id.eq(for_person_id)) - .filter(target_id.eq(for_recipient_id)) - .first::(conn) + select(not(exists( + person_block::table.find((for_person_id, for_recipient_id)), + ))) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::PersonIsBlocked.into()) + } + + pub async fn for_person( + pool: &mut DbPool<'_>, + person_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let target_person_alias = diesel::alias!(person as person1); + + person_block::table + .inner_join(person::table.on(person_block::person_id.eq(person::id))) + .inner_join( + target_person_alias.on(person_block::target_id.eq(target_person_alias.field(person::id))), + ) + .select(target_person_alias.fields(person::all_columns)) + .filter(person_block::person_id.eq(person_id)) + .filter(target_person_alias.field(person::deleted).eq(false)) + .order_by(person_block::published) + .load::(conn) .await } } @@ -31,9 +64,9 @@ impl Blockable for PersonBlock { person_block_form: &PersonBlockForm, ) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(person_block) + insert_into(person_block::table) .values(person_block_form) - .on_conflict((person_id, target_id)) + .on_conflict((person_block::person_id, person_block::target_id)) .do_update() .set(person_block_form) .get_result::(conn) @@ -42,9 +75,7 @@ impl Blockable for PersonBlock { async fn unblock(pool: &mut DbPool<'_>, person_block_form: &Self::Form) -> Result { let conn = &mut get_conn(pool).await?; diesel::delete( - person_block - .filter(person_id.eq(person_block_form.person_id)) - .filter(target_id.eq(person_block_form.target_id)), + person_block::table.find((person_block_form.person_id, person_block_form.target_id)), ) .execute(conn) .await diff --git a/crates/db_schema/src/impls/person_mention.rs b/crates/db_schema/src/impls/person_mention.rs index f2441f00c..433176683 100644 --- a/crates/db_schema/src/impls/person_mention.rs +++ b/crates/db_schema/src/impls/person_mention.rs @@ -1,6 +1,7 @@ use crate::{ + diesel::OptionalExtension, newtypes::{CommentId, PersonId, PersonMentionId}, - schema::person_mention::dsl::{comment_id, person_mention, read, recipient_id}, + schema::person_mention, source::person_mention::{PersonMention, PersonMentionInsertForm, PersonMentionUpdateForm}, traits::Crud, utils::{get_conn, DbPool}, @@ -20,10 +21,10 @@ impl Crud for PersonMention { ) -> Result { let conn = &mut get_conn(pool).await?; // since the return here isnt utilized, we dont need to do an update - // but get_result doesnt return the existing row here - insert_into(person_mention) + // but get_result doesn't return the existing row here + insert_into(person_mention::table) .values(person_mention_form) - .on_conflict((recipient_id, comment_id)) + .on_conflict((person_mention::recipient_id, person_mention::comment_id)) .do_update() .set(person_mention_form) .get_result::(conn) @@ -36,7 +37,7 @@ impl Crud for PersonMention { person_mention_form: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(person_mention.find(person_mention_id)) + diesel::update(person_mention::table.find(person_mention_id)) .set(person_mention_form) .get_result::(conn) .await @@ -50,11 +51,11 @@ impl PersonMention { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; diesel::update( - person_mention - .filter(recipient_id.eq(for_recipient_id)) - .filter(read.eq(false)), + person_mention::table + .filter(person_mention::recipient_id.eq(for_recipient_id)) + .filter(person_mention::read.eq(false)), ) - .set(read.eq(true)) + .set(person_mention::read.eq(true)) .get_results::(conn) .await } @@ -63,124 +64,13 @@ impl PersonMention { pool: &mut DbPool<'_>, for_comment_id: CommentId, for_recipient_id: PersonId, - ) -> Result { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - person_mention - .filter(comment_id.eq(for_comment_id)) - .filter(recipient_id.eq(for_recipient_id)) - .first::(conn) + person_mention::table + .filter(person_mention::comment_id.eq(for_comment_id)) + .filter(person_mention::recipient_id.eq(for_recipient_id)) + .first(conn) .await - } -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - - use crate::{ - source::{ - comment::{Comment, CommentInsertForm}, - community::{Community, CommunityInsertForm}, - instance::Instance, - person::{Person, PersonInsertForm}, - person_mention::{PersonMention, PersonMentionInsertForm, PersonMentionUpdateForm}, - post::{Post, PostInsertForm}, - }, - traits::Crud, - utils::build_db_pool_for_tests, - }; - use serial_test::serial; - - #[tokio::test] - #[serial] - async fn test_crud() { - let pool = &build_db_pool_for_tests().await; - let pool = &mut pool.into(); - - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); - - let new_person = PersonInsertForm::builder() - .name("terrylake".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_person = Person::create(pool, &new_person).await.unwrap(); - - let recipient_form = PersonInsertForm::builder() - .name("terrylakes recipient".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap(); - - let new_community = CommunityInsertForm::builder() - .name("test community lake".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); - - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - - let comment_form = CommentInsertForm::builder() - .content("A test comment".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); - - let person_mention_form = PersonMentionInsertForm { - recipient_id: inserted_recipient.id, - comment_id: inserted_comment.id, - read: None, - }; - - let inserted_mention = PersonMention::create(pool, &person_mention_form) - .await - .unwrap(); - - let expected_mention = PersonMention { - id: inserted_mention.id, - recipient_id: inserted_mention.recipient_id, - comment_id: inserted_mention.comment_id, - read: false, - published: inserted_mention.published, - }; - - let read_mention = PersonMention::read(pool, inserted_mention.id) - .await - .unwrap(); - - let person_mention_update_form = PersonMentionUpdateForm { read: Some(false) }; - let updated_mention = - PersonMention::update(pool, inserted_mention.id, &person_mention_update_form) - .await - .unwrap(); - Comment::delete(pool, inserted_comment.id).await.unwrap(); - Post::delete(pool, inserted_post.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Person::delete(pool, inserted_recipient.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); - - assert_eq!(expected_mention, read_mention); - assert_eq!(expected_mention, inserted_mention); - assert_eq!(expected_mention, updated_mention); + .optional() } } diff --git a/crates/db_schema/src/impls/post.rs b/crates/db_schema/src/impls/post.rs index 4a719415a..fb6245585 100644 --- a/crates/db_schema/src/impls/post.rs +++ b/crates/db_schema/src/impls/post.rs @@ -1,24 +1,11 @@ -use super::instance::coalesce; use crate::{ + diesel::{BoolExpressionMethods, OptionalExtension}, newtypes::{CommunityId, DbUrl, PersonId, PostId}, - schema::post::dsl::{ - ap_id, - body, - community_id, - creator_id, - deleted, - featured_community, - local, - name, - post, - published, - removed, - thumbnail_url, - updated, - url, - }, + schema::{community, person, post, post_hide, post_like, post_read, post_saved}, source::post::{ Post, + PostHide, + PostHideForm, PostInsertForm, PostLike, PostLikeForm, @@ -29,11 +16,28 @@ use crate::{ PostUpdateForm, }, traits::{Crud, Likeable, Saveable}, - utils::{get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT, FETCH_LIMIT_MAX}, + utils::{ + functions::coalesce, + get_conn, + naive_now, + now, + DbPool, + DELETED_REPLACEMENT_TEXT, + FETCH_LIMIT_MAX, + SITEMAP_DAYS, + SITEMAP_LIMIT, + }, }; use ::url::Url; -use chrono::{Duration, Utc}; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods}; +use chrono::{DateTime, Utc}; +use diesel::{ + dsl::{count, insert_into, not}, + result::Error, + DecoratableTarget, + ExpressionMethods, + QueryDsl, + TextExpressionMethods, +}; use diesel_async::RunQueryDsl; use std::collections::HashSet; @@ -45,11 +49,8 @@ impl Crud for Post { async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(post) + insert_into(post::table) .values(form) - .on_conflict(ap_id) - .do_update() - .set(form) .get_result::(conn) .await } @@ -60,7 +61,7 @@ impl Crud for Post { new_post: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(post.find(post_id)) + diesel::update(post::table.find(post_id)) .set(new_post) .get_result::(conn) .await @@ -68,19 +69,23 @@ impl Crud for Post { } impl Post { - pub async fn list_for_community( + pub async fn read_xx(pool: &mut DbPool<'_>, id: PostId) -> Result { + let conn = &mut *get_conn(pool).await?; + post::table.find(id).first(conn).await + } + pub async fn insert_apub( pool: &mut DbPool<'_>, - the_community_id: CommunityId, - ) -> Result, Error> { + timestamp: DateTime, + form: &PostInsertForm, + ) -> Result { let conn = &mut get_conn(pool).await?; - post - .filter(community_id.eq(the_community_id)) - .filter(deleted.eq(false)) - .filter(removed.eq(false)) - .then_order_by(featured_community.desc()) - .then_order_by(published.desc()) - .limit(FETCH_LIMIT_MAX) - .load::(conn) + insert_into(post::table) + .values(form) + .on_conflict(post::ap_id) + .filter_target(coalesce(post::updated, post::published).lt(timestamp)) + .do_update() + .set(form) + .get_result::(conn) .await } @@ -89,12 +94,12 @@ impl Post { the_community_id: CommunityId, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - post - .filter(community_id.eq(the_community_id)) - .filter(deleted.eq(false)) - .filter(removed.eq(false)) - .filter(featured_community.eq(true)) - .then_order_by(published.desc()) + post::table + .filter(post::community_id.eq(the_community_id)) + .filter(post::deleted.eq(false)) + .filter(post::removed.eq(false)) + .filter(post::featured_community.eq(true)) + .then_order_by(post::published.desc()) .limit(FETCH_LIMIT_MAX) .load::(conn) .await @@ -104,13 +109,16 @@ impl Post { pool: &mut DbPool<'_>, ) -> Result)>, Error> { let conn = &mut get_conn(pool).await?; - post - .select((ap_id, coalesce(updated, published))) - .filter(local.eq(true)) - .filter(deleted.eq(false)) - .filter(removed.eq(false)) - .filter(published.ge(Utc::now().naive_utc() - Duration::days(1))) - .order(published.desc()) + post::table + .select((post::ap_id, coalesce(post::updated, post::published))) + .filter(post::local.eq(true)) + .filter(post::deleted.eq(false)) + .filter(post::removed.eq(false)) + .filter( + post::published.ge(Utc::now().naive_utc() - SITEMAP_DAYS.expect("TimeDelta out of bounds")), + ) + .order(post::published.desc()) + .limit(SITEMAP_LIMIT) .load::<(DbUrl, chrono::DateTime)>(conn) .await } @@ -121,13 +129,13 @@ impl Post { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - diesel::update(post.filter(creator_id.eq(for_creator_id))) + diesel::update(post::table.filter(post::creator_id.eq(for_creator_id))) .set(( - name.eq(DELETED_REPLACEMENT_TEXT), - url.eq(Option::<&str>::None), - body.eq(DELETED_REPLACEMENT_TEXT), - deleted.eq(true), - updated.eq(naive_now()), + post::name.eq(DELETED_REPLACEMENT_TEXT), + post::url.eq(Option::<&str>::None), + post::body.eq(DELETED_REPLACEMENT_TEXT), + post::deleted.eq(true), + post::updated.eq(naive_now()), )) .get_results::(conn) .await @@ -137,19 +145,19 @@ impl Post { pool: &mut DbPool<'_>, for_creator_id: PersonId, for_community_id: Option, - new_removed: bool, + removed: bool, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - let mut update = diesel::update(post).into_boxed(); - update = update.filter(creator_id.eq(for_creator_id)); + let mut update = diesel::update(post::table).into_boxed(); + update = update.filter(post::creator_id.eq(for_creator_id)); if let Some(for_community_id) = for_community_id { - update = update.filter(community_id.eq(for_community_id)); + update = update.filter(post::community_id.eq(for_community_id)); } update - .set((removed.eq(new_removed), updated.eq(naive_now()))) + .set((post::removed.eq(removed), post::updated.eq(naive_now()))) .get_results::(conn) .await } @@ -164,14 +172,12 @@ impl Post { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let object_id: DbUrl = object_id.into(); - Ok( - post - .filter(ap_id.eq(object_id)) - .first::(conn) - .await - .ok() - .map(Into::into), - ) + post::table + .filter(post::ap_id.eq(object_id)) + .filter(post::scheduled_publish_time.is_null()) + .first(conn) + .await + .optional() } pub async fn fetch_pictrs_posts_for_creator( @@ -181,9 +187,9 @@ impl Post { let conn = &mut get_conn(pool).await?; let pictrs_search = "%pictrs/image%"; - post - .filter(creator_id.eq(for_creator_id)) - .filter(url.like(pictrs_search)) + post::table + .filter(post::creator_id.eq(for_creator_id)) + .filter(post::url.like(pictrs_search)) .load::(conn) .await } @@ -197,13 +203,13 @@ impl Post { let pictrs_search = "%pictrs/image%"; diesel::update( - post - .filter(creator_id.eq(for_creator_id)) - .filter(url.like(pictrs_search)), + post::table + .filter(post::creator_id.eq(for_creator_id)) + .filter(post::url.like(pictrs_search)), ) .set(( - url.eq::>(None), - thumbnail_url.eq::>(None), + post::url.eq::>(None), + post::thumbnail_url.eq::>(None), )) .get_results::(conn) .await @@ -215,9 +221,9 @@ impl Post { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let pictrs_search = "%pictrs/image%"; - post - .filter(community_id.eq(for_community_id)) - .filter(url.like(pictrs_search)) + post::table + .filter(post::community_id.eq(for_community_id)) + .filter(post::url.like(pictrs_search)) .load::(conn) .await } @@ -231,17 +237,39 @@ impl Post { let pictrs_search = "%pictrs/image%"; diesel::update( - post - .filter(community_id.eq(for_community_id)) - .filter(url.like(pictrs_search)), + post::table + .filter(post::community_id.eq(for_community_id)) + .filter(post::url.like(pictrs_search)), ) .set(( - url.eq::>(None), - thumbnail_url.eq::>(None), + post::url.eq::>(None), + post::thumbnail_url.eq::>(None), )) .get_results::(conn) .await } + + pub async fn user_scheduled_post_count( + person_id: PersonId, + pool: &mut DbPool<'_>, + ) -> Result { + let conn = &mut get_conn(pool).await?; + + post::table + .inner_join(person::table) + .inner_join(community::table) + // find all posts which have scheduled_publish_time that is in the future + .filter(post::scheduled_publish_time.is_not_null()) + .filter(coalesce(post::scheduled_publish_time, now()).gt(now())) + // make sure the post and community are still around + .filter(not(post::deleted.or(post::removed))) + .filter(not(community::removed.or(community::deleted))) + // only posts by specified user + .filter(post::creator_id.eq(person_id)) + .select(count(post::id)) + .first::(conn) + .await + } } #[async_trait] @@ -249,11 +277,10 @@ impl Likeable for PostLike { type Form = PostLikeForm; type IdType = PostId; async fn like(pool: &mut DbPool<'_>, post_like_form: &PostLikeForm) -> Result { - use crate::schema::post_like::dsl::{person_id, post_id, post_like}; let conn = &mut get_conn(pool).await?; - insert_into(post_like) + insert_into(post_like::table) .values(post_like_form) - .on_conflict((post_id, person_id)) + .on_conflict((post_like::post_id, post_like::person_id)) .do_update() .set(post_like_form) .get_result::(conn) @@ -264,15 +291,10 @@ impl Likeable for PostLike { person_id: PersonId, post_id: PostId, ) -> Result { - use crate::schema::post_like::dsl; let conn = &mut get_conn(pool).await?; - diesel::delete( - dsl::post_like - .filter(dsl::post_id.eq(post_id)) - .filter(dsl::person_id.eq(person_id)), - ) - .execute(conn) - .await + diesel::delete(post_like::table.find((person_id, post_id))) + .execute(conn) + .await } } @@ -280,26 +302,20 @@ impl Likeable for PostLike { impl Saveable for PostSaved { type Form = PostSavedForm; async fn save(pool: &mut DbPool<'_>, post_saved_form: &PostSavedForm) -> Result { - use crate::schema::post_saved::dsl::{person_id, post_id, post_saved}; let conn = &mut get_conn(pool).await?; - insert_into(post_saved) + insert_into(post_saved::table) .values(post_saved_form) - .on_conflict((post_id, person_id)) + .on_conflict((post_saved::post_id, post_saved::person_id)) .do_update() .set(post_saved_form) .get_result::(conn) .await } async fn unsave(pool: &mut DbPool<'_>, post_saved_form: &PostSavedForm) -> Result { - use crate::schema::post_saved::dsl::{person_id, post_id, post_saved}; let conn = &mut get_conn(pool).await?; - diesel::delete( - post_saved - .filter(post_id.eq(post_saved_form.post_id)) - .filter(person_id.eq(post_saved_form.person_id)), - ) - .execute(conn) - .await + diesel::delete(post_saved::table.find((post_saved_form.person_id, post_saved_form.post_id))) + .execute(conn) + .await } } @@ -309,14 +325,13 @@ impl PostRead { post_ids: HashSet, person_id: PersonId, ) -> Result { - use crate::schema::post_read::dsl::post_read; let conn = &mut get_conn(pool).await?; let forms = post_ids .into_iter() .map(|post_id| PostReadForm { post_id, person_id }) .collect::>(); - insert_into(post_read) + insert_into(post_read::table) .values(forms) .on_conflict_do_nothing() .execute(conn) @@ -328,13 +343,48 @@ impl PostRead { post_id_: HashSet, person_id_: PersonId, ) -> Result { - use crate::schema::post_read::dsl::{person_id, post_id, post_read}; let conn = &mut get_conn(pool).await?; diesel::delete( - post_read - .filter(post_id.eq_any(post_id_)) - .filter(person_id.eq(person_id_)), + post_read::table + .filter(post_read::post_id.eq_any(post_id_)) + .filter(post_read::person_id.eq(person_id_)), + ) + .execute(conn) + .await + } +} + +impl PostHide { + pub async fn hide( + pool: &mut DbPool<'_>, + post_ids: HashSet, + person_id: PersonId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + + let forms = post_ids + .into_iter() + .map(|post_id| PostHideForm { post_id, person_id }) + .collect::>(); + insert_into(post_hide::table) + .values(forms) + .on_conflict_do_nothing() + .execute(conn) + .await + } + + pub async fn unhide( + pool: &mut DbPool<'_>, + post_id_: HashSet, + person_id_: PersonId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + + diesel::delete( + post_hide::table + .filter(post_hide::post_id.eq_any(post_id_)) + .filter(post_hide::person_id.eq(person_id_)), ) .execute(conn) .await @@ -343,8 +393,6 @@ impl PostRead { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{ @@ -365,56 +413,60 @@ mod tests { traits::{Crud, Likeable, Saveable}, utils::build_db_pool_for_tests, }; + use chrono::DateTime; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; use std::collections::HashSet; + use url::Url; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("jim".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let new_community = CommunityInsertForm::builder() - .name("test community_3".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community_3".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let inserted_community = Community::create(pool, &new_community).await?; - let new_post = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let new_post2 = PostInsertForm::new( + "A test post 2".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post2 = Post::create(pool, &new_post2).await?; - let new_post2 = PostInsertForm::builder() - .name("A test post 2".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - let inserted_post2 = Post::create(pool, &new_post2).await.unwrap(); + let new_scheduled_post = PostInsertForm { + scheduled_publish_time: Some(DateTime::from_timestamp_nanos(i64::MAX)), + ..PostInsertForm::new("beans".into(), inserted_person.id, inserted_community.id) + }; + let inserted_scheduled_post = Post::create(pool, &new_scheduled_post).await?; let expected_post = Post { id: inserted_post.id, name: "A test post".into(), url: None, body: None, + alt_text: None, creator_id: inserted_person.id, community_id: inserted_community.id, published: inserted_post.published, @@ -427,11 +479,13 @@ mod tests { embed_description: None, embed_video_url: None, thumbnail_url: None, - ap_id: inserted_post.ap_id.clone(), + ap_id: Url::parse(&format!("https://lemmy-alpha/post/{}", inserted_post.id))?.into(), local: true, language_id: Default::default(), featured_community: false, featured_local: false, + url_content_type: None, + scheduled_publish_time: None, }; // Post Like @@ -441,10 +495,9 @@ mod tests { score: 1, }; - let inserted_post_like = PostLike::like(pool, &post_like_form).await.unwrap(); + let inserted_post_like = PostLike::like(pool, &post_like_form).await?; let expected_post_like = PostLike { - id: inserted_post_like.id, post_id: inserted_post.id, person_id: inserted_person.id, published: inserted_post_like.published, @@ -457,10 +510,9 @@ mod tests { person_id: inserted_person.id, }; - let inserted_post_saved = PostSaved::save(pool, &post_saved_form).await.unwrap(); + let inserted_post_saved = PostSaved::save(pool, &post_saved_form).await?; let expected_post_saved = PostSaved { - id: inserted_post_saved.id, post_id: inserted_post.id, person_id: inserted_person.id, published: inserted_post_saved.published, @@ -472,48 +524,47 @@ mod tests { HashSet::from([inserted_post.id, inserted_post2.id]), inserted_person.id, ) - .await - .unwrap(); + .await?; assert_eq!(2, marked_as_read); - let read_post = Post::read(pool, inserted_post.id).await.unwrap(); + let read_post = Post::read(pool, inserted_post.id).await?; let new_post_update = PostUpdateForm { name: Some("A test post".into()), ..Default::default() }; - let updated_post = Post::update(pool, inserted_post.id, &new_post_update) - .await - .unwrap(); + let updated_post = Post::update(pool, inserted_post.id, &new_post_update).await?; - let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id) - .await - .unwrap(); + // Scheduled post count + let scheduled_post_count = Post::user_scheduled_post_count(inserted_person.id, pool).await?; + assert_eq!(1, scheduled_post_count); + + let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id).await?; assert_eq!(1, like_removed); - let saved_removed = PostSaved::unsave(pool, &post_saved_form).await.unwrap(); + let saved_removed = PostSaved::unsave(pool, &post_saved_form).await?; assert_eq!(1, saved_removed); let read_removed = PostRead::mark_as_unread( pool, HashSet::from([inserted_post.id, inserted_post2.id]), inserted_person.id, ) - .await - .unwrap(); + .await?; assert_eq!(2, read_removed); - let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap() - + Post::delete(pool, inserted_post2.id).await.unwrap(); - assert_eq!(2, num_deleted); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Person::delete(pool, inserted_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + let num_deleted = Post::delete(pool, inserted_post.id).await? + + Post::delete(pool, inserted_post2.id).await? + + Post::delete(pool, inserted_scheduled_post.id).await?; + assert_eq!(3, num_deleted); + Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_post, read_post); assert_eq!(expected_post, inserted_post); assert_eq!(expected_post, updated_post); assert_eq!(expected_post_like, inserted_post_like); assert_eq!(expected_post_saved, inserted_post_saved); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/post_report.rs b/crates/db_schema/src/impls/post_report.rs index b4078d950..5507423e1 100644 --- a/crates/db_schema/src/impls/post_report.rs +++ b/crates/db_schema/src/impls/post_report.rs @@ -1,6 +1,9 @@ use crate::{ - newtypes::{PersonId, PostReportId}, - schema::post_report::dsl::{post_report, resolved, resolver_id, updated}, + newtypes::{PersonId, PostId, PostReportId}, + schema::post_report::{ + dsl::{post_report, resolved, resolver_id, updated}, + post_id, + }, source::post_report::{PostReport, PostReportForm}, traits::Reportable, utils::{get_conn, naive_now, DbPool}, @@ -17,6 +20,7 @@ use diesel_async::RunQueryDsl; impl Reportable for PostReport { type Form = PostReportForm; type IdType = PostReportId; + type ObjectIdType = PostId; async fn report(pool: &mut DbPool<'_>, post_report_form: &PostReportForm) -> Result { let conn = &mut get_conn(pool).await?; @@ -42,6 +46,22 @@ impl Reportable for PostReport { .await } + async fn resolve_all_for_object( + pool: &mut DbPool<'_>, + post_id_: PostId, + by_resolver_id: PersonId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + update(post_report.filter(post_id.eq(post_id_))) + .set(( + resolved.eq(true), + resolver_id.eq(by_resolver_id), + updated.eq(naive_now()), + )) + .execute(conn) + .await + } + async fn unresolve( pool: &mut DbPool<'_>, report_id: Self::IdType, @@ -61,8 +81,6 @@ impl Reportable for PostReport { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; use crate::{ @@ -75,33 +93,24 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use diesel::result::Error; use serial_test::serial; - async fn init(pool: &mut DbPool<'_>) -> (Person, PostReport) { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); - let person_form = PersonInsertForm::builder() - .name("jim".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let person = Person::create(pool, &person_form).await.unwrap(); + async fn init(pool: &mut DbPool<'_>) -> Result<(Person, PostReport), Error> { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + let person_form = PersonInsertForm::test_form(inserted_instance.id, "jim"); + let person = Person::create(pool, &person_form).await?; - let community_form = CommunityInsertForm::builder() - .name("test community_4".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let community = Community::create(pool, &community_form).await.unwrap(); + let community_form = CommunityInsertForm::new( + inserted_instance.id, + "test community_4".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let community = Community::create(pool, &community_form).await?; - let form = PostInsertForm::builder() - .name("A test post".into()) - .creator_id(person.id) - .community_id(community.id) - .build(); - let post = Post::create(pool, &form).await.unwrap(); + let form = PostInsertForm::new("A test post".into(), person.id, community.id); + let post = Post::create(pool, &form).await?; let report_form = PostReportForm { post_id: post.id, @@ -109,29 +118,46 @@ mod tests { reason: "my reason".to_string(), ..Default::default() }; - let report = PostReport::report(pool, &report_form).await.unwrap(); - (person, report) + let report = PostReport::report(pool, &report_form).await?; + + Ok((person, report)) } #[tokio::test] #[serial] - async fn test_resolve_post_report() { + async fn test_resolve_post_report() -> Result<(), Error> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let (person, report) = init(pool).await; + let (person, report) = init(pool).await?; - let resolved_count = PostReport::resolve(pool, report.id, person.id) - .await - .unwrap(); + let resolved_count = PostReport::resolve(pool, report.id, person.id).await?; assert_eq!(resolved_count, 1); - let unresolved_count = PostReport::unresolve(pool, report.id, person.id) - .await - .unwrap(); + let unresolved_count = PostReport::unresolve(pool, report.id, person.id).await?; assert_eq!(unresolved_count, 1); - Person::delete(pool, person.id).await.unwrap(); - Post::delete(pool, report.post_id).await.unwrap(); + Person::delete(pool, person.id).await?; + Post::delete(pool, report.post_id).await?; + + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_resolve_all_post_reports() -> Result<(), Error> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + + let (person, report) = init(pool).await?; + + let resolved_count = + PostReport::resolve_all_for_object(pool, report.post_id, person.id).await?; + assert_eq!(resolved_count, 1); + + Person::delete(pool, person.id).await?; + Post::delete(pool, report.post_id).await?; + + Ok(()) } } diff --git a/crates/db_schema/src/impls/private_message.rs b/crates/db_schema/src/impls/private_message.rs index 81a4b1850..264175fe2 100644 --- a/crates/db_schema/src/impls/private_message.rs +++ b/crates/db_schema/src/impls/private_message.rs @@ -1,13 +1,14 @@ use crate::{ + diesel::{DecoratableTarget, OptionalExtension}, newtypes::{DbUrl, PersonId, PrivateMessageId}, - schema::private_message::dsl::{ap_id, private_message, read, recipient_id}, + schema::private_message, source::private_message::{PrivateMessage, PrivateMessageInsertForm, PrivateMessageUpdateForm}, traits::Crud, - utils::{get_conn, DbPool}, + utils::{functions::coalesce, get_conn, DbPool}, }; +use chrono::{DateTime, Utc}; use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; use diesel_async::RunQueryDsl; -use lemmy_utils::error::LemmyError; use url::Url; #[async_trait] @@ -18,11 +19,8 @@ impl Crud for PrivateMessage { async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { let conn = &mut get_conn(pool).await?; - insert_into(private_message) + insert_into(private_message::table) .values(form) - .on_conflict(ap_id) - .do_update() - .set(form) .get_result::(conn) .await } @@ -33,7 +31,7 @@ impl Crud for PrivateMessage { form: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(private_message.find(private_message_id)) + diesel::update(private_message::table.find(private_message_id)) .set(form) .get_result::(conn) .await @@ -41,17 +39,33 @@ impl Crud for PrivateMessage { } impl PrivateMessage { + pub async fn insert_apub( + pool: &mut DbPool<'_>, + timestamp: DateTime, + form: &PrivateMessageInsertForm, + ) -> Result { + let conn = &mut get_conn(pool).await?; + insert_into(private_message::table) + .values(form) + .on_conflict(private_message::ap_id) + .filter_target(coalesce(private_message::updated, private_message::published).lt(timestamp)) + .do_update() + .set(form) + .get_result::(conn) + .await + } + pub async fn mark_all_as_read( pool: &mut DbPool<'_>, for_recipient_id: PersonId, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; diesel::update( - private_message - .filter(recipient_id.eq(for_recipient_id)) - .filter(read.eq(false)), + private_message::table + .filter(private_message::recipient_id.eq(for_recipient_id)) + .filter(private_message::read.eq(false)), ) - .set(read.eq(true)) + .set(private_message::read.eq(true)) .get_results::(conn) .await } @@ -59,24 +73,19 @@ impl PrivateMessage { pub async fn read_from_apub_id( pool: &mut DbPool<'_>, object_id: Url, - ) -> Result, LemmyError> { + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; let object_id: DbUrl = object_id.into(); - Ok( - private_message - .filter(ap_id.eq(object_id)) - .first::(conn) - .await - .ok() - .map(Into::into), - ) + private_message::table + .filter(private_message::ap_id.eq(object_id)) + .first(conn) + .await + .optional() } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ source::{ @@ -87,43 +96,34 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; + use url::Url; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let creator_form = PersonInsertForm::builder() - .name("creator_pm".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let creator_form = PersonInsertForm::test_form(inserted_instance.id, "creator_pm"); - let inserted_creator = Person::create(pool, &creator_form).await.unwrap(); + let inserted_creator = Person::create(pool, &creator_form).await?; - let recipient_form = PersonInsertForm::builder() - .name("recipient_pm".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let recipient_form = PersonInsertForm::test_form(inserted_instance.id, "recipient_pm"); - let inserted_recipient = Person::create(pool, &recipient_form).await.unwrap(); + let inserted_recipient = Person::create(pool, &recipient_form).await?; - let private_message_form = PrivateMessageInsertForm::builder() - .content("A test private message".into()) - .creator_id(inserted_creator.id) - .recipient_id(inserted_recipient.id) - .build(); + let private_message_form = PrivateMessageInsertForm::new( + inserted_creator.id, + inserted_recipient.id, + "A test private message".into(), + ); - let inserted_private_message = PrivateMessage::create(pool, &private_message_form) - .await - .unwrap(); + let inserted_private_message = PrivateMessage::create(pool, &private_message_form).await?; let expected_private_message = PrivateMessage { id: inserted_private_message.id, @@ -134,13 +134,15 @@ mod tests { read: false, updated: None, published: inserted_private_message.published, - ap_id: inserted_private_message.ap_id.clone(), + ap_id: Url::parse(&format!( + "https://lemmy-alpha/private_message/{}", + inserted_private_message.id + ))? + .into(), local: true, }; - let read_private_message = PrivateMessage::read(pool, inserted_private_message.id) - .await - .unwrap(); + let read_private_message = PrivateMessage::read(pool, inserted_private_message.id).await?; let private_message_update_form = PrivateMessageUpdateForm { content: Some("A test private message".into()), @@ -151,8 +153,7 @@ mod tests { inserted_private_message.id, &private_message_update_form, ) - .await - .unwrap(); + .await?; let deleted_private_message = PrivateMessage::update( pool, @@ -162,8 +163,7 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; let marked_read_private_message = PrivateMessage::update( pool, inserted_private_message.id, @@ -172,16 +172,17 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); - Person::delete(pool, inserted_creator.id).await.unwrap(); - Person::delete(pool, inserted_recipient.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + .await?; + Person::delete(pool, inserted_creator.id).await?; + Person::delete(pool, inserted_recipient.id).await?; + Instance::delete(pool, inserted_instance.id).await?; assert_eq!(expected_private_message, read_private_message); assert_eq!(expected_private_message, updated_private_message); assert_eq!(expected_private_message, inserted_private_message); assert!(deleted_private_message.deleted); assert!(marked_read_private_message.read); + + Ok(()) } } diff --git a/crates/db_schema/src/impls/private_message_report.rs b/crates/db_schema/src/impls/private_message_report.rs index ca2187960..0d5876659 100644 --- a/crates/db_schema/src/impls/private_message_report.rs +++ b/crates/db_schema/src/impls/private_message_report.rs @@ -1,5 +1,5 @@ use crate::{ - newtypes::{PersonId, PrivateMessageReportId}, + newtypes::{PersonId, PrivateMessageId, PrivateMessageReportId}, schema::private_message_report::dsl::{private_message_report, resolved, resolver_id, updated}, source::private_message_report::{PrivateMessageReport, PrivateMessageReportForm}, traits::Reportable, @@ -17,6 +17,7 @@ use diesel_async::RunQueryDsl; impl Reportable for PrivateMessageReport { type Form = PrivateMessageReportForm; type IdType = PrivateMessageReportId; + type ObjectIdType = PrivateMessageId; async fn report( pool: &mut DbPool<'_>, @@ -45,6 +46,15 @@ impl Reportable for PrivateMessageReport { .await } + // TODO: this is unused because private message doesn't have remove handler + async fn resolve_all_for_object( + _pool: &mut DbPool<'_>, + _pm_id_: PrivateMessageId, + _by_resolver_id: PersonId, + ) -> Result { + Err(Error::NotFound) + } + async fn unresolve( pool: &mut DbPool<'_>, report_id: Self::IdType, diff --git a/crates/db_schema/src/impls/registration_application.rs b/crates/db_schema/src/impls/registration_application.rs index c4df7ba69..d9777919d 100644 --- a/crates/db_schema/src/impls/registration_application.rs +++ b/crates/db_schema/src/impls/registration_application.rs @@ -1,5 +1,5 @@ use crate::{ - newtypes::LocalUserId, + newtypes::{LocalUserId, RegistrationApplicationId}, schema::registration_application::dsl::{local_user_id, registration_application}, source::registration_application::{ RegistrationApplication, @@ -16,7 +16,7 @@ use diesel_async::RunQueryDsl; impl Crud for RegistrationApplication { type InsertForm = RegistrationApplicationInsertForm; type UpdateForm = RegistrationApplicationUpdateForm; - type IdType = i32; + type IdType = RegistrationApplicationId; async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { let conn = &mut get_conn(pool).await?; @@ -47,7 +47,7 @@ impl RegistrationApplication { let conn = &mut get_conn(pool).await?; registration_application .filter(local_user_id.eq(local_user_id_)) - .first::(conn) + .first(conn) .await } } diff --git a/crates/db_schema/src/impls/secret.rs b/crates/db_schema/src/impls/secret.rs index f21c6c487..bfff270b6 100644 --- a/crates/db_schema/src/impls/secret.rs +++ b/crates/db_schema/src/impls/secret.rs @@ -13,8 +13,8 @@ impl Secret { Self::read_secrets(pool).await } - async fn read_secrets(pool: &mut DbPool<'_>) -> Result { + async fn read_secrets(pool: &mut DbPool<'_>) -> Result { let conn = &mut get_conn(pool).await?; - secret.first::(conn).await + secret.first(conn).await } } diff --git a/crates/db_schema/src/impls/site.rs b/crates/db_schema/src/impls/site.rs index 7e9329afb..8f57647a3 100644 --- a/crates/db_schema/src/impls/site.rs +++ b/crates/db_schema/src/impls/site.rs @@ -1,6 +1,6 @@ use crate::{ newtypes::{DbUrl, InstanceId, SiteId}, - schema::site::dsl::{actor_id, id, instance_id, site}, + schema::{local_site, site}, source::{ actor_language::SiteLanguage, site::{Site, SiteInsertForm, SiteUpdateForm}, @@ -10,6 +10,7 @@ use crate::{ }; use diesel::{dsl::insert_into, result::Error, ExpressionMethods, OptionalExtension, QueryDsl}; use diesel_async::RunQueryDsl; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; use url::Url; #[async_trait] @@ -20,7 +21,7 @@ impl Crud for Site { /// Use SiteView::read_local, or Site::read_from_apub_id instead async fn read(_pool: &mut DbPool<'_>, _site_id: SiteId) -> Result { - unimplemented!() + Err(Error::NotFound) } async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { @@ -31,9 +32,9 @@ impl Crud for Site { let conn = &mut get_conn(pool).await?; // Can't do separate insert/update commands because InsertForm/UpdateForm aren't convertible - let site_ = insert_into(site) + let site_ = insert_into(site::table) .values(form) - .on_conflict(actor_id) + .on_conflict(site::actor_id) .do_update() .set(form) .get_result::(conn) @@ -53,7 +54,7 @@ impl Crud for Site { new_site: &Self::UpdateForm, ) -> Result { let conn = &mut get_conn(pool).await?; - diesel::update(site.find(site_id)) + diesel::update(site::table.find(site_id)) .set(new_site) .get_result::(conn) .await @@ -66,9 +67,9 @@ impl Site { _instance_id: InstanceId, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - site - .filter(instance_id.eq(_instance_id)) - .get_result(conn) + site::table + .filter(site::instance_id.eq(_instance_id)) + .first(conn) .await .optional() } @@ -78,17 +79,20 @@ impl Site { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - site - .filter(actor_id.eq(object_id)) - .first::(conn) + site::table + .filter(site::actor_id.eq(object_id)) + .first(conn) .await .optional() - .map(Into::into) } pub async fn read_remote_sites(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; - site.order_by(id).offset(1).get_results::(conn).await + site::table + .order_by(site::id) + .offset(1) + .get_results::(conn) + .await } /// Instance actor is at the root path, so we simply need to clear the path and other unnecessary @@ -99,4 +103,18 @@ impl Site { url.set_query(None); url } + + pub async fn read_local(pool: &mut DbPool<'_>) -> LemmyResult { + let conn = &mut get_conn(pool).await?; + + Ok( + site::table + .inner_join(local_site::table) + .select(site::all_columns) + .first(conn) + .await + .optional()? + .ok_or(LemmyErrorType::LocalSiteNotSetup)?, + ) + } } diff --git a/crates/db_schema/src/impls/tagline.rs b/crates/db_schema/src/impls/tagline.rs index be4860e17..aa5841020 100644 --- a/crates/db_schema/src/impls/tagline.rs +++ b/crates/db_schema/src/impls/tagline.rs @@ -1,58 +1,59 @@ use crate::{ - newtypes::LocalSiteId, - schema::tagline::dsl::{local_site_id, tagline}, - source::tagline::{Tagline, TaglineForm}, - utils::{get_conn, DbPool}, + newtypes::TaglineId, + schema::tagline::dsl::{published, tagline}, + source::tagline::{Tagline, TaglineInsertForm, TaglineUpdateForm}, + traits::Crud, + utils::{get_conn, limit_and_offset, DbPool}, }; use diesel::{insert_into, result::Error, ExpressionMethods, QueryDsl}; -use diesel_async::{AsyncPgConnection, RunQueryDsl}; +use diesel_async::RunQueryDsl; -impl Tagline { - pub async fn replace( - pool: &mut DbPool<'_>, - for_local_site_id: LocalSiteId, - list_content: Option>, - ) -> Result, Error> { +#[async_trait] +impl Crud for Tagline { + type InsertForm = TaglineInsertForm; + type UpdateForm = TaglineUpdateForm; + type IdType = TaglineId; + + async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result { let conn = &mut get_conn(pool).await?; - if let Some(list) = list_content { - conn - .build_transaction() - .run(|conn| { - Box::pin(async move { - Self::clear(conn).await?; - - for item in list { - let form = TaglineForm { - local_site_id: for_local_site_id, - content: item, - updated: None, - }; - insert_into(tagline) - .values(form) - .get_result::(conn) - .await?; - } - Self::get_all(&mut conn.into(), for_local_site_id).await - }) as _ - }) - .await - } else { - Self::get_all(&mut conn.into(), for_local_site_id).await - } + insert_into(tagline) + .values(form) + .get_result::(conn) + .await } - async fn clear(conn: &mut AsyncPgConnection) -> Result { - diesel::delete(tagline).execute(conn).await - } - - pub async fn get_all( + async fn update( pool: &mut DbPool<'_>, - for_local_site_id: LocalSiteId, - ) -> Result, Error> { + tagline_id: TaglineId, + new_tagline: &Self::UpdateForm, + ) -> Result { let conn = &mut get_conn(pool).await?; - tagline - .filter(local_site_id.eq(for_local_site_id)) - .get_results::(conn) + diesel::update(tagline.find(tagline_id)) + .set(new_tagline) + .get_result::(conn) .await } } + +impl Tagline { + pub async fn list( + pool: &mut DbPool<'_>, + page: Option, + limit: Option, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let (limit, offset) = limit_and_offset(page, limit)?; + tagline + .order(published.desc()) + .offset(offset) + .limit(limit) + .get_results::(conn) + .await + } + + pub async fn get_random(pool: &mut DbPool<'_>) -> Result { + let conn = &mut get_conn(pool).await?; + sql_function!(fn random() -> Text); + tagline.order(random()).limit(1).first::(conn).await + } +} diff --git a/crates/db_schema/src/lib.rs b/crates/db_schema/src/lib.rs index 1aa0e4e88..dbadaaf95 100644 --- a/crates/db_schema/src/lib.rs +++ b/crates/db_schema/src/lib.rs @@ -24,14 +24,18 @@ pub mod aggregates; #[cfg(feature = "full")] pub mod impls; pub mod newtypes; +pub mod sensitive; #[cfg(feature = "full")] #[rustfmt::skip] -#[allow(clippy::wildcard_imports)] pub mod schema; #[cfg(feature = "full")] pub mod aliases { use crate::schema::{community_moderator, person}; - diesel::alias!(person as person1: Person1, person as person2: Person2, community_moderator as community_moderator1: CommunityModerator1); + diesel::alias!( + person as person1: Person1, + person as person2: Person2, + community_moderator as community_moderator1: CommunityModerator1 + ); } pub mod source; #[cfg(feature = "full")] @@ -39,24 +43,27 @@ pub mod traits; #[cfg(feature = "full")] pub mod utils; +#[cfg(feature = "full")] +mod schema_setup; + use serde::{Deserialize, Serialize}; -use strum_macros::{Display, EnumString}; +use strum::{Display, EnumString}; #[cfg(feature = "full")] use ts_rs::TS; #[derive( - EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, )] #[cfg_attr(feature = "full", derive(DbEnum, TS))] #[cfg_attr( feature = "full", - ExistingTypePath = "crate::schema::sql_types::SortTypeEnum" + ExistingTypePath = "crate::schema::sql_types::PostSortTypeEnum" )] #[cfg_attr(feature = "full", DbValueStyle = "verbatim")] #[cfg_attr(feature = "full", ts(export))] // TODO add the controversial and scaled rankings to the doc below /// The post sort types. See here for descriptions: https://join-lemmy.org/docs/en/users/03-votes-and-ranking.html -pub enum SortType { +pub enum PostSortType { #[default] Active, Hot, @@ -79,11 +86,19 @@ pub enum SortType { Scaled, } -#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy)] -#[cfg_attr(feature = "full", derive(TS))] +#[derive( + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, +)] +#[cfg_attr(feature = "full", derive(DbEnum, TS))] +#[cfg_attr( + feature = "full", + ExistingTypePath = "crate::schema::sql_types::CommentSortTypeEnum" +)] +#[cfg_attr(feature = "full", DbValueStyle = "verbatim")] #[cfg_attr(feature = "full", ts(export))] /// The comment sort types. See here for descriptions: https://join-lemmy.org/docs/en/users/03-votes-and-ranking.html pub enum CommentSortType { + #[default] Hot, Top, New, @@ -92,7 +107,7 @@ pub enum CommentSortType { } #[derive( - EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, )] #[cfg_attr(feature = "full", derive(DbEnum, TS))] #[cfg_attr( @@ -114,7 +129,9 @@ pub enum ListingType { ModeratorView, } -#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +#[derive( + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, +)] #[cfg_attr(feature = "full", derive(DbEnum, TS))] #[cfg_attr( feature = "full", @@ -129,10 +146,13 @@ pub enum RegistrationMode { /// Open, but pending approval of a registration application. RequireApplication, /// Open to all. + #[default] Open, } -#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +#[derive( + EnumString, Display, Debug, Serialize, Deserialize, Default, Clone, Copy, PartialEq, Eq, Hash, +)] #[cfg_attr(feature = "full", derive(DbEnum, TS))] #[cfg_attr( feature = "full", @@ -143,6 +163,7 @@ pub enum RegistrationMode { /// A post-view mode that changes how multiple post listings look. pub enum PostListingMode { /// A compact, list-type view. + #[default] List, /// A larger card-type view. Card, @@ -150,7 +171,7 @@ pub enum PostListingMode { SmallCard, } -#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy)] +#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// The type of content returned from a search. @@ -160,10 +181,9 @@ pub enum SearchType { Posts, Communities, Users, - Url, } -#[derive(EnumString, Display, Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)] +#[derive(EnumString, Display, Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// A type / status for a community subscribe. @@ -173,7 +193,7 @@ pub enum SubscribedType { Pending, } -#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] /// A list of possible types for the various modlog actions. @@ -197,7 +217,7 @@ pub enum ModlogActionType { } #[derive( - EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash, )] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] @@ -209,3 +229,54 @@ pub enum PostFeatureType { /// Features to the top of the community. Community, } + +#[derive( + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, +)] +#[cfg_attr(feature = "full", derive(DbEnum, TS))] +#[cfg_attr( + feature = "full", + ExistingTypePath = "crate::schema::sql_types::CommunityVisibility" +)] +#[cfg_attr(feature = "full", DbValueStyle = "verbatim")] +#[cfg_attr(feature = "full", ts(export))] +/// Defines who can browse and interact with content in a community. +/// +/// TODO: Also use this to define private communities +pub enum CommunityVisibility { + /// Public community, any local or federated user can interact. + #[default] + Public, + /// Unfederated community, only local users can interact. + LocalOnly, +} + +#[derive( + EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Default, Hash, +)] +#[cfg_attr(feature = "full", derive(DbEnum, TS))] +#[cfg_attr( + feature = "full", + ExistingTypePath = "crate::schema::sql_types::FederationModeEnum" +)] +#[cfg_attr(feature = "full", DbValueStyle = "verbatim")] +#[cfg_attr(feature = "full", ts(export))] +/// The federation mode for an item +pub enum FederationMode { + #[default] + /// Allows all + All, + /// Allows only local + Local, + /// Disables + Disable, +} + +/// Wrapper for assert_eq! macro. Checks that vec matches the given length, and prints the +/// vec on failure. +#[macro_export] +macro_rules! assert_length { + ($len:expr, $vec:expr) => {{ + assert_eq!($len, $vec.len(), "Vec has wrong length: {:?}", $vec) + }}; +} diff --git a/crates/db_schema/src/newtypes.rs b/crates/db_schema/src/newtypes.rs index 555b98256..fe1febef5 100644 --- a/crates/db_schema/src/newtypes.rs +++ b/crates/db_schema/src/newtypes.rs @@ -6,6 +6,14 @@ use activitypub_federation::{ traits::Object, }; #[cfg(feature = "full")] +use diesel::{ + backend::Backend, + deserialize::FromSql, + pg::Pg, + serialize::{Output, ToSql}, + sql_types::Text, +}; +#[cfg(feature = "full")] use diesel_ltree::Ltree; use serde::{Deserialize, Serialize}; use std::{ @@ -77,18 +85,6 @@ impl fmt::Display for PrivateMessageId { /// The person mention id. pub struct PersonMentionId(i32); -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType, TS))] -#[cfg_attr(feature = "full", ts(export))] -/// The person block id. -pub struct PersonBlockId(i32); - -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType, TS))] -#[cfg_attr(feature = "full", ts(export))] -/// The community block id. -pub struct CommunityBlockId(i32); - #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(DieselNewType, TS))] #[cfg_attr(feature = "full", ts(export))] @@ -111,7 +107,7 @@ pub struct PrivateMessageReportId(i32); #[cfg_attr(feature = "full", derive(DieselNewType, TS))] #[cfg_attr(feature = "full", ts(export))] /// The site id. -pub struct SiteId(i32); +pub struct SiteId(pub i32); #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(DieselNewType, TS))] @@ -119,35 +115,26 @@ pub struct SiteId(i32); /// The language id. pub struct LanguageId(pub i32); -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType))] -pub struct LocalUserLanguageId(pub i32); - -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType))] -pub struct SiteLanguageId(pub i32); - -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType))] -pub struct CommunityLanguageId(pub i32); - #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(DieselNewType, TS))] #[cfg_attr(feature = "full", ts(export))] /// The comment reply id. pub struct CommentReplyId(i32); -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "full", derive(DieselNewType, TS))] -#[cfg_attr(feature = "full", ts(export))] -/// The Image Upload id. -pub struct ImageUploadId(i32); - -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] +#[derive( + Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default, Ord, PartialOrd, +)] #[cfg_attr(feature = "full", derive(DieselNewType, TS))] #[cfg_attr(feature = "full", ts(export))] /// The instance id. -pub struct InstanceId(i32); +pub struct InstanceId(pub i32); + +#[derive( + Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default, PartialOrd, Ord, +)] +#[cfg_attr(feature = "full", derive(DieselNewType, TS))] +#[cfg_attr(feature = "full", ts(export))] +pub struct ActivityId(pub i64); #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(DieselNewType, TS))] @@ -161,6 +148,24 @@ pub struct LocalSiteId(i32); /// The custom emoji id. pub struct CustomEmojiId(i32); +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] +#[cfg_attr(feature = "full", derive(DieselNewType, TS))] +#[cfg_attr(feature = "full", ts(export))] +/// The tagline id. +pub struct TaglineId(i32); + +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] +#[cfg_attr(feature = "full", derive(DieselNewType, TS))] +#[cfg_attr(feature = "full", ts(export))] +/// The registration application id. +pub struct RegistrationApplicationId(i32); + +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Default)] +#[cfg_attr(feature = "full", derive(DieselNewType, TS))] +#[cfg_attr(feature = "full", ts(export))] +/// The oauth provider id. +pub struct OAuthProviderId(pub i32); + #[cfg(feature = "full")] #[derive(Serialize, Deserialize)] #[serde(remote = "Ltree")] @@ -185,14 +190,14 @@ impl Display for DbUrl { } } -// the project doesnt compile with From -#[allow(clippy::from_over_into)] +// the project doesn't compile with From +#[expect(clippy::from_over_into)] impl Into for Url { fn into(self) -> DbUrl { DbUrl(Box::new(self)) } } -#[allow(clippy::from_over_into)] +#[expect(clippy::from_over_into)] impl Into for DbUrl { fn into(self) -> Url { *self.0 @@ -256,6 +261,35 @@ impl TS for DbUrl { } } +#[cfg(feature = "full")] +impl ToSql for DbUrl { + fn to_sql(&self, out: &mut Output) -> diesel::serialize::Result { + >::to_sql(&self.0.to_string(), &mut out.reborrow()) + } +} + +#[cfg(feature = "full")] +impl FromSql for DbUrl +where + String: FromSql, +{ + fn from_sql(value: DB::RawValue<'_>) -> diesel::deserialize::Result { + let str = String::from_sql(value)?; + Ok(DbUrl(Box::new(Url::parse(&str)?))) + } +} + +#[cfg(feature = "full")] +impl From> for DbUrl +where + Kind: Object + Send + 'static, + for<'de2> ::Kind: serde::Deserialize<'de2>, +{ + fn from(id: ObjectId) -> Self { + DbUrl(Box::new(id.into())) + } +} + impl InstanceId { pub fn inner(self) -> i32 { self.0 diff --git a/crates/db_schema/src/schema.rs b/crates/db_schema/src/schema.rs index 02b0845dc..346052412 100644 --- a/crates/db_schema/src/schema.rs +++ b/crates/db_schema/src/schema.rs @@ -5,6 +5,18 @@ pub mod sql_types { #[diesel(postgres_type(name = "actor_type_enum"))] pub struct ActorTypeEnum; + #[derive(diesel::sql_types::SqlType)] + #[diesel(postgres_type(name = "comment_sort_type_enum"))] + pub struct CommentSortTypeEnum; + + #[derive(diesel::sql_types::SqlType)] + #[diesel(postgres_type(name = "community_visibility"))] + pub struct CommunityVisibility; + + #[derive(diesel::sql_types::SqlType)] + #[diesel(postgres_type(name = "federation_mode_enum"))] + pub struct FederationModeEnum; + #[derive(diesel::sql_types::SqlType)] #[diesel(postgres_type(name = "listing_type_enum"))] pub struct ListingTypeEnum; @@ -18,12 +30,12 @@ pub mod sql_types { pub struct PostListingModeEnum; #[derive(diesel::sql_types::SqlType)] - #[diesel(postgres_type(name = "registration_mode_enum"))] - pub struct RegistrationModeEnum; + #[diesel(postgres_type(name = "post_sort_type_enum"))] + pub struct PostSortTypeEnum; #[derive(diesel::sql_types::SqlType)] - #[diesel(postgres_type(name = "sort_type_enum"))] - pub struct SortTypeEnum; + #[diesel(postgres_type(name = "registration_mode_enum"))] + pub struct RegistrationModeEnum; } diesel::table! { @@ -65,8 +77,7 @@ diesel::table! { } diesel::table! { - captcha_answer (id) { - id -> Int4, + captcha_answer (uuid) { uuid -> Uuid, answer -> Text, published -> Timestamptz, @@ -96,8 +107,7 @@ diesel::table! { } diesel::table! { - comment_aggregates (id) { - id -> Int4, + comment_aggregates (comment_id) { comment_id -> Int4, score -> Int8, upvotes -> Int8, @@ -110,11 +120,9 @@ diesel::table! { } diesel::table! { - comment_like (id) { - id -> Int4, + comment_like (person_id, comment_id) { person_id -> Int4, comment_id -> Int4, - post_id -> Int4, score -> Int2, published -> Timestamptz, } @@ -145,8 +153,7 @@ diesel::table! { } diesel::table! { - comment_saved (id) { - id -> Int4, + comment_saved (person_id, comment_id) { comment_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -154,13 +161,16 @@ diesel::table! { } diesel::table! { + use diesel::sql_types::*; + use super::sql_types::CommunityVisibility; + community (id) { id -> Int4, #[max_length = 255] name -> Varchar, #[max_length = 255] title -> Varchar, - description -> Nullable, + sidebar -> Nullable, removed -> Bool, published -> Timestamptz, updated -> Nullable, @@ -175,11 +185,9 @@ diesel::table! { icon -> Nullable, banner -> Nullable, #[max_length = 255] - followers_url -> Varchar, + followers_url -> Nullable, #[max_length = 255] inbox_url -> Varchar, - #[max_length = 255] - shared_inbox_url -> Nullable, hidden -> Bool, posting_restricted_to_mods -> Bool, instance_id -> Int4, @@ -187,12 +195,14 @@ diesel::table! { moderators_url -> Nullable, #[max_length = 255] featured_url -> Nullable, + visibility -> CommunityVisibility, + #[max_length = 150] + description -> Nullable, } } diesel::table! { - community_aggregates (id) { - id -> Int4, + community_aggregates (community_id) { community_id -> Int4, subscribers -> Int8, posts -> Int8, @@ -203,12 +213,12 @@ diesel::table! { users_active_month -> Int8, users_active_half_year -> Int8, hot_rank -> Float8, + subscribers_local -> Int8, } } diesel::table! { - community_block (id) { - id -> Int4, + community_block (person_id, community_id) { person_id -> Int4, community_id -> Int4, published -> Timestamptz, @@ -216,8 +226,7 @@ diesel::table! { } diesel::table! { - community_follower (id) { - id -> Int4, + community_follower (person_id, community_id) { community_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -226,16 +235,14 @@ diesel::table! { } diesel::table! { - community_language (id) { - id -> Int4, + community_language (community_id, language_id) { community_id -> Int4, language_id -> Int4, } } diesel::table! { - community_moderator (id) { - id -> Int4, + community_moderator (person_id, community_id) { community_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -243,8 +250,7 @@ diesel::table! { } diesel::table! { - community_person_ban (id) { - id -> Int4, + community_person_ban (person_id, community_id) { community_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -255,7 +261,6 @@ diesel::table! { diesel::table! { custom_emoji (id) { id -> Int4, - local_site_id -> Int4, #[max_length = 128] shortcode -> Varchar, image_url -> Text, @@ -267,8 +272,7 @@ diesel::table! { } diesel::table! { - custom_emoji_keyword (id) { - id -> Int4, + custom_emoji_keyword (custom_emoji_id, keyword) { custom_emoji_id -> Int4, #[max_length = 128] keyword -> Varchar, @@ -286,8 +290,7 @@ diesel::table! { } diesel::table! { - federation_allowlist (id) { - id -> Int4, + federation_allowlist (instance_id) { instance_id -> Int4, published -> Timestamptz, updated -> Nullable, @@ -295,8 +298,7 @@ diesel::table! { } diesel::table! { - federation_blocklist (id) { - id -> Int4, + federation_blocklist (instance_id) { instance_id -> Int4, published -> Timestamptz, updated -> Nullable, @@ -304,22 +306,21 @@ diesel::table! { } diesel::table! { - federation_queue_state (id) { - id -> Int4, + federation_queue_state (instance_id) { instance_id -> Int4, - last_successful_id -> Int8, + last_successful_id -> Nullable, fail_count -> Int4, - last_retry -> Timestamptz, + last_retry -> Nullable, + last_successful_published_time -> Nullable, } } diesel::table! { - image_upload (id) { - id -> Int4, - local_user_id -> Int4, - pictrs_alias -> Text, - pictrs_delete_token -> Text, - published -> Timestamptz, + image_details (link) { + link -> Text, + width -> Int4, + height -> Int4, + content_type -> Text, } } @@ -338,8 +339,7 @@ diesel::table! { } diesel::table! { - instance_block (id) { - id -> Int4, + instance_block (person_id, instance_id) { person_id -> Int4, instance_id -> Int4, published -> Timestamptz, @@ -355,17 +355,28 @@ diesel::table! { } } +diesel::table! { + local_image (pictrs_alias) { + local_user_id -> Nullable, + pictrs_alias -> Text, + pictrs_delete_token -> Text, + published -> Timestamptz, + } +} + diesel::table! { use diesel::sql_types::*; use super::sql_types::ListingTypeEnum; use super::sql_types::RegistrationModeEnum; + use super::sql_types::PostListingModeEnum; + use super::sql_types::PostSortTypeEnum; + use super::sql_types::CommentSortTypeEnum; + use super::sql_types::FederationModeEnum; local_site (id) { id -> Int4, site_id -> Int4, site_setup -> Bool, - enable_downvotes -> Bool, - enable_nsfw -> Bool, community_creation_admin_only -> Bool, require_email_verification -> Bool, application_question -> Nullable, @@ -385,12 +396,20 @@ diesel::table! { updated -> Nullable, registration_mode -> RegistrationModeEnum, reports_email_admins -> Bool, + federation_signed_fetch -> Bool, + default_post_listing_mode -> PostListingModeEnum, + default_post_sort_type -> PostSortTypeEnum, + default_comment_sort_type -> CommentSortTypeEnum, + oauth_registration -> Bool, + post_upvotes -> FederationModeEnum, + post_downvotes -> FederationModeEnum, + comment_upvotes -> FederationModeEnum, + comment_downvotes -> FederationModeEnum, } } diesel::table! { - local_site_rate_limit (id) { - id -> Int4, + local_site_rate_limit (local_site_id) { local_site_id -> Int4, message -> Int4, message_per_second -> Int4, @@ -411,26 +430,35 @@ diesel::table! { } } +diesel::table! { + local_site_url_blocklist (id) { + id -> Int4, + url -> Text, + published -> Timestamptz, + updated -> Nullable, + } +} + diesel::table! { use diesel::sql_types::*; - use super::sql_types::SortTypeEnum; + use super::sql_types::PostSortTypeEnum; use super::sql_types::ListingTypeEnum; use super::sql_types::PostListingModeEnum; + use super::sql_types::CommentSortTypeEnum; local_user (id) { id -> Int4, person_id -> Int4, - password_encrypted -> Text, + password_encrypted -> Nullable, email -> Nullable, show_nsfw -> Bool, theme -> Text, - default_sort_type -> SortTypeEnum, + default_post_sort_type -> PostSortTypeEnum, default_listing_type -> ListingTypeEnum, #[max_length = 20] interface_language -> Varchar, show_avatars -> Bool, send_notifications_to_email -> Bool, - show_scores -> Bool, show_bot_accounts -> Bool, show_read_posts -> Bool, email_verified -> Bool, @@ -438,7 +466,6 @@ diesel::table! { totp_2fa_secret -> Nullable, open_links_in_new_tab -> Bool, blur_nsfw -> Bool, - auto_expand -> Bool, infinite_scroll_enabled -> Bool, admin -> Bool, post_listing_mode -> PostListingModeEnum, @@ -446,20 +473,30 @@ diesel::table! { enable_keyboard_navigation -> Bool, enable_animated_images -> Bool, enable_private_messages -> Bool, + collapse_bot_comments -> Bool, + default_comment_sort_type -> CommentSortTypeEnum, } } diesel::table! { - local_user_language (id) { - id -> Int4, + local_user_language (local_user_id, language_id) { local_user_id -> Int4, language_id -> Int4, } } diesel::table! { - login_token (id) { - id -> Int4, + local_user_vote_display_mode (local_user_id) { + local_user_id -> Int4, + score -> Bool, + upvotes -> Bool, + downvotes -> Bool, + upvote_percentage -> Bool, + } +} + +diesel::table! { + login_token (token) { token -> Text, user_id -> Int4, published -> Timestamptz, @@ -589,6 +626,36 @@ diesel::table! { } } +diesel::table! { + oauth_account (oauth_provider_id, local_user_id) { + local_user_id -> Int4, + oauth_provider_id -> Int4, + oauth_user_id -> Text, + published -> Timestamptz, + updated -> Nullable, + } +} + +diesel::table! { + oauth_provider (id) { + id -> Int4, + display_name -> Text, + issuer -> Text, + authorization_endpoint -> Text, + token_endpoint -> Text, + userinfo_endpoint -> Text, + id_claim -> Text, + client_id -> Text, + client_secret -> Text, + scopes -> Text, + auto_verify_email -> Bool, + account_linking_enabled -> Bool, + enabled -> Bool, + published -> Timestamptz, + updated -> Nullable, + } +} + diesel::table! { password_reset_request (id) { id -> Int4, @@ -620,8 +687,6 @@ diesel::table! { deleted -> Bool, #[max_length = 255] inbox_url -> Varchar, - #[max_length = 255] - shared_inbox_url -> Nullable, matrix_user_id -> Nullable, bot_account -> Bool, ban_expires -> Nullable, @@ -630,8 +695,7 @@ diesel::table! { } diesel::table! { - person_aggregates (id) { - id -> Int4, + person_aggregates (person_id) { person_id -> Int4, post_count -> Int8, post_score -> Int8, @@ -641,16 +705,14 @@ diesel::table! { } diesel::table! { - person_ban (id) { - id -> Int4, + person_ban (person_id) { person_id -> Int4, published -> Timestamptz, } } diesel::table! { - person_block (id) { - id -> Int4, + person_block (person_id, target_id) { person_id -> Int4, target_id -> Int4, published -> Timestamptz, @@ -658,8 +720,7 @@ diesel::table! { } diesel::table! { - person_follower (id) { - id -> Int4, + person_follower (follower_id, person_id) { person_id -> Int4, follower_id -> Int4, published -> Timestamptz, @@ -678,8 +739,7 @@ diesel::table! { } diesel::table! { - person_post_aggregates (id) { - id -> Int4, + person_post_aggregates (person_id, post_id) { person_id -> Int4, post_id -> Int4, read_comments -> Int8, @@ -692,7 +752,7 @@ diesel::table! { id -> Int4, #[max_length = 200] name -> Varchar, - #[max_length = 512] + #[max_length = 2000] url -> Nullable, body -> Nullable, creator_id -> Int4, @@ -713,12 +773,14 @@ diesel::table! { language_id -> Int4, featured_community -> Bool, featured_local -> Bool, + url_content_type -> Nullable, + alt_text -> Nullable, + scheduled_publish_time -> Nullable, } } diesel::table! { - post_aggregates (id) { - id -> Int4, + post_aggregates (post_id) { post_id -> Int4, comments -> Int8, score -> Int8, @@ -740,8 +802,15 @@ diesel::table! { } diesel::table! { - post_like (id) { - id -> Int4, + post_hide (person_id, post_id) { + post_id -> Int4, + person_id -> Int4, + published -> Timestamptz, + } +} + +diesel::table! { + post_like (person_id, post_id) { post_id -> Int4, person_id -> Int4, score -> Int2, @@ -750,8 +819,7 @@ diesel::table! { } diesel::table! { - post_read (id) { - id -> Int4, + post_read (person_id, post_id) { post_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -776,8 +844,7 @@ diesel::table! { } diesel::table! { - post_saved (id) { - id -> Int4, + post_saved (person_id, post_id) { post_id -> Int4, person_id -> Int4, published -> Timestamptz, @@ -815,8 +882,7 @@ diesel::table! { } diesel::table! { - received_activity (id) { - id -> Int8, + received_activity (ap_id) { ap_id -> Text, published -> Timestamptz, } @@ -833,6 +899,13 @@ diesel::table! { } } +diesel::table! { + remote_image (link) { + link -> Text, + published -> Timestamptz, + } +} + diesel::table! { secret (id) { id -> Int4, @@ -878,12 +951,12 @@ diesel::table! { private_key -> Nullable, public_key -> Text, instance_id -> Int4, + content_warning -> Nullable, } } diesel::table! { - site_aggregates (id) { - id -> Int4, + site_aggregates (site_id) { site_id -> Int4, users -> Int8, posts -> Int8, @@ -897,8 +970,7 @@ diesel::table! { } diesel::table! { - site_language (id) { - id -> Int4, + site_language (site_id, language_id) { site_id -> Int4, language_id -> Int4, } @@ -907,7 +979,6 @@ diesel::table! { diesel::table! { tagline (id) { id -> Int4, - local_site_id -> Int4, content -> Text, published -> Timestamptz, updated -> Nullable, @@ -926,7 +997,6 @@ diesel::joinable!(comment -> post (post_id)); diesel::joinable!(comment_aggregates -> comment (comment_id)); diesel::joinable!(comment_like -> comment (comment_id)); diesel::joinable!(comment_like -> person (person_id)); -diesel::joinable!(comment_like -> post (post_id)); diesel::joinable!(comment_reply -> comment (comment_id)); diesel::joinable!(comment_reply -> person (recipient_id)); diesel::joinable!(comment_report -> comment (comment_id)); @@ -944,20 +1014,20 @@ diesel::joinable!(community_moderator -> community (community_id)); diesel::joinable!(community_moderator -> person (person_id)); diesel::joinable!(community_person_ban -> community (community_id)); diesel::joinable!(community_person_ban -> person (person_id)); -diesel::joinable!(custom_emoji -> local_site (local_site_id)); diesel::joinable!(custom_emoji_keyword -> custom_emoji (custom_emoji_id)); diesel::joinable!(email_verification -> local_user (local_user_id)); diesel::joinable!(federation_allowlist -> instance (instance_id)); diesel::joinable!(federation_blocklist -> instance (instance_id)); diesel::joinable!(federation_queue_state -> instance (instance_id)); -diesel::joinable!(image_upload -> local_user (local_user_id)); diesel::joinable!(instance_block -> instance (instance_id)); diesel::joinable!(instance_block -> person (person_id)); +diesel::joinable!(local_image -> local_user (local_user_id)); diesel::joinable!(local_site -> site (site_id)); diesel::joinable!(local_site_rate_limit -> local_site (local_site_id)); diesel::joinable!(local_user -> person (person_id)); diesel::joinable!(local_user_language -> language (language_id)); diesel::joinable!(local_user_language -> local_user (local_user_id)); +diesel::joinable!(local_user_vote_display_mode -> local_user (local_user_id)); diesel::joinable!(login_token -> local_user (user_id)); diesel::joinable!(mod_add_community -> community (community_id)); diesel::joinable!(mod_ban_from_community -> community (community_id)); @@ -974,6 +1044,8 @@ diesel::joinable!(mod_remove_community -> person (mod_person_id)); diesel::joinable!(mod_remove_post -> person (mod_person_id)); diesel::joinable!(mod_remove_post -> post (post_id)); diesel::joinable!(mod_transfer_community -> community (community_id)); +diesel::joinable!(oauth_account -> local_user (local_user_id)); +diesel::joinable!(oauth_account -> oauth_provider (oauth_provider_id)); diesel::joinable!(password_reset_request -> local_user (local_user_id)); diesel::joinable!(person -> instance (instance_id)); diesel::joinable!(person_aggregates -> person (person_id)); @@ -989,6 +1061,8 @@ diesel::joinable!(post_aggregates -> community (community_id)); diesel::joinable!(post_aggregates -> instance (instance_id)); diesel::joinable!(post_aggregates -> person (creator_id)); diesel::joinable!(post_aggregates -> post (post_id)); +diesel::joinable!(post_hide -> person (person_id)); +diesel::joinable!(post_hide -> post (post_id)); diesel::joinable!(post_like -> person (person_id)); diesel::joinable!(post_like -> post (post_id)); diesel::joinable!(post_read -> person (person_id)); @@ -1003,7 +1077,6 @@ diesel::joinable!(site -> instance (instance_id)); diesel::joinable!(site_aggregates -> site (site_id)); diesel::joinable!(site_language -> language (language_id)); diesel::joinable!(site_language -> site (site_id)); -diesel::joinable!(tagline -> local_site (local_site_id)); diesel::allow_tables_to_appear_in_same_query!( admin_purge_comment, @@ -1030,14 +1103,17 @@ diesel::allow_tables_to_appear_in_same_query!( federation_allowlist, federation_blocklist, federation_queue_state, - image_upload, + image_details, instance, instance_block, language, + local_image, local_site, local_site_rate_limit, + local_site_url_blocklist, local_user, local_user_language, + local_user_vote_display_mode, login_token, mod_add, mod_add_community, @@ -1050,6 +1126,8 @@ diesel::allow_tables_to_appear_in_same_query!( mod_remove_community, mod_remove_post, mod_transfer_community, + oauth_account, + oauth_provider, password_reset_request, person, person_aggregates, @@ -1060,6 +1138,7 @@ diesel::allow_tables_to_appear_in_same_query!( person_post_aggregates, post, post_aggregates, + post_hide, post_like, post_read, post_report, @@ -1068,6 +1147,7 @@ diesel::allow_tables_to_appear_in_same_query!( private_message_report, received_activity, registration_application, + remote_image, secret, sent_activity, site, diff --git a/crates/db_schema/src/schema_setup.rs b/crates/db_schema/src/schema_setup.rs new file mode 100644 index 000000000..fb4affa91 --- /dev/null +++ b/crates/db_schema/src/schema_setup.rs @@ -0,0 +1,65 @@ +use anyhow::Context; +use diesel::{connection::SimpleConnection, Connection, PgConnection}; +use diesel_migrations::{EmbeddedMigrations, MigrationHarness}; +use lemmy_utils::error::LemmyError; + +const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); + +/// This SQL code sets up the `r` schema, which contains things that can be safely dropped and +/// replaced instead of being changed using migrations. It may not create or modify things outside +/// of the `r` schema (indicated by `r.` before the name), unless a comment says otherwise. +/// +/// Currently, this code is only run after the server starts and there's at least 1 pending +/// migration to run. This means every time you change something here, you must also create a +/// migration (a blank up.sql file works fine). This behavior will be removed when we implement a +/// better way to avoid useless schema updates and locks. +/// +/// If you add something that depends on something (such as a table) created in a new migration, +/// then down.sql must use `CASCADE` when dropping it. This doesn't need to be fixed in old +/// migrations because the "replaceable-schema" migration runs `DROP SCHEMA IF EXISTS r CASCADE` in +/// down.sql. +const REPLACEABLE_SCHEMA: &[&str] = &[ + "DROP SCHEMA IF EXISTS r CASCADE;", + "CREATE SCHEMA r;", + include_str!("../replaceable_schema/utils.sql"), + include_str!("../replaceable_schema/triggers.sql"), +]; + +pub fn run(db_url: &str) -> Result<(), LemmyError> { + // Migrations don't support async connection + let mut conn = PgConnection::establish(db_url).with_context(|| "Error connecting to database")?; + + // Run all pending migrations except for the newest one, then run the newest one in the same + // transaction as `REPLACEABLE_SCHEMA`. This code will be becone less hacky when the conditional + // setup of things in `REPLACEABLE_SCHEMA` is done without using the number of pending + // migrations. + println!("Running Database migrations (This may take a long time)..."); + let migrations = conn + .pending_migrations(MIGRATIONS) + .map_err(|e| anyhow::anyhow!("Couldn't determine pending migrations: {e}"))?; + for migration in migrations.iter().rev().skip(1).rev() { + conn + .run_migration(migration) + .map_err(|e| anyhow::anyhow!("Couldn't run DB Migrations: {e}"))?; + } + conn.transaction::<_, LemmyError, _>(|conn| { + if let Some(migration) = migrations.last() { + // Migration is run with a savepoint since there's already a transaction + conn + .run_migration(migration) + .map_err(|e| anyhow::anyhow!("Couldn't run DB Migrations: {e}"))?; + } else if !cfg!(debug_assertions) { + // In production, skip running `REPLACEABLE_SCHEMA` to avoid locking things in the schema. In + // CI, always run it because `diesel migration` commands would otherwise prevent it. + return Ok(()); + } + conn + .batch_execute(&REPLACEABLE_SCHEMA.join("\n")) + .context("Couldn't run SQL files in crates/db_schema/replaceable_schema")?; + + Ok(()) + })?; + println!("Database migrations complete."); + + Ok(()) +} diff --git a/crates/db_schema/src/sensitive.rs b/crates/db_schema/src/sensitive.rs new file mode 100644 index 000000000..340679e2f --- /dev/null +++ b/crates/db_schema/src/sensitive.rs @@ -0,0 +1,57 @@ +use serde::{Deserialize, Serialize}; +use std::{fmt::Debug, ops::Deref}; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)] +#[cfg_attr(feature = "full", derive(DieselNewType))] +#[serde(transparent)] +pub struct SensitiveString(String); + +impl SensitiveString { + pub fn into_inner(self) -> String { + self.0 + } +} + +impl Debug for SensitiveString { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Sensitive").finish() + } +} + +impl AsRef<[u8]> for SensitiveString { + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } +} + +impl Deref for SensitiveString { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for SensitiveString { + fn from(t: String) -> Self { + SensitiveString(t) + } +} + +#[cfg(feature = "full")] +impl TS for SensitiveString { + fn name() -> String { + "string".to_string() + } + fn name_with_type_args(_args: Vec) -> String { + "string".to_string() + } + fn dependencies() -> Vec { + Vec::new() + } + fn transparent() -> bool { + true + } +} diff --git a/crates/db_schema/src/source/activity.rs b/crates/db_schema/src/source/activity.rs index fc4bb0ec5..6eb17f606 100644 --- a/crates/db_schema/src/source/activity.rs +++ b/crates/db_schema/src/source/activity.rs @@ -1,6 +1,6 @@ use crate::{ - newtypes::{CommunityId, DbUrl}, - schema::sent_activity, + newtypes::{ActivityId, CommunityId, DbUrl}, + schema::{received_activity, sent_activity}, }; use chrono::{DateTime, Utc}; use diesel::{sql_types::Nullable, Queryable}; @@ -51,10 +51,12 @@ impl ActivitySendTargets { } } -#[derive(PartialEq, Eq, Debug, Queryable)] -#[diesel(table_name = sent_activity)] +#[derive(PartialEq, Eq, Debug)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", diesel(table_name = sent_activity))] pub struct SentActivity { - pub id: i64, + pub id: ActivityId, pub ap_id: DbUrl, pub data: Value, pub sensitive: bool, @@ -66,8 +68,8 @@ pub struct SentActivity { pub actor_apub_id: Option, } -#[derive(Insertable)] -#[diesel(table_name = sent_activity)] +#[cfg_attr(feature = "full", derive(Insertable))] +#[cfg_attr(feature = "full", diesel(table_name = sent_activity))] pub struct SentActivityForm { pub ap_id: DbUrl, pub data: Value, @@ -87,10 +89,12 @@ pub enum ActorType { Person, } -#[derive(PartialEq, Eq, Debug, Queryable)] -#[diesel(table_name = received_activity)] +#[derive(PartialEq, Eq, Debug)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] +#[cfg_attr(feature = "full", diesel(primary_key(ap_id)))] +#[cfg_attr(feature = "full", diesel(table_name = received_activity))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct ReceivedActivity { - pub id: i64, pub ap_id: DbUrl, pub published: DateTime, } diff --git a/crates/db_schema/src/source/actor_language.rs b/crates/db_schema/src/source/actor_language.rs index 6780be51b..0b14b63c1 100644 --- a/crates/db_schema/src/source/actor_language.rs +++ b/crates/db_schema/src/source/actor_language.rs @@ -1,22 +1,14 @@ -use crate::newtypes::{ - CommunityId, - CommunityLanguageId, - LanguageId, - LocalUserId, - LocalUserLanguageId, - SiteId, - SiteLanguageId, -}; +use crate::newtypes::{CommunityId, LanguageId, LocalUserId, SiteId}; #[cfg(feature = "full")] use crate::schema::local_user_language; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = local_user_language))] +#[cfg_attr(feature = "full", diesel(primary_key(local_user_id, language_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct LocalUserLanguage { - #[serde(skip)] - pub id: LocalUserLanguageId, pub local_user_id: LocalUserId, pub language_id: LanguageId, } @@ -33,11 +25,11 @@ pub struct LocalUserLanguageForm { use crate::schema::community_language; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = community_language))] +#[cfg_attr(feature = "full", diesel(primary_key(community_id, language_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommunityLanguage { - #[serde(skip)] - pub id: CommunityLanguageId, pub community_id: CommunityId, pub language_id: LanguageId, } @@ -54,11 +46,11 @@ pub struct CommunityLanguageForm { use crate::schema::site_language; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = site_language))] +#[cfg_attr(feature = "full", diesel(primary_key(site_id, language_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct SiteLanguage { - #[serde(skip)] - pub id: SiteLanguageId, pub site_id: SiteId, pub language_id: LanguageId, } diff --git a/crates/db_schema/src/source/captcha_answer.rs b/crates/db_schema/src/source/captcha_answer.rs index b7e9636c4..188368226 100644 --- a/crates/db_schema/src/source/captcha_answer.rs +++ b/crates/db_schema/src/source/captcha_answer.rs @@ -7,10 +7,10 @@ use uuid::Uuid; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable))] #[cfg_attr(feature = "full", diesel(table_name = captcha_answer))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CaptchaAnswer { - pub id: i32, pub uuid: Uuid, pub answer: String, pub published: DateTime, @@ -18,8 +18,9 @@ pub struct CaptchaAnswer { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable))] #[cfg_attr(feature = "full", diesel(table_name = captcha_answer))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CheckCaptchaAnswer { pub uuid: Uuid, pub answer: String, diff --git a/crates/db_schema/src/source/comment.rs b/crates/db_schema/src/source/comment.rs index efe3b7cdf..1e5f043f1 100644 --- a/crates/db_schema/src/source/comment.rs +++ b/crates/db_schema/src/source/comment.rs @@ -10,14 +10,17 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] #[cfg_attr(feature = "full", diesel(table_name = comment))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] /// A comment. pub struct Comment { pub id: CommentId, @@ -37,7 +40,8 @@ pub struct Comment { #[cfg(feature = "full")] #[cfg_attr(feature = "full", serde(with = "LtreeDef"))] #[cfg_attr(feature = "full", ts(type = "string"))] - /// The path / tree location of a comment, separated by dots, ending with the comment's id. Ex: 0.24.27 + /// The path / tree location of a comment, separated by dots, ending with the comment's id. Ex: + /// 0.24.27 pub path: Ltree, #[cfg(not(feature = "full"))] pub path: String, @@ -46,24 +50,28 @@ pub struct Comment { pub language_id: LanguageId, } -#[derive(Debug, Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = comment))] pub struct CommentInsertForm { - #[builder(!default)] pub creator_id: PersonId, - #[builder(!default)] pub post_id: PostId, - #[builder(!default)] pub content: String, + #[new(default)] pub removed: Option, + #[new(default)] pub published: Option>, + #[new(default)] pub updated: Option>, + #[new(default)] pub deleted: Option, + #[new(default)] pub ap_id: Option, + #[new(default)] pub local: Option, + #[new(default)] pub distinguished: Option, + #[new(default)] pub language_id: Option, } @@ -83,14 +91,17 @@ pub struct CommentUpdateForm { } #[derive(PartialEq, Eq, Debug, Clone)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] #[cfg_attr(feature = "full", diesel(table_name = comment_like))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, comment_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommentLike { - pub id: i32, pub person_id: PersonId, pub comment_id: CommentId, - pub post_id: PostId, // TODO this is redundant pub score: i16, pub published: DateTime, } @@ -101,16 +112,19 @@ pub struct CommentLike { pub struct CommentLikeForm { pub person_id: PersonId, pub comment_id: CommentId, - pub post_id: PostId, // TODO this is redundant pub score: i16, } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] #[cfg_attr(feature = "full", diesel(table_name = comment_saved))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, comment_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommentSaved { - pub id: i32, pub comment_id: CommentId, pub person_id: PersonId, pub published: DateTime, diff --git a/crates/db_schema/src/source/comment_reply.rs b/crates/db_schema/src/source/comment_reply.rs index 30d4ae919..1f99ad076 100644 --- a/crates/db_schema/src/source/comment_reply.rs +++ b/crates/db_schema/src/source/comment_reply.rs @@ -7,9 +7,13 @@ use serde::{Deserialize, Serialize}; use ts_rs::TS; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] #[cfg_attr(feature = "full", diesel(table_name = comment_reply))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A comment reply. pub struct CommentReply { diff --git a/crates/db_schema/src/source/comment_report.rs b/crates/db_schema/src/source/comment_report.rs index 23697adce..73dadc945 100644 --- a/crates/db_schema/src/source/comment_report.rs +++ b/crates/db_schema/src/source/comment_report.rs @@ -9,9 +9,13 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] #[cfg_attr(feature = "full", diesel(table_name = comment_report))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A comment report. pub struct CommentReport { diff --git a/crates/db_schema/src/source/community.rs b/crates/db_schema/src/source/community.rs index 5da772be3..2eb6c143c 100644 --- a/crates/db_schema/src/source/community.rs +++ b/crates/db_schema/src/source/community.rs @@ -2,19 +2,21 @@ use crate::schema::{community, community_follower, community_moderator, community_person_ban}; use crate::{ newtypes::{CommunityId, DbUrl, InstanceId, PersonId}, + sensitive::SensitiveString, source::placeholder_apub_url, + CommunityVisibility, }; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A community. pub struct Community { @@ -22,8 +24,8 @@ pub struct Community { pub name: String, /// A longer title, that can contain other characters, and doesn't have to be unique. pub title: String, - /// A sidebar / markdown description. - pub description: Option, + /// A sidebar for the community in markdown. + pub sidebar: Option, /// Whether the community is removed by a mod. pub removed: bool, pub published: DateTime, @@ -37,7 +39,7 @@ pub struct Community { /// Whether the community is local. pub local: bool, #[serde(skip)] - pub private_key: Option, + pub private_key: Option, #[serde(skip)] pub public_key: String, #[serde(skip)] @@ -47,13 +49,11 @@ pub struct Community { /// A URL for a banner. pub banner: Option, #[cfg_attr(feature = "full", ts(skip))] - #[serde(skip, default = "placeholder_apub_url")] - pub followers_url: DbUrl, + #[serde(skip)] + pub followers_url: Option, #[cfg_attr(feature = "full", ts(skip))] #[serde(skip, default = "placeholder_apub_url")] pub inbox_url: DbUrl, - #[serde(skip)] - pub shared_inbox_url: Option, /// Whether the community is hidden. pub hidden: bool, /// Whether posting is restricted to mods only. @@ -65,39 +65,59 @@ pub struct Community { /// Url where featured posts collection is served over Activitypub #[serde(skip)] pub featured_url: Option, + pub visibility: CommunityVisibility, + /// A shorter, one-line description of the site. + pub description: Option, } -#[derive(Debug, Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = community))] pub struct CommunityInsertForm { - #[builder(!default)] - pub name: String, - #[builder(!default)] - pub title: String, - pub description: Option, - pub removed: Option, - pub published: Option>, - pub updated: Option>, - pub deleted: Option, - pub nsfw: Option, - pub actor_id: Option, - pub local: Option, - pub private_key: Option, - pub public_key: String, - pub last_refreshed_at: Option>, - pub icon: Option, - pub banner: Option, - pub followers_url: Option, - pub inbox_url: Option, - pub shared_inbox_url: Option, - pub moderators_url: Option, - pub featured_url: Option, - pub hidden: Option, - pub posting_restricted_to_mods: Option, - #[builder(!default)] pub instance_id: InstanceId, + pub name: String, + pub title: String, + pub public_key: String, + #[new(default)] + pub sidebar: Option, + #[new(default)] + pub removed: Option, + #[new(default)] + pub published: Option>, + #[new(default)] + pub updated: Option>, + #[new(default)] + pub deleted: Option, + #[new(default)] + pub nsfw: Option, + #[new(default)] + pub actor_id: Option, + #[new(default)] + pub local: Option, + #[new(default)] + pub private_key: Option, + #[new(default)] + pub last_refreshed_at: Option>, + #[new(default)] + pub icon: Option, + #[new(default)] + pub banner: Option, + #[new(default)] + pub followers_url: Option, + #[new(default)] + pub inbox_url: Option, + #[new(default)] + pub moderators_url: Option, + #[new(default)] + pub featured_url: Option, + #[new(default)] + pub hidden: Option, + #[new(default)] + pub posting_restricted_to_mods: Option, + #[new(default)] + pub visibility: Option, + #[new(default)] + pub description: Option, } #[derive(Debug, Clone, Default)] @@ -105,7 +125,7 @@ pub struct CommunityInsertForm { #[cfg_attr(feature = "full", diesel(table_name = community))] pub struct CommunityUpdateForm { pub title: Option, - pub description: Option>, + pub sidebar: Option>, pub removed: Option, pub published: Option>, pub updated: Option>>, @@ -120,22 +140,27 @@ pub struct CommunityUpdateForm { pub banner: Option>, pub followers_url: Option, pub inbox_url: Option, - pub shared_inbox_url: Option>, pub moderators_url: Option, pub featured_url: Option, pub hidden: Option, pub posting_restricted_to_mods: Option, + pub visibility: Option, + pub description: Option>, } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::community::Community)) )] #[cfg_attr(feature = "full", diesel(table_name = community_moderator))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, community_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommunityModerator { - pub id: i32, pub community_id: CommunityId, pub person_id: PersonId, pub published: DateTime, @@ -150,14 +175,18 @@ pub struct CommunityModeratorForm { } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::community::Community)) )] #[cfg_attr(feature = "full", diesel(table_name = community_person_ban))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, community_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommunityPersonBan { - pub id: i32, pub community_id: CommunityId, pub person_id: PersonId, pub published: DateTime, @@ -174,14 +203,18 @@ pub struct CommunityPersonBanForm { } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::community::Community)) )] #[cfg_attr(feature = "full", diesel(table_name = community_follower))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, community_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommunityFollower { - pub id: i32, pub community_id: CommunityId, pub person_id: PersonId, pub published: DateTime, diff --git a/crates/db_schema/src/source/community_block.rs b/crates/db_schema/src/source/community_block.rs index 628e77ade..7d43af173 100644 --- a/crates/db_schema/src/source/community_block.rs +++ b/crates/db_schema/src/source/community_block.rs @@ -1,18 +1,22 @@ -use crate::newtypes::{CommunityBlockId, CommunityId, PersonId}; +use crate::newtypes::{CommunityId, PersonId}; #[cfg(feature = "full")] use crate::schema::community_block; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::community::Community)) )] #[cfg_attr(feature = "full", diesel(table_name = community_block))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, community_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct CommunityBlock { - pub id: CommunityBlockId, pub person_id: PersonId, pub community_id: CommunityId, pub published: DateTime, diff --git a/crates/db_schema/src/source/custom_emoji.rs b/crates/db_schema/src/source/custom_emoji.rs index 015004cae..f5a92ea46 100644 --- a/crates/db_schema/src/source/custom_emoji.rs +++ b/crates/db_schema/src/source/custom_emoji.rs @@ -1,4 +1,4 @@ -use crate::newtypes::{CustomEmojiId, DbUrl, LocalSiteId}; +use crate::newtypes::{CustomEmojiId, DbUrl}; #[cfg(feature = "full")] use crate::schema::custom_emoji; use chrono::{DateTime, Utc}; @@ -6,21 +6,16 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = custom_emoji))] -#[cfg_attr( - feature = "full", - diesel(belongs_to(crate::source::local_site::LocalSite)) -)] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A custom emoji. pub struct CustomEmoji { pub id: CustomEmojiId, - pub local_site_id: LocalSiteId, pub shortcode: String, pub image_url: DbUrl, pub alt_text: String, @@ -29,22 +24,20 @@ pub struct CustomEmoji { pub updated: Option>, } -#[derive(Debug, Clone, TypedBuilder)] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = custom_emoji))] pub struct CustomEmojiInsertForm { - pub local_site_id: LocalSiteId, pub shortcode: String, pub image_url: DbUrl, pub alt_text: String, pub category: String, } -#[derive(Debug, Clone, TypedBuilder)] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = custom_emoji))] pub struct CustomEmojiUpdateForm { - pub local_site_id: LocalSiteId, pub image_url: DbUrl, pub alt_text: String, pub category: String, diff --git a/crates/db_schema/src/source/custom_emoji_keyword.rs b/crates/db_schema/src/source/custom_emoji_keyword.rs index b1811cf44..a47ba411e 100644 --- a/crates/db_schema/src/source/custom_emoji_keyword.rs +++ b/crates/db_schema/src/source/custom_emoji_keyword.rs @@ -4,24 +4,27 @@ use crate::schema::custom_emoji_keyword; use serde::{Deserialize, Serialize}; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = custom_emoji_keyword))] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::custom_emoji::CustomEmoji)) )] +#[cfg_attr(feature = "full", diesel(primary_key(custom_emoji_id, keyword)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A custom keyword for an emoji. pub struct CustomEmojiKeyword { - pub id: i32, pub custom_emoji_id: CustomEmojiId, pub keyword: String, } -#[derive(Debug, Clone, TypedBuilder)] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = custom_emoji_keyword))] pub struct CustomEmojiKeywordInsertForm { diff --git a/crates/db_schema/src/source/email_verification.rs b/crates/db_schema/src/source/email_verification.rs index af117e0bd..201a4cef3 100644 --- a/crates/db_schema/src/source/email_verification.rs +++ b/crates/db_schema/src/source/email_verification.rs @@ -4,13 +4,14 @@ use crate::schema::email_verification; use chrono::{DateTime, Utc}; #[derive(Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = email_verification))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct EmailVerification { pub id: i32, pub local_user_id: LocalUserId, pub email: String, - pub verification_code: String, + pub verification_token: String, pub published: DateTime, } diff --git a/crates/db_schema/src/source/federation_allowlist.rs b/crates/db_schema/src/source/federation_allowlist.rs index 534e1b02e..cc66bcad8 100644 --- a/crates/db_schema/src/source/federation_allowlist.rs +++ b/crates/db_schema/src/source/federation_allowlist.rs @@ -6,14 +6,18 @@ use serde::{Deserialize, Serialize}; use std::fmt::Debug; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::instance::Instance)) )] #[cfg_attr(feature = "full", diesel(table_name = federation_allowlist))] +#[cfg_attr(feature = "full", diesel(primary_key(instance_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct FederationAllowList { - pub id: i32, pub instance_id: InstanceId, pub published: DateTime, pub updated: Option>, diff --git a/crates/db_schema/src/source/federation_blocklist.rs b/crates/db_schema/src/source/federation_blocklist.rs index 0cf615d7b..2176ce42d 100644 --- a/crates/db_schema/src/source/federation_blocklist.rs +++ b/crates/db_schema/src/source/federation_blocklist.rs @@ -6,14 +6,18 @@ use serde::{Deserialize, Serialize}; use std::fmt::Debug; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::instance::Instance)) )] #[cfg_attr(feature = "full", diesel(table_name = federation_blocklist))] +#[cfg_attr(feature = "full", diesel(primary_key(instance_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct FederationBlockList { - pub id: i32, pub instance_id: InstanceId, pub published: DateTime, pub updated: Option>, diff --git a/crates/db_schema/src/source/federation_queue_state.rs b/crates/db_schema/src/source/federation_queue_state.rs new file mode 100644 index 000000000..134dfe452 --- /dev/null +++ b/crates/db_schema/src/source/federation_queue_state.rs @@ -0,0 +1,28 @@ +use crate::newtypes::{ActivityId, InstanceId}; +use chrono::{DateTime, Utc}; +#[cfg(feature = "full")] +use diesel::prelude::*; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Insertable, AsChangeset) +)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", diesel(table_name = crate::schema::federation_queue_state))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +pub struct FederationQueueState { + pub instance_id: InstanceId, + /// the last successfully sent activity id + pub last_successful_id: Option, + pub last_successful_published_time: Option>, + /// how many failed attempts have been made to send the next activity + pub fail_count: i32, + /// timestamp of the last retry attempt (when the last failing activity was resent) + pub last_retry: Option>, +} diff --git a/crates/db_schema/src/source/image_upload.rs b/crates/db_schema/src/source/image_upload.rs index 0a3c4d6c4..db840dc1d 100644 --- a/crates/db_schema/src/source/image_upload.rs +++ b/crates/db_schema/src/source/image_upload.rs @@ -1,4 +1,4 @@ -use crate::newtypes::{ImageUploadId, LocalUserId}; +use crate::newtypes::LocalUserId; #[cfg(feature = "full")] use crate::schema::image_upload; use chrono::{DateTime, Utc}; @@ -7,26 +7,29 @@ use serde_with::skip_serializing_none; use std::fmt::Debug; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(table_name = image_upload))] +#[cfg_attr(feature = "full", diesel(primary_key(pictrs_alias)))] #[cfg_attr(feature = "full", ts(export))] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::local_user::LocalUser)) )] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct ImageUpload { - pub id: ImageUploadId, pub local_user_id: LocalUserId, pub pictrs_alias: String, pub pictrs_delete_token: String, pub published: DateTime, } -#[derive(Debug, Clone, TypedBuilder)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = image_upload))] pub struct ImageUploadForm { diff --git a/crates/db_schema/src/source/images.rs b/crates/db_schema/src/source/images.rs new file mode 100644 index 000000000..22f5e6eb4 --- /dev/null +++ b/crates/db_schema/src/source/images.rs @@ -0,0 +1,76 @@ +use crate::newtypes::{DbUrl, LocalUserId}; +#[cfg(feature = "full")] +use crate::schema::{image_details, local_image, remote_image}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::Debug; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Identifiable, Associations, TS) +)] +#[cfg_attr(feature = "full", ts(export))] +#[cfg_attr(feature = "full", diesel(table_name = local_image))] +#[cfg_attr( + feature = "full", + diesel(belongs_to(crate::source::local_user::LocalUser)) +)] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", diesel(primary_key(pictrs_alias)))] +pub struct LocalImage { + pub local_user_id: Option, + pub pictrs_alias: String, + pub pictrs_delete_token: String, + pub published: DateTime, +} + +#[derive(Debug, Clone)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = local_image))] +pub struct LocalImageForm { + pub local_user_id: Option, + pub pictrs_alias: String, + pub pictrs_delete_token: String, +} + +/// Stores all images which are hosted on remote domains. When attempting to proxy an image, it +/// is checked against this table to avoid Lemmy being used as a general purpose proxy. +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] +#[cfg_attr(feature = "full", diesel(table_name = remote_image))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", diesel(primary_key(link)))] +pub struct RemoteImage { + pub link: DbUrl, + pub published: DateTime, +} + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] +#[cfg_attr(feature = "full", ts(export))] +#[cfg_attr(feature = "full", diesel(table_name = image_details))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", diesel(primary_key(link)))] +pub struct ImageDetails { + pub link: DbUrl, + pub width: i32, + pub height: i32, + pub content_type: String, +} + +#[derive(Debug, Clone)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = image_details))] +pub struct ImageDetailsForm { + pub link: DbUrl, + pub width: i32, + pub height: i32, + pub content_type: String, +} diff --git a/crates/db_schema/src/source/instance.rs b/crates/db_schema/src/source/instance.rs index 8714b317e..8c27a2cb6 100644 --- a/crates/db_schema/src/source/instance.rs +++ b/crates/db_schema/src/source/instance.rs @@ -7,12 +7,12 @@ use serde_with::skip_serializing_none; use std::fmt::Debug; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = instance))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A federated instance / site. pub struct Instance { @@ -24,14 +24,15 @@ pub struct Instance { pub version: Option, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = instance))] pub struct InstanceForm { - #[builder(!default)] pub domain: String, + #[new(default)] pub software: Option, + #[new(default)] pub version: Option, + #[new(default)] pub updated: Option>, } diff --git a/crates/db_schema/src/source/instance_block.rs b/crates/db_schema/src/source/instance_block.rs index 1aa215e45..4eebbf1a8 100644 --- a/crates/db_schema/src/source/instance_block.rs +++ b/crates/db_schema/src/source/instance_block.rs @@ -5,14 +5,18 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::instance::Instance)) )] #[cfg_attr(feature = "full", diesel(table_name = instance_block))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, instance_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct InstanceBlock { - pub id: i32, pub person_id: PersonId, pub instance_id: InstanceId, pub published: DateTime, diff --git a/crates/db_schema/src/source/language.rs b/crates/db_schema/src/source/language.rs index 3d3122da9..a816822e0 100644 --- a/crates/db_schema/src/source/language.rs +++ b/crates/db_schema/src/source/language.rs @@ -6,8 +6,9 @@ use serde::{Deserialize, Serialize}; use ts_rs::TS; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = language))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A language. pub struct Language { diff --git a/crates/db_schema/src/source/local_site.rs b/crates/db_schema/src/source/local_site.rs index e5945e86f..5fa57fe3b 100644 --- a/crates/db_schema/src/source/local_site.rs +++ b/crates/db_schema/src/source/local_site.rs @@ -2,7 +2,11 @@ use crate::schema::local_site; use crate::{ newtypes::{LocalSiteId, SiteId}, + CommentSortType, + FederationMode, ListingType, + PostListingMode, + PostSortType, RegistrationMode, }; use chrono::{DateTime, Utc}; @@ -10,13 +14,13 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] -#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] +#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = local_site))] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::site::Site)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// The local site. pub struct LocalSite { @@ -24,10 +28,6 @@ pub struct LocalSite { pub site_id: SiteId, /// True if the site is set up. pub site_setup: bool, - /// Whether downvotes are enabled. - pub enable_downvotes: bool, - /// Whether NSFW is enabled. - pub enable_nsfw: bool, /// Whether only admins can create communities. pub community_creation_admin_only: bool, /// Whether emails are required. @@ -60,34 +60,84 @@ pub struct LocalSite { pub registration_mode: RegistrationMode, /// Whether to email admins on new reports. pub reports_email_admins: bool, + /// Whether to sign outgoing Activitypub fetches with private key of local instance. Some + /// Fediverse instances and platforms require this. + pub federation_signed_fetch: bool, + /// Default value for [LocalSite.post_listing_mode] + pub default_post_listing_mode: PostListingMode, + /// Default value for [LocalUser.post_sort_type] + pub default_post_sort_type: PostSortType, + /// Default value for [LocalUser.comment_sort_type] + pub default_comment_sort_type: CommentSortType, + /// Whether or not external auth methods can auto-register users. + pub oauth_registration: bool, + /// What kind of post upvotes your site allows. + pub post_upvotes: FederationMode, + /// What kind of post downvotes your site allows. + pub post_downvotes: FederationMode, + /// What kind of comment upvotes your site allows. + pub comment_upvotes: FederationMode, + /// What kind of comment downvotes your site allows. + pub comment_downvotes: FederationMode, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable))] #[cfg_attr(feature = "full", diesel(table_name = local_site))] pub struct LocalSiteInsertForm { - #[builder(!default)] pub site_id: SiteId, + #[new(default)] pub site_setup: Option, - pub enable_downvotes: Option, - pub enable_nsfw: Option, + #[new(default)] pub community_creation_admin_only: Option, + #[new(default)] pub require_email_verification: Option, + #[new(default)] pub application_question: Option, + #[new(default)] pub private_instance: Option, + #[new(default)] pub default_theme: Option, + #[new(default)] pub default_post_listing_type: Option, + #[new(default)] pub legal_information: Option, + #[new(default)] pub hide_modlog_mod_names: Option, + #[new(default)] pub application_email_admins: Option, + #[new(default)] pub slur_filter_regex: Option, + #[new(default)] pub actor_name_max_length: Option, + #[new(default)] pub federation_enabled: Option, + #[new(default)] pub captcha_enabled: Option, + #[new(default)] pub captcha_difficulty: Option, + #[new(default)] pub registration_mode: Option, + #[new(default)] pub reports_email_admins: Option, + #[new(default)] + pub federation_signed_fetch: Option, + #[new(default)] + pub default_post_listing_mode: Option, + #[new(default)] + pub default_post_sort_type: Option, + #[new(default)] + pub default_comment_sort_type: Option, + #[new(default)] + pub oauth_registration: Option, + #[new(default)] + pub post_upvotes: Option, + #[new(default)] + pub post_downvotes: Option, + #[new(default)] + pub comment_upvotes: Option, + #[new(default)] + pub comment_downvotes: Option, } #[derive(Clone, Default)] @@ -95,8 +145,6 @@ pub struct LocalSiteInsertForm { #[cfg_attr(feature = "full", diesel(table_name = local_site))] pub struct LocalSiteUpdateForm { pub site_setup: Option, - pub enable_downvotes: Option, - pub enable_nsfw: Option, pub community_creation_admin_only: Option, pub require_email_verification: Option, pub application_question: Option>, @@ -114,4 +162,13 @@ pub struct LocalSiteUpdateForm { pub registration_mode: Option, pub reports_email_admins: Option, pub updated: Option>>, + pub federation_signed_fetch: Option, + pub default_post_listing_mode: Option, + pub default_post_sort_type: Option, + pub default_comment_sort_type: Option, + pub oauth_registration: Option, + pub post_upvotes: Option, + pub post_downvotes: Option, + pub comment_upvotes: Option, + pub comment_downvotes: Option, } diff --git a/crates/db_schema/src/source/local_site_rate_limit.rs b/crates/db_schema/src/source/local_site_rate_limit.rs index af7023f0f..f7f25f5c1 100644 --- a/crates/db_schema/src/source/local_site_rate_limit.rs +++ b/crates/db_schema/src/source/local_site_rate_limit.rs @@ -6,20 +6,20 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = local_site_rate_limit))] +#[cfg_attr(feature = "full", diesel(primary_key(local_site_id)))] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::local_site::LocalSite)) )] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// Rate limits for your site. Given in count / length of time. pub struct LocalSiteRateLimit { - pub id: i32, pub local_site_id: LocalSiteId, pub message: i32, pub message_per_second: i32, @@ -39,26 +39,38 @@ pub struct LocalSiteRateLimit { pub import_user_settings_per_second: i32, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable))] #[cfg_attr(feature = "full", diesel(table_name = local_site_rate_limit))] pub struct LocalSiteRateLimitInsertForm { - #[builder(!default)] pub local_site_id: LocalSiteId, + #[new(default)] pub message: Option, + #[new(default)] pub message_per_second: Option, + #[new(default)] pub post: Option, + #[new(default)] pub post_per_second: Option, + #[new(default)] pub register: Option, + #[new(default)] pub register_per_second: Option, + #[new(default)] pub image: Option, + #[new(default)] pub image_per_second: Option, + #[new(default)] pub comment: Option, + #[new(default)] pub comment_per_second: Option, + #[new(default)] pub search: Option, + #[new(default)] pub search_per_second: Option, + #[new(default)] pub import_user_settings: Option, + #[new(default)] pub import_user_settings_per_second: Option, } diff --git a/crates/db_schema/src/source/local_site_url_blocklist.rs b/crates/db_schema/src/source/local_site_url_blocklist.rs new file mode 100644 index 000000000..4ac0893ec --- /dev/null +++ b/crates/db_schema/src/source/local_site_url_blocklist.rs @@ -0,0 +1,28 @@ +#[cfg(feature = "full")] +use crate::schema::local_site_url_blocklist; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] +#[cfg_attr(feature = "full", diesel(table_name = local_site_url_blocklist))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +pub struct LocalSiteUrlBlocklist { + pub id: i32, + pub url: String, + pub published: DateTime, + pub updated: Option>, +} + +#[derive(Default, Clone)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = local_site_url_blocklist))] +pub struct LocalSiteUrlBlocklistForm { + pub url: String, + pub updated: Option>, +} diff --git a/crates/db_schema/src/source/local_user.rs b/crates/db_schema/src/source/local_user.rs index 05e2e3a3f..6837a5d0c 100644 --- a/crates/db_schema/src/source/local_user.rs +++ b/crates/db_schema/src/source/local_user.rs @@ -2,40 +2,41 @@ use crate::schema::local_user; use crate::{ newtypes::{LocalUserId, PersonId}, + sensitive::SensitiveString, + CommentSortType, ListingType, PostListingMode, - SortType, + PostSortType, }; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] -#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, Default)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = local_user))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] +#[serde(default)] /// A local user. pub struct LocalUser { pub id: LocalUserId, /// The person_id for the local user. pub person_id: PersonId, #[serde(skip)] - pub password_encrypted: String, - pub email: Option, + pub password_encrypted: Option, + pub email: Option, /// Whether to show NSFW content. pub show_nsfw: bool, pub theme: String, - pub default_sort_type: SortType, + pub default_post_sort_type: PostSortType, pub default_listing_type: ListingType, pub interface_language: String, /// Whether to show avatars. pub show_avatars: bool, pub send_notifications_to_email: bool, - /// Whether to show comment / post scores. - pub show_scores: bool, /// Whether to show bot accounts. pub show_bot_accounts: bool, /// Whether to show read posts. @@ -45,58 +46,83 @@ pub struct LocalUser { /// Whether their registration application has been accepted. pub accepted_application: bool, #[serde(skip)] - pub totp_2fa_secret: Option, + pub totp_2fa_secret: Option, /// Open links in a new tab. pub open_links_in_new_tab: bool, pub blur_nsfw: bool, - pub auto_expand: bool, /// Whether infinite scroll is enabled. pub infinite_scroll_enabled: bool, /// Whether the person is an admin. pub admin: bool, + /// A post-view mode that changes how multiple post listings look. pub post_listing_mode: PostListingMode, pub totp_2fa_enabled: bool, /// Whether to allow keyboard navigation (for browsing and interacting with posts and comments). pub enable_keyboard_navigation: bool, - /// Whether user avatars and inline images in the UI that are gifs should be allowed to play or should be paused + /// Whether user avatars and inline images in the UI that are gifs should be allowed to play or + /// should be paused pub enable_animated_images: bool, /// Whether a user can send / receive private messages pub enable_private_messages: bool, + /// Whether to auto-collapse bot comments. + pub collapse_bot_comments: bool, + pub default_comment_sort_type: CommentSortType, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable))] #[cfg_attr(feature = "full", diesel(table_name = local_user))] pub struct LocalUserInsertForm { - #[builder(!default)] pub person_id: PersonId, - #[builder(!default)] - pub password_encrypted: String, + pub password_encrypted: Option, + #[new(default)] pub email: Option, + #[new(default)] pub show_nsfw: Option, + #[new(default)] pub theme: Option, - pub default_sort_type: Option, + #[new(default)] + pub default_post_sort_type: Option, + #[new(default)] pub default_listing_type: Option, + #[new(default)] pub interface_language: Option, + #[new(default)] pub show_avatars: Option, + #[new(default)] pub send_notifications_to_email: Option, + #[new(default)] pub show_bot_accounts: Option, - pub show_scores: Option, + #[new(default)] pub show_read_posts: Option, + #[new(default)] pub email_verified: Option, + #[new(default)] pub accepted_application: Option, + #[new(default)] pub totp_2fa_secret: Option>, + #[new(default)] pub open_links_in_new_tab: Option, + #[new(default)] pub blur_nsfw: Option, - pub auto_expand: Option, + #[new(default)] pub infinite_scroll_enabled: Option, + #[new(default)] pub admin: Option, + #[new(default)] pub post_listing_mode: Option, + #[new(default)] pub totp_2fa_enabled: Option, + #[new(default)] pub enable_keyboard_navigation: Option, + #[new(default)] pub enable_animated_images: Option, + #[new(default)] pub enable_private_messages: Option, + #[new(default)] + pub collapse_bot_comments: Option, + #[new(default)] + pub default_comment_sort_type: Option, } #[derive(Clone, Default)] @@ -107,20 +133,18 @@ pub struct LocalUserUpdateForm { pub email: Option>, pub show_nsfw: Option, pub theme: Option, - pub default_sort_type: Option, + pub default_post_sort_type: Option, pub default_listing_type: Option, pub interface_language: Option, pub show_avatars: Option, pub send_notifications_to_email: Option, pub show_bot_accounts: Option, - pub show_scores: Option, pub show_read_posts: Option, pub email_verified: Option, pub accepted_application: Option, pub totp_2fa_secret: Option>, pub open_links_in_new_tab: Option, pub blur_nsfw: Option, - pub auto_expand: Option, pub infinite_scroll_enabled: Option, pub admin: Option, pub post_listing_mode: Option, @@ -128,4 +152,6 @@ pub struct LocalUserUpdateForm { pub enable_keyboard_navigation: Option, pub enable_animated_images: Option, pub enable_private_messages: Option, + pub collapse_bot_comments: Option, + pub default_comment_sort_type: Option, } diff --git a/crates/db_schema/src/source/local_user_language.rs b/crates/db_schema/src/source/local_user_language.rs index b6129d0ce..83c666636 100644 --- a/crates/db_schema/src/source/local_user_language.rs +++ b/crates/db_schema/src/source/local_user_language.rs @@ -1,12 +1,12 @@ use crate::newtypes::{LanguageId, LocalUserId, LocalUserLanguageId}; -use serde::{Deserialize, Serialize}; - #[cfg(feature = "full")] use crate::schema::local_user_language; +use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = local_user_language))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct LocalUserLanguage { #[serde(skip)] pub id: LocalUserLanguageId, diff --git a/crates/db_schema/src/source/local_user_vote_display_mode.rs b/crates/db_schema/src/source/local_user_vote_display_mode.rs new file mode 100644 index 000000000..06a433034 --- /dev/null +++ b/crates/db_schema/src/source/local_user_vote_display_mode.rs @@ -0,0 +1,53 @@ +use crate::newtypes::LocalUserId; +#[cfg(feature = "full")] +use crate::schema::local_user_vote_display_mode; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(PartialEq, Eq, Debug, Clone, Default, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] +#[cfg_attr(feature = "full", diesel(table_name = local_user_vote_display_mode))] +#[cfg_attr(feature = "full", diesel(primary_key(local_user_id)))] +#[cfg_attr( + feature = "full", + diesel(belongs_to(crate::source::local_site::LocalUser)) +)] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +/// The vote display settings for your user. +pub struct LocalUserVoteDisplayMode { + #[serde(skip)] + pub local_user_id: LocalUserId, + pub score: bool, + pub upvotes: bool, + pub downvotes: bool, + pub upvote_percentage: bool, +} + +#[derive(Clone, derive_new::new)] +#[cfg_attr(feature = "full", derive(Insertable))] +#[cfg_attr(feature = "full", diesel(table_name = local_user_vote_display_mode))] +pub struct LocalUserVoteDisplayModeInsertForm { + pub local_user_id: LocalUserId, + #[new(default)] + pub score: Option, + #[new(default)] + pub upvotes: Option, + #[new(default)] + pub downvotes: Option, + #[new(default)] + pub upvote_percentage: Option, +} + +#[derive(Clone, Default)] +#[cfg_attr(feature = "full", derive(AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = local_user_vote_display_mode))] +pub struct LocalUserVoteDisplayModeUpdateForm { + pub score: Option, + pub upvotes: Option, + pub downvotes: Option, + pub upvote_percentage: Option, +} diff --git a/crates/db_schema/src/source/login_token.rs b/crates/db_schema/src/source/login_token.rs index 45f74c41f..38aac33ef 100644 --- a/crates/db_schema/src/source/login_token.rs +++ b/crates/db_schema/src/source/login_token.rs @@ -1,6 +1,6 @@ -use crate::newtypes::LocalUserId; #[cfg(feature = "full")] use crate::schema::login_token; +use crate::{newtypes::LocalUserId, sensitive::SensitiveString}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; @@ -10,14 +10,15 @@ use ts_rs::TS; /// Stores data related to a specific user login session. #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = login_token))] +#[cfg_attr(feature = "full", diesel(primary_key(token)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] pub struct LoginToken { - pub id: i32, /// Jwt token for this login #[serde(skip)] - pub token: String, + pub token: SensitiveString, pub user_id: LocalUserId, /// Time of login pub published: DateTime, @@ -30,7 +31,7 @@ pub struct LoginToken { #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = login_token))] pub struct LoginTokenCreateForm { - pub token: String, + pub token: SensitiveString, pub user_id: LocalUserId, pub ip: Option, pub user_agent: Option, diff --git a/crates/db_schema/src/source/mod.rs b/crates/db_schema/src/source/mod.rs index 9879ef35f..377c1aaef 100644 --- a/crates/db_schema/src/source/mod.rs +++ b/crates/db_schema/src/source/mod.rs @@ -15,15 +15,20 @@ pub mod custom_emoji_keyword; pub mod email_verification; pub mod federation_allowlist; pub mod federation_blocklist; -pub mod image_upload; +pub mod federation_queue_state; +pub mod images; pub mod instance; pub mod instance_block; pub mod language; pub mod local_site; pub mod local_site_rate_limit; +pub mod local_site_url_blocklist; pub mod local_user; +pub mod local_user_vote_display_mode; pub mod login_token; pub mod moderator; +pub mod oauth_account; +pub mod oauth_provider; pub mod password_reset_request; pub mod person; pub mod person_block; @@ -40,9 +45,10 @@ pub mod tagline; /// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip). /// /// This is necessary so they can be successfully deserialized from API responses, even though the -/// value is not sent by Lemmy. Necessary for crates which rely on Rust API such as lemmy-stats-crawler. +/// value is not sent by Lemmy. Necessary for crates which rely on Rust API such as +/// lemmy-stats-crawler. fn placeholder_apub_url() -> DbUrl { DbUrl(Box::new( - Url::parse("http://example.com").expect("parse placeholer url"), + Url::parse("http://example.com").expect("parse placeholder url"), )) } diff --git a/crates/db_schema/src/source/moderator.rs b/crates/db_schema/src/source/moderator.rs index 181bdbab7..c1f58ebc8 100644 --- a/crates/db_schema/src/source/moderator.rs +++ b/crates/db_schema/src/source/moderator.rs @@ -25,8 +25,9 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_remove_post))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a post. pub struct ModRemovePost { @@ -48,8 +49,9 @@ pub struct ModRemovePostForm { } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_lock_post))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator locks a post (prevents new comments being made). pub struct ModLockPost { @@ -69,8 +71,9 @@ pub struct ModLockPostForm { } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_feature_post))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator features a post on a community (pins it to the top). pub struct ModFeaturePost { @@ -93,8 +96,9 @@ pub struct ModFeaturePostForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_remove_comment))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a comment. pub struct ModRemoveComment { @@ -117,8 +121,9 @@ pub struct ModRemoveCommentForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_remove_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a community. pub struct ModRemoveCommunity { @@ -141,8 +146,9 @@ pub struct ModRemoveCommunityForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_ban_from_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is banned from a community. pub struct ModBanFromCommunity { @@ -169,8 +175,9 @@ pub struct ModBanFromCommunityForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_ban))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is banned from the site. pub struct ModBan { @@ -194,8 +201,9 @@ pub struct ModHideCommunityForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_hide_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a community is hidden from public view. pub struct ModHideCommunity { @@ -218,8 +226,9 @@ pub struct ModBanForm { } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_add_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is added as a community moderator. pub struct ModAddCommunity { @@ -241,8 +250,9 @@ pub struct ModAddCommunityForm { } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_transfer_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator transfers a community to a new owner. pub struct ModTransferCommunity { @@ -262,8 +272,9 @@ pub struct ModTransferCommunityForm { } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = mod_add))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is added as a site moderator. pub struct ModAdd { @@ -284,8 +295,9 @@ pub struct ModAddForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = admin_purge_person))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a person. pub struct AdminPurgePerson { @@ -304,8 +316,9 @@ pub struct AdminPurgePersonForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = admin_purge_community))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a community. pub struct AdminPurgeCommunity { @@ -324,8 +337,9 @@ pub struct AdminPurgeCommunityForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = admin_purge_post))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a post. pub struct AdminPurgePost { @@ -346,8 +360,9 @@ pub struct AdminPurgePostForm { #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = admin_purge_comment))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a comment. pub struct AdminPurgeComment { diff --git a/crates/db_schema/src/source/oauth_account.rs b/crates/db_schema/src/source/oauth_account.rs new file mode 100644 index 000000000..83b578e22 --- /dev/null +++ b/crates/db_schema/src/source/oauth_account.rs @@ -0,0 +1,32 @@ +use crate::newtypes::{LocalUserId, OAuthProviderId}; +#[cfg(feature = "full")] +use crate::schema::oauth_account; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, TS))] +#[cfg_attr(feature = "full", diesel(table_name = oauth_account))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +/// An auth account method. +pub struct OAuthAccount { + pub local_user_id: LocalUserId, + pub oauth_provider_id: OAuthProviderId, + pub oauth_user_id: String, + pub published: DateTime, + pub updated: Option>, +} + +#[derive(Debug, Clone, derive_new::new)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = oauth_account))] +pub struct OAuthAccountInsertForm { + pub local_user_id: LocalUserId, + pub oauth_provider_id: OAuthProviderId, + pub oauth_user_id: String, +} diff --git a/crates/db_schema/src/source/oauth_provider.rs b/crates/db_schema/src/source/oauth_provider.rs new file mode 100644 index 000000000..75b989805 --- /dev/null +++ b/crates/db_schema/src/source/oauth_provider.rs @@ -0,0 +1,122 @@ +#[cfg(feature = "full")] +use crate::schema::oauth_provider; +use crate::{ + newtypes::{DbUrl, OAuthProviderId}, + sensitive::SensitiveString, +}; +use chrono::{DateTime, Utc}; +use serde::{ + ser::{SerializeStruct, Serializer}, + Deserialize, + Serialize, +}; +use serde_with::skip_serializing_none; +#[cfg(feature = "full")] +use ts_rs::TS; + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] +#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +/// oauth provider with client_secret - should never be sent to the client +pub struct OAuthProvider { + pub id: OAuthProviderId, + /// The OAuth 2.0 provider name displayed to the user on the Login page + pub display_name: String, + /// The issuer url of the OAUTH provider. + #[cfg_attr(feature = "full", ts(type = "string"))] + pub issuer: DbUrl, + /// The authorization endpoint is used to interact with the resource owner and obtain an + /// authorization grant. This is usually provided by the OAUTH provider. + #[cfg_attr(feature = "full", ts(type = "string"))] + pub authorization_endpoint: DbUrl, + /// The token endpoint is used by the client to obtain an access token by presenting its + /// authorization grant or refresh token. This is usually provided by the OAUTH provider. + #[cfg_attr(feature = "full", ts(type = "string"))] + pub token_endpoint: DbUrl, + /// The UserInfo Endpoint is an OAuth 2.0 Protected Resource that returns Claims about the + /// authenticated End-User. This is defined in the OIDC specification. + #[cfg_attr(feature = "full", ts(type = "string"))] + pub userinfo_endpoint: DbUrl, + /// The OAuth 2.0 claim containing the unique user ID returned by the provider. Usually this + /// should be set to "sub". + pub id_claim: String, + /// The client_id is provided by the OAuth 2.0 provider and is a unique identifier to this + /// service + pub client_id: String, + /// The client_secret is provided by the OAuth 2.0 provider and is used to authenticate this + /// service with the provider + #[serde(skip)] + pub client_secret: SensitiveString, + /// Lists the scopes requested from users. Users will have to grant access to the requested scope + /// at sign up. + pub scopes: String, + /// Automatically sets email as verified on registration + pub auto_verify_email: bool, + /// Allows linking an OAUTH account to an existing user account by matching emails + pub account_linking_enabled: bool, + /// switch to enable or disable an oauth provider + pub enabled: bool, + pub published: DateTime, + pub updated: Option>, +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize)] +#[serde(transparent)] +#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", ts(export))] +// A subset of OAuthProvider used for public requests, for example to display the OAUTH buttons on +// the login page +pub struct PublicOAuthProvider(pub OAuthProvider); + +impl Serialize for PublicOAuthProvider { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("PublicOAuthProvider", 5)?; + state.serialize_field("id", &self.0.id)?; + state.serialize_field("display_name", &self.0.display_name)?; + state.serialize_field("authorization_endpoint", &self.0.authorization_endpoint)?; + state.serialize_field("client_id", &self.0.client_id)?; + state.serialize_field("scopes", &self.0.scopes)?; + state.end() + } +} + +#[derive(Debug, Clone)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] +pub struct OAuthProviderInsertForm { + pub display_name: String, + pub issuer: DbUrl, + pub authorization_endpoint: DbUrl, + pub token_endpoint: DbUrl, + pub userinfo_endpoint: DbUrl, + pub id_claim: String, + pub client_id: String, + pub client_secret: String, + pub scopes: String, + pub auto_verify_email: Option, + pub account_linking_enabled: Option, + pub enabled: Option, +} + +#[derive(Debug, Clone)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] +pub struct OAuthProviderUpdateForm { + pub display_name: Option, + pub authorization_endpoint: Option, + pub token_endpoint: Option, + pub userinfo_endpoint: Option, + pub id_claim: Option, + pub client_secret: Option, + pub scopes: Option, + pub auto_verify_email: Option, + pub account_linking_enabled: Option, + pub enabled: Option, + pub updated: Option>>, +} diff --git a/crates/db_schema/src/source/password_reset_request.rs b/crates/db_schema/src/source/password_reset_request.rs index 648c7164c..dbc930b8e 100644 --- a/crates/db_schema/src/source/password_reset_request.rs +++ b/crates/db_schema/src/source/password_reset_request.rs @@ -1,14 +1,15 @@ -use crate::newtypes::LocalUserId; #[cfg(feature = "full")] use crate::schema::password_reset_request; +use crate::{newtypes::LocalUserId, sensitive::SensitiveString}; use chrono::{DateTime, Utc}; #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = password_reset_request))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PasswordResetRequest { pub id: i32, - pub token: String, + pub token: SensitiveString, pub published: DateTime, pub local_user_id: LocalUserId, } @@ -17,5 +18,5 @@ pub struct PasswordResetRequest { #[cfg_attr(feature = "full", diesel(table_name = password_reset_request))] pub struct PasswordResetRequestForm { pub local_user_id: LocalUserId, - pub token: String, + pub token: SensitiveString, } diff --git a/crates/db_schema/src/source/person.rs b/crates/db_schema/src/source/person.rs index 82772fefa..c3aeeb4d7 100644 --- a/crates/db_schema/src/source/person.rs +++ b/crates/db_schema/src/source/person.rs @@ -2,6 +2,7 @@ use crate::schema::{person, person_follower}; use crate::{ newtypes::{DbUrl, InstanceId, PersonId}, + sensitive::SensitiveString, source::placeholder_apub_url, }; use chrono::{DateTime, Utc}; @@ -9,12 +10,12 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = person))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A person. pub struct Person { @@ -35,7 +36,7 @@ pub struct Person { /// Whether the person is local to our site. pub local: bool, #[serde(skip)] - pub private_key: Option, + pub private_key: Option, #[serde(skip)] pub public_key: String, #[serde(skip)] @@ -47,8 +48,6 @@ pub struct Person { #[cfg_attr(feature = "full", ts(skip))] #[serde(skip, default = "placeholder_apub_url")] pub inbox_url: DbUrl, - #[serde(skip)] - pub shared_inbox_url: Option, /// A matrix id, usually given an @person:matrix.org pub matrix_user_id: Option, /// Whether the person is a bot account. @@ -58,33 +57,44 @@ pub struct Person { pub instance_id: InstanceId, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = person))] pub struct PersonInsertForm { - #[builder(!default)] pub name: String, - #[builder(!default)] pub public_key: String, - #[builder(!default)] pub instance_id: InstanceId, + #[new(default)] pub display_name: Option, + #[new(default)] pub avatar: Option, + #[new(default)] pub banned: Option, + #[new(default)] pub published: Option>, + #[new(default)] pub updated: Option>, + #[new(default)] pub actor_id: Option, + #[new(default)] pub bio: Option, + #[new(default)] pub local: Option, + #[new(default)] pub private_key: Option, + #[new(default)] pub last_refreshed_at: Option>, + #[new(default)] pub banner: Option, + #[new(default)] pub deleted: Option, + #[new(default)] pub inbox_url: Option, - pub shared_inbox_url: Option, + #[new(default)] pub matrix_user_id: Option, + #[new(default)] pub bot_account: Option, + #[new(default)] pub ban_expires: Option>, } @@ -105,18 +115,21 @@ pub struct PersonUpdateForm { pub banner: Option>, pub deleted: Option, pub inbox_url: Option, - pub shared_inbox_url: Option>, pub matrix_user_id: Option>, pub bot_account: Option, pub ban_expires: Option>>, } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::person::Person)))] #[cfg_attr(feature = "full", diesel(table_name = person_follower))] +#[cfg_attr(feature = "full", diesel(primary_key(follower_id, person_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PersonFollower { - pub id: i32, pub person_id: PersonId, pub follower_id: PersonId, pub published: DateTime, diff --git a/crates/db_schema/src/source/person_block.rs b/crates/db_schema/src/source/person_block.rs index 3380fbfc3..43048fb39 100644 --- a/crates/db_schema/src/source/person_block.rs +++ b/crates/db_schema/src/source/person_block.rs @@ -1,15 +1,19 @@ -use crate::newtypes::{PersonBlockId, PersonId}; +use crate::newtypes::PersonId; #[cfg(feature = "full")] use crate::schema::person_block; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::person::Person)))] #[cfg_attr(feature = "full", diesel(table_name = person_block))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, target_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PersonBlock { - pub id: PersonBlockId, pub person_id: PersonId, pub target_id: PersonId, pub published: DateTime, diff --git a/crates/db_schema/src/source/person_mention.rs b/crates/db_schema/src/source/person_mention.rs index b9d7ccf8e..9c3005655 100644 --- a/crates/db_schema/src/source/person_mention.rs +++ b/crates/db_schema/src/source/person_mention.rs @@ -7,9 +7,13 @@ use serde::{Deserialize, Serialize}; use ts_rs::TS; #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::comment::Comment)))] #[cfg_attr(feature = "full", diesel(table_name = person_mention))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A person mention. pub struct PersonMention { diff --git a/crates/db_schema/src/source/post.rs b/crates/db_schema/src/source/post.rs index 72c32d4af..3819bd773 100644 --- a/crates/db_schema/src/source/post.rs +++ b/crates/db_schema/src/source/post.rs @@ -1,18 +1,18 @@ use crate::newtypes::{CommunityId, DbUrl, LanguageId, PersonId, PostId}; #[cfg(feature = "full")] -use crate::schema::{post, post_like, post_read, post_saved}; +use crate::schema::{post, post_hide, post_like, post_read, post_saved}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = post))] #[cfg_attr(feature = "full", ts(export))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] /// A post. pub struct Post { pub id: PostId, @@ -54,36 +54,60 @@ pub struct Post { pub featured_community: bool, /// Whether the post is featured to its site. pub featured_local: bool, + pub url_content_type: Option, + /// An optional alt_text, usable for image posts. + pub alt_text: Option, + /// Time at which the post will be published. None means publish immediately. + pub scheduled_publish_time: Option>, } -#[derive(Debug, Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Debug, Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = post))] pub struct PostInsertForm { - #[builder(!default)] pub name: String, - #[builder(!default)] pub creator_id: PersonId, - #[builder(!default)] pub community_id: CommunityId, + #[new(default)] pub nsfw: Option, + #[new(default)] pub url: Option, + #[new(default)] pub body: Option, + #[new(default)] pub removed: Option, + #[new(default)] pub locked: Option, + #[new(default)] pub updated: Option>, + #[new(default)] pub published: Option>, + #[new(default)] pub deleted: Option, + #[new(default)] pub embed_title: Option, + #[new(default)] pub embed_description: Option, + #[new(default)] pub embed_video_url: Option, + #[new(default)] pub thumbnail_url: Option, + #[new(default)] pub ap_id: Option, + #[new(default)] pub local: Option, + #[new(default)] pub language_id: Option, + #[new(default)] pub featured_community: Option, + #[new(default)] pub featured_local: Option, + #[new(default)] + pub url_content_type: Option, + #[new(default)] + pub alt_text: Option, + #[new(default)] + pub scheduled_publish_time: Option>, } #[derive(Debug, Clone, Default)] @@ -108,14 +132,21 @@ pub struct PostUpdateForm { pub language_id: Option, pub featured_community: Option, pub featured_local: Option, + pub url_content_type: Option>, + pub alt_text: Option>, + pub scheduled_publish_time: Option>>, } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] #[cfg_attr(feature = "full", diesel(table_name = post_like))] +#[cfg_attr(feature = "full", diesel(primary_key(person_id, post_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PostLike { - pub id: i32, pub post_id: PostId, pub person_id: PersonId, pub score: i16, @@ -132,11 +163,15 @@ pub struct PostLikeForm { } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] #[cfg_attr(feature = "full", diesel(table_name = post_saved))] +#[cfg_attr(feature = "full", diesel(primary_key(post_id, person_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PostSaved { - pub id: i32, pub post_id: PostId, pub person_id: PersonId, pub published: DateTime, @@ -150,11 +185,15 @@ pub struct PostSavedForm { } #[derive(PartialEq, Eq, Debug)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] #[cfg_attr(feature = "full", diesel(table_name = post_read))] +#[cfg_attr(feature = "full", diesel(primary_key(post_id, person_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct PostRead { - pub id: i32, pub post_id: PostId, pub person_id: PersonId, pub published: DateTime, @@ -166,3 +205,25 @@ pub(crate) struct PostReadForm { pub post_id: PostId, pub person_id: PersonId, } + +#[derive(PartialEq, Eq, Debug)] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations) +)] +#[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] +#[cfg_attr(feature = "full", diesel(table_name = post_hide))] +#[cfg_attr(feature = "full", diesel(primary_key(post_id, person_id)))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +pub struct PostHide { + pub post_id: PostId, + pub person_id: PersonId, + pub published: DateTime, +} + +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = post_hide))] +pub(crate) struct PostHideForm { + pub post_id: PostId, + pub person_id: PersonId, +} diff --git a/crates/db_schema/src/source/post_report.rs b/crates/db_schema/src/source/post_report.rs index 9f5f53d95..9aee9ed97 100644 --- a/crates/db_schema/src/source/post_report.rs +++ b/crates/db_schema/src/source/post_report.rs @@ -9,9 +9,13 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "full", derive(Identifiable, Queryable, Associations, TS))] +#[cfg_attr( + feature = "full", + derive(Identifiable, Queryable, Selectable, Associations, TS) +)] #[cfg_attr(feature = "full", diesel(belongs_to(crate::source::post::Post)))] // Is this the right assoc? #[cfg_attr(feature = "full", diesel(table_name = post_report))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A post report. pub struct PostReport { diff --git a/crates/db_schema/src/source/private_message.rs b/crates/db_schema/src/source/private_message.rs index 033bfbe20..8afaa14f1 100644 --- a/crates/db_schema/src/source/private_message.rs +++ b/crates/db_schema/src/source/private_message.rs @@ -6,16 +6,19 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::person::Person, foreign_key = creator_id) ))] // Is this the right assoc? #[cfg_attr(feature = "full", diesel(table_name = private_message))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A private message. pub struct PrivateMessage { @@ -31,22 +34,24 @@ pub struct PrivateMessage { pub local: bool, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = private_message))] pub struct PrivateMessageInsertForm { - #[builder(!default)] pub creator_id: PersonId, - #[builder(!default)] pub recipient_id: PersonId, - #[builder(!default)] pub content: String, + #[new(default)] pub deleted: Option, + #[new(default)] pub read: Option, + #[new(default)] pub published: Option>, + #[new(default)] pub updated: Option>, + #[new(default)] pub ap_id: Option, + #[new(default)] pub local: Option, } diff --git a/crates/db_schema/src/source/private_message_report.rs b/crates/db_schema/src/source/private_message_report.rs index 0afc5b039..7b4c8c637 100644 --- a/crates/db_schema/src/source/private_message_report.rs +++ b/crates/db_schema/src/source/private_message_report.rs @@ -9,12 +9,16 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr( + feature = "full", + derive(Queryable, Selectable, Associations, Identifiable, TS) +)] #[cfg_attr( feature = "full", diesel(belongs_to(crate::source::private_message::PrivateMessage)) )] #[cfg_attr(feature = "full", diesel(table_name = private_message_report))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// The private message report. pub struct PrivateMessageReport { diff --git a/crates/db_schema/src/source/registration_application.rs b/crates/db_schema/src/source/registration_application.rs index 1ed78703b..2ac973f34 100644 --- a/crates/db_schema/src/source/registration_application.rs +++ b/crates/db_schema/src/source/registration_application.rs @@ -1,4 +1,4 @@ -use crate::newtypes::{LocalUserId, PersonId}; +use crate::newtypes::{LocalUserId, PersonId, RegistrationApplicationId}; #[cfg(feature = "full")] use crate::schema::registration_application; use chrono::{DateTime, Utc}; @@ -9,12 +9,13 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = registration_application))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A registration application. pub struct RegistrationApplication { - pub id: i32, + pub id: RegistrationApplicationId, pub local_user_id: LocalUserId, pub answer: String, pub admin_id: Option, diff --git a/crates/db_schema/src/source/secret.rs b/crates/db_schema/src/source/secret.rs index 8b955ec59..36c0b691b 100644 --- a/crates/db_schema/src/source/secret.rs +++ b/crates/db_schema/src/source/secret.rs @@ -1,10 +1,12 @@ #[cfg(feature = "full")] use crate::schema::secret; +use crate::sensitive::SensitiveString; #[derive(Clone)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", diesel(table_name = secret))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] pub struct Secret { pub id: i32, - pub jwt_secret: String, + pub jwt_secret: SensitiveString, } diff --git a/crates/db_schema/src/source/site.rs b/crates/db_schema/src/source/site.rs index 0e175dc71..0ec4043e4 100644 --- a/crates/db_schema/src/source/site.rs +++ b/crates/db_schema/src/source/site.rs @@ -1,17 +1,20 @@ -use crate::newtypes::{DbUrl, InstanceId, SiteId}; #[cfg(feature = "full")] use crate::schema::site; +use crate::{ + newtypes::{DbUrl, InstanceId, SiteId}, + sensitive::SensitiveString, +}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] use ts_rs::TS; -use typed_builder::TypedBuilder; #[skip_serializing_none] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = site))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// The site. pub struct Site { @@ -33,30 +36,44 @@ pub struct Site { pub last_refreshed_at: DateTime, /// The site inbox pub inbox_url: DbUrl, - pub private_key: Option, + #[serde(skip)] + pub private_key: Option, + // TODO: mark as `serde(skip)` in next major release as its not needed for api pub public_key: String, pub instance_id: InstanceId, + /// If present, nsfw content is visible by default. Should be displayed by frontends/clients + /// when the site is first opened by a user. + pub content_warning: Option, } -#[derive(Clone, TypedBuilder)] -#[builder(field_defaults(default))] +#[derive(Clone, derive_new::new)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = site))] pub struct SiteInsertForm { - #[builder(!default)] pub name: String, - pub sidebar: Option, - pub updated: Option>, - pub icon: Option, - pub banner: Option, - pub description: Option, - pub actor_id: Option, - pub last_refreshed_at: Option>, - pub inbox_url: Option, - pub private_key: Option, - pub public_key: Option, - #[builder(!default)] pub instance_id: InstanceId, + #[new(default)] + pub sidebar: Option, + #[new(default)] + pub updated: Option>, + #[new(default)] + pub icon: Option, + #[new(default)] + pub banner: Option, + #[new(default)] + pub description: Option, + #[new(default)] + pub actor_id: Option, + #[new(default)] + pub last_refreshed_at: Option>, + #[new(default)] + pub inbox_url: Option, + #[new(default)] + pub private_key: Option, + #[new(default)] + pub public_key: Option, + #[new(default)] + pub content_warning: Option, } #[derive(Clone, Default)] @@ -66,7 +83,8 @@ pub struct SiteUpdateForm { pub name: Option, pub sidebar: Option>, pub updated: Option>>, - // when you want to null out a column, you have to send Some(None)), since sending None means you just don't want to update that column. + // when you want to null out a column, you have to send Some(None)), since sending None means you + // just don't want to update that column. pub icon: Option>, pub banner: Option>, pub description: Option>, @@ -75,4 +93,5 @@ pub struct SiteUpdateForm { pub inbox_url: Option, pub private_key: Option>, pub public_key: Option, + pub content_warning: Option>, } diff --git a/crates/db_schema/src/source/tagline.rs b/crates/db_schema/src/source/tagline.rs index 564c6dc80..05f7e0520 100644 --- a/crates/db_schema/src/source/tagline.rs +++ b/crates/db_schema/src/source/tagline.rs @@ -1,4 +1,3 @@ -use crate::newtypes::LocalSiteId; #[cfg(feature = "full")] use crate::schema::tagline; use chrono::{DateTime, Utc}; @@ -9,17 +8,13 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "full", derive(Queryable, Associations, Identifiable, TS))] +#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = tagline))] -#[cfg_attr( - feature = "full", - diesel(belongs_to(crate::source::local_site::LocalSite)) -)] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A tagline, shown at the top of your site. pub struct Tagline { pub id: i32, - pub local_site_id: LocalSiteId, pub content: String, pub published: DateTime, pub updated: Option>, @@ -28,8 +23,14 @@ pub struct Tagline { #[derive(Clone, Default)] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", diesel(table_name = tagline))] -pub struct TaglineForm { - pub local_site_id: LocalSiteId, +pub struct TaglineInsertForm { pub content: String, - pub updated: Option>, +} + +#[derive(Clone, Default)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = tagline))] +pub struct TaglineUpdateForm { + pub content: String, + pub updated: DateTime, } diff --git a/crates/db_schema/src/traits.rs b/crates/db_schema/src/traits.rs index e58319c0b..74f5ea009 100644 --- a/crates/db_schema/src/traits.rs +++ b/crates/db_schema/src/traits.rs @@ -24,8 +24,8 @@ pub type Find = dsl::Find<::Table, ::IdType>; pub type PrimaryKey = <::Table as Table>::PrimaryKey; -// Trying to create default implementations for `create` and `update` results in a lifetime mess and weird compile errors. -// https://github.com/rust-lang/rust/issues/102211 +// Trying to create default implementations for `create` and `update` results in a lifetime mess and +// weird compile errors. https://github.com/rust-lang/rust/issues/102211 #[async_trait] pub trait Crud: HasTable + Sized where @@ -45,10 +45,11 @@ where async fn read(pool: &mut DbPool<'_>, id: Self::IdType) -> Result { let query: Find = Self::table().find(id); let conn = &mut *get_conn(pool).await?; - query.first::(conn).await + query.first(conn).await } - /// when you want to null out a column, you have to send Some(None)), since sending None means you just don't want to update that column. + /// when you want to null out a column, you have to send Some(None)), since sending None means you + /// just don't want to update that column. async fn update( pool: &mut DbPool<'_>, id: Self::IdType, @@ -144,6 +145,7 @@ pub trait Blockable { pub trait Reportable { type Form; type IdType; + type ObjectIdType; async fn report(pool: &mut DbPool<'_>, form: &Self::Form) -> Result where Self: Sized; @@ -152,6 +154,13 @@ pub trait Reportable { report_id: Self::IdType, resolver_id: PersonId, ) -> Result + where + Self: Sized; + async fn resolve_all_for_object( + pool: &mut DbPool<'_>, + comment_id_: Self::ObjectIdType, + by_resolver_id: PersonId, + ) -> Result where Self: Sized; async fn unresolve( @@ -177,14 +186,14 @@ pub trait ApubActor { pool: &mut DbPool<'_>, actor_name: &str, include_deleted: bool, - ) -> Result + ) -> Result, Error> where Self: Sized; async fn read_from_name_and_domain( pool: &mut DbPool<'_>, actor_name: &str, protocol_domain: &str, - ) -> Result + ) -> Result, Error> where Self: Sized; } diff --git a/crates/db_schema/src/utils.rs b/crates/db_schema/src/utils.rs index 7e83569a7..1e56563bc 100644 --- a/crates/db_schema/src/utils.rs +++ b/crates/db_schema/src/utils.rs @@ -1,60 +1,68 @@ -use crate::{ - diesel::Connection, - diesel_migrations::MigrationHarness, - newtypes::DbUrl, - CommentSortType, - SortType, -}; -use activitypub_federation::{fetch::object_id::ObjectId, traits::Object}; -use chrono::{DateTime, Utc}; +use crate::{newtypes::DbUrl, CommentSortType, PostSortType}; +use chrono::{DateTime, TimeDelta, Utc}; use deadpool::Runtime; use diesel::{ - backend::Backend, - deserialize::FromSql, helper_types::AsExprOf, pg::Pg, - result::{ConnectionError, ConnectionResult, Error as DieselError, Error::QueryBuilderError}, - serialize::{Output, ToSql}, - sql_types::{Text, Timestamptz}, + query_builder::{Query, QueryFragment}, + query_dsl::methods::LimitDsl, + result::{ + ConnectionError, + ConnectionResult, + Error::{self as DieselError, QueryBuilderError}, + }, + sql_types::{self, Timestamptz}, IntoSql, - PgConnection, }; use diesel_async::{ pg::AsyncPgConnection, pooled_connection::{ - deadpool::{Object as PooledConnection, Pool}, + deadpool::{Hook, HookError, Object as PooledConnection, Pool}, AsyncDieselConnectionManager, + ManagerConfig, }, + AsyncConnection, + RunQueryDsl, }; -use diesel_migrations::EmbeddedMigrations; use futures_util::{future::BoxFuture, Future, FutureExt}; +use i_love_jesus::CursorKey; use lemmy_utils::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, - settings::structs::Settings, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, + settings::SETTINGS, + utils::validation::clean_url, }; -use once_cell::sync::Lazy; use regex::Regex; use rustls::{ - client::{ServerCertVerified, ServerCertVerifier}, - ServerName, + client::danger::{ + DangerousClientConfigBuilder, + HandshakeSignatureValid, + ServerCertVerified, + ServerCertVerifier, + }, + crypto::{self, verify_tls12_signature, verify_tls13_signature}, + pki_types::{CertificateDer, ServerName, UnixTime}, + ClientConfig, + DigitallySignedStruct, + SignatureScheme, }; use std::{ - env, - env::VarError, ops::{Deref, DerefMut}, - sync::Arc, - time::{Duration, SystemTime}, + sync::{Arc, LazyLock}, + time::Duration, }; -use tracing::{error, info}; +use tracing::error; use url::Url; const FETCH_LIMIT_DEFAULT: i64 = 10; pub const FETCH_LIMIT_MAX: i64 = 50; -const POOL_TIMEOUT: Option = Some(Duration::from_secs(5)); +pub const SITEMAP_LIMIT: i64 = 50000; +pub const SITEMAP_DAYS: Option = TimeDelta::try_days(31); +pub const RANK_DEFAULT: f64 = 0.0001; pub type ActualDbPool = Pool; -/// References a pool or connection. Functions must take `&mut DbPool<'_>` to allow implicit reborrowing. +/// References a pool or connection. Functions must take `&mut DbPool<'_>` to allow implicit +/// reborrowing. /// /// https://github.com/rust-lang/rfcs/issues/1403 pub enum DbPool<'a> { @@ -94,7 +102,8 @@ impl<'a> DerefMut for DbConn<'a> { } } -// Allows functions that take `DbPool<'_>` to be called in a transaction by passing `&mut conn.into()` +// Allows functions that take `DbPool<'_>` to be called in a transaction by passing `&mut +// conn.into()` impl<'a> From<&'a mut AsyncPgConnection> for DbPool<'a> { fn from(value: &'a mut AsyncPgConnection) -> Self { DbPool::Conn(value) @@ -113,11 +122,13 @@ impl<'a> From<&'a ActualDbPool> for DbPool<'a> { } } -/// Runs multiple async functions that take `&mut DbPool<'_>` as input and return `Result`. Only works when the `futures` crate is listed in `Cargo.toml`. +/// Runs multiple async functions that take `&mut DbPool<'_>` as input and return `Result`. Only +/// works when the `futures` crate is listed in `Cargo.toml`. /// /// `$pool` is the value given to each function. /// -/// A `Result` is returned (not in a `Future`, so don't use `.await`). The `Ok` variant contains a tuple with the values returned by the given functions. +/// A `Result` is returned (not in a `Future`, so don't use `.await`). The `Ok` variant contains a +/// tuple with the values returned by the given functions. /// /// The functions run concurrently if `$pool` has the `DbPool::Pool` variant. #[macro_export] @@ -149,12 +160,92 @@ macro_rules! try_join_with_pool { }}; } -pub fn get_database_url_from_env() -> Result { - env::var("LEMMY_DATABASE_URL") +pub struct ReverseTimestampKey(pub K); + +impl CursorKey for ReverseTimestampKey +where + K: CursorKey, +{ + type SqlType = sql_types::BigInt; + type CursorValue = functions::reverse_timestamp_sort::HelperType; + type SqlValue = functions::reverse_timestamp_sort::HelperType; + + fn get_cursor_value(cursor: &C) -> Self::CursorValue { + functions::reverse_timestamp_sort(K::get_cursor_value(cursor)) + } + + fn get_sql_value() -> Self::SqlValue { + functions::reverse_timestamp_sort(K::get_sql_value()) + } +} + +/// Includes an SQL comment before `T`, which can be used to label auto_explain output +#[derive(QueryId)] +pub struct Commented { + comment: String, + inner: T, +} + +impl Commented { + pub fn new(inner: T) -> Self { + Commented { + comment: String::new(), + inner, + } + } + + /// Adds `text` to the comment if `condition` is true + pub fn text_if(mut self, text: &str, condition: bool) -> Self { + if condition { + if !self.comment.is_empty() { + self.comment.push_str(", "); + } + self.comment.push_str(text); + } + self + } + + /// Adds `text` to the comment + pub fn text(self, text: &str) -> Self { + self.text_if(text, true) + } +} + +impl Query for Commented { + type SqlType = T::SqlType; +} + +impl> QueryFragment for Commented { + fn walk_ast<'b>( + &'b self, + mut out: diesel::query_builder::AstPass<'_, 'b, Pg>, + ) -> Result<(), DieselError> { + for line in self.comment.lines() { + out.push_sql("\n-- "); + out.push_sql(line); + } + out.push_sql("\n"); + self.inner.walk_ast(out.reborrow()) + } +} + +impl LimitDsl for Commented { + type Output = Commented; + + fn limit(self, limit: i64) -> Self::Output { + Commented { + comment: self.comment, + inner: self.inner.limit(limit), + } + } } pub fn fuzzy_search(q: &str) -> String { - let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%"); + let replaced = q + .replace('\\', "\\\\") + .replace('%', "\\%") + .replace('_', "\\_") + .replace(' ', "%"); format!("%{replaced}%") } @@ -166,9 +257,8 @@ pub fn limit_and_offset( Some(page) => { if page < 1 { return Err(QueryBuilderError("Page is < 1".into())); - } else { - page } + page } None => 1, }; @@ -178,9 +268,8 @@ pub fn limit_and_offset( return Err(QueryBuilderError( format!("Fetch limit is > {FETCH_LIMIT_MAX}").into(), )); - } else { - limit } + limit } None => FETCH_LIMIT_DEFAULT, }; @@ -198,226 +287,246 @@ pub fn is_email_regex(test: &str) -> bool { EMAIL_REGEX.is_match(test) } -pub fn diesel_option_overwrite(opt: Option) -> Option> { +/// Takes an API optional text input, and converts it to an optional diesel DB update. +pub fn diesel_string_update(opt: Option<&str>) -> Option> { match opt { // An empty string is an erase - Some(unwrapped) => { - if !unwrapped.eq("") { - Some(Some(unwrapped)) - } else { - Some(None) - } - } + Some("") => Some(None), + Some(str) => Some(Some(str.into())), None => None, } } -pub fn diesel_option_overwrite_to_url( - opt: &Option, -) -> Result>, LemmyError> { - match opt.as_ref().map(String::as_str) { +/// Takes an API optional text input, and converts it to an optional diesel DB update (for non +/// nullable properties). +pub fn diesel_required_string_update(opt: Option<&str>) -> Option { + match opt { + // An empty string is no change + Some("") => None, + Some(str) => Some(str.into()), + None => None, + } +} + +/// Takes an optional API URL-type input, and converts it to an optional diesel DB update. +/// Also cleans the url params. +pub fn diesel_url_update(opt: Option<&str>) -> LemmyResult>> { + match opt { // An empty string is an erase Some("") => Ok(Some(None)), Some(str_url) => Url::parse(str_url) - .map(|u| Some(Some(u.into()))) + .map(|u| Some(Some(clean_url(&u).into()))) .with_lemmy_type(LemmyErrorType::InvalidUrl), None => Ok(None), } } -pub fn diesel_option_overwrite_to_url_create( - opt: &Option, -) -> Result, LemmyError> { - match opt.as_ref().map(String::as_str) { - // An empty string is nothing +/// Takes an optional API URL-type input, and converts it to an optional diesel DB update (for non +/// nullable properties). Also cleans the url params. +pub fn diesel_required_url_update(opt: Option<&str>) -> LemmyResult> { + match opt { + // An empty string is no change Some("") => Ok(None), Some(str_url) => Url::parse(str_url) - .map(|u| Some(u.into())) + .map(|u| Some(clean_url(&u).into())) .with_lemmy_type(LemmyErrorType::InvalidUrl), None => Ok(None), } } -async fn build_db_pool_settings_opt( - settings: Option<&Settings>, -) -> Result { - let db_url = get_database_url(settings); - let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5); - // We only support TLS with sslmode=require currently - let tls_enabled = db_url.contains("sslmode=require"); - let manager = if tls_enabled { - // diesel-async does not support any TLS connections out of the box, so we need to manually - // provide a setup function which handles creating the connection - AsyncDieselConnectionManager::::new_with_setup(&db_url, establish_connection) - } else { - AsyncDieselConnectionManager::::new(&db_url) - }; - let pool = Pool::builder(manager) - .max_size(pool_size) - .wait_timeout(POOL_TIMEOUT) - .create_timeout(POOL_TIMEOUT) - .recycle_timeout(POOL_TIMEOUT) - .runtime(Runtime::Tokio1) - .build()?; - - // If there's no settings, that means its a unit test, and migrations need to be run - if settings.is_none() { - run_migrations(&db_url); +/// Takes an optional API URL-type input, and converts it to an optional diesel DB create. +/// Also cleans the url params. +pub fn diesel_url_create(opt: Option<&str>) -> LemmyResult> { + match opt { + Some(str_url) => Url::parse(str_url) + .map(|u| Some(clean_url(&u).into())) + .with_lemmy_type(LemmyErrorType::InvalidUrl), + None => Ok(None), } - - Ok(pool) } fn establish_connection(config: &str) -> BoxFuture> { let fut = async { - let rustls_config = rustls::ClientConfig::builder() - .with_safe_defaults() + // We only support TLS with sslmode=require currently + let mut conn = if config.contains("sslmode=require") { + let rustls_config = DangerousClientConfigBuilder { + cfg: ClientConfig::builder(), + } .with_custom_certificate_verifier(Arc::new(NoCertVerifier {})) .with_no_client_auth(); - let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config); - let (client, conn) = tokio_postgres::connect(config, tls) - .await - .map_err(|e| ConnectionError::BadConnection(e.to_string()))?; - tokio::spawn(async move { - if let Err(e) = conn.await { - error!("Database connection failed: {e}"); - } - }); - AsyncPgConnection::try_from(client).await + let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config); + let (client, conn) = tokio_postgres::connect(config, tls) + .await + .map_err(|e| ConnectionError::BadConnection(e.to_string()))?; + tokio::spawn(async move { + if let Err(e) = conn.await { + error!("Database connection failed: {e}"); + } + }); + AsyncPgConnection::try_from(client).await? + } else { + AsyncPgConnection::establish(config).await? + }; + + diesel::select(( + // Change geqo_threshold back to default value if it was changed, so it's higher than the + // collapse limits + functions::set_config("geqo_threshold", "12", false), + // Change collapse limits from 8 to 11 so the query planner can find a better table join + // order for more complicated queries + functions::set_config("from_collapse_limit", "11", false), + functions::set_config("join_collapse_limit", "11", false), + // Set `lemmy.protocol_and_hostname` so triggers can use it + functions::set_config( + "lemmy.protocol_and_hostname", + SETTINGS.get_protocol_and_hostname(), + false, + ), + )) + .execute(&mut conn) + .await + .map_err(ConnectionError::CouldntSetupConfiguration)?; + Ok(conn) }; fut.boxed() } +#[derive(Debug)] struct NoCertVerifier {} impl ServerCertVerifier for NoCertVerifier { fn verify_server_cert( &self, - _end_entity: &rustls::Certificate, - _intermediates: &[rustls::Certificate], + _end_entity: &CertificateDer, + _intermediates: &[CertificateDer], _server_name: &ServerName, - _scts: &mut dyn Iterator, - _ocsp_response: &[u8], - _now: SystemTime, + _ocsp: &[u8], + _now: UnixTime, ) -> Result { // Will verify all (even invalid) certs without any checks (sslmode=require) Ok(ServerCertVerified::assertion()) } + + fn verify_tls12_signature( + &self, + message: &[u8], + cert: &CertificateDer, + dss: &DigitallySignedStruct, + ) -> Result { + verify_tls12_signature( + message, + cert, + dss, + &crypto::ring::default_provider().signature_verification_algorithms, + ) + } + + fn verify_tls13_signature( + &self, + message: &[u8], + cert: &CertificateDer, + dss: &DigitallySignedStruct, + ) -> Result { + verify_tls13_signature( + message, + cert, + dss, + &crypto::ring::default_provider().signature_verification_algorithms, + ) + } + + fn supported_verify_schemes(&self) -> Vec { + crypto::ring::default_provider() + .signature_verification_algorithms + .supported_schemes() + } } -pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); +pub async fn build_db_pool() -> LemmyResult { + let db_url = SETTINGS.get_database_url(); + // diesel-async does not support any TLS connections out of the box, so we need to manually + // provide a setup function which handles creating the connection + let mut config = ManagerConfig::default(); + config.custom_setup = Box::new(establish_connection); + let manager = AsyncDieselConnectionManager::::new_with_config(&db_url, config); + let pool = Pool::builder(manager) + .max_size(SETTINGS.database.pool_size) + .runtime(Runtime::Tokio1) + // Limit connection age to prevent use of prepared statements that have query plans based on + // very old statistics + .pre_recycle(Hook::sync_fn(|_conn, metrics| { + // Preventing the first recycle can cause an infinite loop when trying to get a new connection + // from the pool + let conn_was_used = metrics.recycled.is_some(); + if metrics.age() > Duration::from_secs(3 * 24 * 60 * 60) && conn_was_used { + Err(HookError::Continue(None)) + } else { + Ok(()) + } + })) + .build()?; -pub fn run_migrations(db_url: &str) { - // Needs to be a sync connection - let mut conn = - PgConnection::establish(db_url).unwrap_or_else(|e| panic!("Error connecting to {db_url}: {e}")); - info!("Running Database migrations (This may take a long time)..."); - let _ = &mut conn - .run_pending_migrations(MIGRATIONS) - .unwrap_or_else(|e| panic!("Couldn't run DB Migrations: {e}")); - info!("Database migrations complete."); -} + crate::schema_setup::run(&db_url)?; -pub async fn build_db_pool(settings: &Settings) -> Result { - build_db_pool_settings_opt(Some(settings)).await + Ok(pool) } pub async fn build_db_pool_for_tests() -> ActualDbPool { - build_db_pool_settings_opt(None) - .await - .expect("db pool missing") -} - -pub fn get_database_url(settings: Option<&Settings>) -> String { - // The env var should override anything in the settings config - match get_database_url_from_env() { - Ok(url) => url, - Err(e) => match settings { - Some(settings) => settings.get_database_url(), - None => panic!("Failed to read database URL from env var LEMMY_DATABASE_URL: {e}"), - }, - } + build_db_pool().await.expect("db pool missing") } pub fn naive_now() -> DateTime { - chrono::prelude::Utc::now() + Utc::now() } -pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType { +pub fn post_to_comment_sort_type(sort: PostSortType) -> CommentSortType { + use PostSortType::*; match sort { - SortType::Active | SortType::Hot | SortType::Scaled => CommentSortType::Hot, - SortType::New | SortType::NewComments | SortType::MostComments => CommentSortType::New, - SortType::Old => CommentSortType::Old, - SortType::Controversial => CommentSortType::Controversial, - SortType::TopHour - | SortType::TopSixHour - | SortType::TopTwelveHour - | SortType::TopDay - | SortType::TopAll - | SortType::TopWeek - | SortType::TopYear - | SortType::TopMonth - | SortType::TopThreeMonths - | SortType::TopSixMonths - | SortType::TopNineMonths => CommentSortType::Top, + Active | Hot | Scaled => CommentSortType::Hot, + New | NewComments | MostComments => CommentSortType::New, + Old => CommentSortType::Old, + Controversial => CommentSortType::Controversial, + TopHour | TopSixHour | TopTwelveHour | TopDay | TopAll | TopWeek | TopYear | TopMonth + | TopThreeMonths | TopSixMonths | TopNineMonths => CommentSortType::Top, } } -static EMAIL_REGEX: Lazy = Lazy::new(|| { +static EMAIL_REGEX: LazyLock = LazyLock::new(|| { Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$") .expect("compile email regex") }); pub mod functions { - use diesel::sql_types::{BigInt, Text, Timestamptz}; + use diesel::sql_types::{BigInt, Bool, Text, Timestamptz}; sql_function! { + #[sql_name = "r.hot_rank"] fn hot_rank(score: BigInt, time: Timestamptz) -> Double; } sql_function! { + #[sql_name = "r.scaled_rank"] fn scaled_rank(score: BigInt, time: Timestamptz, users_active_month: BigInt) -> Double; } sql_function! { + #[sql_name = "r.controversy_rank"] fn controversy_rank(upvotes: BigInt, downvotes: BigInt, score: BigInt) -> Double; } + sql_function!(fn reverse_timestamp_sort(time: Timestamptz) -> BigInt); + sql_function!(fn lower(x: Text) -> Text); // really this function is variadic, this just adds the two-argument version sql_function!(fn coalesce(x: diesel::sql_types::Nullable, y: T) -> T); + + sql_function!(fn set_config(setting_name: Text, new_value: Text, is_local: Bool) -> Text); } pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*"; -impl ToSql for DbUrl { - fn to_sql(&self, out: &mut Output) -> diesel::serialize::Result { - >::to_sql(&self.0.to_string(), &mut out.reborrow()) - } -} - -impl FromSql for DbUrl -where - String: FromSql, -{ - fn from_sql(value: DB::RawValue<'_>) -> diesel::deserialize::Result { - let str = String::from_sql(value)?; - Ok(DbUrl(Box::new(Url::parse(&str)?))) - } -} - -impl From> for DbUrl -where - Kind: Object + Send + 'static, - for<'de2> ::Kind: serde::Deserialize<'de2>, -{ - fn from(id: ObjectId) -> Self { - DbUrl(Box::new(id.into())) - } -} - pub fn now() -> AsExprOf { // https://github.com/diesel-rs/diesel/issues/1514 diesel::dsl::now.into_sql::() @@ -433,7 +542,8 @@ pub trait ListFn<'a, T, Args>: Fn(DbConn<'a>, Args) -> ResultFuture<'a, Vec> impl<'a, T, Args, F: Fn(DbConn<'a>, Args) -> ResultFuture<'a, Vec>> ListFn<'a, T, Args> for F {} -/// Allows read and list functions to capture a shared closure that has an inferred return type, which is useful for join logic +/// Allows read and list functions to capture a shared closure that has an inferred return type, +/// which is useful for join logic pub struct Queries { pub read_fn: RF, pub list_fn: LF, @@ -486,11 +596,9 @@ impl Queries { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::{fuzzy_search, *}; - use crate::utils::is_email_regex; + use super::*; + use pretty_assertions::assert_eq; #[test] fn test_fuzzy_search() { @@ -509,26 +617,24 @@ mod tests { #[test] fn test_diesel_option_overwrite() { - assert_eq!(diesel_option_overwrite(None), None); - assert_eq!(diesel_option_overwrite(Some(String::new())), Some(None)); + assert_eq!(diesel_string_update(None), None); + assert_eq!(diesel_string_update(Some("")), Some(None)); assert_eq!( - diesel_option_overwrite(Some("test".to_string())), + diesel_string_update(Some("test")), Some(Some("test".to_string())) ); } #[test] - fn test_diesel_option_overwrite_to_url() { - assert!(matches!(diesel_option_overwrite_to_url(&None), Ok(None))); - assert!(matches!( - diesel_option_overwrite_to_url(&Some(String::new())), - Ok(Some(None)) - )); - assert!(diesel_option_overwrite_to_url(&Some("invalid_url".to_string())).is_err()); + fn test_diesel_option_overwrite_to_url() -> LemmyResult<()> { + assert!(matches!(diesel_url_update(None), Ok(None))); + assert!(matches!(diesel_url_update(Some("")), Ok(Some(None)))); + assert!(diesel_url_update(Some("invalid_url")).is_err()); let example_url = "https://example.com"; assert!(matches!( - diesel_option_overwrite_to_url(&Some(example_url.to_string())), - Ok(Some(Some(url))) if url == Url::parse(example_url).unwrap().into() + diesel_url_update(Some(example_url)), + Ok(Some(Some(url))) if url == Url::parse(example_url)?.into() )); + Ok(()) } } diff --git a/crates/db_views/Cargo.toml b/crates/db_views/Cargo.toml index 69fa24403..df8124c8a 100644 --- a/crates/db_views/Cargo.toml +++ b/crates/db_views/Cargo.toml @@ -11,6 +11,9 @@ repository.workspace = true [lib] doctest = false +[lints] +workspace = true + [features] full = [ "lemmy_utils", @@ -20,6 +23,8 @@ full = [ "tracing", "ts-rs", "actix-web", + "i-love-jesus", + "lemmy_db_schema/full", ] [dependencies] @@ -33,7 +38,11 @@ serde_with = { workspace = true } tracing = { workspace = true, optional = true } ts-rs = { workspace = true, optional = true } actix-web = { workspace = true, optional = true } +i-love-jesus = { workspace = true, optional = true } +chrono = { workspace = true } [dev-dependencies] serial_test = { workspace = true } tokio = { workspace = true } +pretty_assertions = { workspace = true } +url = { workspace = true } diff --git a/crates/db_views/src/comment_report_view.rs b/crates/db_views/src/comment_report_view.rs index cded46ac9..be5e76562 100644 --- a/crates/db_views/src/comment_report_view.rs +++ b/crates/db_views/src/comment_report_view.rs @@ -12,16 +12,20 @@ use diesel::{ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ aliases, - newtypes::{CommentReportId, CommunityId, PersonId}, + newtypes::{CommentId, CommentReportId, CommunityId, PersonId}, schema::{ comment, comment_aggregates, comment_like, comment_report, + comment_saved, community, + community_follower, community_moderator, community_person_ban, + local_user, person, + person_block, post, }, utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, @@ -52,41 +56,6 @@ fn queries<'a>() -> Queries< aliases::person2 .on(comment_report::resolver_id.eq(aliases::person2.field(person::id).nullable())), ) - }; - - let selection = ( - comment_report::all_columns, - comment::all_columns, - post::all_columns, - community::all_columns, - person::all_columns, - aliases::person1.fields(person::all_columns), - comment_aggregates::all_columns, - community_person_ban::id.nullable().is_not_null(), - comment_like::score.nullable(), - aliases::person2.fields(person::all_columns).nullable(), - ); - - let read = move |mut conn: DbConn<'a>, (report_id, my_person_id): (CommentReportId, PersonId)| async move { - all_joins( - comment_report::table.find(report_id).into_boxed(), - my_person_id, - ) - .left_join( - community_person_ban::table.on( - community::id - .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(comment::creator_id)), - ), - ) - .select(selection) - .first::(&mut conn) - .await - }; - - let list = move |mut conn: DbConn<'a>, - (options, user): (CommentReportQuery, &'a LocalUserView)| async move { - let mut query = all_joins(comment_report::table.into_boxed(), user.person.id) .left_join( community_person_ban::table.on( community::id @@ -99,22 +68,101 @@ fn queries<'a>() -> Queries< ), ), ) - .select(selection); + .left_join( + aliases::community_moderator1.on( + community::id + .eq(aliases::community_moderator1.field(community_moderator::community_id)) + .and( + aliases::community_moderator1 + .field(community_moderator::person_id) + .eq(comment::creator_id), + ), + ), + ) + .left_join( + local_user::table.on( + comment::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ) + .left_join( + person_block::table.on( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(my_person_id)), + ), + ) + .left_join( + community_follower::table.on( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(my_person_id)), + ), + ) + .left_join( + comment_saved::table.on( + comment::id + .eq(comment_saved::comment_id) + .and(comment_saved::person_id.eq(my_person_id)), + ), + ) + .select(( + comment_report::all_columns, + comment::all_columns, + post::all_columns, + community::all_columns, + person::all_columns, + aliases::person1.fields(person::all_columns), + comment_aggregates::all_columns, + community_person_ban::community_id.nullable().is_not_null(), + aliases::community_moderator1 + .field(community_moderator::community_id) + .nullable() + .is_not_null(), + local_user::admin.nullable().is_not_null(), + person_block::target_id.nullable().is_not_null(), + community_follower::pending.nullable(), + comment_saved::published.nullable().is_not_null(), + comment_like::score.nullable(), + aliases::person2.fields(person::all_columns).nullable(), + )) + }; + + let read = move |mut conn: DbConn<'a>, (report_id, my_person_id): (CommentReportId, PersonId)| async move { + all_joins( + comment_report::table.find(report_id).into_boxed(), + my_person_id, + ) + .first(&mut conn) + .await + }; + + let list = move |mut conn: DbConn<'a>, + (options, user): (CommentReportQuery, &'a LocalUserView)| async move { + let mut query = all_joins(comment_report::table.into_boxed(), user.person.id); if let Some(community_id) = options.community_id { query = query.filter(post::community_id.eq(community_id)); } + if let Some(comment_id) = options.comment_id { + query = query.filter(comment_report::comment_id.eq(comment_id)); + } + + // If viewing all reports, order by newest, but if viewing unresolved only, show the oldest + // first (FIFO) if options.unresolved_only { - query = query.filter(comment_report::resolved.eq(false)); + query = query + .filter(comment_report::resolved.eq(false)) + .order_by(comment_report::published.asc()); + } else { + query = query.order_by(comment_report::published.desc()); } let (limit, offset) = limit_and_offset(options.page, options.limit)?; - query = query - .order_by(comment_report::published.desc()) - .limit(limit) - .offset(offset); + query = query.limit(limit).offset(offset); // If its not an admin, get only the ones you mod if !user.local_user.admin { @@ -148,7 +196,7 @@ impl CommentReportView { queries().read(pool, (report_id, my_person_id)).await } - /// Returns the current unresolved post report count for the communities you mod + /// Returns the current unresolved comment report count for the communities you mod pub async fn get_report_count( pool: &mut DbPool<'_>, my_person_id: PersonId, @@ -194,6 +242,7 @@ impl CommentReportView { #[derive(Default)] pub struct CommentReportQuery { pub community_id: Option, + pub comment_id: Option, pub page: Option, pub limit: Option, pub unresolved_only: bool, @@ -210,9 +259,8 @@ impl CommentReportQuery { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ comment_report_view::{CommentReportQuery, CommentReportView}, @@ -226,68 +274,56 @@ mod tests { community::{Community, CommunityInsertForm, CommunityModerator, CommunityModeratorForm}, instance::Instance, local_user::{LocalUser, LocalUserInsertForm}, + local_user_vote_display_mode::LocalUserVoteDisplayMode, person::{Person, PersonInsertForm}, post::{Post, PostInsertForm}, }, traits::{Crud, Joinable, Reportable}, - utils::build_db_pool_for_tests, + utils::{build_db_pool_for_tests, RANK_DEFAULT}, + CommunityVisibility, + SubscribedType, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("timmy_crv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "timmy_crv"); - let inserted_timmy = Person::create(pool, &new_person).await.unwrap(); + let inserted_timmy = Person::create(pool, &new_person).await?; - let new_local_user = LocalUserInsertForm::builder() - .person_id(inserted_timmy.id) - .password_encrypted("123".to_string()) - .build(); - let timmy_local_user = LocalUser::create(pool, &new_local_user).await.unwrap(); + let new_local_user = LocalUserInsertForm::test_form(inserted_timmy.id); + let timmy_local_user = LocalUser::create(pool, &new_local_user, vec![]).await?; let timmy_view = LocalUserView { local_user: timmy_local_user, + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), person: inserted_timmy.clone(), counts: Default::default(), }; - let new_person_2 = PersonInsertForm::builder() - .name("sara_crv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person_2 = PersonInsertForm::test_form(inserted_instance.id, "sara_crv"); - let inserted_sara = Person::create(pool, &new_person_2).await.unwrap(); + let inserted_sara = Person::create(pool, &new_person_2).await?; // Add a third person, since new ppl can only report something once. - let new_person_3 = PersonInsertForm::builder() - .name("jessica_crv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person_3 = PersonInsertForm::test_form(inserted_instance.id, "jessica_crv"); - let inserted_jessica = Person::create(pool, &new_person_3).await.unwrap(); + let inserted_jessica = Person::create(pool, &new_person_3).await?; - let new_community = CommunityInsertForm::builder() - .name("test community crv".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community crv".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; // Make timmy a mod let timmy_moderator_form = CommunityModeratorForm { @@ -295,25 +331,22 @@ mod tests { person_id: inserted_timmy.id, }; - let _inserted_moderator = CommunityModerator::join(pool, &timmy_moderator_form) - .await - .unwrap(); + let _inserted_moderator = CommunityModerator::join(pool, &timmy_moderator_form).await?; - let new_post = PostInsertForm::builder() - .name("A test post crv".into()) - .creator_id(inserted_timmy.id) - .community_id(inserted_community.id) - .build(); + let new_post = PostInsertForm::new( + "A test post crv".into(), + inserted_timmy.id, + inserted_community.id, + ); - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let inserted_post = Post::create(pool, &new_post).await?; - let comment_form = CommentInsertForm::builder() - .content("A test comment 32".into()) - .creator_id(inserted_timmy.id) - .post_id(inserted_post.id) - .build(); - - let inserted_comment = Comment::create(pool, &comment_form, None).await.unwrap(); + let comment_form = CommentInsertForm::new( + inserted_timmy.id, + inserted_post.id, + "A test comment 32".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; // sara reports let sara_report_form = CommentReportForm { @@ -323,9 +356,7 @@ mod tests { reason: "from sara".into(), }; - let inserted_sara_report = CommentReport::report(pool, &sara_report_form) - .await - .unwrap(); + let inserted_sara_report = CommentReport::report(pool, &sara_report_form).await?; // jessica reports let jessica_report_form = CommentReportForm { @@ -335,22 +366,21 @@ mod tests { reason: "from jessica".into(), }; - let inserted_jessica_report = CommentReport::report(pool, &jessica_report_form) - .await - .unwrap(); + let inserted_jessica_report = CommentReport::report(pool, &jessica_report_form).await?; - let agg = CommentAggregates::read(pool, inserted_comment.id) - .await - .unwrap(); + let agg = CommentAggregates::read(pool, inserted_comment.id).await?; let read_jessica_report_view = - CommentReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id) - .await - .unwrap(); + CommentReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id).await?; let expected_jessica_report_view = CommentReportView { comment_report: inserted_jessica_report.clone(), comment: inserted_comment.clone(), post: inserted_post, + creator_is_moderator: true, + creator_is_admin: false, + creator_blocked: false, + subscribed: SubscribedType::NotSubscribed, + saved: false, community: Community { id: inserted_community.id, name: inserted_community.name, @@ -361,6 +391,7 @@ mod tests { actor_id: inserted_community.actor_id.clone(), local: true, title: inserted_community.title, + sidebar: None, description: None, updated: None, banner: None, @@ -372,10 +403,10 @@ mod tests { last_refreshed_at: inserted_community.last_refreshed_at, followers_url: inserted_community.followers_url, inbox_url: inserted_community.inbox_url, - shared_inbox_url: inserted_community.shared_inbox_url, moderators_url: inserted_community.moderators_url, featured_url: inserted_community.featured_url, instance_id: inserted_instance.id, + visibility: CommunityVisibility::Public, }, creator: Person { id: inserted_jessica.id, @@ -392,7 +423,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_jessica.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: inserted_instance.id, @@ -415,7 +445,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_timmy.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: inserted_instance.id, @@ -425,14 +454,13 @@ mod tests { }, creator_banned_from_community: false, counts: CommentAggregates { - id: agg.id, comment_id: inserted_comment.id, score: 0, upvotes: 0, downvotes: 0, published: agg.published, child_count: 0, - hot_rank: 0.1728, + hot_rank: RANK_DEFAULT, controversy_rank: 0.0, }, my_vote: None, @@ -458,7 +486,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_sara.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: inserted_instance.id, @@ -470,31 +497,25 @@ mod tests { // Do a batch read of timmys reports let reports = CommentReportQuery::default() .list(pool, &timmy_view) - .await - .unwrap(); + .await?; assert_eq!( reports, [ expected_jessica_report_view.clone(), - expected_sara_report_view.clone() + expected_sara_report_view.clone(), ] ); // Make sure the counts are correct - let report_count = CommentReportView::get_report_count(pool, inserted_timmy.id, false, None) - .await - .unwrap(); + let report_count = + CommentReportView::get_report_count(pool, inserted_timmy.id, false, None).await?; assert_eq!(2, report_count); // Try to resolve the report - CommentReport::resolve(pool, inserted_jessica_report.id, inserted_timmy.id) - .await - .unwrap(); + CommentReport::resolve(pool, inserted_jessica_report.id, inserted_timmy.id).await?; let read_jessica_report_view_after_resolve = - CommentReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id) - .await - .unwrap(); + CommentReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id).await?; let mut expected_jessica_report_view_after_resolve = expected_jessica_report_view; expected_jessica_report_view_after_resolve @@ -526,7 +547,6 @@ mod tests { private_key: inserted_timmy.private_key.clone(), public_key: inserted_timmy.public_key.clone(), last_refreshed_at: inserted_timmy.last_refreshed_at, - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: inserted_instance.id, @@ -544,24 +564,21 @@ mod tests { ..Default::default() } .list(pool, &timmy_view) - .await - .unwrap(); + .await?; assert_eq!(reports_after_resolve[0], expected_sara_report_view); assert_eq!(reports_after_resolve.len(), 1); // Make sure the counts are correct let report_count_after_resolved = - CommentReportView::get_report_count(pool, inserted_timmy.id, false, None) - .await - .unwrap(); + CommentReportView::get_report_count(pool, inserted_timmy.id, false, None).await?; assert_eq!(1, report_count_after_resolved); - Person::delete(pool, inserted_timmy.id).await.unwrap(); - Person::delete(pool, inserted_sara.id).await.unwrap(); - Person::delete(pool, inserted_jessica.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Person::delete(pool, inserted_timmy.id).await?; + Person::delete(pool, inserted_sara.id).await?; + Person::delete(pool, inserted_jessica.id).await?; + Community::delete(pool, inserted_community.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_views/src/comment_view.rs b/crates/db_views/src/comment_view.rs index 99f046cdf..ff1405508 100644 --- a/crates/db_views/src/comment_view.rs +++ b/crates/db_views/src/comment_view.rs @@ -1,9 +1,13 @@ -use crate::structs::{CommentView, LocalUserView}; +use crate::structs::CommentView; use diesel::{ + dsl::{exists, not}, pg::Pg, result::Error, + sql_types, BoolExpressionMethods, + BoxableExpression, ExpressionMethods, + IntoSql, JoinOnDsl, NullableExpressionMethods, PgTextExpressionMethods, @@ -12,7 +16,7 @@ use diesel::{ use diesel_async::RunQueryDsl; use diesel_ltree::{nlevel, subpath, Ltree, LtreeExtensions}; use lemmy_db_schema::{ - aliases, + impls::local_user::LocalUserOptionHelper, newtypes::{CommentId, CommunityId, LocalUserId, PersonId, PostId}, schema::{ comment, @@ -25,141 +29,168 @@ use lemmy_db_schema::{ community_moderator, community_person_ban, instance_block, + local_user, local_user_language, person, person_block, post, }, - source::community::CommunityFollower, + source::{local_user::LocalUser, site::Site}, utils::{fuzzy_search, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, CommentSortType, ListingType, }; fn queries<'a>() -> Queries< - impl ReadFn<'a, CommentView, (CommentId, Option)>, - impl ListFn<'a, CommentView, CommentQuery<'a>>, + impl ReadFn<'a, CommentView, (CommentId, Option<&'a LocalUser>)>, + impl ListFn<'a, CommentView, (CommentQuery<'a>, &'a Site)>, > { - let all_joins = |query: comment::BoxedQuery<'a, Pg>, my_person_id: Option| { - // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + let is_creator_banned_from_community = exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(comment::creator_id)), + ), + ); + + let is_local_user_banned_from_community = |person_id| { + exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(person_id)), + ), + ) + }; + + let is_community_followed = |person_id| { + community_follower::table + .filter( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(person_id)), + ) + .select(community_follower::pending.nullable()) + .single_value() + }; + + let is_creator_blocked = |person_id| { + exists( + person_block::table.filter( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(person_id)), + ), + ) + }; + + let score = |person_id| { + comment_like::table + .filter( + comment::id + .eq(comment_like::comment_id) + .and(comment_like::person_id.eq(person_id)), + ) + .select(comment_like::score.nullable()) + .single_value() + }; + + let creator_is_moderator = exists( + community_moderator::table.filter( + community::id + .eq(community_moderator::community_id) + .and(community_moderator::person_id.eq(comment::creator_id)), + ), + ); + + let creator_is_admin = exists( + local_user::table.filter( + comment::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ); + + let all_joins = move |query: comment::BoxedQuery<'a, Pg>, my_person_id: Option| { + let is_local_user_banned_from_community_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>, + > = if let Some(person_id) = my_person_id { + Box::new(is_local_user_banned_from_community(person_id)) + } else { + Box::new(false.into_sql::()) + }; + + let score_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(score(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let subscribed_type_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(is_community_followed(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let is_creator_blocked_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_creator_blocked(person_id)) + } else { + Box::new(false.into_sql::()) + }; + query .inner_join(person::table) .inner_join(post::table) .inner_join(community::table.on(post::community_id.eq(community::id))) .inner_join(comment_aggregates::table) - .left_join( - community_person_ban::table.on( - community::id - .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(comment::creator_id)), - ), - ) - .left_join( - community_follower::table.on( - post::community_id - .eq(community_follower::community_id) - .and(community_follower::person_id.eq(person_id_join)), - ), - ) .left_join( comment_saved::table.on( comment::id .eq(comment_saved::comment_id) - .and(comment_saved::person_id.eq(person_id_join)), - ), - ) - .left_join( - person_block::table.on( - comment::creator_id - .eq(person_block::target_id) - .and(person_block::person_id.eq(person_id_join)), - ), - ) - .left_join( - comment_like::table.on( - comment::id - .eq(comment_like::comment_id) - .and(comment_like::person_id.eq(person_id_join)), - ), - ) - .left_join( - community_moderator::table.on( - post::id - .eq(comment::post_id) - .and(post::community_id.eq(community_moderator::community_id)) - .and(community_moderator::person_id.eq(person_id_join)), - ), - ) - .left_join( - aliases::community_moderator1.on( - community::id - .eq(aliases::community_moderator1.field(community_moderator::community_id)) - .and( - aliases::community_moderator1 - .field(community_moderator::person_id) - .eq(comment::creator_id), - ), + .and(comment_saved::person_id.eq(my_person_id.unwrap_or(PersonId(-1)))), ), ) + .select(( + comment::all_columns, + person::all_columns, + post::all_columns, + community::all_columns, + comment_aggregates::all_columns, + is_creator_banned_from_community, + is_local_user_banned_from_community_selection, + creator_is_moderator, + creator_is_admin, + subscribed_type_selection, + comment_saved::person_id.nullable().is_not_null(), + is_creator_blocked_selection, + score_selection, + )) }; - let selection = ( - comment::all_columns, - person::all_columns, - post::all_columns, - community::all_columns, - comment_aggregates::all_columns, - community_person_ban::id.nullable().is_not_null(), - aliases::community_moderator1 - .field(community_moderator::id) - .nullable() - .is_not_null(), - CommunityFollower::select_subscribed_type(), - comment_saved::id.nullable().is_not_null(), - person_block::id.nullable().is_not_null(), - comment_like::score.nullable(), - ); - let read = move |mut conn: DbConn<'a>, - (comment_id, my_person_id): (CommentId, Option)| async move { - all_joins(comment::table.find(comment_id).into_boxed(), my_person_id) - .select(selection) - .first::(&mut conn) - .await + (comment_id, my_local_user): (CommentId, Option<&'a LocalUser>)| async move { + let mut query = all_joins( + comment::table.find(comment_id).into_boxed(), + my_local_user.person_id(), + ); + query = my_local_user.visible_communities_only(query); + query.first(&mut conn).await }; - let list = move |mut conn: DbConn<'a>, options: CommentQuery<'a>| async move { - let person_id = options.local_user.map(|l| l.person.id); - let local_user_id = options.local_user.map(|l| l.local_user.id); - + let list = move |mut conn: DbConn<'a>, (options, site): (CommentQuery<'a>, &'a Site)| async move { // The left join below will return None in this case - let person_id_join = person_id.unwrap_or(PersonId(-1)); - let local_user_id_join = local_user_id.unwrap_or(LocalUserId(-1)); + let person_id_join = options.local_user.person_id().unwrap_or(PersonId(-1)); + let local_user_id_join = options + .local_user + .local_user_id() + .unwrap_or(LocalUserId(-1)); - let mut query = all_joins(comment::table.into_boxed(), person_id) - .left_join( - instance_block::table.on( - community::instance_id - .eq(instance_block::instance_id) - .and(instance_block::person_id.eq(person_id_join)), - ), - ) - .left_join( - community_block::table.on( - community::id - .eq(community_block::community_id) - .and(community_block::person_id.eq(person_id_join)), - ), - ) - .left_join( - local_user_language::table.on( - comment::language_id - .eq(local_user_language::language_id) - .and(local_user_language::local_user_id.eq(local_user_id_join)), - ), - ) - .select(selection); + let mut query = all_joins(comment::table.into_boxed(), options.local_user.person_id()); if let Some(creator_id) = options.creator_id { query = query.filter(comment::creator_id.eq(creator_id)); @@ -172,53 +203,63 @@ fn queries<'a>() -> Queries< if let Some(parent_path) = options.parent_path.as_ref() { query = query.filter(comment::path.contained_by(parent_path)); }; - + //filtering out removed and deleted comments from search if let Some(search_term) = options.search_term { - query = query.filter(comment::content.ilike(fuzzy_search(&search_term))); + query = query.filter( + comment::content + .ilike(fuzzy_search(&search_term)) + .and(not(comment::removed.or(comment::deleted))), + ); }; if let Some(community_id) = options.community_id { query = query.filter(post::community_id.eq(community_id)); } - if let Some(listing_type) = options.listing_type { - match listing_type { - ListingType::Subscribed => query = query.filter(community_follower::pending.is_not_null()), // TODO could be this: and(community_follower::person_id.eq(person_id_join)), - ListingType::Local => { - query = query.filter(community::local.eq(true)).filter( - community::hidden - .eq(false) - .or(community_follower::person_id.eq(person_id_join)), - ) - } - ListingType::All => { - query = query.filter( - community::hidden - .eq(false) - .or(community_follower::person_id.eq(person_id_join)), - ) - } - ListingType::ModeratorView => { - query = query.filter(community_moderator::person_id.is_not_null()); - } + let is_subscribed = exists( + community_follower::table.filter( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(person_id_join)), + ), + ); + + match options.listing_type.unwrap_or_default() { + ListingType::Subscribed => query = query.filter(is_subscribed), /* TODO could be this: and(community_follower::person_id.eq(person_id_join)), */ + ListingType::Local => { + query = query + .filter(community::local.eq(true)) + .filter(community::hidden.eq(false).or(is_subscribed)) + } + ListingType::All => query = query.filter(community::hidden.eq(false).or(is_subscribed)), + ListingType::ModeratorView => { + query = query.filter(exists( + community_moderator::table.filter( + post::community_id + .eq(community_moderator::community_id) + .and(community_moderator::person_id.eq(person_id_join)), + ), + )); } } - if options.saved_only { - query = query.filter(comment_saved::comment_id.is_not_null()); + // If its saved only, then filter, and order by the saved time, not the comment creation time. + if options.saved_only.unwrap_or_default() { + query = query + .filter(comment_saved::person_id.is_not_null()) + .then_order_by(comment_saved::published.desc()); } - if options.liked_only { - query = query.filter(comment_like::score.eq(1)); - } else if options.disliked_only { - query = query.filter(comment_like::score.eq(-1)); + if let Some(my_id) = options.local_user.person_id() { + let not_creator_filter = comment::creator_id.ne(my_id); + if options.liked_only.unwrap_or_default() { + query = query.filter(not_creator_filter).filter(score(my_id).eq(1)); + } else if options.disliked_only.unwrap_or_default() { + query = query.filter(not_creator_filter).filter(score(my_id).eq(-1)); + } } - if !options - .local_user - .map(|l| l.local_user.show_bot_accounts) - .unwrap_or(true) - { + if !options.local_user.show_bot_accounts() { query = query.filter(person::bot_account.eq(false)); }; @@ -226,15 +267,39 @@ fn queries<'a>() -> Queries< && options.listing_type.unwrap_or_default() != ListingType::ModeratorView { // Filter out the rows with missing languages - query = query.filter(local_user_language::language_id.is_not_null()); + query = query.filter(exists( + local_user_language::table.filter( + comment::language_id + .eq(local_user_language::language_id) + .and(local_user_language::local_user_id.eq(local_user_id_join)), + ), + )); // Don't show blocked communities or persons - if options.post_id.is_none() { - query = query.filter(instance_block::person_id.is_null()); - query = query.filter(community_block::person_id.is_null()); - } - query = query.filter(person_block::person_id.is_null()); - } + query = query.filter(not(exists( + instance_block::table.filter( + community::instance_id + .eq(instance_block::instance_id) + .and(instance_block::person_id.eq(person_id_join)), + ), + ))); + query = query.filter(not(exists( + community_block::table.filter( + community::id + .eq(community_block::community_id) + .and(community_block::person_id.eq(person_id_join)), + ), + ))); + query = query.filter(not(is_creator_blocked(person_id_join))); + }; + + if !options.local_user.show_nsfw(site) { + query = query + .filter(post::nsfw.eq(false)) + .filter(community::nsfw.eq(false)); + }; + + query = options.local_user.visible_communities_only(query); // A Max depth given means its a tree fetch let (limit, offset) = if let Some(max_depth) = options.max_depth { @@ -247,7 +312,8 @@ fn queries<'a>() -> Queries< query = query.filter(nlevel(comment::path).le(depth_limit)); - // only order if filtering by a post id, or parent_path. DOS potential otherwise and max_depth + !post_id isn't used anyways (afaik) + // only order if filtering by a post id, or parent_path. DOS potential otherwise and max_depth + // + !post_id isn't used anyways (afaik) if options.post_id.is_some() || options.parent_path.is_some() { // Always order by the parent path first query = query.then_order_by(subpath(comment::path, 0, -1)); @@ -298,18 +364,22 @@ fn queries<'a>() -> Queries< } impl CommentView { - pub async fn read( + pub async fn read<'a>( pool: &mut DbPool<'_>, comment_id: CommentId, - my_person_id: Option, + my_local_user: Option<&'a LocalUser>, ) -> Result { // If a person is given, then my_vote (res.9), if None, should be 0, not null // Necessary to differentiate between other person's votes - let mut res = queries().read(pool, (comment_id, my_person_id)).await?; - if my_person_id.is_some() && res.my_vote.is_none() { - res.my_vote = Some(0); + let res = queries().read(pool, (comment_id, my_local_user)).await?; + let mut new_view = res.clone(); + if my_local_user.is_some() && res.my_vote.is_none() { + new_view.my_vote = Some(0); } - Ok(res) + if res.comment.deleted || res.comment.removed { + new_view.comment.content = String::new(); + } + Ok(new_view) } } @@ -321,27 +391,37 @@ pub struct CommentQuery<'a> { pub post_id: Option, pub parent_path: Option, pub creator_id: Option, - pub local_user: Option<&'a LocalUserView>, + pub local_user: Option<&'a LocalUser>, pub search_term: Option, - pub saved_only: bool, - pub liked_only: bool, - pub disliked_only: bool, - pub is_profile_view: bool, + pub saved_only: Option, + pub liked_only: Option, + pub disliked_only: Option, pub page: Option, pub limit: Option, pub max_depth: Option, } impl<'a> CommentQuery<'a> { - pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { - queries().list(pool, self).await + pub async fn list(self, site: &Site, pool: &mut DbPool<'_>) -> Result, Error> { + Ok( + queries() + .list(pool, (self, site)) + .await? + .into_iter() + .map(|mut c| { + if c.comment.deleted || c.comment.removed { + c.comment.content = String::new(); + } + c + }) + .collect(), + ) } } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ comment_view::{CommentQuery, CommentSortType, CommentView, DbPool}, @@ -349,23 +429,45 @@ mod tests { }; use lemmy_db_schema::{ aggregates::structs::CommentAggregates, + assert_length, impls::actor_language::UNDETERMINED_ID, newtypes::LanguageId, source::{ actor_language::LocalUserLanguage, - comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm, CommentUpdateForm}, - community::{Community, CommunityInsertForm, CommunityModerator, CommunityModeratorForm}, + comment::{ + Comment, + CommentInsertForm, + CommentLike, + CommentLikeForm, + CommentSaved, + CommentSavedForm, + CommentUpdateForm, + }, + community::{ + Community, + CommunityInsertForm, + CommunityModerator, + CommunityModeratorForm, + CommunityPersonBan, + CommunityPersonBanForm, + CommunityUpdateForm, + }, instance::Instance, language::Language, local_user::{LocalUser, LocalUserInsertForm}, + local_user_vote_display_mode::LocalUserVoteDisplayMode, person::{Person, PersonInsertForm}, person_block::{PersonBlock, PersonBlockForm}, - post::{Post, PostInsertForm}, + post::{Post, PostInsertForm, PostUpdateForm}, + site::{Site, SiteInsertForm}, }, - traits::{Blockable, Crud, Joinable, Likeable}, - utils::build_db_pool_for_tests, + traits::{Bannable, Blockable, Crud, Joinable, Likeable, Saveable}, + utils::{build_db_pool_for_tests, RANK_DEFAULT}, + CommunityVisibility, SubscribedType, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; struct Data { @@ -374,52 +476,39 @@ mod tests { inserted_comment_1: Comment, inserted_comment_2: Comment, inserted_post: Post, - local_user_view: LocalUserView, - inserted_person_2: Person, + timmy_local_user_view: LocalUserView, + inserted_sara_person: Person, inserted_community: Community, + site: Site, } - async fn init_data(pool: &mut DbPool<'_>) -> Data { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("timmy".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); - let local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_person.id) - .password_encrypted(String::new()) - .build(); - let inserted_local_user = LocalUser::create(pool, &local_user_form).await.unwrap(); + let timmy_person_form = PersonInsertForm::test_form(inserted_instance.id, "timmy"); + let inserted_timmy_person = Person::create(pool, &timmy_person_form).await?; + let timmy_local_user_form = LocalUserInsertForm::test_form_admin(inserted_timmy_person.id); - let new_person_2 = PersonInsertForm::builder() - .name("sara".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let inserted_person_2 = Person::create(pool, &new_person_2).await.unwrap(); + let inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form, vec![]).await?; - let new_community = CommunityInsertForm::builder() - .name("test community 5".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let sara_person_form = PersonInsertForm::test_form(inserted_instance.id, "sara"); + let inserted_sara_person = Person::create(pool, &sara_person_form).await?; - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community 5".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; - let new_post = PostInsertForm::builder() - .name("A test post 2".into()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); - let english_id = Language::read_id_from_code(pool, Some("en")).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post 2".into(), + inserted_timmy_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; + let english_id = Language::read_id_from_code(pool, "en").await?; // Create a comment tree with this hierarchy // 0 @@ -429,127 +518,124 @@ mod tests { // 3 4 // \ // 5 - let comment_form_0 = CommentInsertForm::builder() - .content("Comment 0".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .language_id(english_id) - .build(); - - let inserted_comment_0 = Comment::create(pool, &comment_form_0, None).await.unwrap(); - - let comment_form_1 = CommentInsertForm::builder() - .content("Comment 1, A test blocked comment".into()) - .creator_id(inserted_person_2.id) - .post_id(inserted_post.id) - .language_id(english_id) - .build(); - - let inserted_comment_1 = Comment::create(pool, &comment_form_1, Some(&inserted_comment_0.path)) - .await - .unwrap(); - - let finnish_id = Language::read_id_from_code(pool, Some("fi")).await.unwrap(); - let comment_form_2 = CommentInsertForm::builder() - .content("Comment 2".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .language_id(finnish_id) - .build(); - - let inserted_comment_2 = Comment::create(pool, &comment_form_2, Some(&inserted_comment_0.path)) - .await - .unwrap(); - - let comment_form_3 = CommentInsertForm::builder() - .content("Comment 3".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .language_id(english_id) - .build(); - - let _inserted_comment_3 = - Comment::create(pool, &comment_form_3, Some(&inserted_comment_1.path)) - .await - .unwrap(); - - let polish_id = Language::read_id_from_code(pool, Some("pl")) - .await - .unwrap() - .unwrap(); - let comment_form_4 = CommentInsertForm::builder() - .content("Comment 4".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .language_id(Some(polish_id)) - .build(); - - let inserted_comment_4 = Comment::create(pool, &comment_form_4, Some(&inserted_comment_1.path)) - .await - .unwrap(); - - let comment_form_5 = CommentInsertForm::builder() - .content("Comment 5".into()) - .creator_id(inserted_person.id) - .post_id(inserted_post.id) - .build(); - - let _inserted_comment_5 = - Comment::create(pool, &comment_form_5, Some(&inserted_comment_4.path)) - .await - .unwrap(); - - let timmy_blocks_sara_form = PersonBlockForm { - person_id: inserted_person.id, - target_id: inserted_person_2.id, + let comment_form_0 = CommentInsertForm { + language_id: Some(english_id), + ..CommentInsertForm::new( + inserted_timmy_person.id, + inserted_post.id, + "Comment 0".into(), + ) }; - let inserted_block = PersonBlock::block(pool, &timmy_blocks_sara_form) - .await - .unwrap(); + let inserted_comment_0 = Comment::create(pool, &comment_form_0, None).await?; + + let comment_form_1 = CommentInsertForm { + language_id: Some(english_id), + ..CommentInsertForm::new( + inserted_sara_person.id, + inserted_post.id, + "Comment 1, A test blocked comment".into(), + ) + }; + let inserted_comment_1 = + Comment::create(pool, &comment_form_1, Some(&inserted_comment_0.path)).await?; + + let finnish_id = Language::read_id_from_code(pool, "fi").await?; + let comment_form_2 = CommentInsertForm { + language_id: Some(finnish_id), + ..CommentInsertForm::new( + inserted_timmy_person.id, + inserted_post.id, + "Comment 2".into(), + ) + }; + + let inserted_comment_2 = + Comment::create(pool, &comment_form_2, Some(&inserted_comment_0.path)).await?; + + let comment_form_3 = CommentInsertForm { + language_id: Some(english_id), + ..CommentInsertForm::new( + inserted_timmy_person.id, + inserted_post.id, + "Comment 3".into(), + ) + }; + let _inserted_comment_3 = + Comment::create(pool, &comment_form_3, Some(&inserted_comment_1.path)).await?; + + let polish_id = Language::read_id_from_code(pool, "pl").await?; + let comment_form_4 = CommentInsertForm { + language_id: Some(polish_id), + ..CommentInsertForm::new( + inserted_timmy_person.id, + inserted_post.id, + "Comment 4".into(), + ) + }; + + let inserted_comment_4 = + Comment::create(pool, &comment_form_4, Some(&inserted_comment_1.path)).await?; + + let comment_form_5 = CommentInsertForm::new( + inserted_timmy_person.id, + inserted_post.id, + "Comment 5".into(), + ); + let _inserted_comment_5 = + Comment::create(pool, &comment_form_5, Some(&inserted_comment_4.path)).await?; + + let timmy_blocks_sara_form = PersonBlockForm { + person_id: inserted_timmy_person.id, + target_id: inserted_sara_person.id, + }; + + let inserted_block = PersonBlock::block(pool, &timmy_blocks_sara_form).await?; let expected_block = PersonBlock { - id: inserted_block.id, - person_id: inserted_person.id, - target_id: inserted_person_2.id, + person_id: inserted_timmy_person.id, + target_id: inserted_sara_person.id, published: inserted_block.published, }; assert_eq!(expected_block, inserted_block); let comment_like_form = CommentLikeForm { comment_id: inserted_comment_0.id, - post_id: inserted_post.id, - person_id: inserted_person.id, + person_id: inserted_timmy_person.id, score: 1, }; - let _inserted_comment_like = CommentLike::like(pool, &comment_like_form).await.unwrap(); + let _inserted_comment_like = CommentLike::like(pool, &comment_like_form).await?; - let local_user_view = LocalUserView { - local_user: inserted_local_user.clone(), - person: inserted_person.clone(), + let timmy_local_user_view = LocalUserView { + local_user: inserted_timmy_local_user.clone(), + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), + person: inserted_timmy_person.clone(), counts: Default::default(), }; - Data { + let site_form = SiteInsertForm::new("test site".to_string(), inserted_instance.id); + let site = Site::create(pool, &site_form).await?; + Ok(Data { inserted_instance, inserted_comment_0, inserted_comment_1, inserted_comment_2, inserted_post, - local_user_view, - inserted_person_2, + timmy_local_user_view, + inserted_sara_person, inserted_community, - } + site, + }) } #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; - let expected_comment_view_no_person = expected_comment_view(&data, pool).await; + let expected_comment_view_no_person = expected_comment_view(&data, pool).await?; let mut expected_comment_view_with_person = expected_comment_view_no_person.clone(); expected_comment_view_with_person.my_vote = Some(1); @@ -559,24 +645,22 @@ mod tests { post_id: (Some(data.inserted_post.id)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; assert_eq!( - expected_comment_view_no_person, - read_comment_views_no_person[0] + Some(&expected_comment_view_no_person), + read_comment_views_no_person.first() ); let read_comment_views_with_person = CommentQuery { sort: (Some(CommentSortType::Old)), post_id: (Some(data.inserted_post.id)), - local_user: (Some(&data.local_user_view)), + local_user: (Some(&data.timmy_local_user_view.local_user)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; assert_eq!( expected_comment_view_with_person, @@ -584,55 +668,78 @@ mod tests { ); // Make sure its 1, not showing the blocked comment - assert_eq!(5, read_comment_views_with_person.len()); + assert_length!(5, read_comment_views_with_person); let read_comment_from_blocked_person = CommentView::read( pool, data.inserted_comment_1.id, - Some(data.local_user_view.person.id), + Some(&data.timmy_local_user_view.local_user), ) - .await - .unwrap(); + .await?; // Make sure block set the creator blocked assert!(read_comment_from_blocked_person.creator_blocked); - let read_liked_comment_views = CommentQuery { - local_user: (Some(&data.local_user_view)), - liked_only: (true), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - - assert_eq!( - expected_comment_view_with_person, - read_liked_comment_views[0] - ); - - assert_eq!(1, read_liked_comment_views.len()); - - let read_disliked_comment_views: Vec = CommentQuery { - local_user: (Some(&data.local_user_view)), - disliked_only: (true), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - - assert!(read_disliked_comment_views.is_empty()); - - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn test_comment_tree() { + async fn test_liked_only() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; + + // Unblock sara first + let timmy_unblocks_sara_form = PersonBlockForm { + person_id: data.timmy_local_user_view.person.id, + target_id: data.inserted_sara_person.id, + }; + PersonBlock::unblock(pool, &timmy_unblocks_sara_form).await?; + + // Like a new comment + let comment_like_form = CommentLikeForm { + comment_id: data.inserted_comment_1.id, + person_id: data.timmy_local_user_view.person.id, + score: 1, + }; + CommentLike::like(pool, &comment_like_form).await?; + + let read_liked_comment_views = CommentQuery { + local_user: Some(&data.timmy_local_user_view.local_user), + liked_only: Some(true), + ..Default::default() + } + .list(&data.site, pool) + .await? + .into_iter() + .map(|c| c.comment.content) + .collect::>(); + + // Shouldn't include your own post, only other peoples + assert_eq!(data.inserted_comment_1.content, read_liked_comment_views[0]); + + assert_length!(1, read_liked_comment_views); + + let read_disliked_comment_views: Vec = CommentQuery { + local_user: Some(&data.timmy_local_user_view.local_user), + disliked_only: Some(true), + ..Default::default() + } + .list(&data.site, pool) + .await?; + + assert!(read_disliked_comment_views.is_empty()); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn test_comment_tree() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; let top_path = data.inserted_comment_0.path.clone(); let read_comment_views_top_path = CommentQuery { @@ -640,9 +747,8 @@ mod tests { parent_path: (Some(top_path)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; let child_path = data.inserted_comment_1.path.clone(); let read_comment_views_child_path = CommentQuery { @@ -650,13 +756,12 @@ mod tests { parent_path: (Some(child_path)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; // Make sure the comment parent-limited fetch is correct - assert_eq!(6, read_comment_views_top_path.len()); - assert_eq!(4, read_comment_views_child_path.len()); + assert_length!(6, read_comment_views_top_path); + assert_length!(4, read_comment_views_child_path); // Make sure it contains the parent, but not the comment from the other tree let child_comments = read_comment_views_child_path @@ -671,16 +776,15 @@ mod tests { max_depth: (Some(1)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; // Make sure a depth limited one only has the top comment assert_eq!( - expected_comment_view(&data, pool).await, + expected_comment_view(&data, pool).await?, read_comment_views_top_max_depth[0] ); - assert_eq!(1, read_comment_views_top_max_depth.len()); + assert_length!(1, read_comment_views_top_max_depth); let child_path = data.inserted_comment_1.path.clone(); let read_comment_views_parent_max_depth = CommentQuery { @@ -690,171 +794,227 @@ mod tests { sort: (Some(CommentSortType::New)), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; // Make sure a depth limited one, and given child comment 1, has 3 assert!(read_comment_views_parent_max_depth[2] .comment .content .eq("Comment 3")); - assert_eq!(3, read_comment_views_parent_max_depth.len()); + assert_length!(3, read_comment_views_parent_max_depth); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn test_languages() { + async fn test_languages() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; // by default, user has all languages enabled and should see all comments // (except from blocked user) let all_languages = CommentQuery { - local_user: (Some(&data.local_user_view)), + local_user: (Some(&data.timmy_local_user_view.local_user)), ..Default::default() } - .list(pool) - .await - .unwrap(); - assert_eq!(5, all_languages.len()); + .list(&data.site, pool) + .await?; + assert_length!(5, all_languages); // change user lang to finnish, should only show one post in finnish and one undetermined - let finnish_id = Language::read_id_from_code(pool, Some("fi")) - .await - .unwrap() - .unwrap(); - LocalUserLanguage::update(pool, vec![finnish_id], data.local_user_view.local_user.id) - .await - .unwrap(); + let finnish_id = Language::read_id_from_code(pool, "fi").await?; + LocalUserLanguage::update( + pool, + vec![finnish_id], + data.timmy_local_user_view.local_user.id, + ) + .await?; let finnish_comments = CommentQuery { - local_user: (Some(&data.local_user_view)), + local_user: (Some(&data.timmy_local_user_view.local_user)), ..Default::default() } - .list(pool) - .await - .unwrap(); - assert_eq!(2, finnish_comments.len()); + .list(&data.site, pool) + .await?; + assert_length!(2, finnish_comments); let finnish_comment = finnish_comments .iter() .find(|c| c.comment.language_id == finnish_id); assert!(finnish_comment.is_some()); assert_eq!( - data.inserted_comment_2.content, - finnish_comment.unwrap().comment.content + Some(&data.inserted_comment_2.content), + finnish_comment.map(|c| &c.comment.content) ); // now show all comments with undetermined language (which is the default value) LocalUserLanguage::update( pool, vec![UNDETERMINED_ID], - data.local_user_view.local_user.id, + data.timmy_local_user_view.local_user.id, ) - .await - .unwrap(); + .await?; let undetermined_comment = CommentQuery { - local_user: (Some(&data.local_user_view)), + local_user: (Some(&data.timmy_local_user_view.local_user)), ..Default::default() } - .list(pool) - .await - .unwrap(); - assert_eq!(1, undetermined_comment.len()); + .list(&data.site, pool) + .await?; + assert_length!(1, undetermined_comment); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn test_distinguished_first() { + async fn test_distinguished_first() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; let form = CommentUpdateForm { distinguished: Some(true), ..Default::default() }; - Comment::update(pool, data.inserted_comment_2.id, &form) - .await - .unwrap(); + Comment::update(pool, data.inserted_comment_2.id, &form).await?; let comments = CommentQuery { post_id: Some(data.inserted_comment_2.post_id), ..Default::default() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; assert_eq!(comments[0].comment.id, data.inserted_comment_2.id); assert!(comments[0].comment.distinguished); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn test_creator_is_moderator() { + async fn test_creator_is_moderator() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; // Make one of the inserted persons a moderator - let person_id = data.inserted_person_2.id; + let person_id = data.inserted_sara_person.id; let community_id = data.inserted_community.id; let form = CommunityModeratorForm { community_id, person_id, }; - CommunityModerator::join(pool, &form).await.unwrap(); + CommunityModerator::join(pool, &form).await?; // Make sure that they come back as a mod in the list - let comments = CommentQuery::default().list(pool).await.unwrap(); + let comments = CommentQuery { + sort: (Some(CommentSortType::Old)), + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!(comments[1].creator.name, "sara"); assert!(comments[1].creator_is_moderator); + assert!(!comments[0].creator_is_moderator); - cleanup(data, pool).await; + cleanup(data, pool).await } - async fn cleanup(data: Data, pool: &mut DbPool<'_>) { + #[tokio::test] + #[serial] + async fn test_creator_is_admin() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let comments = CommentQuery { + sort: (Some(CommentSortType::Old)), + ..Default::default() + } + .list(&data.site, pool) + .await?; + + // Timmy is an admin, and make sure that field is true + assert_eq!(comments[0].creator.name, "timmy"); + assert!(comments[0].creator_is_admin); + + // Sara isn't, make sure its false + assert_eq!(comments[1].creator.name, "sara"); + assert!(!comments[1].creator_is_admin); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn test_saved_order() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Save two comments + let save_comment_0_form = CommentSavedForm { + person_id: data.timmy_local_user_view.person.id, + comment_id: data.inserted_comment_0.id, + }; + CommentSaved::save(pool, &save_comment_0_form).await?; + + let save_comment_2_form = CommentSavedForm { + person_id: data.timmy_local_user_view.person.id, + comment_id: data.inserted_comment_2.id, + }; + CommentSaved::save(pool, &save_comment_2_form).await?; + + // Fetch the saved comments + let comments = CommentQuery { + local_user: Some(&data.timmy_local_user_view.local_user), + saved_only: Some(true), + ..Default::default() + } + .list(&data.site, pool) + .await?; + + // There should only be two comments + assert_eq!(2, comments.len()); + + // The first comment, should be the last one saved (descending order) + assert_eq!(comments[0].comment.id, data.inserted_comment_2.id); + + // The second comment, should be the first one saved + assert_eq!(comments[1].comment.id, data.inserted_comment_0.id); + + cleanup(data, pool).await + } + + async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> { CommentLike::remove( pool, - data.local_user_view.person.id, + data.timmy_local_user_view.person.id, data.inserted_comment_0.id, ) - .await - .unwrap(); - Comment::delete(pool, data.inserted_comment_0.id) - .await - .unwrap(); - Comment::delete(pool, data.inserted_comment_1.id) - .await - .unwrap(); - Post::delete(pool, data.inserted_post.id).await.unwrap(); - Community::delete(pool, data.inserted_community.id) - .await - .unwrap(); - Person::delete(pool, data.local_user_view.person.id) - .await - .unwrap(); - Person::delete(pool, data.inserted_person_2.id) - .await - .unwrap(); - Instance::delete(pool, data.inserted_instance.id) - .await - .unwrap(); + .await?; + Comment::delete(pool, data.inserted_comment_0.id).await?; + Comment::delete(pool, data.inserted_comment_1.id).await?; + Post::delete(pool, data.inserted_post.id).await?; + Community::delete(pool, data.inserted_community.id).await?; + Person::delete(pool, data.timmy_local_user_view.person.id).await?; + LocalUser::delete(pool, data.timmy_local_user_view.local_user.id).await?; + Person::delete(pool, data.inserted_sara_person.id).await?; + Instance::delete(pool, data.inserted_instance.id).await?; + Site::delete(pool, data.site.id).await?; + + Ok(()) } - async fn expected_comment_view(data: &Data, pool: &mut DbPool<'_>) -> CommentView { - let agg = CommentAggregates::read(pool, data.inserted_comment_0.id) - .await - .unwrap(); - CommentView { + async fn expected_comment_view(data: &Data, pool: &mut DbPool<'_>) -> LemmyResult { + let agg = CommentAggregates::read(pool, data.inserted_comment_0.id).await?; + Ok(CommentView { creator_banned_from_community: false, + banned_from_community: false, creator_is_moderator: false, + creator_is_admin: true, my_vote: None, subscribed: SubscribedType::NotSubscribed, saved: false, @@ -862,7 +1022,7 @@ mod tests { comment: Comment { id: data.inserted_comment_0.id, content: "Comment 0".into(), - creator_id: data.local_user_view.person.id, + creator_id: data.timmy_local_user_view.person.id, post_id: data.inserted_post.id, removed: false, deleted: false, @@ -875,12 +1035,12 @@ mod tests { language_id: LanguageId(37), }, creator: Person { - id: data.local_user_view.person.id, + id: data.timmy_local_user_view.person.id, name: "timmy".into(), display_name: None, - published: data.local_user_view.person.published, + published: data.timmy_local_user_view.person.published, avatar: None, - actor_id: data.local_user_view.person.actor_id.clone(), + actor_id: data.timmy_local_user_view.person.actor_id.clone(), local: true, banned: false, deleted: false, @@ -888,21 +1048,21 @@ mod tests { bio: None, banner: None, updated: None, - inbox_url: data.local_user_view.person.inbox_url.clone(), - shared_inbox_url: None, + inbox_url: data.timmy_local_user_view.person.inbox_url.clone(), matrix_user_id: None, ban_expires: None, instance_id: data.inserted_instance.id, - private_key: data.local_user_view.person.private_key.clone(), - public_key: data.local_user_view.person.public_key.clone(), - last_refreshed_at: data.local_user_view.person.last_refreshed_at, + private_key: data.timmy_local_user_view.person.private_key.clone(), + public_key: data.timmy_local_user_view.person.public_key.clone(), + last_refreshed_at: data.timmy_local_user_view.person.last_refreshed_at, }, post: Post { id: data.inserted_post.id, name: data.inserted_post.name.clone(), - creator_id: data.local_user_view.person.id, + creator_id: data.timmy_local_user_view.person.id, url: None, body: None, + alt_text: None, published: data.inserted_post.published, updated: None, community_id: data.inserted_community.id, @@ -919,6 +1079,8 @@ mod tests { language_id: Default::default(), featured_community: false, featured_local: false, + url_content_type: None, + scheduled_publish_time: None, }, community: Community { id: data.inserted_community.id, @@ -930,6 +1092,7 @@ mod tests { actor_id: data.inserted_community.actor_id.clone(), local: true, title: "nada".to_owned(), + sidebar: None, description: None, updated: None, banner: None, @@ -942,21 +1105,156 @@ mod tests { last_refreshed_at: data.inserted_community.last_refreshed_at, followers_url: data.inserted_community.followers_url.clone(), inbox_url: data.inserted_community.inbox_url.clone(), - shared_inbox_url: data.inserted_community.shared_inbox_url.clone(), moderators_url: data.inserted_community.moderators_url.clone(), featured_url: data.inserted_community.featured_url.clone(), + visibility: CommunityVisibility::Public, }, counts: CommentAggregates { - id: agg.id, comment_id: data.inserted_comment_0.id, score: 1, upvotes: 1, downvotes: 0, published: agg.published, child_count: 5, - hot_rank: 0.1728, + hot_rank: RANK_DEFAULT, controversy_rank: 0.0, }, + }) + } + + #[tokio::test] + #[serial] + async fn local_only_instance() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + Community::update( + pool, + data.inserted_community.id, + &CommunityUpdateForm { + visibility: Some(CommunityVisibility::LocalOnly), + ..Default::default() + }, + ) + .await?; + + let unauthenticated_query = CommentQuery { + ..Default::default() } + .list(&data.site, pool) + .await?; + assert_eq!(0, unauthenticated_query.len()); + + let authenticated_query = CommentQuery { + local_user: Some(&data.timmy_local_user_view.local_user), + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!(5, authenticated_query.len()); + + let unauthenticated_comment = CommentView::read(pool, data.inserted_comment_0.id, None).await; + assert!(unauthenticated_comment.is_err()); + + let authenticated_comment = CommentView::read( + pool, + data.inserted_comment_0.id, + Some(&data.timmy_local_user_view.local_user), + ) + .await; + assert!(authenticated_comment.is_ok()); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn comment_listing_local_user_banned_from_community() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Test that comment view shows if local user is blocked from community + let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill"); + + let inserted_banned_from_comm_person = Person::create(pool, &banned_from_comm_person).await?; + + let inserted_banned_from_comm_local_user = LocalUser::create( + pool, + &LocalUserInsertForm::test_form(inserted_banned_from_comm_person.id), + vec![], + ) + .await?; + + CommunityPersonBan::ban( + pool, + &CommunityPersonBanForm { + community_id: data.inserted_community.id, + person_id: inserted_banned_from_comm_person.id, + expires: None, + }, + ) + .await?; + + let comment_view = CommentView::read( + pool, + data.inserted_comment_0.id, + Some(&inserted_banned_from_comm_local_user), + ) + .await?; + + assert!(comment_view.banned_from_community); + + Person::delete(pool, inserted_banned_from_comm_person.id).await?; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn comment_listing_local_user_not_banned_from_community() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let comment_view = CommentView::read( + pool, + data.inserted_comment_0.id, + Some(&data.timmy_local_user_view.local_user), + ) + .await?; + + assert!(!comment_view.banned_from_community); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn comment_listings_hide_nsfw() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Mark a post as nsfw + let update_form = PostUpdateForm { + nsfw: Some(true), + ..Default::default() + }; + Post::update(pool, data.inserted_post.id, &update_form).await?; + + // Make sure comments of this post are not returned + let comments = CommentQuery::default().list(&data.site, pool).await?; + assert_eq!(0, comments.len()); + + // Mark site as nsfw + let mut site = data.site.clone(); + site.content_warning = Some("nsfw".to_string()); + + // Now comments of nsfw post are returned + let comments = CommentQuery::default().list(&site, pool).await?; + assert_eq!(6, comments.len()); + + cleanup(data, pool).await } } diff --git a/crates/db_views/src/custom_emoji_view.rs b/crates/db_views/src/custom_emoji_view.rs index d83fa9912..606e807e9 100644 --- a/crates/db_views/src/custom_emoji_view.rs +++ b/crates/db_views/src/custom_emoji_view.rs @@ -2,10 +2,10 @@ use crate::structs::CustomEmojiView; use diesel::{result::Error, ExpressionMethods, JoinOnDsl, NullableExpressionMethods, QueryDsl}; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ - newtypes::{CustomEmojiId, LocalSiteId}, + newtypes::CustomEmojiId, schema::{custom_emoji, custom_emoji_keyword}, source::{custom_emoji::CustomEmoji, custom_emoji_keyword::CustomEmojiKeyword}, - utils::{get_conn, DbPool}, + utils::{get_conn, limit_and_offset, DbPool}, }; use std::collections::HashMap; @@ -35,18 +35,34 @@ impl CustomEmojiView { } } - pub async fn get_all( + pub async fn list( pool: &mut DbPool<'_>, - for_local_site_id: LocalSiteId, + category: &Option, + page: Option, + limit: Option, + ignore_page_limits: bool, ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - let emojis = custom_emoji::table - .filter(custom_emoji::local_site_id.eq(for_local_site_id)) + + let mut query = custom_emoji::table .left_join( custom_emoji_keyword::table.on(custom_emoji_keyword::custom_emoji_id.eq(custom_emoji::id)), ) .order(custom_emoji::category) - .then_order_by(custom_emoji::id) + .into_boxed(); + + if !ignore_page_limits { + let (limit, offset) = limit_and_offset(page, limit)?; + query = query.limit(limit).offset(offset); + } + + if let Some(category) = category { + query = query.filter(custom_emoji::category.eq(category)) + } + + query = query.then_order_by(custom_emoji::id); + + let emojis = query .select(( custom_emoji::all_columns, custom_emoji_keyword::all_columns.nullable(), // (or all the columns if you want) @@ -60,16 +76,16 @@ impl CustomEmojiView { fn from_tuple_to_vec(items: Vec) -> Vec { let mut result = Vec::new(); let mut hash: HashMap> = HashMap::new(); - for item in &items { - let emoji_id: CustomEmojiId = item.0.id; + for (emoji, keyword) in &items { + let emoji_id: CustomEmojiId = emoji.id; if let std::collections::hash_map::Entry::Vacant(e) = hash.entry(emoji_id) { e.insert(Vec::new()); result.push(CustomEmojiView { - custom_emoji: item.0.clone(), + custom_emoji: emoji.clone(), keywords: Vec::new(), }) } - if let Some(item_keyword) = &item.1 { + if let Some(item_keyword) = &keyword { if let Some(keywords) = hash.get_mut(&emoji_id) { keywords.push(item_keyword.clone()) } @@ -77,7 +93,7 @@ impl CustomEmojiView { } for emoji in &mut result { if let Some(keywords) = hash.get_mut(&emoji.custom_emoji.id) { - emoji.keywords = keywords.clone(); + emoji.keywords.clone_from(keywords); } } result diff --git a/crates/db_views/src/lib.rs b/crates/db_views/src/lib.rs index 8abf776ba..e93c7409d 100644 --- a/crates/db_views/src/lib.rs +++ b/crates/db_views/src/lib.rs @@ -8,6 +8,8 @@ pub mod comment_view; #[cfg(feature = "full")] pub mod custom_emoji_view; #[cfg(feature = "full")] +pub mod local_image_view; +#[cfg(feature = "full")] pub mod local_user_view; #[cfg(feature = "full")] pub mod post_report_view; @@ -22,3 +24,5 @@ pub mod registration_application_view; #[cfg(feature = "full")] pub mod site_view; pub mod structs; +#[cfg(feature = "full")] +pub mod vote_view; diff --git a/crates/db_views/src/local_image_view.rs b/crates/db_views/src/local_image_view.rs new file mode 100644 index 000000000..7b5b97095 --- /dev/null +++ b/crates/db_views/src/local_image_view.rs @@ -0,0 +1,61 @@ +use crate::structs::LocalImageView; +use diesel::{result::Error, ExpressionMethods, JoinOnDsl, QueryDsl}; +use diesel_async::RunQueryDsl; +use lemmy_db_schema::{ + newtypes::LocalUserId, + schema::{local_image, local_user, person}, + utils::{get_conn, limit_and_offset, DbPool}, +}; + +impl LocalImageView { + async fn get_all_helper( + pool: &mut DbPool<'_>, + user_id: Option, + page: Option, + limit: Option, + ignore_page_limits: bool, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let mut query = local_image::table + .inner_join(local_user::table) + .inner_join(person::table.on(local_user::person_id.eq(person::id))) + .select((local_image::all_columns, person::all_columns)) + .order_by(local_image::published.desc()) + .into_boxed(); + + if let Some(user_id) = user_id { + query = query.filter(local_image::local_user_id.eq(user_id)) + } + + if !ignore_page_limits { + let (limit, offset) = limit_and_offset(page, limit)?; + query = query.limit(limit).offset(offset); + } + + query.load::(conn).await + } + + pub async fn get_all_paged_by_local_user_id( + pool: &mut DbPool<'_>, + user_id: LocalUserId, + page: Option, + limit: Option, + ) -> Result, Error> { + Self::get_all_helper(pool, Some(user_id), page, limit, false).await + } + + pub async fn get_all_by_local_user_id( + pool: &mut DbPool<'_>, + user_id: LocalUserId, + ) -> Result, Error> { + Self::get_all_helper(pool, Some(user_id), None, None, true).await + } + + pub async fn get_all( + pool: &mut DbPool<'_>, + page: Option, + limit: Option, + ) -> Result, Error> { + Self::get_all_helper(pool, None, page, limit, false).await + } +} diff --git a/crates/db_views/src/local_user_view.rs b/crates/db_views/src/local_user_view.rs index dd0e3631b..8d55b96fe 100644 --- a/crates/db_views/src/local_user_view.rs +++ b/crates/db_views/src/local_user_view.rs @@ -3,9 +3,22 @@ use actix_web::{dev::Payload, FromRequest, HttpMessage, HttpRequest}; use diesel::{result::Error, BoolExpressionMethods, ExpressionMethods, JoinOnDsl, QueryDsl}; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ - newtypes::{LocalUserId, PersonId}, - schema::{local_user, person, person_aggregates}, - utils::{functions::lower, DbConn, DbPool, ListFn, Queries, ReadFn}, + newtypes::{LocalUserId, OAuthProviderId, PersonId}, + schema::{local_user, local_user_vote_display_mode, oauth_account, person, person_aggregates}, + source::{ + instance::Instance, + local_user::{LocalUser, LocalUserInsertForm}, + person::{Person, PersonInsertForm}, + }, + traits::Crud, + utils::{ + functions::{coalesce, lower}, + DbConn, + DbPool, + ListFn, + Queries, + ReadFn, + }, }; use lemmy_utils::error::{LemmyError, LemmyErrorType}; use std::future::{ready, Ready}; @@ -16,6 +29,7 @@ enum ReadBy<'a> { Name(&'a str), NameOrEmail(&'a str), Email(&'a str), + OAuthID(OAuthProviderId, &'a str), } enum ListMode { @@ -26,6 +40,7 @@ fn queries<'a>( ) -> Queries>, impl ListFn<'a, LocalUserView, ListMode>> { let selection = ( local_user::all_columns, + local_user_vote_display_mode::all_columns, person::all_columns, person_aggregates::all_columns, ); @@ -34,7 +49,9 @@ fn queries<'a>( let mut query = local_user::table.into_boxed(); query = match search { ReadBy::Id(local_user_id) => query.filter(local_user::id.eq(local_user_id)), - ReadBy::Email(from_email) => query.filter(local_user::email.eq(from_email)), + ReadBy::Email(from_email) => { + query.filter(lower(coalesce(local_user::email, "")).eq(from_email.to_lowercase())) + } _ => query, }; let mut query = query.inner_join(person::table); @@ -43,26 +60,37 @@ fn queries<'a>( ReadBy::Name(name) => query.filter(lower(person::name).eq(name.to_lowercase())), ReadBy::NameOrEmail(name_or_email) => query.filter( lower(person::name) - .eq(lower(name_or_email)) - .or(local_user::email.eq(name_or_email)), + .eq(lower(name_or_email.to_lowercase())) + .or(lower(coalesce(local_user::email, "")).eq(name_or_email.to_lowercase())), ), _ => query, }; - query - .inner_join(person_aggregates::table.on(person::id.eq(person_aggregates::person_id))) - .select(selection) - .first::(&mut conn) - .await + let query = query + .inner_join(local_user_vote_display_mode::table) + .inner_join(person_aggregates::table.on(person::id.eq(person_aggregates::person_id))); + + if let ReadBy::OAuthID(oauth_provider_id, oauth_user_id) = search { + query + .inner_join(oauth_account::table) + .filter(oauth_account::oauth_provider_id.eq(oauth_provider_id)) + .filter(oauth_account::oauth_user_id.eq(oauth_user_id)) + .select(selection) + .first(&mut conn) + .await + } else { + query.select(selection).first(&mut conn).await + } }; let list = move |mut conn: DbConn<'a>, mode: ListMode| async move { match mode { ListMode::AdminsWithEmails => { local_user::table - .filter(local_user::email.is_not_null()) - .filter(local_user::admin.eq(true)) + .inner_join(local_user_vote_display_mode::table) .inner_join(person::table) .inner_join(person_aggregates::table.on(person::id.eq(person_aggregates::person_id))) + .filter(local_user::email.is_not_null()) + .filter(local_user::admin.eq(true)) .select(selection) .load::(&mut conn) .await @@ -99,9 +127,44 @@ impl LocalUserView { queries().read(pool, ReadBy::Email(from_email)).await } + pub async fn find_by_oauth_id( + pool: &mut DbPool<'_>, + oauth_provider_id: OAuthProviderId, + oauth_user_id: &str, + ) -> Result { + queries() + .read(pool, ReadBy::OAuthID(oauth_provider_id, oauth_user_id)) + .await + } + pub async fn list_admins_with_emails(pool: &mut DbPool<'_>) -> Result, Error> { queries().list(pool, ListMode::AdminsWithEmails).await } + + pub async fn create_test_user( + pool: &mut DbPool<'_>, + name: &str, + bio: &str, + admin: bool, + ) -> Result { + let instance_id = Instance::read_or_create(pool, "example.com".to_string()) + .await? + .id; + let person_form = PersonInsertForm { + display_name: Some(name.to_owned()), + bio: Some(bio.to_owned()), + ..PersonInsertForm::test_form(instance_id, name) + }; + let person = Person::create(pool, &person_form).await?; + + let user_form = match admin { + true => LocalUserInsertForm::test_form_admin(person.id), + false => LocalUserInsertForm::test_form(person.id), + }; + let local_user = LocalUser::create(pool, &user_form, vec![]).await?; + + LocalUserView::read(pool, local_user.id).await + } } impl FromRequest for LocalUserView { diff --git a/crates/db_views/src/post_report_view.rs b/crates/db_views/src/post_report_view.rs index 324023ecf..82e4c5d5b 100644 --- a/crates/db_views/src/post_report_view.rs +++ b/crates/db_views/src/post_report_view.rs @@ -11,18 +11,34 @@ use diesel::{ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ aliases, - newtypes::{CommunityId, PersonId, PostReportId}, + newtypes::{CommunityId, PersonId, PostId, PostReportId}, schema::{ community, + community_follower, community_moderator, community_person_ban, + local_user, person, + person_block, + person_post_aggregates, post, post_aggregates, + post_hide, post_like, + post_read, post_report, + post_saved, + }, + utils::{ + functions::coalesce, + get_conn, + limit_and_offset, + DbConn, + DbPool, + ListFn, + Queries, + ReadFn, }, - utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, }; fn queries<'a>() -> Queries< @@ -42,6 +58,67 @@ fn queries<'a>() -> Queries< .and(community_person_ban::person_id.eq(post::creator_id)), ), ) + .left_join( + aliases::community_moderator1.on( + aliases::community_moderator1 + .field(community_moderator::community_id) + .eq(post::community_id) + .and( + aliases::community_moderator1 + .field(community_moderator::person_id) + .eq(my_person_id), + ), + ), + ) + .left_join( + local_user::table.on( + post::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ) + .left_join( + post_saved::table.on( + post::id + .eq(post_saved::post_id) + .and(post_saved::person_id.eq(my_person_id)), + ), + ) + .left_join( + post_read::table.on( + post::id + .eq(post_read::post_id) + .and(post_read::person_id.eq(my_person_id)), + ), + ) + .left_join( + post_hide::table.on( + post::id + .eq(post_hide::post_id) + .and(post_hide::person_id.eq(my_person_id)), + ), + ) + .left_join( + person_block::table.on( + post::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(my_person_id)), + ), + ) + .left_join( + person_post_aggregates::table.on( + post::id + .eq(person_post_aggregates::post_id) + .and(person_post_aggregates::person_id.eq(my_person_id)), + ), + ) + .left_join( + community_follower::table.on( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(my_person_id)), + ), + ) .left_join( post_like::table.on( post::id @@ -60,8 +137,22 @@ fn queries<'a>() -> Queries< community::all_columns, person::all_columns, aliases::person1.fields(person::all_columns), - community_person_ban::id.nullable().is_not_null(), + community_person_ban::community_id.nullable().is_not_null(), + aliases::community_moderator1 + .field(community_moderator::community_id) + .nullable() + .is_not_null(), + local_user::admin.nullable().is_not_null(), + community_follower::pending.nullable(), + post_saved::post_id.nullable().is_not_null(), + post_read::post_id.nullable().is_not_null(), + post_hide::post_id.nullable().is_not_null(), + person_block::target_id.nullable().is_not_null(), post_like::score.nullable(), + coalesce( + post_aggregates::comments.nullable() - person_post_aggregates::read_comments.nullable(), + post_aggregates::comments, + ), post_aggregates::all_columns, aliases::person2.fields(person::all_columns.nullable()), )) @@ -72,7 +163,7 @@ fn queries<'a>() -> Queries< post_report::table.find(report_id).into_boxed(), my_person_id, ) - .first::(&mut conn) + .first(&mut conn) .await }; @@ -83,16 +174,23 @@ fn queries<'a>() -> Queries< query = query.filter(post::community_id.eq(community_id)); } + if let Some(post_id) = options.post_id { + query = query.filter(post::id.eq(post_id)); + } + + // If viewing all reports, order by newest, but if viewing unresolved only, show the oldest + // first (FIFO) if options.unresolved_only { - query = query.filter(post_report::resolved.eq(false)); + query = query + .filter(post_report::resolved.eq(false)) + .order_by(post_report::published.asc()); + } else { + query = query.order_by(post_report::published.desc()); } let (limit, offset) = limit_and_offset(options.page, options.limit)?; - query = query - .order_by(post_report::published.desc()) - .limit(limit) - .offset(offset); + query = query.limit(limit).offset(offset); // If its not an admin, get only the ones you mod if !user.local_user.admin { @@ -169,6 +267,7 @@ impl PostReportView { #[derive(Default)] pub struct PostReportQuery { pub community_id: Option, + pub post_id: Option, pub page: Option, pub limit: Option, pub unresolved_only: bool, @@ -185,20 +284,20 @@ impl PostReportQuery { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::{ post_report_view::{PostReportQuery, PostReportView}, structs::LocalUserView, }; use lemmy_db_schema::{ + assert_length, source::{ community::{Community, CommunityInsertForm, CommunityModerator, CommunityModeratorForm}, instance::Instance, local_user::{LocalUser, LocalUserInsertForm}, - moderator::{ModRemovePost, ModRemovePostForm}, + local_user_vote_display_mode::LocalUserVoteDisplayMode, person::{Person, PersonInsertForm}, post::{Post, PostInsertForm}, post_report::{PostReport, PostReportForm}, @@ -206,62 +305,47 @@ mod tests { traits::{Crud, Joinable, Reportable}, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name("timmy_prv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "timmy_prv"); - let inserted_timmy = Person::create(pool, &new_person).await.unwrap(); + let inserted_timmy = Person::create(pool, &new_person).await?; - let new_local_user = LocalUserInsertForm::builder() - .person_id(inserted_timmy.id) - .password_encrypted("123".to_string()) - .build(); - let timmy_local_user = LocalUser::create(pool, &new_local_user).await.unwrap(); + let new_local_user = LocalUserInsertForm::test_form(inserted_timmy.id); + let timmy_local_user = LocalUser::create(pool, &new_local_user, vec![]).await?; let timmy_view = LocalUserView { local_user: timmy_local_user, + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), person: inserted_timmy.clone(), counts: Default::default(), }; - let new_person_2 = PersonInsertForm::builder() - .name("sara_prv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person_2 = PersonInsertForm::test_form(inserted_instance.id, "sara_prv"); - let inserted_sara = Person::create(pool, &new_person_2).await.unwrap(); + let inserted_sara = Person::create(pool, &new_person_2).await?; // Add a third person, since new ppl can only report something once. - let new_person_3 = PersonInsertForm::builder() - .name("jessica_prv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person_3 = PersonInsertForm::test_form(inserted_instance.id, "jessica_prv"); - let inserted_jessica = Person::create(pool, &new_person_3).await.unwrap(); + let inserted_jessica = Person::create(pool, &new_person_3).await?; - let new_community = CommunityInsertForm::builder() - .name("test community prv".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community prv".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; // Make timmy a mod let timmy_moderator_form = CommunityModeratorForm { @@ -269,17 +353,14 @@ mod tests { person_id: inserted_timmy.id, }; - let _inserted_moderator = CommunityModerator::join(pool, &timmy_moderator_form) - .await - .unwrap(); + let _inserted_moderator = CommunityModerator::join(pool, &timmy_moderator_form).await?; - let new_post = PostInsertForm::builder() - .name("A test post crv".into()) - .creator_id(inserted_timmy.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let new_post = PostInsertForm::new( + "A test post crv".into(), + inserted_timmy.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; // sara reports let sara_report_form = PostReportForm { @@ -291,15 +372,14 @@ mod tests { reason: "from sara".into(), }; - PostReport::report(pool, &sara_report_form).await.unwrap(); + PostReport::report(pool, &sara_report_form).await?; - let new_post_2 = PostInsertForm::builder() - .name("A test post crv 2".into()) - .creator_id(inserted_timmy.id) - .community_id(inserted_community.id) - .build(); - - let inserted_post_2 = Post::create(pool, &new_post_2).await.unwrap(); + let new_post_2 = PostInsertForm::new( + "A test post crv 2".into(), + inserted_timmy.id, + inserted_community.id, + ); + let inserted_post_2 = Post::create(pool, &new_post_2).await?; // jessica reports let jessica_report_form = PostReportForm { @@ -311,14 +391,10 @@ mod tests { reason: "from jessica".into(), }; - let inserted_jessica_report = PostReport::report(pool, &jessica_report_form) - .await - .unwrap(); + let inserted_jessica_report = PostReport::report(pool, &jessica_report_form).await?; let read_jessica_report_view = - PostReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id) - .await - .unwrap(); + PostReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id).await?; assert_eq!( read_jessica_report_view.post_report, @@ -332,33 +408,23 @@ mod tests { assert_eq!(read_jessica_report_view.resolver, None); // Do a batch read of timmys reports - let reports = PostReportQuery::default() - .list(pool, &timmy_view) - .await - .unwrap(); + let reports = PostReportQuery::default().list(pool, &timmy_view).await?; - assert_eq!(reports[0].creator.id, inserted_jessica.id); assert_eq!(reports[1].creator.id, inserted_sara.id); + assert_eq!(reports[0].creator.id, inserted_jessica.id); // Make sure the counts are correct - let report_count = PostReportView::get_report_count(pool, inserted_timmy.id, false, None) - .await - .unwrap(); + let report_count = + PostReportView::get_report_count(pool, inserted_timmy.id, false, None).await?; assert_eq!(2, report_count); - // Writing post removal to mod log should automatically resolve reports - let remove_form = ModRemovePostForm { - mod_person_id: inserted_timmy.id, - post_id: inserted_jessica_report.post_id, - reason: None, - removed: Some(true), - }; - ModRemovePost::create(pool, &remove_form).await.unwrap(); + // Pretend the post was removed, and resolve all reports for that object. + // This is called manually in the API for post removals + PostReport::resolve_all_for_object(pool, inserted_jessica_report.post_id, inserted_timmy.id) + .await?; let read_jessica_report_view_after_resolve = - PostReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id) - .await - .unwrap(); + PostReportView::read(pool, inserted_jessica_report.id, inserted_timmy.id).await?; assert!(read_jessica_report_view_after_resolve.post_report.resolved); assert_eq!( read_jessica_report_view_after_resolve @@ -367,8 +433,10 @@ mod tests { Some(inserted_timmy.id) ); assert_eq!( - read_jessica_report_view_after_resolve.resolver.unwrap().id, - inserted_timmy.id + read_jessica_report_view_after_resolve + .resolver + .map(|r| r.id), + Some(inserted_timmy.id) ); // Do a batch read of timmys reports @@ -378,24 +446,21 @@ mod tests { ..Default::default() } .list(pool, &timmy_view) - .await - .unwrap(); - assert_eq!(reports_after_resolve.len(), 1); + .await?; + assert_length!(1, reports_after_resolve); assert_eq!(reports_after_resolve[0].creator.id, inserted_sara.id); // Make sure the counts are correct let report_count_after_resolved = - PostReportView::get_report_count(pool, inserted_timmy.id, false, None) - .await - .unwrap(); + PostReportView::get_report_count(pool, inserted_timmy.id, false, None).await?; assert_eq!(1, report_count_after_resolved); - Person::delete(pool, inserted_timmy.id).await.unwrap(); - Person::delete(pool, inserted_sara.id).await.unwrap(); - Person::delete(pool, inserted_jessica.id).await.unwrap(); - Community::delete(pool, inserted_community.id) - .await - .unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Person::delete(pool, inserted_timmy.id).await?; + Person::delete(pool, inserted_sara.id).await?; + Person::delete(pool, inserted_jessica.id).await?; + Community::delete(pool, inserted_community.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_views/src/post_view.rs b/crates/db_views/src/post_view.rs index 51213abb2..4fa2222ae 100644 --- a/crates/db_views/src/post_view.rs +++ b/crates/db_views/src/post_view.rs @@ -1,15 +1,13 @@ -use crate::structs::{LocalUserView, PaginationCursor, PostView}; +use crate::structs::{PaginationCursor, PostView}; use diesel::{ debug_query, - dsl::{self, exists, not, IntervalDsl}, - expression::AsExpression, + dsl::{exists, not, IntervalDsl}, pg::Pg, + query_builder::AsQuery, result::Error, - sql_function, - sql_types::{self, SingleValue, SqlType, Timestamptz}, + sql_types, BoolExpressionMethods, BoxableExpression, - Expression, ExpressionMethods, IntoSql, JoinOnDsl, @@ -19,8 +17,10 @@ use diesel::{ QueryDsl, }; use diesel_async::RunQueryDsl; +use i_love_jesus::PaginatedQueryBuilder; use lemmy_db_schema::{ - aggregates::structs::PostAggregates, + aggregates::structs::{post_aggregates_keys as key, PostAggregates}, + impls::local_user::LocalUserOptionHelper, newtypes::{CommunityId, LocalUserId, PersonId, PostId}, schema::{ community, @@ -28,77 +28,44 @@ use lemmy_db_schema::{ community_follower, community_moderator, community_person_ban, + image_details, instance_block, + local_user, local_user_language, person, person_block, person_post_aggregates, post, - post_aggregates::{self, newest_comment_time}, + post_aggregates, + post_hide, post_like, post_read, post_saved, }, - utils::{fuzzy_search, get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, + source::{local_user::LocalUser, site::Site}, + utils::{ + functions::coalesce, + fuzzy_search, + get_conn, + limit_and_offset, + now, + Commented, + DbConn, + DbPool, + ListFn, + Queries, + ReadFn, + ReverseTimestampKey, + }, ListingType, - SortType, + PostSortType, }; use tracing::debug; - -sql_function!(fn coalesce(x: sql_types::Nullable, y: sql_types::BigInt) -> sql_types::BigInt); - -fn order_and_page_filter_desc( - query: Q, - column: C, - options: &PostQuery, - getter: impl Fn(&PostAggregates) -> T, -) -> Q -where - Q: diesel::query_dsl::methods::ThenOrderDsl, Output = Q> - + diesel::query_dsl::methods::ThenOrderDsl, Output = Q> - + diesel::query_dsl::methods::FilterDsl, Output = Q> - + diesel::query_dsl::methods::FilterDsl, Output = Q>, - C: Expression + Copy, - C::SqlType: SingleValue + SqlType, - T: AsExpression, -{ - let mut query = query.then_order_by(column.desc()); - if let Some(before) = &options.page_before_or_equal { - query = query.filter(column.ge(getter(&before.0))); - } - if let Some(after) = &options.page_after { - query = query.filter(column.le(getter(&after.0))); - } - query -} - -fn order_and_page_filter_asc( - query: Q, - column: C, - options: &PostQuery, - getter: impl Fn(&PostAggregates) -> T, -) -> Q -where - Q: diesel::query_dsl::methods::ThenOrderDsl, Output = Q> - + diesel::query_dsl::methods::FilterDsl, Output = Q> - + diesel::query_dsl::methods::FilterDsl, Output = Q>, - C: Expression + Copy, - C::SqlType: SingleValue + SqlType, - T: AsExpression, -{ - let mut query = query.then_order_by(column.asc()); - if let Some(before) = &options.page_before_or_equal { - query = query.filter(column.le(getter(&before.0))); - } - if let Some(after) = &options.page_after { - query = query.filter(column.ge(getter(&after.0))); - } - query -} +use PostSortType::*; fn queries<'a>() -> Queries< - impl ReadFn<'a, PostView, (PostId, Option, bool)>, - impl ListFn<'a, PostView, PostQuery<'a>>, + impl ReadFn<'a, PostView, (PostId, Option<&'a LocalUser>, bool)>, + impl ListFn<'a, PostView, (PostQuery<'a>, &'a Site)>, > { let is_creator_banned_from_community = exists( community_person_ban::table.filter( @@ -107,6 +74,17 @@ fn queries<'a>() -> Queries< .and(community_person_ban::person_id.eq(post_aggregates::creator_id)), ), ); + + let is_local_user_banned_from_community = |person_id| { + exists( + community_person_ban::table.filter( + post_aggregates::community_id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(person_id)), + ), + ) + }; + let creator_is_moderator = exists( community_moderator::table.filter( post_aggregates::community_id @@ -115,15 +93,13 @@ fn queries<'a>() -> Queries< ), ); - let is_saved = |person_id| { - exists( - post_saved::table.filter( - post_aggregates::post_id - .eq(post_saved::post_id) - .and(post_saved::person_id.eq(person_id)), - ), - ) - }; + let creator_is_admin = exists( + local_user::table.filter( + post_aggregates::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ); let is_read = |person_id| { exists( @@ -135,6 +111,16 @@ fn queries<'a>() -> Queries< ) }; + let is_hidden = |person_id| { + exists( + post_hide::table.filter( + post_aggregates::post_id + .eq(post_hide::post_id) + .and(post_hide::person_id.eq(person_id)), + ), + ) + }; + let is_creator_blocked = |person_id| { exists( person_block::table.filter( @@ -156,17 +142,16 @@ fn queries<'a>() -> Queries< .single_value() }; + // TODO maybe this should go to localuser also let all_joins = move |query: post_aggregates::BoxedQuery<'a, Pg>, - my_person_id: Option, - saved_only: bool| { - let is_saved_selection: Box> = - if saved_only { - Box::new(true.into_sql::()) - } else if let Some(person_id) = my_person_id { - Box::new(is_saved(person_id)) - } else { - Box::new(false.into_sql::()) - }; + my_person_id: Option| { + let is_local_user_banned_from_community_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>, + > = if let Some(person_id) = my_person_id { + Box::new(is_local_user_banned_from_community(person_id)) + } else { + Box::new(false.into_sql::()) + }; let is_read_selection: Box> = if let Some(person_id) = my_person_id { @@ -175,6 +160,13 @@ fn queries<'a>() -> Queries< Box::new(false.into_sql::()) }; + let is_hidden_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_hidden(person_id)) + } else { + Box::new(false.into_sql::()) + }; + let is_creator_blocked_selection: Box> = if let Some(person_id) = my_person_id { Box::new(is_creator_blocked(person_id)) @@ -228,16 +220,28 @@ fn queries<'a>() -> Queries< .inner_join(person::table) .inner_join(community::table) .inner_join(post::table) + .left_join(image_details::table.on(post::thumbnail_url.eq(image_details::link.nullable()))) + .left_join( + post_saved::table.on( + post_aggregates::post_id + .eq(post_saved::post_id) + .and(post_saved::person_id.eq(my_person_id.unwrap_or(PersonId(-1)))), + ), + ) .select(( post::all_columns, person::all_columns, community::all_columns, + image_details::all_columns.nullable(), is_creator_banned_from_community, + is_local_user_banned_from_community_selection, creator_is_moderator, + creator_is_admin, post_aggregates::all_columns, subscribed_type_selection, - is_saved_selection, + post_saved::person_id.nullable().is_not_null(), is_read_selection, + is_hidden_selection, is_creator_blocked_selection, score_selection, coalesce( @@ -247,83 +251,93 @@ fn queries<'a>() -> Queries< )) }; - let read = - move |mut conn: DbConn<'a>, - (post_id, my_person_id, is_mod_or_admin): (PostId, Option, bool)| async move { - // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); - - let mut query = all_joins( - post_aggregates::table - .filter(post_aggregates::post_id.eq(post_id)) - .into_boxed(), - my_person_id, - false, - ); - - // Hide deleted and removed for non-admins or mods - if !is_mod_or_admin { - query = query - .filter(community::removed.eq(false)) - .filter(post::removed.eq(false)) - // users can see their own deleted posts - .filter( - community::deleted - .eq(false) - .or(post::creator_id.eq(person_id_join)), - ) - .filter( - post::deleted - .eq(false) - .or(post::creator_id.eq(person_id_join)), - ); - } - - query.first::(&mut conn).await - }; - - let list = move |mut conn: DbConn<'a>, options: PostQuery<'a>| async move { - let person_id = options.local_user.map(|l| l.person.id); - let local_user_id = options.local_user.map(|l| l.local_user.id); - + let read = move |mut conn: DbConn<'a>, + (post_id, my_local_user, is_mod_or_admin): ( + PostId, + Option<&'a LocalUser>, + bool, + )| async move { // The left join below will return None in this case - let person_id_join = person_id.unwrap_or(PersonId(-1)); - let local_user_id_join = local_user_id.unwrap_or(LocalUserId(-1)); + let my_person_id = my_local_user.person_id(); + let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + + let mut query = all_joins( + post_aggregates::table + .filter(post_aggregates::post_id.eq(post_id)) + .into_boxed(), + my_person_id, + ); + + // Hide deleted and removed for non-admins or mods + if !is_mod_or_admin { + query = query + .filter( + community::removed + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ) + .filter( + post::removed + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ) + // users can see their own deleted posts + .filter( + community::deleted + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ) + .filter( + post::deleted + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ); + } + + query = my_local_user.visible_communities_only(query); + + Commented::new(query) + .text("PostView::read") + .first(&mut conn) + .await + }; + + let list = move |mut conn: DbConn<'a>, (options, site): (PostQuery<'a>, &'a Site)| async move { + // The left join below will return None in this case + let person_id_join = options.local_user.person_id().unwrap_or(PersonId(-1)); + let local_user_id_join = options + .local_user + .local_user_id() + .unwrap_or(LocalUserId(-1)); let mut query = all_joins( post_aggregates::table.into_boxed(), - person_id, - options.saved_only, + options.local_user.person_id(), ); - let is_creator = options.creator_id == options.local_user.map(|l| l.person.id); - // only show deleted posts to creator - if is_creator { + // hide posts from deleted communities + query = query.filter(community::deleted.eq(false)); + + // only creator can see deleted posts and unpublished scheduled posts + if let Some(person_id) = options.local_user.person_id() { + query = query.filter(post::deleted.eq(false).or(post::creator_id.eq(person_id))); + query = query.filter( + post::scheduled_publish_time + .is_null() + .or(post::creator_id.eq(person_id)), + ); + } else { query = query - .filter(community::deleted.eq(false)) - .filter(post::deleted.eq(false)); + .filter(post::deleted.eq(false)) + .filter(post::scheduled_publish_time.is_null()); } - let is_admin = options - .local_user - .map(|l| l.local_user.admin) - .unwrap_or(false); // only show removed posts to admin when viewing user profile - if !(options.is_profile_view && is_admin) { + if !(options.creator_id.is_some() && options.local_user.is_admin()) { query = query .filter(community::removed.eq(false)) .filter(post::removed.eq(false)); } - if options.community_id.is_none() || options.community_id_just_for_prefetch { - query = order_and_page_filter_desc(query, post_aggregates::featured_local, &options, |e| { - e.featured_local - }); - } else { - query = - order_and_page_filter_desc(query, post_aggregates::featured_community, &options, |e| { - e.featured_community - }); - } if let Some(community_id) = options.community_id { query = query.filter(post_aggregates::community_id.eq(community_id)); } @@ -332,92 +346,101 @@ fn queries<'a>() -> Queries< query = query.filter(post_aggregates::creator_id.eq(creator_id)); } - if let (Some(listing_type), Some(person_id)) = (options.listing_type, person_id) { - let is_subscribed = exists( - community_follower::table.filter( - post_aggregates::community_id - .eq(community_follower::community_id) - .and(community_follower::person_id.eq(person_id)), - ), - ); - match listing_type { - ListingType::Subscribed => query = query.filter(is_subscribed), - ListingType::Local => { - query = query - .filter(community::local.eq(true)) - .filter(community::hidden.eq(false).or(is_subscribed)); - } - ListingType::All => query = query.filter(community::hidden.eq(false).or(is_subscribed)), - ListingType::ModeratorView => { - query = query.filter(exists( - community_moderator::table.filter( - post::community_id - .eq(community_moderator::community_id) - .and(community_moderator::person_id.eq(person_id)), - ), - )); - } + let is_subscribed = exists( + community_follower::table.filter( + post_aggregates::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(person_id_join)), + ), + ); + match options.listing_type.unwrap_or_default() { + ListingType::Subscribed => query = query.filter(is_subscribed), + ListingType::Local => { + query = query + .filter(community::local.eq(true)) + .filter(community::hidden.eq(false).or(is_subscribed)); + } + ListingType::All => query = query.filter(community::hidden.eq(false).or(is_subscribed)), + ListingType::ModeratorView => { + query = query.filter(exists( + community_moderator::table.filter( + post::community_id + .eq(community_moderator::community_id) + .and(community_moderator::person_id.eq(person_id_join)), + ), + )); } } - if let Some(url_search) = &options.url_search { - query = query.filter(post::url.eq(url_search)); - } - if let Some(search_term) = &options.search_term { - let searcher = fuzzy_search(search_term); - query = query.filter( - post::name - .ilike(searcher.clone()) - .or(post::body.ilike(searcher)), - ); + if options.url_only.unwrap_or_default() { + query = query.filter(post::url.eq(search_term)); + } else { + let searcher = fuzzy_search(search_term); + let name_filter = post::name.ilike(searcher.clone()); + let body_filter = post::body.ilike(searcher.clone()); + query = if options.title_only.unwrap_or_default() { + query.filter(name_filter) + } else { + query.filter(name_filter.or(body_filter)) + } + .filter(not(post::removed.or(post::deleted))); + } } if !options - .local_user - .map(|l| l.local_user.show_nsfw) - .unwrap_or(false) + .show_nsfw + .unwrap_or(options.local_user.show_nsfw(site)) { query = query .filter(post::nsfw.eq(false)) .filter(community::nsfw.eq(false)); }; - if !options - .local_user - .map(|l| l.local_user.show_bot_accounts) - .unwrap_or(true) - { + if !options.local_user.show_bot_accounts() { query = query.filter(person::bot_account.eq(false)); }; - if let (true, Some(person_id)) = (options.saved_only, person_id) { - query = query.filter(is_saved(person_id)); + // If its saved only, then filter, and order by the saved time, not the comment creation time. + if options.saved_only.unwrap_or_default() { + query = query + .filter(post_saved::person_id.is_not_null()) + .then_order_by(post_saved::published.desc()); } // Only hide the read posts, if the saved_only is false. Otherwise ppl with the hide_read // setting wont be able to see saved posts. else if !options - .local_user - .map(|l| l.local_user.show_read_posts) - .unwrap_or(true) + .show_read + .unwrap_or(options.local_user.show_read_posts()) { // Do not hide read posts when it is a user profile view - if let (false, Some(person_id)) = (options.is_profile_view, person_id) { + // Or, only hide read posts on non-profile views + if let (None, Some(person_id)) = (options.creator_id, options.local_user.person_id()) { query = query.filter(not(is_read(person_id))); } } - if let Some(person_id) = person_id { - if options.liked_only { - query = query.filter(score(person_id).eq(1)); - } else if options.disliked_only { - query = query.filter(score(person_id).eq(-1)); + if !options.show_hidden.unwrap_or_default() { + // If a creator id isn't given (IE its on home or community pages), hide the hidden posts + if let (None, Some(person_id)) = (options.creator_id, options.local_user.person_id()) { + query = query.filter(not(is_hidden(person_id))); + } + } + + if let Some(my_id) = options.local_user.person_id() { + let not_creator_filter = post_aggregates::creator_id.ne(my_id); + if options.liked_only.unwrap_or_default() { + query = query.filter(not_creator_filter).filter(score(my_id).eq(1)); + } else if options.disliked_only.unwrap_or_default() { + query = query.filter(not_creator_filter).filter(score(my_id).eq(-1)); } }; + query = options.local_user.visible_communities_only(query); + // Dont filter blocks or missing languages for moderator view type if let (Some(person_id), false) = ( - person_id, + options.local_user.person_id(), options.listing_type.unwrap_or_default() == ListingType::ModeratorView, ) { // Filter out the rows with missing languages @@ -446,115 +469,97 @@ fn queries<'a>() -> Queries< ))); query = query.filter(not(is_creator_blocked(person_id))); } - let now = diesel::dsl::now.into_sql::(); - { - use post_aggregates::{ - comments, - controversy_rank, - hot_rank, - hot_rank_active, - published, - scaled_rank, - score, - }; - match options.sort.as_ref().unwrap_or(&SortType::Hot) { - SortType::Active => { - query = - order_and_page_filter_desc(query, hot_rank_active, &options, |e| e.hot_rank_active); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - SortType::Hot => { - query = order_and_page_filter_desc(query, hot_rank, &options, |e| e.hot_rank); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - SortType::Scaled => { - query = order_and_page_filter_desc(query, scaled_rank, &options, |e| e.scaled_rank); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - SortType::Controversial => { - query = - order_and_page_filter_desc(query, controversy_rank, &options, |e| e.controversy_rank); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - SortType::New => { - query = order_and_page_filter_desc(query, published, &options, |e| e.published) - } - SortType::Old => { - query = order_and_page_filter_asc(query, published, &options, |e| e.published) - } - SortType::NewComments => { - query = order_and_page_filter_desc(query, newest_comment_time, &options, |e| { - e.newest_comment_time - }) - } - SortType::MostComments => { - query = order_and_page_filter_desc(query, comments, &options, |e| e.comments); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - SortType::TopAll => { - query = order_and_page_filter_desc(query, score, &options, |e| e.score); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - o @ (SortType::TopYear - | SortType::TopMonth - | SortType::TopWeek - | SortType::TopDay - | SortType::TopHour - | SortType::TopSixHour - | SortType::TopTwelveHour - | SortType::TopThreeMonths - | SortType::TopSixMonths - | SortType::TopNineMonths) => { - let interval = match o { - SortType::TopYear => 1.years(), - SortType::TopMonth => 1.months(), - SortType::TopWeek => 1.weeks(), - SortType::TopDay => 1.days(), - SortType::TopHour => 1.hours(), - SortType::TopSixHour => 6.hours(), - SortType::TopTwelveHour => 12.hours(), - SortType::TopThreeMonths => 3.months(), - SortType::TopSixMonths => 6.months(), - SortType::TopNineMonths => 9.months(), - _ => return Err(Error::NotFound), - }; - query = query.filter(post_aggregates::published.gt(now - interval)); - query = order_and_page_filter_desc(query, score, &options, |e| e.score); - query = order_and_page_filter_desc(query, published, &options, |e| e.published); - } - } + let (limit, offset) = limit_and_offset(options.page, options.limit)?; + query = query.limit(limit).offset(offset); + + let mut query = PaginatedQueryBuilder::new(query); + + let page_after = options.page_after.map(|c| c.0); + let page_before_or_equal = options.page_before_or_equal.map(|c| c.0); + + if options.page_back.unwrap_or_default() { + query = query + .before(page_after) + .after_or_equal(page_before_or_equal) + .limit_and_offset_from_end(); + } else { + query = query + .after(page_after) + .before_or_equal(page_before_or_equal); + } + + // featured posts first + query = if options.community_id.is_none() || options.community_id_just_for_prefetch { + query.then_desc(key::featured_local) + } else { + query.then_desc(key::featured_community) }; - let (limit, mut offset) = limit_and_offset(options.page, options.limit)?; - if options.page_after.is_some() { - // always skip exactly one post because that's the last post of the previous page - // fixing the where clause is more difficult because we'd have to change only the last order-by-where clause - // e.g. WHERE (featured_local<=, hot_rank<=, published<=) to WHERE (<=, <=, <) - offset = 1; - } - query = query.limit(limit).offset(offset); + let time = |interval| post_aggregates::published.gt(now() - interval); + + // then use the main sort + query = match options.sort.unwrap_or(Hot) { + Active => query.then_desc(key::hot_rank_active), + Hot => query.then_desc(key::hot_rank), + Scaled => query.then_desc(key::scaled_rank), + Controversial => query.then_desc(key::controversy_rank), + New => query.then_desc(key::published), + Old => query.then_desc(ReverseTimestampKey(key::published)), + NewComments => query.then_desc(key::newest_comment_time), + MostComments => query.then_desc(key::comments), + TopAll => query.then_desc(key::score), + TopYear => query.then_desc(key::score).filter(time(1.years())), + TopMonth => query.then_desc(key::score).filter(time(1.months())), + TopWeek => query.then_desc(key::score).filter(time(1.weeks())), + TopDay => query.then_desc(key::score).filter(time(1.days())), + TopHour => query.then_desc(key::score).filter(time(1.hours())), + TopSixHour => query.then_desc(key::score).filter(time(6.hours())), + TopTwelveHour => query.then_desc(key::score).filter(time(12.hours())), + TopThreeMonths => query.then_desc(key::score).filter(time(3.months())), + TopSixMonths => query.then_desc(key::score).filter(time(6.months())), + TopNineMonths => query.then_desc(key::score).filter(time(9.months())), + }; + + // use publish as fallback. especially useful for hot rank which reaches zero after some days. + // necessary because old posts can be fetched over federation and inserted with high post id + query = match options.sort.unwrap_or(Hot) { + // A second time-based sort would not be very useful + New | Old | NewComments => query, + _ => query.then_desc(key::published), + }; + + // finally use unique post id as tie breaker + query = query.then_desc(key::post_id); + + // Not done by debug_query + let query = query.as_query(); debug!("Post View Query: {:?}", debug_query::(&query)); - query.load::(&mut conn).await + Commented::new(query) + .text("PostQuery::list") + .text_if( + "getting upper bound for next query", + options.community_id_just_for_prefetch, + ) + .load::(&mut conn) + .await }; Queries::new(read, list) } impl PostView { - pub async fn read( + pub async fn read<'a>( pool: &mut DbPool<'_>, post_id: PostId, - my_person_id: Option, + my_local_user: Option<&'a LocalUser>, is_mod_or_admin: bool, ) -> Result { - let res = queries() - .read(pool, (post_id, my_person_id, is_mod_or_admin)) - .await?; - - Ok(res) + queries() + .read(pool, (post_id, my_local_user, is_mod_or_admin)) + .await } } @@ -565,63 +570,72 @@ impl PaginationCursor { PaginationCursor(format!("P{:x}", view.counts.post_id.0)) } pub async fn read(&self, pool: &mut DbPool<'_>) -> Result { - Ok(PaginationCursorData( - PostAggregates::read( - pool, - PostId( - self - .0 - .get(1..) - .and_then(|e| i32::from_str_radix(e, 16).ok()) - .ok_or_else(|| Error::QueryBuilderError("Could not parse pagination token".into()))?, - ), - ) - .await?, - )) + let err_msg = || Error::QueryBuilderError("Could not parse pagination token".into()); + let token = PostAggregates::read( + pool, + PostId( + self + .0 + .get(1..) + .and_then(|e| i32::from_str_radix(e, 16).ok()) + .ok_or_else(err_msg)?, + ), + ) + .await?; + + Ok(PaginationCursorData(token)) } } // currently we use a postaggregates struct as the pagination token. -// we only use some of the properties of the post aggregates, depending on which sort type we page by +// we only use some of the properties of the post aggregates, depending on which sort type we page +// by #[derive(Clone)] pub struct PaginationCursorData(PostAggregates); -#[derive(Default, Clone)] +#[derive(Clone, Default)] pub struct PostQuery<'a> { pub listing_type: Option, - pub sort: Option, + pub sort: Option, pub creator_id: Option, pub community_id: Option, - // if true, the query should be handled as if community_id was not given except adding the literal filter + // if true, the query should be handled as if community_id was not given except adding the + // literal filter pub community_id_just_for_prefetch: bool, - pub local_user: Option<&'a LocalUserView>, + pub local_user: Option<&'a LocalUser>, pub search_term: Option, - pub url_search: Option, - pub saved_only: bool, - pub liked_only: bool, - pub disliked_only: bool, - pub moderator_view: bool, - pub is_profile_view: bool, + pub url_only: Option, + pub saved_only: Option, + pub liked_only: Option, + pub disliked_only: Option, + pub title_only: Option, pub page: Option, pub limit: Option, pub page_after: Option, pub page_before_or_equal: Option, + pub page_back: Option, + pub show_hidden: Option, + pub show_read: Option, + pub show_nsfw: Option, } impl<'a> PostQuery<'a> { async fn prefetch_upper_bound_for_page_before( &self, + site: &Site, pool: &mut DbPool<'_>, ) -> Result>, Error> { - // first get one page for the most popular community to get an upper bound for the the page end for the real query - // the reason this is needed is that when fetching posts for a single community PostgreSQL can optimize - // the query to use an index on e.g. (=, >=, >=, >=) and fetch only LIMIT rows - // but for the followed-communities query it has to query the index on (IN, >=, >=, >=) - // which it currently can't do at all (as of PG 16). see the discussion here: - // https://github.com/LemmyNet/lemmy/issues/2877#issuecomment-1673597190 + // first get one page for the most popular community to get an upper bound for the page end for + // the real query. the reason this is needed is that when fetching posts for a single + // community PostgreSQL can optimize the query to use an index on e.g. (=, >=, >=, >=) and + // fetch only LIMIT rows but for the followed-communities query it has to query the index on + // (IN, >=, >=, >=) which it currently can't do at all (as of PG 16). see the discussion + // here: https://github.com/LemmyNet/lemmy/issues/2877#issuecomment-1673597190 // - // the results are correct no matter which community we fetch these for, since it basically covers the "worst case" of the whole page consisting of posts from one community - // but using the largest community decreases the pagination-frame so make the real query more efficient. + // the results are correct no matter which community we fetch these for, since it basically + // covers the "worst case" of the whole page consisting of posts from one community + // but using the largest community decreases the pagination-frame so make the real query more + // efficient. use lemmy_db_schema::schema::{ community_aggregates::dsl::{community_aggregates, community_id, users_active_month}, community_follower::dsl::{ @@ -636,11 +650,7 @@ impl<'a> PostQuery<'a> { "legacy pagination cannot be combined with v2 pagination".into(), )); } - let self_person_id = self - .local_user - .expect("part of the above if") - .local_user - .person_id; + let self_person_id = self.local_user.expect("part of the above if").person_id; let largest_subscribed = { let conn = &mut get_conn(pool).await?; community_follower @@ -661,140 +671,193 @@ impl<'a> PostQuery<'a> { let mut v = queries() .list( pool, - PostQuery { - community_id: Some(largest_subscribed), - community_id_just_for_prefetch: true, - ..self.clone() - }, + ( + PostQuery { + community_id: Some(largest_subscribed), + community_id_just_for_prefetch: true, + ..self.clone() + }, + site, + ), ) .await?; // take last element of array. if this query returned less than LIMIT elements, - // the heuristic is invalid since we can't guarantee the full query will return >= LIMIT results (return original query) + // the heuristic is invalid since we can't guarantee the full query will return >= LIMIT results + // (return original query) if (v.len() as i64) < limit { Ok(Some(self.clone())) } else { - let page_before_or_equal = Some(PaginationCursorData(v.pop().expect("else case").counts)); + let item = if self.page_back.unwrap_or_default() { + // for backward pagination, get first element instead + v.into_iter().next() + } else { + v.pop() + }; + let limit_cursor = Some(PaginationCursorData(item.expect("else case").counts)); Ok(Some(PostQuery { - page_before_or_equal, + page_before_or_equal: limit_cursor, ..self.clone() })) } } - pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + pub async fn list(self, site: &Site, pool: &mut DbPool<'_>) -> Result, Error> { if self.listing_type == Some(ListingType::Subscribed) && self.community_id.is_none() && self.local_user.is_some() && self.page_before_or_equal.is_none() { - if let Some(query) = self.prefetch_upper_bound_for_page_before(pool).await? { - queries().list(pool, query).await + if let Some(query) = self + .prefetch_upper_bound_for_page_before(site, pool) + .await? + { + queries().list(pool, (query, site)).await } else { Ok(vec![]) } } else { - queries().list(pool, self).await + queries().list(pool, (self, site)).await } } } #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::{ - post_view::{PostQuery, PostView}, + post_view::{PaginationCursorData, PostQuery, PostView}, structs::LocalUserView, }; + use chrono::Utc; use lemmy_db_schema::{ aggregates::structs::PostAggregates, impls::actor_language::UNDETERMINED_ID, newtypes::LanguageId, source::{ actor_language::LocalUserLanguage, - community::{Community, CommunityInsertForm, CommunityModerator, CommunityModeratorForm}, + comment::{Comment, CommentInsertForm}, + community::{ + Community, + CommunityFollower, + CommunityFollowerForm, + CommunityInsertForm, + CommunityModerator, + CommunityModeratorForm, + CommunityPersonBan, + CommunityPersonBanForm, + CommunityUpdateForm, + }, community_block::{CommunityBlock, CommunityBlockForm}, instance::Instance, instance_block::{InstanceBlock, InstanceBlockForm}, language::Language, local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, + local_user_vote_display_mode::LocalUserVoteDisplayMode, person::{Person, PersonInsertForm}, person_block::{PersonBlock, PersonBlockForm}, - post::{Post, PostInsertForm, PostLike, PostLikeForm, PostUpdateForm}, + post::{ + Post, + PostHide, + PostInsertForm, + PostLike, + PostLikeForm, + PostRead, + PostSaved, + PostSavedForm, + PostUpdateForm, + }, + site::Site, }, - traits::{Blockable, Crud, Joinable, Likeable}, - utils::{build_db_pool_for_tests, DbPool}, - SortType, + traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable}, + utils::{build_db_pool, build_db_pool_for_tests, DbPool, RANK_DEFAULT}, + CommunityVisibility, + PostSortType, SubscribedType, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; + use std::{collections::HashSet, time::Duration}; + use url::Url; + + const POST_WITH_ANOTHER_TITLE: &str = "Another title"; + const POST_BY_BLOCKED_PERSON: &str = "post by blocked person"; + const POST_BY_BOT: &str = "post by bot"; + const POST: &str = "post"; + + fn names(post_views: &[PostView]) -> Vec<&str> { + post_views.iter().map(|i| i.post.name.as_str()).collect() + } struct Data { inserted_instance: Instance, local_user_view: LocalUserView, - inserted_blocked_person: Person, + blocked_local_user_view: LocalUserView, inserted_bot: Person, inserted_community: Community, inserted_post: Post, + inserted_bot_post: Post, + site: Site, } - async fn init_data(pool: &mut DbPool<'_>) -> Data { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + impl Data { + fn default_post_query(&self) -> PostQuery<'_> { + PostQuery { + sort: Some(PostSortType::New), + local_user: Some(&self.local_user_view.local_user), + ..Default::default() + } + } + } - let person_name = "tegan".to_string(); + async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person = PersonInsertForm::builder() - .name(person_name.clone()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_person = PersonInsertForm::test_form(inserted_instance.id, "tegan"); - let inserted_person = Person::create(pool, &new_person).await.unwrap(); + let inserted_person = Person::create(pool, &new_person).await?; - let local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_person.id) - .password_encrypted(String::new()) - .build(); - let inserted_local_user = LocalUser::create(pool, &local_user_form).await.unwrap(); + let local_user_form = LocalUserInsertForm { + admin: Some(true), + ..LocalUserInsertForm::test_form(inserted_person.id) + }; + let inserted_local_user = LocalUser::create(pool, &local_user_form, vec![]).await?; - let new_bot = PersonInsertForm::builder() - .name("mybot".to_string()) - .bot_account(Some(true)) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let new_bot = PersonInsertForm { + bot_account: Some(true), + ..PersonInsertForm::test_form(inserted_instance.id, "mybot") + }; - let inserted_bot = Person::create(pool, &new_bot).await.unwrap(); + let inserted_bot = Person::create(pool, &new_bot).await?; - let new_community = CommunityInsertForm::builder() - .name("test_community_3".to_string()) - .title("nada".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - - let inserted_community = Community::create(pool, &new_community).await.unwrap(); + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test_community_3".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; // Test a person block, make sure the post query doesn't include their post - let blocked_person = PersonInsertForm::builder() - .name(person_name) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let blocked_person = PersonInsertForm::test_form(inserted_instance.id, "john"); - let inserted_blocked_person = Person::create(pool, &blocked_person).await.unwrap(); + let inserted_blocked_person = Person::create(pool, &blocked_person).await?; - let post_from_blocked_person = PostInsertForm::builder() - .name("blocked_person_post".to_string()) - .creator_id(inserted_blocked_person.id) - .community_id(inserted_community.id) - .language_id(Some(LanguageId(1))) - .build(); + let inserted_blocked_local_user = LocalUser::create( + pool, + &LocalUserInsertForm::test_form(inserted_blocked_person.id), + vec![], + ) + .await?; - Post::create(pool, &post_from_blocked_person).await.unwrap(); + let post_from_blocked_person = PostInsertForm { + language_id: Some(LanguageId(1)), + ..PostInsertForm::new( + POST_BY_BLOCKED_PERSON.to_string(), + inserted_blocked_person.id, + inserted_community.id, + ) + }; + Post::create(pool, &post_from_blocked_person).await?; // block that person let person_block = PersonBlockForm { @@ -802,84 +865,101 @@ mod tests { target_id: inserted_blocked_person.id, }; - PersonBlock::block(pool, &person_block).await.unwrap(); + PersonBlock::block(pool, &person_block).await?; // A sample post - let new_post = PostInsertForm::builder() - .name("test post 3".to_string()) - .creator_id(inserted_person.id) - .community_id(inserted_community.id) - .language_id(Some(LanguageId(47))) - .build(); + let new_post = PostInsertForm { + language_id: Some(LanguageId(47)), + ..PostInsertForm::new(POST.to_string(), inserted_person.id, inserted_community.id) + }; + let inserted_post = Post::create(pool, &new_post).await?; - let inserted_post = Post::create(pool, &new_post).await.unwrap(); + let new_bot_post = PostInsertForm::new( + POST_BY_BOT.to_string(), + inserted_bot.id, + inserted_community.id, + ); + let inserted_bot_post = Post::create(pool, &new_bot_post).await?; - let new_bot_post = PostInsertForm::builder() - .name("test bot post".to_string()) - .creator_id(inserted_bot.id) - .community_id(inserted_community.id) - .build(); - - let _inserted_bot_post = Post::create(pool, &new_bot_post).await.unwrap(); let local_user_view = LocalUserView { local_user: inserted_local_user, + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), person: inserted_person, counts: Default::default(), }; + let blocked_local_user_view = LocalUserView { + local_user: inserted_blocked_local_user, + local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), + person: inserted_blocked_person, + counts: Default::default(), + }; - Data { + let site = Site { + id: Default::default(), + name: String::new(), + sidebar: None, + published: Default::default(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: Url::parse("http://example.com")?.into(), + last_refreshed_at: Default::default(), + inbox_url: Url::parse("http://example.com")?.into(), + private_key: None, + public_key: String::new(), + instance_id: Default::default(), + content_warning: None, + }; + + Ok(Data { inserted_instance, local_user_view, - inserted_blocked_person, + blocked_local_user_view, inserted_bot, inserted_community, inserted_post, - } + inserted_bot_post, + site, + }) } #[tokio::test] #[serial] - async fn post_listing_with_person() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_with_person() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let mut data = init_data(pool).await; + let mut data = init_data(pool).await?; let local_user_form = LocalUserUpdateForm { show_bot_accounts: Some(false), ..Default::default() }; - let inserted_local_user = - LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) - .await - .unwrap(); - data.local_user_view.local_user = inserted_local_user; + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; + data.local_user_view.local_user.show_bot_accounts = false; let read_post_listing = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - ..Default::default() + community_id: Some(data.inserted_community.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; let post_listing_single_with_person = PostView::read( pool, data.inserted_post.id, - Some(data.local_user_view.person.id), + Some(&data.local_user_view.local_user), false, ) - .await - .unwrap(); + .await?; - let mut expected_post_listing_with_user = expected_post_view(&data, pool).await; + let expected_post_listing_with_user = expected_post_view(&data, pool).await?; // Should be only one person, IE the bot post, and blocked should be missing - assert_eq!(1, read_post_listing.len()); - - assert_eq!(expected_post_listing_with_user, read_post_listing[0]); - expected_post_listing_with_user.my_vote = None; + assert_eq!( + vec![post_listing_single_with_person.clone()], + read_post_listing + ); assert_eq!( expected_post_listing_with_user, post_listing_single_with_person @@ -889,102 +969,150 @@ mod tests { show_bot_accounts: Some(true), ..Default::default() }; - let inserted_local_user = - LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) - .await - .unwrap(); - data.local_user_view.local_user = inserted_local_user; + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; + data.local_user_view.local_user.show_bot_accounts = true; let post_listings_with_bots = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - ..Default::default() + community_id: Some(data.inserted_community.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; // should include bot post which has "undetermined" language - assert_eq!(2, post_listings_with_bots.len()); + assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_with_bots)); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listing_no_person() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_no_person() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; let read_post_listing_multiple_no_person = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - ..Default::default() + community_id: Some(data.inserted_community.id), + local_user: None, + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; let read_post_listing_single_no_person = - PostView::read(pool, data.inserted_post.id, None, false) - .await - .unwrap(); + PostView::read(pool, data.inserted_post.id, None, false).await?; - let expected_post_listing_no_person = expected_post_view(&data, pool).await; + let expected_post_listing_no_person = expected_post_view(&data, pool).await?; // Should be 2 posts, with the bot post, and the blocked - assert_eq!(3, read_post_listing_multiple_no_person.len()); + assert_eq!( + vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON], + names(&read_post_listing_multiple_no_person) + ); assert_eq!( - expected_post_listing_no_person, - read_post_listing_multiple_no_person[1] + Some(&expected_post_listing_no_person), + read_post_listing_multiple_no_person.get(1) ); assert_eq!( expected_post_listing_no_person, read_post_listing_single_no_person ); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listing_block_community() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_title_only() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; + + // A post which contains the search them 'Post' not in the title (but in the body) + let new_post = PostInsertForm { + language_id: Some(LanguageId(47)), + body: Some("Post".to_string()), + ..PostInsertForm::new( + POST_WITH_ANOTHER_TITLE.to_string(), + data.local_user_view.person.id, + data.inserted_community.id, + ) + }; + + let inserted_post = Post::create(pool, &new_post).await?; + + let read_post_listing_by_title_only = PostQuery { + community_id: Some(data.inserted_community.id), + local_user: None, + search_term: Some("Post".to_string()), + title_only: Some(true), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + + let read_post_listing = PostQuery { + community_id: Some(data.inserted_community.id), + local_user: None, + search_term: Some("Post".to_string()), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + + // Should be 4 posts when we do not search for title only + assert_eq!( + vec![ + POST_WITH_ANOTHER_TITLE, + POST_BY_BOT, + POST, + POST_BY_BLOCKED_PERSON + ], + names(&read_post_listing) + ); + + // Should be 3 posts when we search for title only + assert_eq!( + vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON], + names(&read_post_listing_by_title_only) + ); + Post::delete(pool, inserted_post.id).await?; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listing_block_community() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; let community_block = CommunityBlockForm { person_id: data.local_user_view.person.id, community_id: data.inserted_community.id, }; - CommunityBlock::block(pool, &community_block).await.unwrap(); + CommunityBlock::block(pool, &community_block).await?; let read_post_listings_with_person_after_block = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - ..Default::default() + community_id: Some(data.inserted_community.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await?; // Should be 0 posts after the community block - assert_eq!(0, read_post_listings_with_person_after_block.len()); + assert_eq!(read_post_listings_with_person_after_block, vec![]); - CommunityBlock::unblock(pool, &community_block) - .await - .unwrap(); - cleanup(data, pool).await; + CommunityBlock::unblock(pool, &community_block).await?; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listing_like() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_like() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let mut data = init_data(pool).await; + let mut data = init_data(pool).await?; let post_like_form = PostLikeForm { post_id: data.inserted_post.id, @@ -992,10 +1120,9 @@ mod tests { score: 1, }; - let inserted_post_like = PostLike::like(pool, &post_like_form).await.unwrap(); + let inserted_post_like = PostLike::like(pool, &post_like_form).await?; let expected_post_like = PostLike { - id: inserted_post_like.id, post_id: data.inserted_post.id, person_id: data.local_user_view.person.id, published: inserted_post_like.published, @@ -1006,13 +1133,12 @@ mod tests { let post_listing_single_with_person = PostView::read( pool, data.inserted_post.id, - Some(data.local_user_view.person.id), + Some(&data.local_user_view.local_user), false, ) - .await - .unwrap(); + .await?; - let mut expected_post_with_upvote = expected_post_view(&data, pool).await; + let mut expected_post_with_upvote = expected_post_view(&data, pool).await?; expected_post_with_upvote.my_vote = Some(1); expected_post_with_upvote.counts.score = 1; expected_post_with_upvote.counts.upvotes = 1; @@ -1022,61 +1148,108 @@ mod tests { show_bot_accounts: Some(false), ..Default::default() }; - let inserted_local_user = - LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) - .await - .unwrap(); - data.local_user_view.local_user = inserted_local_user; + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; + data.local_user_view.local_user.show_bot_accounts = false; let read_post_listing = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - ..Default::default() + community_id: Some(data.inserted_community.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); - assert_eq!(1, read_post_listing.len()); - - assert_eq!(expected_post_with_upvote, read_post_listing[0]); - - let read_liked_post_listing = PostQuery { - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - liked_only: (true), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - assert_eq!(read_post_listing, read_liked_post_listing); - - let read_disliked_post_listing = PostQuery { - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - disliked_only: (true), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - assert!(read_disliked_post_listing.is_empty()); + .list(&data.site, pool) + .await?; + assert_eq!(vec![expected_post_with_upvote], read_post_listing); let like_removed = - PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id) - .await - .unwrap(); + PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?; assert_eq!(1, like_removed); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn creator_is_moderator() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_liked_only() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; + + // Like both the bot post, and your own + // The liked_only should not show your own post + let post_like_form = PostLikeForm { + post_id: data.inserted_post.id, + person_id: data.local_user_view.person.id, + score: 1, + }; + PostLike::like(pool, &post_like_form).await?; + + let bot_post_like_form = PostLikeForm { + post_id: data.inserted_bot_post.id, + person_id: data.local_user_view.person.id, + score: 1, + }; + PostLike::like(pool, &bot_post_like_form).await?; + + // Read the liked only + let read_liked_post_listing = PostQuery { + community_id: Some(data.inserted_community.id), + liked_only: Some(true), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + + // This should only include the bot post, not the one you created + assert_eq!(vec![POST_BY_BOT], names(&read_liked_post_listing)); + + let read_disliked_post_listing = PostQuery { + community_id: Some(data.inserted_community.id), + disliked_only: Some(true), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + + // Should be no posts + assert_eq!(read_disliked_post_listing, vec![]); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listing_saved_only() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Save only the bot post + // The saved_only should only show the bot post + let post_save_form = PostSavedForm { + post_id: data.inserted_bot_post.id, + person_id: data.local_user_view.person.id, + }; + PostSaved::save(pool, &post_save_form).await?; + + // Read the saved only + let read_saved_post_listing = PostQuery { + community_id: Some(data.inserted_community.id), + saved_only: Some(true), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + + // This should only include the bot post, not the one you created + assert_eq!(vec![POST_BY_BOT], names(&read_saved_post_listing)); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn creator_info() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; // Make one of the inserted persons a moderator let person_id = data.local_user_view.person.id; @@ -1085,156 +1258,132 @@ mod tests { community_id, person_id, }; - CommunityModerator::join(pool, &form).await.unwrap(); + CommunityModerator::join(pool, &form).await?; let post_listing = PostQuery { - sort: (Some(SortType::New)), - community_id: (Some(data.inserted_community.id)), - local_user: (Some(&data.local_user_view)), - ..Default::default() + community_id: Some(data.inserted_community.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); + .list(&data.site, pool) + .await? + .into_iter() + .map(|p| (p.creator.name, p.creator_is_moderator, p.creator_is_admin)) + .collect::>(); - assert!(post_listing[1].creator_is_moderator); - cleanup(data, pool).await; + let expected_post_listing = vec![ + ("mybot".to_owned(), false, false), + ("tegan".to_owned(), true, true), + ]; + + assert_eq!(expected_post_listing, post_listing); + + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listing_person_language() { - let pool = &build_db_pool_for_tests().await; + async fn post_listing_person_language() -> LemmyResult<()> { + const EL_POSTO: &str = "el posto"; + + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; - let spanish_id = Language::read_id_from_code(pool, Some("es")) - .await - .unwrap() - .unwrap(); - let post_spanish = PostInsertForm::builder() - .name("asffgdsc".to_string()) - .creator_id(data.local_user_view.person.id) - .community_id(data.inserted_community.id) - .language_id(Some(spanish_id)) - .build(); + let spanish_id = Language::read_id_from_code(pool, "es").await?; - Post::create(pool, &post_spanish).await.unwrap(); + let french_id = Language::read_id_from_code(pool, "fr").await?; - let post_listings_all = PostQuery { - sort: (Some(SortType::New)), - local_user: (Some(&data.local_user_view)), - ..Default::default() - } - .list(pool) - .await - .unwrap(); + let post_spanish = PostInsertForm { + language_id: Some(spanish_id), + ..PostInsertForm::new( + EL_POSTO.to_string(), + data.local_user_view.person.id, + data.inserted_community.id, + ) + }; + Post::create(pool, &post_spanish).await?; + + let post_listings_all = data.default_post_query().list(&data.site, pool).await?; // no language filters specified, all posts should be returned - assert_eq!(3, post_listings_all.len()); + assert_eq!(vec![EL_POSTO, POST_BY_BOT, POST], names(&post_listings_all)); - let french_id = Language::read_id_from_code(pool, Some("fr")) - .await - .unwrap() - .unwrap(); - LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id) - .await - .unwrap(); + LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id).await?; - let post_listing_french = PostQuery { - sort: (Some(SortType::New)), - local_user: (Some(&data.local_user_view)), - ..Default::default() - } - .list(pool) - .await - .unwrap(); + let post_listing_french = data.default_post_query().list(&data.site, pool).await?; // only one post in french and one undetermined should be returned - assert_eq!(2, post_listing_french.len()); - assert!(post_listing_french - .iter() - .any(|p| p.post.language_id == french_id)); + assert_eq!(vec![POST_BY_BOT, POST], names(&post_listing_french)); + assert_eq!( + Some(french_id), + post_listing_french.get(1).map(|p| p.post.language_id) + ); LocalUserLanguage::update( pool, vec![french_id, UNDETERMINED_ID], data.local_user_view.local_user.id, ) - .await - .unwrap(); - let post_listings_french_und = PostQuery { - sort: (Some(SortType::New)), - local_user: (Some(&data.local_user_view)), - ..Default::default() - } - .list(pool) - .await - .unwrap(); + .await?; + let post_listings_french_und = data + .default_post_query() + .list(&data.site, pool) + .await? + .into_iter() + .map(|p| (p.post.name, p.post.language_id)) + .collect::>(); + let expected_post_listings_french_und = vec![ + (POST_BY_BOT.to_owned(), UNDETERMINED_ID), + (POST.to_owned(), french_id), + ]; // french post and undetermined language post should be returned - assert_eq!(2, post_listings_french_und.len()); - assert_eq!( - UNDETERMINED_ID, - post_listings_french_und[0].post.language_id - ); - assert_eq!(french_id, post_listings_french_und[1].post.language_id); + assert_eq!(expected_post_listings_french_und, post_listings_french_und); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listings_removed() { - let pool = &build_db_pool_for_tests().await; + async fn post_listings_removed() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let mut data = init_data(pool).await; + let mut data = init_data(pool).await?; // Remove the post Post::update( pool, - data.inserted_post.id, + data.inserted_bot_post.id, &PostUpdateForm { removed: Some(true), ..Default::default() }, ) - .await - .unwrap(); + .await?; // Make sure you don't see the removed post in the results - let post_listings_no_admin = PostQuery { - sort: Some(SortType::New), - local_user: Some(&data.local_user_view), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - assert_eq!(1, post_listings_no_admin.len()); + let post_listings_no_admin = data.default_post_query().list(&data.site, pool).await?; + assert_eq!(vec![POST], names(&post_listings_no_admin)); - // Removed post is shown to admins on profile page + // Removed bot post is shown to admins on its profile page data.local_user_view.local_user.admin = true; let post_listings_is_admin = PostQuery { - sort: Some(SortType::New), - local_user: Some(&data.local_user_view), - is_profile_view: true, - ..Default::default() + creator_id: Some(data.inserted_bot.id), + ..data.default_post_query() } - .list(pool) - .await - .unwrap(); - assert_eq!(2, post_listings_is_admin.len()); + .list(&data.site, pool) + .await?; + assert_eq!(vec![POST_BY_BOT], names(&post_listings_is_admin)); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listings_deleted() { - let pool = &build_db_pool_for_tests().await; + async fn post_listings_deleted() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; // Delete the post Post::update( @@ -1245,152 +1394,388 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - // Make sure you don't see the deleted post in the results - let post_listings_no_creator = PostQuery { - sort: Some(SortType::New), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - let not_contains_deleted = post_listings_no_creator + // Deleted post is only shown to creator + for (local_user, expect_contains_deleted) in [ + (None, false), + (Some(&data.blocked_local_user_view.local_user), false), + (Some(&data.local_user_view.local_user), true), + ] { + let contains_deleted = PostQuery { + local_user, + ..data.default_post_query() + } + .list(&data.site, pool) + .await? .iter() - .map(|p| p.post.id) - .all(|p| p != data.inserted_post.id); - assert!(not_contains_deleted); + .any(|p| p.post.id == data.inserted_post.id); - // Deleted post is shown to creator - let post_listings_is_creator = PostQuery { - sort: Some(SortType::New), - local_user: Some(&data.local_user_view), - ..Default::default() + assert_eq!(expect_contains_deleted, contains_deleted); } - .list(pool) - .await - .unwrap(); - let contains_deleted = post_listings_is_creator - .iter() - .map(|p| p.post.id) - .any(|p| p == data.inserted_post.id); - assert!(contains_deleted); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn post_listing_instance_block() { - let pool = &build_db_pool_for_tests().await; + async fn post_listings_hidden_community() -> LemmyResult<()> { + let pool = &build_db_pool().await?; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; - let blocked_instance = Instance::read_or_create(pool, "another_domain.tld".to_string()) - .await - .unwrap(); + Community::update( + pool, + data.inserted_community.id, + &CommunityUpdateForm { + hidden: Some(true), + ..Default::default() + }, + ) + .await?; - let community_form = CommunityInsertForm::builder() - .name("test_community_4".to_string()) - .title("none".to_owned()) - .public_key("pubkey".to_string()) - .instance_id(blocked_instance.id) - .build(); - let inserted_community = Community::create(pool, &community_form).await.unwrap(); + let posts = PostQuery::default().list(&data.site, pool).await?; + assert!(posts.is_empty()); - let post_form = PostInsertForm::builder() - .name("blocked instance post".to_string()) - .creator_id(data.inserted_bot.id) - .community_id(inserted_community.id) - .language_id(Some(LanguageId(1))) - .build(); + let posts = data.default_post_query().list(&data.site, pool).await?; + assert!(posts.is_empty()); - let post_from_blocked_instance = Post::create(pool, &post_form).await.unwrap(); + // Follow the community + let form = CommunityFollowerForm { + community_id: data.inserted_community.id, + person_id: data.local_user_view.person.id, + pending: false, + }; + CommunityFollower::follow(pool, &form).await?; + + let posts = data.default_post_query().list(&data.site, pool).await?; + assert!(!posts.is_empty()); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listing_instance_block() -> LemmyResult<()> { + const POST_FROM_BLOCKED_INSTANCE: &str = "post on blocked instance"; + + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let blocked_instance = Instance::read_or_create(pool, "another_domain.tld".to_string()).await?; + + let community_form = CommunityInsertForm::new( + blocked_instance.id, + "test_community_4".to_string(), + "none".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &community_form).await?; + + let post_form = PostInsertForm { + language_id: Some(LanguageId(1)), + ..PostInsertForm::new( + POST_FROM_BLOCKED_INSTANCE.to_string(), + data.inserted_bot.id, + inserted_community.id, + ) + }; + let post_from_blocked_instance = Post::create(pool, &post_form).await?; // no instance block, should return all posts - let post_listings_all = PostQuery { - local_user: Some(&data.local_user_view), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - assert_eq!(post_listings_all.len(), 3); + let post_listings_all = data.default_post_query().list(&data.site, pool).await?; + assert_eq!( + vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST], + names(&post_listings_all) + ); // block the instance let block_form = InstanceBlockForm { person_id: data.local_user_view.person.id, instance_id: blocked_instance.id, }; - InstanceBlock::block(pool, &block_form).await.unwrap(); + InstanceBlock::block(pool, &block_form).await?; // now posts from communities on that instance should be hidden - let post_listings_blocked = PostQuery { - local_user: Some(&data.local_user_view), - ..Default::default() - } - .list(pool) - .await - .unwrap(); - assert_eq!(post_listings_blocked.len(), 2); - assert_ne!( - post_listings_blocked[0].post.id, - post_from_blocked_instance.id - ); - assert_ne!( - post_listings_blocked[1].post.id, - post_from_blocked_instance.id - ); + let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?; + assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_blocked)); + assert!(post_listings_blocked + .iter() + .all(|p| p.post.id != post_from_blocked_instance.id)); // after unblocking it should return all posts again - InstanceBlock::unblock(pool, &block_form).await.unwrap(); - let post_listings_blocked = PostQuery { - local_user: Some(&data.local_user_view), + InstanceBlock::unblock(pool, &block_form).await?; + let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?; + assert_eq!( + vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST], + names(&post_listings_blocked) + ); + + Instance::delete(pool, blocked_instance.id).await?; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn pagination_includes_each_post_once() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let community_form = CommunityInsertForm::new( + data.inserted_instance.id, + "yes".to_string(), + "yes".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &community_form).await?; + + let mut inserted_post_ids = vec![]; + let mut inserted_comment_ids = vec![]; + + // Create 150 posts with varying non-correlating values for publish date, number of comments, + // and featured + for comments in 0..10 { + for _ in 0..15 { + let post_form = PostInsertForm { + featured_local: Some((comments % 2) == 0), + featured_community: Some((comments % 2) == 0), + published: Some(Utc::now() - Duration::from_secs(comments % 3)), + ..PostInsertForm::new( + "keep Christ in Christmas".to_owned(), + data.local_user_view.person.id, + inserted_community.id, + ) + }; + let inserted_post = Post::create(pool, &post_form).await?; + inserted_post_ids.push(inserted_post.id); + + for _ in 0..comments { + let comment_form = CommentInsertForm::new( + data.local_user_view.person.id, + inserted_post.id, + "yes".to_owned(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + inserted_comment_ids.push(inserted_comment.id); + } + } + } + + let options = PostQuery { + community_id: Some(inserted_community.id), + sort: Some(PostSortType::MostComments), + limit: Some(10), + ..Default::default() + }; + + let mut listed_post_ids = vec![]; + let mut page_after = None; + loop { + let post_listings = PostQuery { + page_after, + ..options.clone() + } + .list(&data.site, pool) + .await?; + + listed_post_ids.extend(post_listings.iter().map(|p| p.post.id)); + + if let Some(p) = post_listings.into_iter().last() { + page_after = Some(PaginationCursorData(p.counts)); + } else { + break; + } + } + + // Check that backward pagination matches forward pagination + let mut listed_post_ids_forward = listed_post_ids.clone(); + let mut page_before = None; + loop { + let post_listings = PostQuery { + page_after: page_before, + page_back: Some(true), + ..options.clone() + } + .list(&data.site, pool) + .await?; + + let listed_post_ids = post_listings.iter().map(|p| p.post.id).collect::>(); + + let index = listed_post_ids_forward.len() - listed_post_ids.len(); + assert_eq!( + listed_post_ids_forward.get(index..), + listed_post_ids.get(..) + ); + listed_post_ids_forward.truncate(index); + + if let Some(p) = post_listings.into_iter().next() { + page_before = Some(PaginationCursorData(p.counts)); + } else { + break; + } + } + + inserted_post_ids.sort_unstable_by_key(|id| id.0); + listed_post_ids.sort_unstable_by_key(|id| id.0); + + assert_eq!(inserted_post_ids, listed_post_ids); + + Community::delete(pool, inserted_community.id).await?; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listings_hide_read() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let mut data = init_data(pool).await?; + + // Make sure local user hides read posts + let local_user_form = LocalUserUpdateForm { + show_read_posts: Some(false), + ..Default::default() + }; + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; + data.local_user_view.local_user.show_read_posts = false; + + // Mark a post as read + PostRead::mark_as_read( + pool, + HashSet::from([data.inserted_bot_post.id]), + data.local_user_view.person.id, + ) + .await?; + + // Make sure you don't see the read post in the results + let post_listings_hide_read = data.default_post_query().list(&data.site, pool).await?; + assert_eq!(vec![POST], names(&post_listings_hide_read)); + + // Test with the show_read override as true + let post_listings_show_read_true = PostQuery { + show_read: Some(true), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + assert_eq!( + vec![POST_BY_BOT, POST], + names(&post_listings_show_read_true) + ); + + // Test with the show_read override as false + let post_listings_show_read_false = PostQuery { + show_read: Some(false), + ..data.default_post_query() + } + .list(&data.site, pool) + .await?; + assert_eq!(vec![POST], names(&post_listings_show_read_false)); + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listings_hide_hidden() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Mark a post as hidden + PostHide::hide( + pool, + HashSet::from([data.inserted_bot_post.id]), + data.local_user_view.person.id, + ) + .await?; + + // Make sure you don't see the hidden post in the results + let post_listings_hide_hidden = data.default_post_query().list(&data.site, pool).await?; + assert_eq!(vec![POST], names(&post_listings_hide_hidden)); + + // Make sure it does come back with the show_hidden option + let post_listings_show_hidden = PostQuery { + sort: Some(PostSortType::New), + local_user: Some(&data.local_user_view.local_user), + show_hidden: Some(true), ..Default::default() } - .list(pool) - .await - .unwrap(); - assert_eq!(post_listings_blocked.len(), 3); + .list(&data.site, pool) + .await?; + assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_hidden)); - Instance::delete(pool, blocked_instance.id).await.unwrap(); - cleanup(data, pool).await; + // Make sure that hidden field is true. + assert!(&post_listings_show_hidden.first().is_some_and(|p| p.hidden)); + + cleanup(data, pool).await } - async fn cleanup(data: Data, pool: &mut DbPool<'_>) { - let num_deleted = Post::delete(pool, data.inserted_post.id).await.unwrap(); - Community::delete(pool, data.inserted_community.id) - .await - .unwrap(); - Person::delete(pool, data.local_user_view.person.id) - .await - .unwrap(); - Person::delete(pool, data.inserted_bot.id).await.unwrap(); - Person::delete(pool, data.inserted_blocked_person.id) - .await - .unwrap(); - Instance::delete(pool, data.inserted_instance.id) - .await - .unwrap(); + #[tokio::test] + #[serial] + async fn post_listings_hide_nsfw() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Mark a post as nsfw + let update_form = PostUpdateForm { + nsfw: Some(true), + ..Default::default() + }; + + Post::update(pool, data.inserted_bot_post.id, &update_form).await?; + + // Make sure you don't see the nsfw post in the regular results + let post_listings_hide_nsfw = data.default_post_query().list(&data.site, pool).await?; + assert_eq!(vec![POST], names(&post_listings_hide_nsfw)); + + // Make sure it does come back with the show_nsfw option + let post_listings_show_nsfw = PostQuery { + sort: Some(PostSortType::New), + show_nsfw: Some(true), + local_user: Some(&data.local_user_view.local_user), + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_nsfw)); + + // Make sure that nsfw field is true. + assert!(&post_listings_show_nsfw.first().is_some_and(|p| p.post.nsfw)); + + cleanup(data, pool).await + } + + async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> { + let num_deleted = Post::delete(pool, data.inserted_post.id).await?; + Community::delete(pool, data.inserted_community.id).await?; + Person::delete(pool, data.local_user_view.person.id).await?; + Person::delete(pool, data.inserted_bot.id).await?; + Person::delete(pool, data.blocked_local_user_view.person.id).await?; + Instance::delete(pool, data.inserted_instance.id).await?; assert_eq!(1, num_deleted); + + Ok(()) } - async fn expected_post_view(data: &Data, pool: &mut DbPool<'_>) -> PostView { + async fn expected_post_view(data: &Data, pool: &mut DbPool<'_>) -> LemmyResult { let (inserted_person, inserted_community, inserted_post) = ( &data.local_user_view.person, &data.inserted_community, &data.inserted_post, ); - let agg = PostAggregates::read(pool, inserted_post.id).await.unwrap(); + let agg = PostAggregates::read(pool, inserted_post.id).await?; - PostView { + Ok(PostView { post: Post { id: inserted_post.id, name: inserted_post.name.clone(), creator_id: inserted_person.id, url: None, body: None, + alt_text: None, published: inserted_post.published, updated: None, community_id: inserted_community.id, @@ -1407,6 +1792,8 @@ mod tests { language_id: LanguageId(47), featured_community: false, featured_local: false, + url_content_type: None, + scheduled_publish_time: None, }, my_vote: None, unread_comments: 0, @@ -1425,7 +1812,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_person.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: data.inserted_instance.id, @@ -1433,8 +1819,11 @@ mod tests { public_key: inserted_person.public_key.clone(), last_refreshed_at: inserted_person.last_refreshed_at, }, + image_details: None, creator_banned_from_community: false, + banned_from_community: false, creator_is_moderator: false, + creator_is_admin: true, community: Community { id: inserted_community.id, name: inserted_community.name.clone(), @@ -1445,6 +1834,7 @@ mod tests { actor_id: inserted_community.actor_id.clone(), local: true, title: "nada".to_owned(), + sidebar: None, description: None, updated: None, banner: None, @@ -1457,12 +1847,11 @@ mod tests { last_refreshed_at: inserted_community.last_refreshed_at, followers_url: inserted_community.followers_url.clone(), inbox_url: inserted_community.inbox_url.clone(), - shared_inbox_url: inserted_community.shared_inbox_url.clone(), moderators_url: inserted_community.moderators_url.clone(), featured_url: inserted_community.featured_url.clone(), + visibility: CommunityVisibility::Public, }, counts: PostAggregates { - id: agg.id, post_id: inserted_post.id, comments: 0, score: 0, @@ -1473,18 +1862,130 @@ mod tests { newest_comment_time: inserted_post.published, featured_community: false, featured_local: false, - hot_rank: 0.1728, - hot_rank_active: 0.1728, + hot_rank: RANK_DEFAULT, + hot_rank_active: RANK_DEFAULT, controversy_rank: 0.0, - scaled_rank: 0.3621, + scaled_rank: RANK_DEFAULT, community_id: inserted_post.community_id, creator_id: inserted_post.creator_id, instance_id: data.inserted_instance.id, }, subscribed: SubscribedType::NotSubscribed, read: false, + hidden: false, saved: false, creator_blocked: false, + }) + } + + #[tokio::test] + #[serial] + async fn local_only_instance() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + Community::update( + pool, + data.inserted_community.id, + &CommunityUpdateForm { + visibility: Some(CommunityVisibility::LocalOnly), + ..Default::default() + }, + ) + .await?; + + let unauthenticated_query = PostQuery { + ..Default::default() } + .list(&data.site, pool) + .await?; + assert_eq!(0, unauthenticated_query.len()); + + let authenticated_query = PostQuery { + local_user: Some(&data.local_user_view.local_user), + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!(2, authenticated_query.len()); + + let unauthenticated_post = PostView::read(pool, data.inserted_post.id, None, false).await; + assert!(unauthenticated_post.is_err()); + + let authenticated_post = PostView::read( + pool, + data.inserted_post.id, + Some(&data.local_user_view.local_user), + false, + ) + .await; + assert!(authenticated_post.is_ok()); + + cleanup(data, pool).await?; + Ok(()) + } + + #[tokio::test] + #[serial] + async fn post_listing_local_user_banned_from_community() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + // Test that post view shows if local user is blocked from community + let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill"); + + let inserted_banned_from_comm_person = Person::create(pool, &banned_from_comm_person).await?; + + let inserted_banned_from_comm_local_user = LocalUser::create( + pool, + &LocalUserInsertForm::test_form(inserted_banned_from_comm_person.id), + vec![], + ) + .await?; + + CommunityPersonBan::ban( + pool, + &CommunityPersonBanForm { + community_id: data.inserted_community.id, + person_id: inserted_banned_from_comm_person.id, + expires: None, + }, + ) + .await?; + + let post_view = PostView::read( + pool, + data.inserted_post.id, + Some(&inserted_banned_from_comm_local_user), + false, + ) + .await?; + + assert!(post_view.banned_from_community); + + Person::delete(pool, inserted_banned_from_comm_person.id).await?; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn post_listing_local_user_not_banned_from_community() -> LemmyResult<()> { + let pool = &build_db_pool().await?; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let post_view = PostView::read( + pool, + data.inserted_post.id, + Some(&data.local_user_view.local_user), + false, + ) + .await?; + + assert!(!post_view.banned_from_community); + + cleanup(data, pool).await } } diff --git a/crates/db_views/src/private_message_report_view.rs b/crates/db_views/src/private_message_report_view.rs index 878d79edb..56d0d6e7b 100644 --- a/crates/db_views/src/private_message_report_view.rs +++ b/crates/db_views/src/private_message_report_view.rs @@ -42,21 +42,26 @@ fn queries<'a>() -> Queries< let read = move |mut conn: DbConn<'a>, report_id: PrivateMessageReportId| async move { all_joins(private_message_report::table.find(report_id).into_boxed()) - .first::(&mut conn) + .first(&mut conn) .await }; let list = move |mut conn: DbConn<'a>, options: PrivateMessageReportQuery| async move { let mut query = all_joins(private_message_report::table.into_boxed()); + // If viewing all reports, order by newest, but if viewing unresolved only, show the oldest + // first (FIFO) if options.unresolved_only { - query = query.filter(private_message_report::resolved.eq(false)); + query = query + .filter(private_message_report::resolved.eq(false)) + .order_by(private_message_report::published.asc()); + } else { + query = query.order_by(private_message_report::published.desc()); } let (limit, offset) = limit_and_offset(options.page, options.limit)?; query - .order_by(private_message::published.desc()) .limit(limit) .offset(offset) .load::(&mut conn) @@ -106,12 +111,12 @@ impl PrivateMessageReportQuery { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::private_message_report_view::PrivateMessageReportQuery; use lemmy_db_schema::{ + assert_length, source::{ instance::Instance, person::{Person, PersonInsertForm}, @@ -121,39 +126,31 @@ mod tests { traits::{Crud, Reportable}, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let new_person_1 = PersonInsertForm::builder() - .name("timmy_mrv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let inserted_timmy = Person::create(pool, &new_person_1).await.unwrap(); + let new_person_1 = PersonInsertForm::test_form(inserted_instance.id, "timmy_mrv"); + let inserted_timmy = Person::create(pool, &new_person_1).await?; - let new_person_2 = PersonInsertForm::builder() - .name("jessica_mrv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let inserted_jessica = Person::create(pool, &new_person_2).await.unwrap(); + let new_person_2 = PersonInsertForm::test_form(inserted_instance.id, "jessica_mrv"); + let inserted_jessica = Person::create(pool, &new_person_2).await?; // timmy sends private message to jessica - let pm_form = PrivateMessageInsertForm::builder() - .creator_id(inserted_timmy.id) - .recipient_id(inserted_jessica.id) - .content("something offensive".to_string()) - .build(); - let pm = PrivateMessage::create(pool, &pm_form).await.unwrap(); + let pm_form = PrivateMessageInsertForm::new( + inserted_timmy.id, + inserted_jessica.id, + "something offensive".to_string(), + ); + let pm = PrivateMessage::create(pool, &pm_form).await?; // jessica reports private message let pm_report_form = PrivateMessageReportForm { @@ -162,48 +159,38 @@ mod tests { private_message_id: pm.id, reason: "its offensive".to_string(), }; - let pm_report = PrivateMessageReport::report(pool, &pm_report_form) - .await - .unwrap(); + let pm_report = PrivateMessageReport::report(pool, &pm_report_form).await?; - let reports = PrivateMessageReportQuery::default() - .list(pool) - .await - .unwrap(); - assert_eq!(1, reports.len()); + let reports = PrivateMessageReportQuery::default().list(pool).await?; + assert_length!(1, reports); assert!(!reports[0].private_message_report.resolved); assert_eq!(inserted_timmy.name, reports[0].private_message_creator.name); assert_eq!(inserted_jessica.name, reports[0].creator.name); assert_eq!(pm_report.reason, reports[0].private_message_report.reason); assert_eq!(pm.content, reports[0].private_message.content); - let new_person_3 = PersonInsertForm::builder() - .name("admin_mrv".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let inserted_admin = Person::create(pool, &new_person_3).await.unwrap(); + let new_person_3 = PersonInsertForm::test_form(inserted_instance.id, "admin_mrv"); + let inserted_admin = Person::create(pool, &new_person_3).await?; // admin resolves the report (after taking appropriate action) - PrivateMessageReport::resolve(pool, pm_report.id, inserted_admin.id) - .await - .unwrap(); + PrivateMessageReport::resolve(pool, pm_report.id, inserted_admin.id).await?; let reports = PrivateMessageReportQuery { unresolved_only: (false), ..Default::default() } .list(pool) - .await - .unwrap(); - assert_eq!(1, reports.len()); + .await?; + assert_length!(1, reports); assert!(reports[0].private_message_report.resolved); assert!(reports[0].resolver.is_some()); assert_eq!( - inserted_admin.name, - reports[0].resolver.as_ref().unwrap().name + Some(&inserted_admin.name), + reports[0].resolver.as_ref().map(|r| &r.name) ); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_views/src/private_message_view.rs b/crates/db_views/src/private_message_view.rs index b8628ecc5..0fbc0ee16 100644 --- a/crates/db_views/src/private_message_view.rs +++ b/crates/db_views/src/private_message_view.rs @@ -12,7 +12,7 @@ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ aliases, newtypes::{PersonId, PrivateMessageId}, - schema::{person, private_message}, + schema::{instance_block, person, person_block, private_message}, utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, }; use tracing::debug; @@ -27,6 +27,20 @@ fn queries<'a>() -> Queries< .inner_join( aliases::person1.on(private_message::recipient_id.eq(aliases::person1.field(person::id))), ) + .left_join( + person_block::table.on( + private_message::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(aliases::person1.field(person::id))), + ), + ) + .left_join( + instance_block::table.on( + person::instance_id + .eq(instance_block::instance_id) + .and(instance_block::person_id.eq(aliases::person1.field(person::id))), + ), + ) }; let selection = ( @@ -39,13 +53,18 @@ fn queries<'a>() -> Queries< all_joins(private_message::table.find(private_message_id).into_boxed()) .order_by(private_message::published.desc()) .select(selection) - .first::(&mut conn) + .first(&mut conn) .await }; let list = move |mut conn: DbConn<'a>, (options, recipient_id): (PrivateMessageQuery, PersonId)| async move { - let mut query = all_joins(private_message::table.into_boxed()).select(selection); + let mut query = all_joins(private_message::table.into_boxed()) + .select(selection) + // Dont show replies from blocked users + .filter(person_block::person_id.is_null()) + // Dont show replies from blocked instances + .filter(instance_block::person_id.is_null()); // If its unread, I only want the ones to me if options.unread_only { @@ -106,6 +125,26 @@ impl PrivateMessageView { use diesel::dsl::count; let conn = &mut get_conn(pool).await?; private_message::table + // Necessary to get the senders instance_id + .inner_join(person::table.on(private_message::creator_id.eq(person::id))) + .left_join( + person_block::table.on( + private_message::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(my_person_id)), + ), + ) + .left_join( + instance_block::table.on( + person::instance_id + .eq(instance_block::instance_id) + .and(instance_block::person_id.eq(my_person_id)), + ), + ) + // Dont count replies from blocked users + .filter(person_block::person_id.is_null()) + // Dont count replies from blocked instances + .filter(instance_block::person_id.is_null()) .filter(private_message::read.eq(false)) .filter(private_message::recipient_id.eq(my_person_id)) .filter(private_message::deleted.eq(false)) @@ -134,103 +173,102 @@ impl PrivateMessageQuery { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::private_message_view::PrivateMessageQuery; + use crate::{private_message_view::PrivateMessageQuery, structs::PrivateMessageView}; use lemmy_db_schema::{ + assert_length, + newtypes::InstanceId, source::{ instance::Instance, + instance_block::{InstanceBlock, InstanceBlockForm}, person::{Person, PersonInsertForm}, + person_block::{PersonBlock, PersonBlockForm}, private_message::{PrivateMessage, PrivateMessageInsertForm}, }, - traits::Crud, - utils::build_db_pool_for_tests, + traits::{Blockable, Crud}, + utils::{build_db_pool_for_tests, DbPool}, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; + struct Data { + instance: Instance, + timmy: Person, + jess: Person, + sara: Person, + } + + async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult { + let message_content = String::new(); + + let instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let timmy_form = PersonInsertForm::test_form(instance.id, "timmy_rav"); + + let timmy = Person::create(pool, &timmy_form).await?; + + let sara_form = PersonInsertForm::test_form(instance.id, "sara_rav"); + + let sara = Person::create(pool, &sara_form).await?; + + let jess_form = PersonInsertForm::test_form(instance.id, "jess_rav"); + + let jess = Person::create(pool, &jess_form).await?; + + let sara_timmy_message_form = + PrivateMessageInsertForm::new(sara.id, timmy.id, message_content.clone()); + PrivateMessage::create(pool, &sara_timmy_message_form).await?; + + let sara_jess_message_form = + PrivateMessageInsertForm::new(sara.id, jess.id, message_content.clone()); + PrivateMessage::create(pool, &sara_jess_message_form).await?; + + let timmy_sara_message_form = + PrivateMessageInsertForm::new(timmy.id, sara.id, message_content.clone()); + PrivateMessage::create(pool, &timmy_sara_message_form).await?; + + let jess_timmy_message_form = + PrivateMessageInsertForm::new(jess.id, timmy.id, message_content.clone()); + PrivateMessage::create(pool, &jess_timmy_message_form).await?; + + Ok(Data { + instance, + timmy, + jess, + sara, + }) + } + + async fn cleanup(instance_id: InstanceId, pool: &mut DbPool<'_>) -> LemmyResult<()> { + // This also deletes all persons and private messages thanks to sql `on delete cascade` + Instance::delete(pool, instance_id).await?; + Ok(()) + } + #[tokio::test] #[serial] - async fn test_crud() { - let message_content = String::new(); + async fn read_private_messages() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - - let instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); - - let timmy_form = PersonInsertForm::builder() - .name("timmy_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - - let timmy = Person::create(pool, &timmy_form).await.unwrap(); - - let sara_form = PersonInsertForm::builder() - .name("sara_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - - let sara = Person::create(pool, &sara_form).await.unwrap(); - - let jess_form = PersonInsertForm::builder() - .name("jess_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(instance.id) - .build(); - - let jess = Person::create(pool, &jess_form).await.unwrap(); - - let sara_timmy_message_form = PrivateMessageInsertForm::builder() - .creator_id(sara.id) - .recipient_id(timmy.id) - .content(message_content.clone()) - .build(); - let _inserted_sara_timmy_message_form = PrivateMessage::create(pool, &sara_timmy_message_form) - .await - .unwrap(); - - let sara_jess_message_form = PrivateMessageInsertForm::builder() - .creator_id(sara.id) - .recipient_id(jess.id) - .content(message_content.clone()) - .build(); - let _inserted_sara_jess_message_form = PrivateMessage::create(pool, &sara_jess_message_form) - .await - .unwrap(); - - let timmy_sara_message_form = PrivateMessageInsertForm::builder() - .creator_id(timmy.id) - .recipient_id(sara.id) - .content(message_content.clone()) - .build(); - let _inserted_timmy_sara_message_form = PrivateMessage::create(pool, &timmy_sara_message_form) - .await - .unwrap(); - - let jess_timmy_message_form = PrivateMessageInsertForm::builder() - .creator_id(jess.id) - .recipient_id(timmy.id) - .content(message_content.clone()) - .build(); - let _inserted_jess_timmy_message_form = PrivateMessage::create(pool, &jess_timmy_message_form) - .await - .unwrap(); + let Data { + timmy, + jess, + sara, + instance, + } = init_data(pool).await?; let timmy_messages = PrivateMessageQuery { unread_only: false, - creator_id: Option::None, + creator_id: None, ..Default::default() } .list(pool, timmy.id) - .await - .unwrap(); + .await?; - assert_eq!(timmy_messages.len(), 3); + assert_length!(3, &timmy_messages); assert_eq!(timmy_messages[0].creator.id, jess.id); assert_eq!(timmy_messages[0].recipient.id, timmy.id); assert_eq!(timmy_messages[1].creator.id, timmy.id); @@ -240,14 +278,13 @@ mod tests { let timmy_unread_messages = PrivateMessageQuery { unread_only: true, - creator_id: Option::None, + creator_id: None, ..Default::default() } .list(pool, timmy.id) - .await - .unwrap(); + .await?; - assert_eq!(timmy_unread_messages.len(), 2); + assert_length!(2, &timmy_unread_messages); assert_eq!(timmy_unread_messages[0].creator.id, jess.id); assert_eq!(timmy_unread_messages[0].recipient.id, timmy.id); assert_eq!(timmy_unread_messages[1].creator.id, sara.id); @@ -259,10 +296,9 @@ mod tests { ..Default::default() } .list(pool, timmy.id) - .await - .unwrap(); + .await?; - assert_eq!(timmy_sara_messages.len(), 2); + assert_length!(2, &timmy_sara_messages); assert_eq!(timmy_sara_messages[0].creator.id, timmy.id); assert_eq!(timmy_sara_messages[0].recipient.id, sara.id); assert_eq!(timmy_sara_messages[1].creator.id, sara.id); @@ -274,11 +310,96 @@ mod tests { ..Default::default() } .list(pool, timmy.id) - .await - .unwrap(); + .await?; - assert_eq!(timmy_sara_unread_messages.len(), 1); + assert_length!(1, &timmy_sara_unread_messages); assert_eq!(timmy_sara_unread_messages[0].creator.id, sara.id); assert_eq!(timmy_sara_unread_messages[0].recipient.id, timmy.id); + + cleanup(instance.id, pool).await + } + + #[tokio::test] + #[serial] + async fn ensure_person_block() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let Data { + timmy, + sara, + instance, + jess: _, + } = init_data(pool).await?; + + // Make sure blocks are working + let timmy_blocks_sara_form = PersonBlockForm { + person_id: timmy.id, + target_id: sara.id, + }; + + let inserted_block = PersonBlock::block(pool, &timmy_blocks_sara_form).await?; + + let expected_block = PersonBlock { + person_id: timmy.id, + target_id: sara.id, + published: inserted_block.published, + }; + assert_eq!(expected_block, inserted_block); + + let timmy_messages = PrivateMessageQuery { + unread_only: true, + creator_id: None, + ..Default::default() + } + .list(pool, timmy.id) + .await?; + + assert_length!(1, &timmy_messages); + + let timmy_unread_messages = PrivateMessageView::get_unread_messages(pool, timmy.id).await?; + assert_eq!(timmy_unread_messages, 1); + + cleanup(instance.id, pool).await + } + + #[tokio::test] + #[serial] + async fn ensure_instance_block() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let Data { + timmy, + jess: _, + sara, + instance, + } = init_data(pool).await?; + // Make sure instance_blocks are working + let timmy_blocks_instance_form = InstanceBlockForm { + person_id: timmy.id, + instance_id: sara.instance_id, + }; + + let inserted_instance_block = InstanceBlock::block(pool, &timmy_blocks_instance_form).await?; + + let expected_instance_block = InstanceBlock { + person_id: timmy.id, + instance_id: sara.instance_id, + published: inserted_instance_block.published, + }; + assert_eq!(expected_instance_block, inserted_instance_block); + + let timmy_messages = PrivateMessageQuery { + unread_only: true, + creator_id: None, + ..Default::default() + } + .list(pool, timmy.id) + .await?; + + assert_length!(0, &timmy_messages); + + let timmy_unread_messages = PrivateMessageView::get_unread_messages(pool, timmy.id).await?; + assert_eq!(timmy_unread_messages, 0); + cleanup(instance.id, pool).await } } diff --git a/crates/db_views/src/registration_application_view.rs b/crates/db_views/src/registration_application_view.rs index ee109e5da..a830f3d7a 100644 --- a/crates/db_views/src/registration_application_view.rs +++ b/crates/db_views/src/registration_application_view.rs @@ -11,12 +11,18 @@ use diesel::{ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ aliases, + newtypes::{PersonId, RegistrationApplicationId}, schema::{local_user, person, registration_application}, utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, }; +enum ReadBy { + Id(RegistrationApplicationId), + Person(PersonId), +} + fn queries<'a>() -> Queries< - impl ReadFn<'a, RegistrationApplicationView, i32>, + impl ReadFn<'a, RegistrationApplicationView, ReadBy>, impl ListFn<'a, RegistrationApplicationView, RegistrationApplicationQuery>, > { let all_joins = |query: registration_application::BoxedQuery<'a, Pg>| { @@ -36,21 +42,28 @@ fn queries<'a>() -> Queries< )) }; - let read = move |mut conn: DbConn<'a>, registration_application_id: i32| async move { - all_joins( - registration_application::table - .find(registration_application_id) - .into_boxed(), - ) - .first::(&mut conn) - .await + let read = move |mut conn: DbConn<'a>, search: ReadBy| async move { + let mut query = all_joins(registration_application::table.into_boxed()); + + query = match search { + ReadBy::Id(id) => query.filter(registration_application::id.eq(id)), + ReadBy::Person(person_id) => query.filter(person::id.eq(person_id)), + }; + + query.first(&mut conn).await }; let list = move |mut conn: DbConn<'a>, options: RegistrationApplicationQuery| async move { let mut query = all_joins(registration_application::table.into_boxed()); + // If viewing all applications, order by newest, but if viewing unresolved only, show the oldest + // first (FIFO) if options.unread_only { - query = query.filter(registration_application::admin_id.is_null()) + query = query + .filter(registration_application::admin_id.is_null()) + .order_by(registration_application::published.asc()); + } else { + query = query.order_by(registration_application::published.desc()); } if options.verified_email_only { @@ -59,10 +72,7 @@ fn queries<'a>() -> Queries< let (limit, offset) = limit_and_offset(options.page, options.limit)?; - query = query - .limit(limit) - .offset(offset) - .order_by(registration_application::published.desc()); + query = query.limit(limit).offset(offset); query.load::(&mut conn).await }; @@ -71,13 +81,13 @@ fn queries<'a>() -> Queries< } impl RegistrationApplicationView { - pub async fn read( - pool: &mut DbPool<'_>, - registration_application_id: i32, - ) -> Result { - queries().read(pool, registration_application_id).await + pub async fn read(pool: &mut DbPool<'_>, id: RegistrationApplicationId) -> Result { + queries().read(pool, ReadBy::Id(id)).await } + pub async fn read_by_person(pool: &mut DbPool<'_>, person_id: PersonId) -> Result { + queries().read(pool, ReadBy::Person(person_id)).await + } /// Returns the current unread registration_application count pub async fn get_unread_count( pool: &mut DbPool<'_>, @@ -126,8 +136,6 @@ impl RegistrationApplicationQuery { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::registration_application_view::{ RegistrationApplicationQuery, @@ -147,52 +155,34 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; #[tokio::test] #[serial] - async fn test_crud() { + async fn test_crud() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let timmy_person_form = PersonInsertForm::builder() - .name("timmy_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let timmy_person_form = PersonInsertForm::test_form(inserted_instance.id, "timmy_rav"); - let inserted_timmy_person = Person::create(pool, &timmy_person_form).await.unwrap(); + let inserted_timmy_person = Person::create(pool, &timmy_person_form).await?; - let timmy_local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_timmy_person.id) - .password_encrypted("nada".to_string()) - .admin(Some(true)) - .build(); + let timmy_local_user_form = LocalUserInsertForm::test_form_admin(inserted_timmy_person.id); - let _inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form) - .await - .unwrap(); + let _inserted_timmy_local_user = + LocalUser::create(pool, &timmy_local_user_form, vec![]).await?; - let sara_person_form = PersonInsertForm::builder() - .name("sara_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let sara_person_form = PersonInsertForm::test_form(inserted_instance.id, "sara_rav"); - let inserted_sara_person = Person::create(pool, &sara_person_form).await.unwrap(); + let inserted_sara_person = Person::create(pool, &sara_person_form).await?; - let sara_local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_sara_person.id) - .password_encrypted("nada".to_string()) - .build(); + let sara_local_user_form = LocalUserInsertForm::test_form(inserted_sara_person.id); - let inserted_sara_local_user = LocalUser::create(pool, &sara_local_user_form) - .await - .unwrap(); + let inserted_sara_local_user = LocalUser::create(pool, &sara_local_user_form, vec![]).await?; // Sara creates an application let sara_app_form = RegistrationApplicationInsertForm { @@ -200,30 +190,17 @@ mod tests { answer: "LET ME IIIIINN".to_string(), }; - let sara_app = RegistrationApplication::create(pool, &sara_app_form) - .await - .unwrap(); + let sara_app = RegistrationApplication::create(pool, &sara_app_form).await?; - let read_sara_app_view = RegistrationApplicationView::read(pool, sara_app.id) - .await - .unwrap(); + let read_sara_app_view = RegistrationApplicationView::read(pool, sara_app.id).await?; - let jess_person_form = PersonInsertForm::builder() - .name("jess_rav".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let jess_person_form = PersonInsertForm::test_form(inserted_instance.id, "jess_rav"); - let inserted_jess_person = Person::create(pool, &jess_person_form).await.unwrap(); + let inserted_jess_person = Person::create(pool, &jess_person_form).await?; - let jess_local_user_form = LocalUserInsertForm::builder() - .person_id(inserted_jess_person.id) - .password_encrypted("nada".to_string()) - .build(); + let jess_local_user_form = LocalUserInsertForm::test_form(inserted_jess_person.id); - let inserted_jess_local_user = LocalUser::create(pool, &jess_local_user_form) - .await - .unwrap(); + let inserted_jess_local_user = LocalUser::create(pool, &jess_local_user_form, vec![]).await?; // Sara creates an application let jess_app_form = RegistrationApplicationInsertForm { @@ -231,13 +208,9 @@ mod tests { answer: "LET ME IIIIINN".to_string(), }; - let jess_app = RegistrationApplication::create(pool, &jess_app_form) - .await - .unwrap(); + let jess_app = RegistrationApplication::create(pool, &jess_app_form).await?; - let read_jess_app_view = RegistrationApplicationView::read(pool, jess_app.id) - .await - .unwrap(); + let read_jess_app_view = RegistrationApplicationView::read(pool, jess_app.id).await?; let mut expected_sara_app_view = RegistrationApplicationView { registration_application: sara_app.clone(), @@ -246,16 +219,15 @@ mod tests { person_id: inserted_sara_local_user.person_id, email: inserted_sara_local_user.email, show_nsfw: inserted_sara_local_user.show_nsfw, - auto_expand: inserted_sara_local_user.auto_expand, blur_nsfw: inserted_sara_local_user.blur_nsfw, theme: inserted_sara_local_user.theme, - default_sort_type: inserted_sara_local_user.default_sort_type, + default_post_sort_type: inserted_sara_local_user.default_post_sort_type, + default_comment_sort_type: inserted_sara_local_user.default_comment_sort_type, default_listing_type: inserted_sara_local_user.default_listing_type, interface_language: inserted_sara_local_user.interface_language, show_avatars: inserted_sara_local_user.show_avatars, send_notifications_to_email: inserted_sara_local_user.send_notifications_to_email, show_bot_accounts: inserted_sara_local_user.show_bot_accounts, - show_scores: inserted_sara_local_user.show_scores, show_read_posts: inserted_sara_local_user.show_read_posts, email_verified: inserted_sara_local_user.email_verified, accepted_application: inserted_sara_local_user.accepted_application, @@ -269,6 +241,7 @@ mod tests { enable_keyboard_navigation: inserted_sara_local_user.enable_keyboard_navigation, enable_animated_images: inserted_sara_local_user.enable_animated_images, enable_private_messages: inserted_sara_local_user.enable_private_messages, + collapse_bot_comments: inserted_sara_local_user.collapse_bot_comments, }, creator: Person { id: inserted_sara_person.id, @@ -286,7 +259,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_sara_person.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, instance_id: inserted_instance.id, private_key: inserted_sara_person.private_key, @@ -304,18 +276,15 @@ mod tests { ..Default::default() } .list(pool) - .await - .unwrap(); + .await?; assert_eq!( apps, - [read_jess_app_view.clone(), expected_sara_app_view.clone()] + [expected_sara_app_view.clone(), read_jess_app_view.clone()] ); // Make sure the counts are correct - let unread_count = RegistrationApplicationView::get_unread_count(pool, false) - .await - .unwrap(); + let unread_count = RegistrationApplicationView::get_unread_count(pool, false).await?; assert_eq!(unread_count, 2); // Approve the application @@ -324,9 +293,7 @@ mod tests { deny_reason: None, }; - RegistrationApplication::update(pool, sara_app.id, &approve_form) - .await - .unwrap(); + RegistrationApplication::update(pool, sara_app.id, &approve_form).await?; // Update the local_user row let approve_local_user_form = LocalUserUpdateForm { @@ -334,13 +301,10 @@ mod tests { ..Default::default() }; - LocalUser::update(pool, inserted_sara_local_user.id, &approve_local_user_form) - .await - .unwrap(); + LocalUser::update(pool, inserted_sara_local_user.id, &approve_local_user_form).await?; - let read_sara_app_view_after_approve = RegistrationApplicationView::read(pool, sara_app.id) - .await - .unwrap(); + let read_sara_app_view_after_approve = + RegistrationApplicationView::read(pool, sara_app.id).await?; // Make sure the columns changed expected_sara_app_view @@ -364,7 +328,6 @@ mod tests { banner: None, updated: None, inbox_url: inserted_timmy_person.inbox_url.clone(), - shared_inbox_url: None, matrix_user_id: None, instance_id: inserted_instance.id, private_key: inserted_timmy_person.private_key, @@ -380,28 +343,23 @@ mod tests { ..Default::default() } .list(pool) - .await - .unwrap(); + .await?; assert_eq!(apps_after_resolve, vec![read_jess_app_view]); // Make sure the counts are correct - let unread_count_after_approve = RegistrationApplicationView::get_unread_count(pool, false) - .await - .unwrap(); + let unread_count_after_approve = + RegistrationApplicationView::get_unread_count(pool, false).await?; assert_eq!(unread_count_after_approve, 1); // Make sure the not undenied_only has all the apps - let all_apps = RegistrationApplicationQuery::default() - .list(pool) - .await - .unwrap(); + let all_apps = RegistrationApplicationQuery::default().list(pool).await?; assert_eq!(all_apps.len(), 2); - Person::delete(pool, inserted_timmy_person.id) - .await - .unwrap(); - Person::delete(pool, inserted_sara_person.id).await.unwrap(); - Person::delete(pool, inserted_jess_person.id).await.unwrap(); - Instance::delete(pool, inserted_instance.id).await.unwrap(); + Person::delete(pool, inserted_timmy_person.id).await?; + Person::delete(pool, inserted_sara_person.id).await?; + Person::delete(pool, inserted_jess_person.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) } } diff --git a/crates/db_views/src/site_view.rs b/crates/db_views/src/site_view.rs index 17819fdd7..6014ad964 100644 --- a/crates/db_views/src/site_view.rs +++ b/crates/db_views/src/site_view.rs @@ -1,30 +1,32 @@ use crate::structs::SiteView; -use diesel::{result::Error, ExpressionMethods, JoinOnDsl, QueryDsl}; +use diesel::{ExpressionMethods, JoinOnDsl, OptionalExtension, QueryDsl}; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ schema::{local_site, local_site_rate_limit, site, site_aggregates}, utils::{get_conn, DbPool}, }; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl SiteView { - pub async fn read_local(pool: &mut DbPool<'_>) -> Result { + pub async fn read_local(pool: &mut DbPool<'_>) -> LemmyResult { let conn = &mut get_conn(pool).await?; - let mut res = site::table - .inner_join(local_site::table) - .inner_join( - local_site_rate_limit::table.on(local_site::id.eq(local_site_rate_limit::local_site_id)), - ) - .inner_join(site_aggregates::table) - .select(( - site::all_columns, - local_site::all_columns, - local_site_rate_limit::all_columns, - site_aggregates::all_columns, - )) - .first::(conn) - .await?; - - res.site.private_key = None; - Ok(res) + Ok( + site::table + .inner_join(local_site::table) + .inner_join( + local_site_rate_limit::table.on(local_site::id.eq(local_site_rate_limit::local_site_id)), + ) + .inner_join(site_aggregates::table) + .select(( + site::all_columns, + local_site::all_columns, + local_site_rate_limit::all_columns, + site_aggregates::all_columns, + )) + .first(conn) + .await + .optional()? + .ok_or(LemmyErrorType::LocalSiteNotSetup)?, + ) } } diff --git a/crates/db_views/src/structs.rs b/crates/db_views/src/structs.rs index 2f65bb78f..3c219d63f 100644 --- a/crates/db_views/src/structs.rs +++ b/crates/db_views/src/structs.rs @@ -8,9 +8,11 @@ use lemmy_db_schema::{ community::Community, custom_emoji::CustomEmoji, custom_emoji_keyword::CustomEmojiKeyword, + images::{ImageDetails, LocalImage}, local_site::LocalSite, local_site_rate_limit::LocalSiteRateLimit, local_user::LocalUser, + local_user_vote_display_mode::LocalUserVoteDisplayMode, person::Person, post::Post, post_report::PostReport, @@ -29,6 +31,7 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A comment report view. pub struct CommentReportView { @@ -40,6 +43,11 @@ pub struct CommentReportView { pub comment_creator: Person, pub counts: CommentAggregates, pub creator_banned_from_community: bool, + pub creator_is_moderator: bool, + pub creator_is_admin: bool, + pub creator_blocked: bool, + pub subscribed: SubscribedType, + pub saved: bool, pub my_vote: Option, pub resolver: Option, } @@ -47,6 +55,7 @@ pub struct CommentReportView { #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A comment view. pub struct CommentView { @@ -56,7 +65,9 @@ pub struct CommentView { pub community: Community, pub counts: CommentAggregates, pub creator_banned_from_community: bool, + pub banned_from_community: bool, pub creator_is_moderator: bool, + pub creator_is_admin: bool, pub subscribed: SubscribedType, pub saved: bool, pub creator_blocked: bool, @@ -65,10 +76,12 @@ pub struct CommentView { #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A local user view. pub struct LocalUserView { pub local_user: LocalUser, + pub local_user_vote_display_mode: LocalUserVoteDisplayMode, pub person: Person, pub counts: PersonAggregates, } @@ -76,6 +89,7 @@ pub struct LocalUserView { #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A post report view. pub struct PostReportView { @@ -85,34 +99,48 @@ pub struct PostReportView { pub creator: Person, pub post_creator: Person, pub creator_banned_from_community: bool, + pub creator_is_moderator: bool, + pub creator_is_admin: bool, + pub subscribed: SubscribedType, + pub saved: bool, + pub read: bool, + pub hidden: bool, + pub creator_blocked: bool, pub my_vote: Option, + pub unread_comments: i64, pub counts: PostAggregates, pub resolver: Option, } -/// currently this is just a wrapper around post id, but should be seen as opaque from the client's perspective -/// stringified since we might want to use arbitrary info later, with a P prepended to prevent ossification -/// (api users love to make assumptions (e.g. parse stuff that looks like numbers as numbers) about apis that aren't part of the spec -#[derive(Serialize, Deserialize, Debug, Clone)] +/// currently this is just a wrapper around post id, but should be seen as opaque from the client's +/// perspective. stringified since we might want to use arbitrary info later, with a P prepended to +/// prevent ossification (api users love to make assumptions (e.g. parse stuff that looks like +/// numbers as numbers) about apis that aren't part of the spec +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "full", derive(ts_rs::TS))] #[cfg_attr(feature = "full", ts(export))] -pub struct PaginationCursor(pub(crate) String); +pub struct PaginationCursor(pub String); #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A post view. pub struct PostView { pub post: Post, pub creator: Person, pub community: Community, + pub image_details: Option, pub creator_banned_from_community: bool, + pub banned_from_community: bool, pub creator_is_moderator: bool, + pub creator_is_admin: bool, pub counts: PostAggregates, pub subscribed: SubscribedType, pub saved: bool, pub read: bool, + pub hidden: bool, pub creator_blocked: bool, pub my_vote: Option, pub unread_comments: i64, @@ -120,6 +148,7 @@ pub struct PostView { #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A private message view. pub struct PrivateMessageView { @@ -131,6 +160,7 @@ pub struct PrivateMessageView { #[skip_serializing_none] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A private message report view. pub struct PrivateMessageReportView { @@ -144,6 +174,7 @@ pub struct PrivateMessageReportView { #[skip_serializing_none] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A registration application view. pub struct RegistrationApplicationView { @@ -155,6 +186,7 @@ pub struct RegistrationApplicationView { #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A site view. pub struct SiteView { @@ -165,10 +197,34 @@ pub struct SiteView { } #[derive(Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(TS))] +#[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A custom emoji view. pub struct CustomEmojiView { pub custom_emoji: CustomEmoji, pub keywords: Vec, } + +#[skip_serializing_none] +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +/// A vote view for checking a post or comments votes. +pub struct VoteView { + pub creator: Person, + pub creator_banned_from_community: bool, + pub score: i16, +} + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] +#[cfg_attr(feature = "full", ts(export))] +/// A local image view. +pub struct LocalImageView { + pub local_image: LocalImage, + pub person: Person, +} diff --git a/crates/db_views/src/vote_view.rs b/crates/db_views/src/vote_view.rs new file mode 100644 index 000000000..0fd64deca --- /dev/null +++ b/crates/db_views/src/vote_view.rs @@ -0,0 +1,236 @@ +use crate::structs::VoteView; +use diesel::{ + result::Error, + BoolExpressionMethods, + ExpressionMethods, + JoinOnDsl, + NullableExpressionMethods, + QueryDsl, +}; +use diesel_async::RunQueryDsl; +use lemmy_db_schema::{ + newtypes::{CommentId, PostId}, + schema::{comment, comment_like, community_person_ban, person, post, post_like}, + utils::{get_conn, limit_and_offset, DbPool}, +}; + +impl VoteView { + pub async fn list_for_post( + pool: &mut DbPool<'_>, + post_id: PostId, + page: Option, + limit: Option, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let (limit, offset) = limit_and_offset(page, limit)?; + + post_like::table + .inner_join(person::table) + .inner_join(post::table) + // Join to community_person_ban to get creator_banned_from_community + .left_join( + community_person_ban::table.on( + post::community_id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(post_like::person_id)), + ), + ) + .filter(post_like::post_id.eq(post_id)) + .select(( + person::all_columns, + community_person_ban::community_id.nullable().is_not_null(), + post_like::score, + )) + .order_by(post_like::score) + .limit(limit) + .offset(offset) + .load::(conn) + .await + } + + pub async fn list_for_comment( + pool: &mut DbPool<'_>, + comment_id: CommentId, + page: Option, + limit: Option, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + let (limit, offset) = limit_and_offset(page, limit)?; + + comment_like::table + .inner_join(person::table) + .inner_join(comment::table) + .inner_join(post::table.on(comment::post_id.eq(post::id))) + // Join to community_person_ban to get creator_banned_from_community + .left_join( + community_person_ban::table.on( + post::community_id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(comment_like::person_id)), + ), + ) + .filter(comment_like::comment_id.eq(comment_id)) + .select(( + person::all_columns, + community_person_ban::community_id.nullable().is_not_null(), + comment_like::score, + )) + .order_by(comment_like::score) + .limit(limit) + .offset(offset) + .load::(conn) + .await + } +} + +#[cfg(test)] +mod tests { + + use crate::structs::VoteView; + use lemmy_db_schema::{ + source::{ + comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm}, + community::{Community, CommunityInsertForm, CommunityPersonBan, CommunityPersonBanForm}, + instance::Instance, + person::{Person, PersonInsertForm}, + post::{Post, PostInsertForm, PostLike, PostLikeForm}, + }, + traits::{Bannable, Crud, Likeable}, + utils::build_db_pool_for_tests, + }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; + use serial_test::serial; + + #[tokio::test] + #[serial] + async fn post_and_comment_vote_views() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let new_person = PersonInsertForm::test_form(inserted_instance.id, "timmy_vv"); + + let inserted_timmy = Person::create(pool, &new_person).await?; + + let new_person_2 = PersonInsertForm::test_form(inserted_instance.id, "sara_vv"); + + let inserted_sara = Person::create(pool, &new_person_2).await?; + + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community vv".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; + + let new_post = PostInsertForm::new( + "A test post vv".into(), + inserted_timmy.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; + + let comment_form = CommentInsertForm::new( + inserted_timmy.id, + inserted_post.id, + "A test comment vv".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + + // Timmy upvotes his own post + let timmy_post_vote_form = PostLikeForm { + post_id: inserted_post.id, + person_id: inserted_timmy.id, + score: 1, + }; + PostLike::like(pool, &timmy_post_vote_form).await?; + + // Sara downvotes timmy's post + let sara_post_vote_form = PostLikeForm { + post_id: inserted_post.id, + person_id: inserted_sara.id, + score: -1, + }; + PostLike::like(pool, &sara_post_vote_form).await?; + + let expected_post_vote_views = [ + VoteView { + creator: inserted_sara.clone(), + creator_banned_from_community: false, + score: -1, + }, + VoteView { + creator: inserted_timmy.clone(), + creator_banned_from_community: false, + score: 1, + }, + ]; + + let read_post_vote_views = VoteView::list_for_post(pool, inserted_post.id, None, None).await?; + assert_eq!(read_post_vote_views, expected_post_vote_views); + + // Timothy votes down his own comment + let timmy_comment_vote_form = CommentLikeForm { + comment_id: inserted_comment.id, + person_id: inserted_timmy.id, + score: -1, + }; + CommentLike::like(pool, &timmy_comment_vote_form).await?; + + // Sara upvotes timmy's comment + let sara_comment_vote_form = CommentLikeForm { + comment_id: inserted_comment.id, + person_id: inserted_sara.id, + score: 1, + }; + CommentLike::like(pool, &sara_comment_vote_form).await?; + + let expected_comment_vote_views = [ + VoteView { + creator: inserted_timmy.clone(), + creator_banned_from_community: false, + score: -1, + }, + VoteView { + creator: inserted_sara.clone(), + creator_banned_from_community: false, + score: 1, + }, + ]; + + let read_comment_vote_views = + VoteView::list_for_comment(pool, inserted_comment.id, None, None).await?; + assert_eq!(read_comment_vote_views, expected_comment_vote_views); + + // Ban timmy from that community + let ban_timmy_form = CommunityPersonBanForm { + community_id: inserted_community.id, + person_id: inserted_timmy.id, + expires: None, + }; + CommunityPersonBan::ban(pool, &ban_timmy_form).await?; + + // Make sure creator_banned_from_community is true + let read_comment_vote_views_after_ban = + VoteView::list_for_comment(pool, inserted_comment.id, None, None).await?; + + assert!(read_comment_vote_views_after_ban + .first() + .is_some_and(|c| c.creator_banned_from_community)); + + let read_post_vote_views_after_ban = + VoteView::list_for_post(pool, inserted_post.id, None, None).await?; + + assert!(read_post_vote_views_after_ban + .get(1) + .is_some_and(|p| p.creator_banned_from_community)); + + // Cleanup + Instance::delete(pool, inserted_instance.id).await?; + + Ok(()) + } +} diff --git a/crates/db_views_actor/Cargo.toml b/crates/db_views_actor/Cargo.toml index 93ce0f5b3..d623959d5 100644 --- a/crates/db_views_actor/Cargo.toml +++ b/crates/db_views_actor/Cargo.toml @@ -11,8 +11,17 @@ repository.workspace = true [lib] doctest = false +[lints] +workspace = true + [features] -full = ["lemmy_db_schema/full", "diesel", "diesel-async", "ts-rs"] +full = [ + "lemmy_db_schema/full", + "lemmy_utils/full", + "diesel", + "diesel-async", + "ts-rs", +] [dependencies] lemmy_db_schema = { workspace = true } @@ -30,8 +39,12 @@ serde_with = { workspace = true } ts-rs = { workspace = true, optional = true } chrono.workspace = true strum = { workspace = true } -strum_macros = { workspace = true } +lemmy_utils = { workspace = true, optional = true } [dev-dependencies] serial_test = { workspace = true } tokio = { workspace = true } +pretty_assertions = { workspace = true } +url.workspace = true +lemmy_db_views.workspace = true +lemmy_utils.workspace = true diff --git a/crates/db_views_actor/src/comment_reply_view.rs b/crates/db_views_actor/src/comment_reply_view.rs index a97ad96fb..1b657866a 100644 --- a/crates/db_views_actor/src/comment_reply_view.rs +++ b/crates/db_views_actor/src/comment_reply_view.rs @@ -1,9 +1,13 @@ use crate::structs::CommentReplyView; use diesel::{ + dsl::{exists, not}, pg::Pg, result::Error, + sql_types, BoolExpressionMethods, + BoxableExpression, ExpressionMethods, + IntoSql, JoinOnDsl, NullableExpressionMethods, QueryDsl, @@ -20,12 +24,14 @@ use lemmy_db_schema::{ comment_saved, community, community_follower, + community_moderator, community_person_ban, + local_user, person, person_block, post, }, - source::community::CommunityFollower, + source::local_user::LocalUser, utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, CommentSortType, }; @@ -34,9 +40,121 @@ fn queries<'a>() -> Queries< impl ReadFn<'a, CommentReplyView, (CommentReplyId, Option)>, impl ListFn<'a, CommentReplyView, CommentReplyQuery>, > { - let all_joins = |query: comment_reply::BoxedQuery<'a, Pg>, my_person_id: Option| { - // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + let is_creator_banned_from_community = exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(comment::creator_id)), + ), + ); + + let is_local_user_banned_from_community = |person_id| { + exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(person_id)), + ), + ) + }; + + let is_saved = |person_id| { + exists( + comment_saved::table.filter( + comment::id + .eq(comment_saved::comment_id) + .and(comment_saved::person_id.eq(person_id)), + ), + ) + }; + + let is_community_followed = |person_id| { + community_follower::table + .filter( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(person_id)), + ) + .select(community_follower::pending.nullable()) + .single_value() + }; + + let is_creator_blocked = |person_id| { + exists( + person_block::table.filter( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(person_id)), + ), + ) + }; + + let score = |person_id| { + comment_like::table + .filter( + comment::id + .eq(comment_like::comment_id) + .and(comment_like::person_id.eq(person_id)), + ) + .select(comment_like::score.nullable()) + .single_value() + }; + + let creator_is_moderator = exists( + community_moderator::table.filter( + community::id + .eq(community_moderator::community_id) + .and(community_moderator::person_id.eq(comment::creator_id)), + ), + ); + + let creator_is_admin = exists( + local_user::table.filter( + comment::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ); + + let all_joins = move |query: comment_reply::BoxedQuery<'a, Pg>, + my_person_id: Option| { + let is_local_user_banned_from_community_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>, + > = if let Some(person_id) = my_person_id { + Box::new(is_local_user_banned_from_community(person_id)) + } else { + Box::new(false.into_sql::()) + }; + + let score_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(score(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let subscribed_type_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(is_community_followed(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let is_saved_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_saved(person_id)) + } else { + Box::new(false.into_sql::()) + }; + + let is_creator_blocked_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_creator_blocked(person_id)) + } else { + Box::new(false.into_sql::()) + }; query .inner_join(comment::table) @@ -45,41 +163,6 @@ fn queries<'a>() -> Queries< .inner_join(community::table.on(post::community_id.eq(community::id))) .inner_join(aliases::person1) .inner_join(comment_aggregates::table.on(comment::id.eq(comment_aggregates::comment_id))) - .left_join( - community_person_ban::table.on( - community::id - .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(comment::creator_id)), - ), - ) - .left_join( - community_follower::table.on( - post::community_id - .eq(community_follower::community_id) - .and(community_follower::person_id.eq(person_id_join)), - ), - ) - .left_join( - comment_saved::table.on( - comment::id - .eq(comment_saved::comment_id) - .and(comment_saved::person_id.eq(person_id_join)), - ), - ) - .left_join( - person_block::table.on( - comment::creator_id - .eq(person_block::target_id) - .and(person_block::person_id.eq(person_id_join)), - ), - ) - .left_join( - comment_like::table.on( - comment::id - .eq(comment_like::comment_id) - .and(comment_like::person_id.eq(person_id_join)), - ), - ) .select(( comment_reply::all_columns, comment::all_columns, @@ -88,11 +171,14 @@ fn queries<'a>() -> Queries< community::all_columns, aliases::person1.fields(person::all_columns), comment_aggregates::all_columns, - community_person_ban::id.nullable().is_not_null(), - CommunityFollower::select_subscribed_type(), - comment_saved::id.nullable().is_not_null(), - person_block::id.nullable().is_not_null(), - comment_like::score.nullable(), + is_creator_banned_from_community, + is_local_user_banned_from_community_selection, + creator_is_moderator, + creator_is_admin, + subscribed_type_selection, + is_saved_selection, + is_creator_blocked_selection, + score_selection, )) }; @@ -103,11 +189,13 @@ fn queries<'a>() -> Queries< comment_reply::table.find(comment_reply_id).into_boxed(), my_person_id, ) - .first::(&mut conn) + .first(&mut conn) .await }; let list = move |mut conn: DbConn<'a>, options: CommentReplyQuery| async move { + // These filters need to be kept in sync with the filters in + // CommentReplyView::get_unread_replies() let mut query = all_joins(comment_reply::table.into_boxed(), options.my_person_id); if let Some(recipient_id) = options.recipient_id { @@ -119,7 +207,7 @@ fn queries<'a>() -> Queries< } if !options.show_bot_accounts { - query = query.filter(person::bot_account.eq(false)); + query = query.filter(not(person::bot_account)); }; query = match options.sort.unwrap_or(CommentSortType::New) { @@ -132,6 +220,11 @@ fn queries<'a>() -> Queries< CommentSortType::Top => query.order_by(comment_aggregates::score.desc()), }; + // Don't show replies from blocked persons + if let Some(my_person_id) = options.my_person_id { + query = query.filter(not(is_creator_blocked(my_person_id))); + } + let (limit, offset) = limit_and_offset(options.page, options.limit)?; query @@ -156,15 +249,33 @@ impl CommentReplyView { /// Gets the number of unread replies pub async fn get_unread_replies( pool: &mut DbPool<'_>, - my_person_id: PersonId, + local_user: &LocalUser, ) -> Result { use diesel::dsl::count; let conn = &mut get_conn(pool).await?; - comment_reply::table + let mut query = comment_reply::table .inner_join(comment::table) - .filter(comment_reply::recipient_id.eq(my_person_id)) + .left_join( + person_block::table.on( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(local_user.person_id)), + ), + ) + .inner_join(person::table.on(comment::creator_id.eq(person::id))) + .into_boxed(); + + // These filters need to be kept in sync with the filters in queries().list() + if !local_user.show_bot_accounts { + query = query.filter(not(person::bot_account)); + } + + query + // Don't count replies from blocked users + .filter(person_block::person_id.is_null()) + .filter(comment_reply::recipient_id.eq(local_user.person_id)) .filter(comment_reply::read.eq(false)) .filter(comment::deleted.eq(false)) .filter(comment::removed.eq(false)) @@ -174,7 +285,7 @@ impl CommentReplyView { } } -#[derive(Default)] +#[derive(Default, Clone)] pub struct CommentReplyQuery { pub my_person_id: Option, pub recipient_id: Option, @@ -190,3 +301,163 @@ impl CommentReplyQuery { queries().list(pool, self).await } } + +#[cfg(test)] +mod tests { + + use crate::{comment_reply_view::CommentReplyQuery, structs::CommentReplyView}; + use lemmy_db_schema::{ + source::{ + comment::{Comment, CommentInsertForm}, + comment_reply::{CommentReply, CommentReplyInsertForm, CommentReplyUpdateForm}, + community::{Community, CommunityInsertForm}, + instance::Instance, + local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, + person::{Person, PersonInsertForm, PersonUpdateForm}, + person_block::{PersonBlock, PersonBlockForm}, + post::{Post, PostInsertForm}, + }, + traits::{Blockable, Crud}, + utils::build_db_pool_for_tests, + }; + use lemmy_db_views::structs::LocalUserView; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; + use serial_test::serial; + + #[tokio::test] + #[serial] + async fn test_crud() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let terry_form = PersonInsertForm::test_form(inserted_instance.id, "terrylake"); + let inserted_terry = Person::create(pool, &terry_form).await?; + + let recipient_form = PersonInsertForm { + local: Some(true), + ..PersonInsertForm::test_form(inserted_instance.id, "terrylakes recipient") + }; + + let inserted_recipient = Person::create(pool, &recipient_form).await?; + let recipient_id = inserted_recipient.id; + + let recipient_local_user = + LocalUser::create(pool, &LocalUserInsertForm::test_form(recipient_id), vec![]).await?; + + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community lake".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; + + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_terry.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; + + let comment_form = + CommentInsertForm::new(inserted_terry.id, inserted_post.id, "A test comment".into()); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + + let comment_reply_form = CommentReplyInsertForm { + recipient_id: inserted_recipient.id, + comment_id: inserted_comment.id, + read: None, + }; + + let inserted_reply = CommentReply::create(pool, &comment_reply_form).await?; + + let expected_reply = CommentReply { + id: inserted_reply.id, + recipient_id: inserted_reply.recipient_id, + comment_id: inserted_reply.comment_id, + read: false, + published: inserted_reply.published, + }; + + let read_reply = CommentReply::read(pool, inserted_reply.id).await?; + + let comment_reply_update_form = CommentReplyUpdateForm { read: Some(false) }; + let updated_reply = + CommentReply::update(pool, inserted_reply.id, &comment_reply_update_form).await?; + + // Test to make sure counts and blocks work correctly + let unread_replies = CommentReplyView::get_unread_replies(pool, &recipient_local_user).await?; + + let query = CommentReplyQuery { + recipient_id: Some(recipient_id), + my_person_id: Some(recipient_id), + sort: None, + unread_only: false, + show_bot_accounts: true, + page: None, + limit: None, + }; + let replies = query.clone().list(pool).await?; + assert_eq!(1, unread_replies); + assert_eq!(1, replies.len()); + + // Block the person, and make sure these counts are now empty + let block_form = PersonBlockForm { + person_id: recipient_id, + target_id: inserted_terry.id, + }; + PersonBlock::block(pool, &block_form).await?; + + let unread_replies_after_block = + CommentReplyView::get_unread_replies(pool, &recipient_local_user).await?; + let replies_after_block = query.clone().list(pool).await?; + assert_eq!(0, unread_replies_after_block); + assert_eq!(0, replies_after_block.len()); + + // Unblock user so we can reuse the same person + PersonBlock::unblock(pool, &block_form).await?; + + // Turn Terry into a bot account + let person_update_form = PersonUpdateForm { + bot_account: Some(true), + ..Default::default() + }; + Person::update(pool, inserted_terry.id, &person_update_form).await?; + + let recipient_local_user_update_form = LocalUserUpdateForm { + show_bot_accounts: Some(false), + ..Default::default() + }; + LocalUser::update( + pool, + recipient_local_user.id, + &recipient_local_user_update_form, + ) + .await?; + let recipient_local_user_view = LocalUserView::read(pool, recipient_local_user.id).await?; + + let unread_replies_after_hide_bots = + CommentReplyView::get_unread_replies(pool, &recipient_local_user_view.local_user).await?; + + let mut query_without_bots = query.clone(); + query_without_bots.show_bot_accounts = false; + let replies_after_hide_bots = query_without_bots.list(pool).await?; + assert_eq!(0, unread_replies_after_hide_bots); + assert_eq!(0, replies_after_hide_bots.len()); + + Comment::delete(pool, inserted_comment.id).await?; + Post::delete(pool, inserted_post.id).await?; + Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_terry.id).await?; + Person::delete(pool, inserted_recipient.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + assert_eq!(expected_reply, read_reply); + assert_eq!(expected_reply, inserted_reply); + assert_eq!(expected_reply, updated_reply); + Ok(()) + } +} diff --git a/crates/db_views_actor/src/community_block_view.rs b/crates/db_views_actor/src/community_block_view.rs deleted file mode 100644 index c7d3d1836..000000000 --- a/crates/db_views_actor/src/community_block_view.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::structs::CommunityBlockView; -use diesel::{result::Error, ExpressionMethods, QueryDsl}; -use diesel_async::RunQueryDsl; -use lemmy_db_schema::{ - newtypes::PersonId, - schema::{community, community_block, person}, - utils::{get_conn, DbPool}, -}; - -impl CommunityBlockView { - pub async fn for_person(pool: &mut DbPool<'_>, person_id: PersonId) -> Result, Error> { - let conn = &mut get_conn(pool).await?; - community_block::table - .inner_join(person::table) - .inner_join(community::table) - .select((person::all_columns, community::all_columns)) - .filter(community_block::person_id.eq(person_id)) - .filter(community::deleted.eq(false)) - .filter(community::removed.eq(false)) - .order_by(community_block::published) - .load::(conn) - .await - } -} diff --git a/crates/db_views_actor/src/community_follower_view.rs b/crates/db_views_actor/src/community_follower_view.rs index b2eeda29e..92889d12d 100644 --- a/crates/db_views_actor/src/community_follower_view.rs +++ b/crates/db_views_actor/src/community_follower_view.rs @@ -10,11 +10,12 @@ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ newtypes::{CommunityId, DbUrl, InstanceId, PersonId}, schema::{community, community_follower, person}, - utils::{functions::coalesce, get_conn, DbPool}, + utils::{get_conn, DbPool}, }; impl CommunityFollowerView { - /// return a list of local community ids and remote inboxes that at least one user of the given instance has followed + /// return a list of local community ids and remote inboxes that at least one user of the given + /// instance has followed pub async fn get_instance_followed_community_inboxes( pool: &mut DbPool<'_>, instance_id: InstanceId, @@ -22,8 +23,9 @@ impl CommunityFollowerView { ) -> Result, Error> { let conn = &mut get_conn(pool).await?; // In most cases this will fetch the same url many times (the shared inbox url) - // PG will only send a single copy to rust, but it has to scan through all follower rows (same as it was before). - // So on the PG side it would be possible to optimize this further by adding e.g. a new table community_followed_instances (community_id, instance_id) + // PG will only send a single copy to rust, but it has to scan through all follower rows (same + // as it was before). So on the PG side it would be possible to optimize this further by + // adding e.g. a new table community_followed_instances (community_id, instance_id) // that would work for all instances that support fully shared inboxes. // It would be a bit more complicated though to keep it in sync. @@ -31,13 +33,11 @@ impl CommunityFollowerView { .inner_join(community::table) .inner_join(person::table) .filter(person::instance_id.eq(instance_id)) - .filter(community::local) // this should be a no-op since community_followers table only has local-person+remote-community or remote-person+local-community + .filter(community::local) // this should be a no-op since community_followers table only has + // local-person+remote-community or remote-person+local-community .filter(not(person::local)) .filter(community_follower::published.gt(published_since.naive_utc())) - .select(( - community::id, - coalesce(person::shared_inbox_url, person::inbox_url), - )) + .select((community::id, person::inbox_url)) .distinct() // only need each community_id, inbox combination once .load::<(CommunityId, DbUrl)>(conn) .await @@ -51,7 +51,7 @@ impl CommunityFollowerView { .filter(community_follower::community_id.eq(community_id)) .filter(not(person::local)) .inner_join(person::table) - .select(coalesce(person::shared_inbox_url, person::inbox_url)) + .select(person::inbox_url) .distinct() .load::(conn) .await?; diff --git a/crates/db_views_actor/src/community_moderator_view.rs b/crates/db_views_actor/src/community_moderator_view.rs index 63e711225..ebcdcbd25 100644 --- a/crates/db_views_actor/src/community_moderator_view.rs +++ b/crates/db_views_actor/src/community_moderator_view.rs @@ -2,17 +2,20 @@ use crate::structs::CommunityModeratorView; use diesel::{dsl::exists, result::Error, select, ExpressionMethods, QueryDsl}; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ + impls::local_user::LocalUserOptionHelper, newtypes::{CommunityId, PersonId}, schema::{community, community_moderator, person}, + source::local_user::LocalUser, utils::{get_conn, DbPool}, }; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl CommunityModeratorView { - pub async fn is_community_moderator( + pub async fn check_is_community_moderator( pool: &mut DbPool<'_>, find_community_id: CommunityId, find_person_id: PersonId, - ) -> Result { + ) -> LemmyResult<()> { use lemmy_db_schema::schema::community_moderator::dsl::{ community_id, community_moderator, @@ -25,8 +28,26 @@ impl CommunityModeratorView { .filter(person_id.eq(find_person_id)), )) .get_result::(conn) - .await + .await? + .then_some(()) + .ok_or(LemmyErrorType::NotAModerator.into()) } + + pub(crate) async fn is_community_moderator_of_any( + pool: &mut DbPool<'_>, + find_person_id: PersonId, + ) -> LemmyResult<()> { + use lemmy_db_schema::schema::community_moderator::dsl::{community_moderator, person_id}; + let conn = &mut get_conn(pool).await?; + select(exists( + community_moderator.filter(person_id.eq(find_person_id)), + )) + .get_result::(conn) + .await? + .then_some(()) + .ok_or(LemmyErrorType::NotAModerator.into()) + } + pub async fn for_community( pool: &mut DbPool<'_>, community_id: CommunityId, @@ -42,17 +63,32 @@ impl CommunityModeratorView { .await } - pub async fn for_person(pool: &mut DbPool<'_>, person_id: PersonId) -> Result, Error> { + pub async fn for_person( + pool: &mut DbPool<'_>, + person_id: PersonId, + local_user: Option<&LocalUser>, + ) -> Result, Error> { let conn = &mut get_conn(pool).await?; - community_moderator::table + let mut query = community_moderator::table .inner_join(community::table) .inner_join(person::table) .filter(community_moderator::person_id.eq(person_id)) - .filter(community::deleted.eq(false)) - .filter(community::removed.eq(false)) .select((community::all_columns, person::all_columns)) - .load::(conn) - .await + .into_boxed(); + + query = local_user.visible_communities_only(query); + + // only show deleted communities to creator + if Some(person_id) != local_user.person_id() { + query = query.filter(community::deleted.eq(false)); + } + + // Show removed communities to admins only + if !local_user.is_admin() { + query = query.filter(community::removed.eq(false)) + } + + query.load::(conn).await } /// Finds all communities first mods / creators @@ -68,7 +104,7 @@ impl CommunityModeratorView { .distinct_on(community_moderator::community_id) .order_by(( community_moderator::community_id, - community_moderator::person_id, + community_moderator::published, )) .load::(conn) .await diff --git a/crates/db_views_actor/src/community_person_ban_view.rs b/crates/db_views_actor/src/community_person_ban_view.rs index 712bb2d3a..5543222f3 100644 --- a/crates/db_views_actor/src/community_person_ban_view.rs +++ b/crates/db_views_actor/src/community_person_ban_view.rs @@ -1,25 +1,33 @@ use crate::structs::CommunityPersonBanView; -use diesel::{dsl::exists, result::Error, select, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::{exists, not}, + select, + ExpressionMethods, + QueryDsl, +}; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ newtypes::{CommunityId, PersonId}, schema::community_person_ban, utils::{get_conn, DbPool}, }; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; impl CommunityPersonBanView { - pub async fn get( + pub async fn check( pool: &mut DbPool<'_>, from_person_id: PersonId, from_community_id: CommunityId, - ) -> Result { + ) -> LemmyResult<()> { let conn = &mut get_conn(pool).await?; - select(exists( + select(not(exists( community_person_ban::table .filter(community_person_ban::community_id.eq(from_community_id)) .filter(community_person_ban::person_id.eq(from_person_id)), - )) + ))) .get_result::(conn) - .await + .await? + .then_some(()) + .ok_or(LemmyErrorType::PersonIsBannedFromCommunity.into()) } } diff --git a/crates/db_views_actor/src/community_view.rs b/crates/db_views_actor/src/community_view.rs index f646e21f8..9ff6fadce 100644 --- a/crates/db_views_actor/src/community_view.rs +++ b/crates/db_views_actor/src/community_view.rs @@ -1,4 +1,4 @@ -use crate::structs::{CommunityModeratorView, CommunityView, PersonView}; +use crate::structs::{CommunityModeratorView, CommunitySortType, CommunityView, PersonView}; use diesel::{ pg::Pg, result::Error, @@ -11,28 +11,39 @@ use diesel::{ }; use diesel_async::RunQueryDsl; use lemmy_db_schema::{ + impls::local_user::LocalUserOptionHelper, newtypes::{CommunityId, PersonId}, schema::{ community, community_aggregates, community_block, community_follower, + community_person_ban, instance_block, - local_user, }, - source::{community::CommunityFollower, local_user::LocalUser}, - utils::{fuzzy_search, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, + source::{community::CommunityFollower, local_user::LocalUser, site::Site}, + utils::{ + functions::lower, + fuzzy_search, + limit_and_offset, + DbConn, + DbPool, + ListFn, + Queries, + ReadFn, + }, ListingType, - SortType, + PostSortType, }; +use lemmy_utils::{error::LemmyResult, LemmyErrorType}; fn queries<'a>() -> Queries< - impl ReadFn<'a, CommunityView, (CommunityId, Option, bool)>, - impl ListFn<'a, CommunityView, CommunityQuery<'a>>, + impl ReadFn<'a, CommunityView, (CommunityId, Option<&'a LocalUser>, bool)>, + impl ListFn<'a, CommunityView, (CommunityQuery<'a>, &'a Site)>, > { - let all_joins = |query: community::BoxedQuery<'a, Pg>, my_person_id: Option| { + let all_joins = |query: community::BoxedQuery<'a, Pg>, my_local_user: Option<&'a LocalUser>| { // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + let person_id_join = my_local_user.person_id().unwrap_or(PersonId(-1)); query .inner_join(community_aggregates::table) @@ -57,13 +68,21 @@ fn queries<'a>() -> Queries< .and(community_block::person_id.eq(person_id_join)), ), ) + .left_join( + community_person_ban::table.on( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(person_id_join)), + ), + ) }; let selection = ( community::all_columns, CommunityFollower::select_subscribed_type(), - community_block::id.nullable().is_not_null(), + community_block::community_id.nullable().is_not_null(), community_aggregates::all_columns, + community_person_ban::person_id.nullable().is_not_null(), ); let not_removed_or_deleted = community::removed @@ -71,14 +90,14 @@ fn queries<'a>() -> Queries< .and(community::deleted.eq(false)); let read = move |mut conn: DbConn<'a>, - (community_id, my_person_id, is_mod_or_admin): ( + (community_id, my_local_user, is_mod_or_admin): ( CommunityId, - Option, + Option<&'a LocalUser>, bool, )| async move { let mut query = all_joins( community::table.find(community_id).into_boxed(), - my_person_id, + my_local_user, ) .select(selection); @@ -87,26 +106,29 @@ fn queries<'a>() -> Queries< query = query.filter(not_removed_or_deleted); } - query.first::(&mut conn).await + query = my_local_user.visible_communities_only(query); + + query.first(&mut conn).await }; - let list = move |mut conn: DbConn<'a>, options: CommunityQuery<'a>| async move { - use SortType::*; - - let my_person_id = options.local_user.map(|l| l.person_id); + let list = move |mut conn: DbConn<'a>, (options, site): (CommunityQuery<'a>, &'a Site)| async move { + use CommunitySortType::*; // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + let person_id_join = options.local_user.person_id().unwrap_or(PersonId(-1)); - let mut query = all_joins(community::table.into_boxed(), my_person_id) - .left_join(local_user::table.on(local_user::person_id.eq(person_id_join))) - .select(selection); + let mut query = all_joins(community::table.into_boxed(), options.local_user).select(selection); if let Some(search_term) = options.search_term { let searcher = fuzzy_search(&search_term); - query = query - .filter(community::name.ilike(searcher.clone())) - .or_filter(community::title.ilike(searcher)) + let name_filter = community::name.ilike(searcher.clone()); + let title_filter = community::title.ilike(searcher.clone()); + let description_filter = community::description.ilike(searcher.clone()); + query = if options.title_only.unwrap_or_default() { + query.filter(name_filter.or(title_filter)) + } else { + query.filter(name_filter.or(title_filter.or(description_filter))) + } } // Hide deleted and removed for non-admins or mods @@ -135,11 +157,13 @@ fn queries<'a>() -> Queries< } TopMonth => query = query.order_by(community_aggregates::users_active_month.desc()), TopWeek => query = query.order_by(community_aggregates::users_active_week.desc()), + NameAsc => query = query.order_by(lower(community::name).asc()), + NameDesc => query = query.order_by(lower(community::name).desc()), }; if let Some(listing_type) = options.listing_type { query = match listing_type { - ListingType::Subscribed => query.filter(community_follower::pending.is_not_null()), // TODO could be this: and(community_follower::person_id.eq(person_id_join)), + ListingType::Subscribed => query.filter(community_follower::pending.is_not_null()), /* TODO could be this: and(community_follower::person_id.eq(person_id_join)), */ ListingType::Local => query.filter(community::local.eq(true)), _ => query, }; @@ -147,17 +171,14 @@ fn queries<'a>() -> Queries< // Don't show blocked communities and communities on blocked instances. nsfw communities are // also hidden (based on profile setting) - if options.local_user.is_some() { - query = query.filter(instance_block::person_id.is_null()); - query = query.filter(community_block::person_id.is_null()); - query = query.filter(community::nsfw.eq(false).or(local_user::show_nsfw.eq(true))); - } else { - // No person in request, only show nsfw communities if show_nsfw is passed into request - if !options.show_nsfw { - query = query.filter(community::nsfw.eq(false)); - } + query = query.filter(instance_block::person_id.is_null()); + query = query.filter(community_block::person_id.is_null()); + if !(options.local_user.show_nsfw(site) || options.show_nsfw) { + query = query.filter(community::nsfw.eq(false)); } + query = options.local_user.visible_communities_only(query); + let (limit, offset) = limit_and_offset(options.page, options.limit)?; query .limit(limit) @@ -170,38 +191,87 @@ fn queries<'a>() -> Queries< } impl CommunityView { - pub async fn read( + pub async fn read<'a>( pool: &mut DbPool<'_>, community_id: CommunityId, - my_person_id: Option, + my_local_user: Option<&'a LocalUser>, is_mod_or_admin: bool, ) -> Result { queries() - .read(pool, (community_id, my_person_id, is_mod_or_admin)) + .read(pool, (community_id, my_local_user, is_mod_or_admin)) .await } - pub async fn is_mod_or_admin( + pub async fn check_is_mod_or_admin( pool: &mut DbPool<'_>, person_id: PersonId, community_id: CommunityId, - ) -> Result { + ) -> LemmyResult<()> { let is_mod = - CommunityModeratorView::is_community_moderator(pool, community_id, person_id).await?; - if is_mod { - return Ok(true); + CommunityModeratorView::check_is_community_moderator(pool, community_id, person_id).await; + if is_mod.is_ok() + || PersonView::read(pool, person_id) + .await + .is_ok_and(|t| t.is_admin) + { + Ok(()) + } else { + Err(LemmyErrorType::NotAModOrAdmin)? } + } - PersonView::is_admin(pool, person_id).await + /// Checks if a person is an admin, or moderator of any community. + pub async fn check_is_mod_of_any_or_admin( + pool: &mut DbPool<'_>, + person_id: PersonId, + ) -> LemmyResult<()> { + let is_mod_of_any = + CommunityModeratorView::is_community_moderator_of_any(pool, person_id).await; + if is_mod_of_any.is_ok() + || PersonView::read(pool, person_id) + .await + .is_ok_and(|t| t.is_admin) + { + Ok(()) + } else { + Err(LemmyErrorType::NotAModOrAdmin)? + } + } +} + +impl From for CommunitySortType { + fn from(value: PostSortType) -> Self { + match value { + PostSortType::Active => Self::Active, + PostSortType::Hot => Self::Hot, + PostSortType::New => Self::New, + PostSortType::Old => Self::Old, + PostSortType::TopDay => Self::TopDay, + PostSortType::TopWeek => Self::TopWeek, + PostSortType::TopMonth => Self::TopMonth, + PostSortType::TopYear => Self::TopYear, + PostSortType::TopAll => Self::TopAll, + PostSortType::MostComments => Self::MostComments, + PostSortType::NewComments => Self::NewComments, + PostSortType::TopHour => Self::TopHour, + PostSortType::TopSixHour => Self::TopSixHour, + PostSortType::TopTwelveHour => Self::TopTwelveHour, + PostSortType::TopThreeMonths => Self::TopThreeMonths, + PostSortType::TopSixMonths => Self::TopSixMonths, + PostSortType::TopNineMonths => Self::TopNineMonths, + PostSortType::Controversial => Self::Controversial, + PostSortType::Scaled => Self::Scaled, + } } } #[derive(Default)] pub struct CommunityQuery<'a> { pub listing_type: Option, - pub sort: Option, + pub sort: Option, pub local_user: Option<&'a LocalUser>, pub search_term: Option, + pub title_only: Option, pub is_mod_or_admin: bool, pub show_nsfw: bool, pub page: Option, @@ -209,7 +279,201 @@ pub struct CommunityQuery<'a> { } impl<'a> CommunityQuery<'a> { - pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { - queries().list(pool, self).await + pub async fn list(self, site: &Site, pool: &mut DbPool<'_>) -> Result, Error> { + queries().list(pool, (self, site)).await + } +} + +#[cfg(test)] +mod tests { + + use crate::{ + community_view::CommunityQuery, + structs::{CommunitySortType, CommunityView}, + }; + use lemmy_db_schema::{ + source::{ + community::{Community, CommunityInsertForm, CommunityUpdateForm}, + instance::Instance, + local_user::{LocalUser, LocalUserInsertForm}, + person::{Person, PersonInsertForm}, + site::Site, + }, + traits::Crud, + utils::{build_db_pool_for_tests, DbPool}, + CommunityVisibility, + }; + use lemmy_utils::error::LemmyResult; + use serial_test::serial; + use url::Url; + + struct Data { + inserted_instance: Instance, + local_user: LocalUser, + inserted_communities: [Community; 3], + site: Site, + } + + async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let person_name = "tegan".to_string(); + + let new_person = PersonInsertForm::test_form(inserted_instance.id, &person_name); + + let inserted_person = Person::create(pool, &new_person).await?; + + let local_user_form = LocalUserInsertForm::test_form(inserted_person.id); + let local_user = LocalUser::create(pool, &local_user_form, vec![]).await?; + + let inserted_communities = [ + Community::create( + pool, + &CommunityInsertForm::new( + inserted_instance.id, + "test_community_1".to_string(), + "nada1".to_owned(), + "pubkey".to_string(), + ), + ) + .await?, + Community::create( + pool, + &CommunityInsertForm::new( + inserted_instance.id, + "test_community_2".to_string(), + "nada2".to_owned(), + "pubkey".to_string(), + ), + ) + .await?, + Community::create( + pool, + &CommunityInsertForm::new( + inserted_instance.id, + "test_community_3".to_string(), + "nada3".to_owned(), + "pubkey".to_string(), + ), + ) + .await?, + ]; + + let url = Url::parse("http://example.com")?; + let site = Site { + id: Default::default(), + name: String::new(), + sidebar: None, + published: Default::default(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: url.clone().into(), + last_refreshed_at: Default::default(), + inbox_url: url.into(), + private_key: None, + public_key: String::new(), + instance_id: Default::default(), + content_warning: None, + }; + + Ok(Data { + inserted_instance, + local_user, + inserted_communities, + site, + }) + } + + async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> { + for Community { id, .. } in data.inserted_communities { + Community::delete(pool, id).await?; + } + Person::delete(pool, data.local_user.person_id).await?; + Instance::delete(pool, data.inserted_instance.id).await?; + + Ok(()) + } + + #[tokio::test] + #[serial] + async fn local_only_community() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + Community::update( + pool, + data.inserted_communities[0].id, + &CommunityUpdateForm { + visibility: Some(CommunityVisibility::LocalOnly), + ..Default::default() + }, + ) + .await?; + + let unauthenticated_query = CommunityQuery { + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!( + data.inserted_communities.len() - 1, + unauthenticated_query.len() + ); + + let authenticated_query = CommunityQuery { + local_user: Some(&data.local_user), + ..Default::default() + } + .list(&data.site, pool) + .await?; + assert_eq!(data.inserted_communities.len(), authenticated_query.len()); + + let unauthenticated_community = + CommunityView::read(pool, data.inserted_communities[0].id, None, false).await; + assert!(unauthenticated_community.is_err()); + + let authenticated_community = CommunityView::read( + pool, + data.inserted_communities[0].id, + Some(&data.local_user), + false, + ) + .await; + assert!(authenticated_community.is_ok()); + + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn community_sort_name() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let query = CommunityQuery { + sort: Some(CommunitySortType::NameAsc), + ..Default::default() + }; + let communities = query.list(&data.site, pool).await?; + for (i, c) in communities.iter().enumerate().skip(1) { + let prev = communities.get(i - 1).expect("No previous community?"); + assert!(c.community.title.cmp(&prev.community.title).is_ge()); + } + + let query = CommunityQuery { + sort: Some(CommunitySortType::NameDesc), + ..Default::default() + }; + let communities = query.list(&data.site, pool).await?; + for (i, c) in communities.iter().enumerate().skip(1) { + let prev = communities.get(i - 1).expect("No previous community?"); + assert!(c.community.title.cmp(&prev.community.title).is_le()); + } + + cleanup(data, pool).await } } diff --git a/crates/db_views_actor/src/instance_block_view.rs b/crates/db_views_actor/src/instance_block_view.rs deleted file mode 100644 index 05820862a..000000000 --- a/crates/db_views_actor/src/instance_block_view.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::structs::InstanceBlockView; -use diesel::{result::Error, ExpressionMethods, JoinOnDsl, NullableExpressionMethods, QueryDsl}; -use diesel_async::RunQueryDsl; -use lemmy_db_schema::{ - newtypes::PersonId, - schema::{instance, instance_block, person, site}, - utils::{get_conn, DbPool}, -}; - -impl InstanceBlockView { - pub async fn for_person(pool: &mut DbPool<'_>, person_id: PersonId) -> Result, Error> { - let conn = &mut get_conn(pool).await?; - instance_block::table - .inner_join(person::table) - .inner_join(instance::table) - .left_join(site::table.on(site::instance_id.eq(instance::id))) - .select(( - person::all_columns, - instance::all_columns, - site::all_columns.nullable(), - )) - .filter(instance_block::person_id.eq(person_id)) - .order_by(instance_block::published) - .load::(conn) - .await - } -} diff --git a/crates/db_views_actor/src/lib.rs b/crates/db_views_actor/src/lib.rs index e9f8e4189..2ec9652e3 100644 --- a/crates/db_views_actor/src/lib.rs +++ b/crates/db_views_actor/src/lib.rs @@ -1,8 +1,6 @@ #[cfg(feature = "full")] pub mod comment_reply_view; #[cfg(feature = "full")] -pub mod community_block_view; -#[cfg(feature = "full")] pub mod community_follower_view; #[cfg(feature = "full")] pub mod community_moderator_view; @@ -11,10 +9,6 @@ pub mod community_person_ban_view; #[cfg(feature = "full")] pub mod community_view; #[cfg(feature = "full")] -pub mod instance_block_view; -#[cfg(feature = "full")] -pub mod person_block_view; -#[cfg(feature = "full")] pub mod person_mention_view; #[cfg(feature = "full")] pub mod person_view; diff --git a/crates/db_views_actor/src/person_block_view.rs b/crates/db_views_actor/src/person_block_view.rs deleted file mode 100644 index 5f028acd8..000000000 --- a/crates/db_views_actor/src/person_block_view.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::structs::PersonBlockView; -use diesel::{result::Error, ExpressionMethods, JoinOnDsl, QueryDsl}; -use diesel_async::RunQueryDsl; -use lemmy_db_schema::{ - newtypes::PersonId, - schema::{person, person_block}, - utils::{get_conn, DbPool}, -}; - -impl PersonBlockView { - pub async fn for_person(pool: &mut DbPool<'_>, person_id: PersonId) -> Result, Error> { - let conn = &mut get_conn(pool).await?; - let target_person_alias = diesel::alias!(person as person1); - - person_block::table - .inner_join(person::table.on(person_block::person_id.eq(person::id))) - .inner_join( - target_person_alias.on(person_block::target_id.eq(target_person_alias.field(person::id))), - ) - .select(( - person::all_columns, - target_person_alias.fields(person::all_columns), - )) - .filter(person_block::person_id.eq(person_id)) - .filter(target_person_alias.field(person::deleted).eq(false)) - .order_by(person_block::published) - .load::(conn) - .await - } -} diff --git a/crates/db_views_actor/src/person_mention_view.rs b/crates/db_views_actor/src/person_mention_view.rs index 8d04f9820..2478c0183 100644 --- a/crates/db_views_actor/src/person_mention_view.rs +++ b/crates/db_views_actor/src/person_mention_view.rs @@ -1,10 +1,13 @@ use crate::structs::PersonMentionView; use diesel::{ - dsl::now, + dsl::{exists, not}, pg::Pg, result::Error, + sql_types, BoolExpressionMethods, + BoxableExpression, ExpressionMethods, + IntoSql, JoinOnDsl, NullableExpressionMethods, QueryDsl, @@ -20,13 +23,15 @@ use lemmy_db_schema::{ comment_saved, community, community_follower, + community_moderator, community_person_ban, + local_user, person, person_block, person_mention, post, }, - source::community::CommunityFollower, + source::local_user::LocalUser, utils::{get_conn, limit_and_offset, DbConn, DbPool, ListFn, Queries, ReadFn}, CommentSortType, }; @@ -35,9 +40,120 @@ fn queries<'a>() -> Queries< impl ReadFn<'a, PersonMentionView, (PersonMentionId, Option)>, impl ListFn<'a, PersonMentionView, PersonMentionQuery>, > { - let all_joins = |query: person_mention::BoxedQuery<'a, Pg>, my_person_id: Option| { - // The left join below will return None in this case - let person_id_join = my_person_id.unwrap_or(PersonId(-1)); + let is_creator_banned_from_community = exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(comment::creator_id)), + ), + ); + + let is_local_user_banned_from_community = |person_id| { + exists( + community_person_ban::table.filter( + community::id + .eq(community_person_ban::community_id) + .and(community_person_ban::person_id.eq(person_id)), + ), + ) + }; + + let is_saved = |person_id| { + exists( + comment_saved::table.filter( + comment::id + .eq(comment_saved::comment_id) + .and(comment_saved::person_id.eq(person_id)), + ), + ) + }; + + let is_community_followed = |person_id| { + community_follower::table + .filter( + post::community_id + .eq(community_follower::community_id) + .and(community_follower::person_id.eq(person_id)), + ) + .select(community_follower::pending.nullable()) + .single_value() + }; + + let is_creator_blocked = |person_id| { + exists( + person_block::table.filter( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(person_id)), + ), + ) + }; + + let score = |person_id| { + comment_like::table + .filter( + comment::id + .eq(comment_like::comment_id) + .and(comment_like::person_id.eq(person_id)), + ) + .select(comment_like::score.nullable()) + .single_value() + }; + + let creator_is_moderator = exists( + community_moderator::table.filter( + community::id + .eq(community_moderator::community_id) + .and(community_moderator::person_id.eq(comment::creator_id)), + ), + ); + + let creator_is_admin = exists( + local_user::table.filter( + comment::creator_id + .eq(local_user::person_id) + .and(local_user::admin.eq(true)), + ), + ); + + let all_joins = move |query: person_mention::BoxedQuery<'a, Pg>, + my_person_id: Option| { + let is_local_user_banned_from_community_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>, + > = if let Some(person_id) = my_person_id { + Box::new(is_local_user_banned_from_community(person_id)) + } else { + Box::new(false.into_sql::()) + }; + let score_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(score(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let subscribed_type_selection: Box< + dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable>, + > = if let Some(person_id) = my_person_id { + Box::new(is_community_followed(person_id)) + } else { + Box::new(None::.into_sql::>()) + }; + + let is_saved_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_saved(person_id)) + } else { + Box::new(false.into_sql::()) + }; + + let is_creator_blocked_selection: Box> = + if let Some(person_id) = my_person_id { + Box::new(is_creator_blocked(person_id)) + } else { + Box::new(false.into_sql::()) + }; query .inner_join(comment::table) @@ -46,51 +162,25 @@ fn queries<'a>() -> Queries< .inner_join(community::table.on(post::community_id.eq(community::id))) .inner_join(aliases::person1) .inner_join(comment_aggregates::table.on(comment::id.eq(comment_aggregates::comment_id))) - .left_join( - community_follower::table.on( - post::community_id - .eq(community_follower::community_id) - .and(community_follower::person_id.eq(person_id_join)), - ), - ) - .left_join( - comment_saved::table.on( - comment::id - .eq(comment_saved::comment_id) - .and(comment_saved::person_id.eq(person_id_join)), - ), - ) - .left_join( - person_block::table.on( - comment::creator_id - .eq(person_block::target_id) - .and(person_block::person_id.eq(person_id_join)), - ), - ) - .left_join( - comment_like::table.on( - comment::id - .eq(comment_like::comment_id) - .and(comment_like::person_id.eq(person_id_join)), - ), - ) + .select(( + person_mention::all_columns, + comment::all_columns, + person::all_columns, + post::all_columns, + community::all_columns, + aliases::person1.fields(person::all_columns), + comment_aggregates::all_columns, + is_creator_banned_from_community, + is_local_user_banned_from_community_selection, + creator_is_moderator, + creator_is_admin, + subscribed_type_selection, + is_saved_selection, + is_creator_blocked_selection, + score_selection, + )) }; - let selection = ( - person_mention::all_columns, - comment::all_columns, - person::all_columns, - post::all_columns, - community::all_columns, - aliases::person1.fields(person::all_columns), - comment_aggregates::all_columns, - community_person_ban::id.nullable().is_not_null(), - CommunityFollower::select_subscribed_type(), - comment_saved::id.nullable().is_not_null(), - person_block::id.nullable().is_not_null(), - comment_like::score.nullable(), - ); - let read = move |mut conn: DbConn<'a>, (person_mention_id, my_person_id): (PersonMentionId, Option)| async move { @@ -98,33 +188,14 @@ fn queries<'a>() -> Queries< person_mention::table.find(person_mention_id).into_boxed(), my_person_id, ) - .left_join( - community_person_ban::table.on( - community::id - .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(comment::creator_id)), - ), - ) - .select(selection) - .first::(&mut conn) + .first(&mut conn) .await }; let list = move |mut conn: DbConn<'a>, options: PersonMentionQuery| async move { - let mut query = all_joins(person_mention::table.into_boxed(), options.my_person_id) - .left_join( - community_person_ban::table.on( - community::id - .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(comment::creator_id)) - .and( - community_person_ban::expires - .is_null() - .or(community_person_ban::expires.gt(now)), - ), - ), - ) - .select(selection); + // These filters need to be kept in sync with the filters in + // PersonMentionView::get_unread_mentions() + let mut query = all_joins(person_mention::table.into_boxed(), options.my_person_id); if let Some(recipient_id) = options.recipient_id { query = query.filter(person_mention::recipient_id.eq(recipient_id)); @@ -135,7 +206,7 @@ fn queries<'a>() -> Queries< } if !options.show_bot_accounts { - query = query.filter(person::bot_account.eq(false)); + query = query.filter(not(person::bot_account)); }; query = match options.sort.unwrap_or(CommentSortType::Hot) { @@ -148,6 +219,11 @@ fn queries<'a>() -> Queries< CommentSortType::Top => query.order_by(comment_aggregates::score.desc()), }; + // Don't show mentions from blocked persons + if let Some(my_person_id) = options.my_person_id { + query = query.filter(not(is_creator_blocked(my_person_id))); + } + let (limit, offset) = limit_and_offset(options.page, options.limit)?; query @@ -174,14 +250,32 @@ impl PersonMentionView { /// Gets the number of unread mentions pub async fn get_unread_mentions( pool: &mut DbPool<'_>, - my_person_id: PersonId, + local_user: &LocalUser, ) -> Result { use diesel::dsl::count; let conn = &mut get_conn(pool).await?; - person_mention::table + let mut query = person_mention::table .inner_join(comment::table) - .filter(person_mention::recipient_id.eq(my_person_id)) + .left_join( + person_block::table.on( + comment::creator_id + .eq(person_block::target_id) + .and(person_block::person_id.eq(local_user.person_id)), + ), + ) + .inner_join(person::table.on(comment::creator_id.eq(person::id))) + .into_boxed(); + + // These filters need to be kept in sync with the filters in queries().list() + if !local_user.show_bot_accounts { + query = query.filter(not(person::bot_account)); + } + + query + // Don't count replies from blocked users + .filter(person_block::person_id.is_null()) + .filter(person_mention::recipient_id.eq(local_user.person_id)) .filter(person_mention::read.eq(false)) .filter(comment::deleted.eq(false)) .filter(comment::removed.eq(false)) @@ -191,7 +285,7 @@ impl PersonMentionView { } } -#[derive(Default)] +#[derive(Default, Clone)] pub struct PersonMentionQuery { pub my_person_id: Option, pub recipient_id: Option, @@ -207,3 +301,166 @@ impl PersonMentionQuery { queries().list(pool, self).await } } + +#[cfg(test)] +mod tests { + + use crate::{person_mention_view::PersonMentionQuery, structs::PersonMentionView}; + use lemmy_db_schema::{ + source::{ + comment::{Comment, CommentInsertForm}, + community::{Community, CommunityInsertForm}, + instance::Instance, + local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, + person::{Person, PersonInsertForm, PersonUpdateForm}, + person_block::{PersonBlock, PersonBlockForm}, + person_mention::{PersonMention, PersonMentionInsertForm, PersonMentionUpdateForm}, + post::{Post, PostInsertForm}, + }, + traits::{Blockable, Crud}, + utils::build_db_pool_for_tests, + }; + use lemmy_db_views::structs::LocalUserView; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; + use serial_test::serial; + + #[tokio::test] + #[serial] + async fn test_crud() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; + + let new_person = PersonInsertForm::test_form(inserted_instance.id, "terrylake"); + + let inserted_person = Person::create(pool, &new_person).await?; + + let recipient_form = PersonInsertForm::test_form(inserted_instance.id, "terrylakes recipient"); + + let inserted_recipient = Person::create(pool, &recipient_form).await?; + let recipient_id = inserted_recipient.id; + + let recipient_local_user = + LocalUser::create(pool, &LocalUserInsertForm::test_form(recipient_id), vec![]).await?; + + let new_community = CommunityInsertForm::new( + inserted_instance.id, + "test community lake".to_string(), + "nada".to_owned(), + "pubkey".to_string(), + ); + let inserted_community = Community::create(pool, &new_community).await?; + + let new_post = PostInsertForm::new( + "A test post".into(), + inserted_person.id, + inserted_community.id, + ); + let inserted_post = Post::create(pool, &new_post).await?; + + let comment_form = CommentInsertForm::new( + inserted_person.id, + inserted_post.id, + "A test comment".into(), + ); + let inserted_comment = Comment::create(pool, &comment_form, None).await?; + + let person_mention_form = PersonMentionInsertForm { + recipient_id: inserted_recipient.id, + comment_id: inserted_comment.id, + read: None, + }; + + let inserted_mention = PersonMention::create(pool, &person_mention_form).await?; + + let expected_mention = PersonMention { + id: inserted_mention.id, + recipient_id: inserted_mention.recipient_id, + comment_id: inserted_mention.comment_id, + read: false, + published: inserted_mention.published, + }; + + let read_mention = PersonMention::read(pool, inserted_mention.id).await?; + + let person_mention_update_form = PersonMentionUpdateForm { read: Some(false) }; + let updated_mention = + PersonMention::update(pool, inserted_mention.id, &person_mention_update_form).await?; + + // Test to make sure counts and blocks work correctly + let unread_mentions = + PersonMentionView::get_unread_mentions(pool, &recipient_local_user).await?; + + let query = PersonMentionQuery { + recipient_id: Some(recipient_id), + my_person_id: Some(recipient_id), + sort: None, + unread_only: false, + show_bot_accounts: true, + page: None, + limit: None, + }; + let mentions = query.clone().list(pool).await?; + assert_eq!(1, unread_mentions); + assert_eq!(1, mentions.len()); + + // Block the person, and make sure these counts are now empty + let block_form = PersonBlockForm { + person_id: recipient_id, + target_id: inserted_person.id, + }; + PersonBlock::block(pool, &block_form).await?; + + let unread_mentions_after_block = + PersonMentionView::get_unread_mentions(pool, &recipient_local_user).await?; + let mentions_after_block = query.clone().list(pool).await?; + assert_eq!(0, unread_mentions_after_block); + assert_eq!(0, mentions_after_block.len()); + + // Unblock user so we can reuse the same person + PersonBlock::unblock(pool, &block_form).await?; + + // Turn Terry into a bot account + let person_update_form = PersonUpdateForm { + bot_account: Some(true), + ..Default::default() + }; + Person::update(pool, inserted_person.id, &person_update_form).await?; + + let recipient_local_user_update_form = LocalUserUpdateForm { + show_bot_accounts: Some(false), + ..Default::default() + }; + LocalUser::update( + pool, + recipient_local_user.id, + &recipient_local_user_update_form, + ) + .await?; + let recipient_local_user_view = LocalUserView::read(pool, recipient_local_user.id).await?; + + let unread_mentions_after_hide_bots = + PersonMentionView::get_unread_mentions(pool, &recipient_local_user_view.local_user).await?; + + let mut query_without_bots = query.clone(); + query_without_bots.show_bot_accounts = false; + let replies_after_hide_bots = query_without_bots.list(pool).await?; + assert_eq!(0, unread_mentions_after_hide_bots); + assert_eq!(0, replies_after_hide_bots.len()); + + Comment::delete(pool, inserted_comment.id).await?; + Post::delete(pool, inserted_post.id).await?; + Community::delete(pool, inserted_community.id).await?; + Person::delete(pool, inserted_person.id).await?; + Person::delete(pool, inserted_recipient.id).await?; + Instance::delete(pool, inserted_instance.id).await?; + + assert_eq!(expected_mention, read_mention); + assert_eq!(expected_mention, inserted_mention); + assert_eq!(expected_mention, updated_mention); + + Ok(()) + } +} diff --git a/crates/db_views_actor/src/person_view.rs b/crates/db_views_actor/src/person_view.rs index 9e7c4d7e6..724a700ad 100644 --- a/crates/db_views_actor/src/person_view.rs +++ b/crates/db_views_actor/src/person_view.rs @@ -11,13 +11,23 @@ use diesel::{ use diesel_async::RunQueryDsl; use lemmy_db_schema::{ newtypes::PersonId, - schema, schema::{local_user, person, person_aggregates}, - utils::{fuzzy_search, get_conn, limit_and_offset, now, DbConn, DbPool, ListFn, Queries, ReadFn}, - SortType, + utils::{ + functions::coalesce, + fuzzy_search, + limit_and_offset, + now, + DbConn, + DbPool, + ListFn, + Queries, + ReadFn, + }, + ListingType, + PostSortType, }; use serde::{Deserialize, Serialize}; -use strum_macros::{Display, EnumString}; +use strum::{Display, EnumString}; enum ListMode { Admins, @@ -36,29 +46,34 @@ enum PersonSortType { PostCount, } -fn post_to_person_sort_type(sort: SortType) -> PersonSortType { +fn post_to_person_sort_type(sort: PostSortType) -> PersonSortType { + use PostSortType::*; match sort { - SortType::Active | SortType::Hot | SortType::Controversial => PersonSortType::CommentScore, - SortType::New | SortType::NewComments => PersonSortType::New, - SortType::MostComments => PersonSortType::MostComments, - SortType::Old => PersonSortType::Old, + Active | Hot | Controversial => PersonSortType::CommentScore, + New | NewComments => PersonSortType::New, + MostComments => PersonSortType::MostComments, + Old => PersonSortType::Old, _ => PersonSortType::CommentScore, } } fn queries<'a>( ) -> Queries, impl ListFn<'a, PersonView, ListMode>> { - let all_joins = |query: person::BoxedQuery<'a, Pg>| { + let all_joins = move |query: person::BoxedQuery<'a, Pg>| { query .inner_join(person_aggregates::table) .left_join(local_user::table) .filter(person::deleted.eq(false)) - .select((person::all_columns, person_aggregates::all_columns)) + .select(( + person::all_columns, + person_aggregates::all_columns, + coalesce(local_user::admin.nullable(), false), + )) }; let read = move |mut conn: DbConn<'a>, person_id: PersonId| async move { all_joins(person::table.find(person_id).into_boxed()) - .first::(&mut conn) + .first(&mut conn) .await }; @@ -102,6 +117,15 @@ fn queries<'a>( let (limit, offset) = limit_and_offset(options.page, options.limit)?; query = query.limit(limit).offset(offset); + + if let Some(listing_type) = options.listing_type { + query = match listing_type { + // return nothing as its not possible to follow users + ListingType::Subscribed => query.limit(0), + ListingType::Local => query.filter(person::local.eq(true)), + _ => query, + }; + } } } query.load::(&mut conn).await @@ -115,21 +139,6 @@ impl PersonView { queries().read(pool, person_id).await } - pub async fn is_admin(pool: &mut DbPool<'_>, person_id: PersonId) -> Result { - use schema::{ - local_user::dsl::admin, - person::dsl::{id, person}, - }; - let conn = &mut get_conn(pool).await?; - let is_admin = person - .inner_join(local_user::table) - .filter(id.eq(person_id)) - .select(admin) - .first::(conn) - .await?; - Ok(is_admin) - } - pub async fn admins(pool: &mut DbPool<'_>) -> Result, Error> { queries().list(pool, ListMode::Admins).await } @@ -141,8 +150,9 @@ impl PersonView { #[derive(Default)] pub struct PersonQuery { - pub sort: Option, + pub sort: Option, pub search_term: Option, + pub listing_type: Option, pub page: Option, pub limit: Option, } @@ -154,13 +164,12 @@ impl PersonQuery { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::*; - use diesel::NotFound; use lemmy_db_schema::{ + assert_length, source::{ instance::Instance, local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm}, @@ -169,6 +178,8 @@ mod tests { traits::Crud, utils::build_db_pool_for_tests, }; + use lemmy_utils::error::LemmyResult; + use pretty_assertions::assert_eq; use serial_test::serial; struct Data { @@ -178,64 +189,49 @@ mod tests { bob_local_user: LocalUser, } - async fn init_data(pool: &mut DbPool<'_>) -> Data { - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult { + let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; - let alice_form = PersonInsertForm::builder() - .name("alice".to_string()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let alice = Person::create(pool, &alice_form).await.unwrap(); - let alice_local_user_form = LocalUserInsertForm::builder() - .person_id(alice.id) - .password_encrypted(String::new()) - .build(); - let alice_local_user = LocalUser::create(pool, &alice_local_user_form) - .await - .unwrap(); + let alice_form = PersonInsertForm { + local: Some(true), + ..PersonInsertForm::test_form(inserted_instance.id, "alice") + }; + let alice = Person::create(pool, &alice_form).await?; + let alice_local_user_form = LocalUserInsertForm::test_form(alice.id); + let alice_local_user = LocalUser::create(pool, &alice_local_user_form, vec![]).await?; - let bob_form = PersonInsertForm::builder() - .name("bob".to_string()) - .bot_account(Some(true)) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); - let bob = Person::create(pool, &bob_form).await.unwrap(); - let bob_local_user_form = LocalUserInsertForm::builder() - .person_id(bob.id) - .password_encrypted(String::new()) - .build(); - let bob_local_user = LocalUser::create(pool, &bob_local_user_form).await.unwrap(); + let bob_form = PersonInsertForm { + bot_account: Some(true), + local: Some(false), + ..PersonInsertForm::test_form(inserted_instance.id, "bob") + }; + let bob = Person::create(pool, &bob_form).await?; + let bob_local_user_form = LocalUserInsertForm::test_form(bob.id); + let bob_local_user = LocalUser::create(pool, &bob_local_user_form, vec![]).await?; - Data { + Ok(Data { alice, alice_local_user, bob, bob_local_user, - } + }) } - async fn cleanup(data: Data, pool: &mut DbPool<'_>) { - LocalUser::delete(pool, data.alice_local_user.id) - .await - .unwrap(); - LocalUser::delete(pool, data.bob_local_user.id) - .await - .unwrap(); - Person::delete(pool, data.alice.id).await.unwrap(); - Person::delete(pool, data.bob.id).await.unwrap(); - Instance::delete(pool, data.bob.instance_id).await.unwrap(); + async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> { + LocalUser::delete(pool, data.alice_local_user.id).await?; + LocalUser::delete(pool, data.bob_local_user.id).await?; + Person::delete(pool, data.alice.id).await?; + Person::delete(pool, data.bob.id).await?; + Instance::delete(pool, data.bob.instance_id).await?; + Ok(()) } #[tokio::test] #[serial] - async fn exclude_deleted() { + async fn exclude_deleted() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; Person::update( pool, @@ -245,25 +241,29 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; let read = PersonView::read(pool, data.alice.id).await; - assert_eq!(read.err(), Some(NotFound)); + assert!(read.is_err()); - let list = PersonQuery::default().list(pool).await.unwrap(); - assert_eq!(list.len(), 1); + let list = PersonQuery { + sort: Some(PostSortType::New), + ..Default::default() + } + .list(pool) + .await?; + assert_length!(1, list); assert_eq!(list[0].person.id, data.bob.id); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn list_banned() { + async fn list_banned() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; Person::update( pool, @@ -273,22 +273,21 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let list = PersonView::banned(pool).await.unwrap(); - assert_eq!(list.len(), 1); + let list = PersonView::banned(pool).await?; + assert_length!(1, list); assert_eq!(list[0].person.id, data.alice.id); - cleanup(data, pool).await; + cleanup(data, pool).await } #[tokio::test] #[serial] - async fn list_admins() { + async fn list_admins() -> LemmyResult<()> { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let data = init_data(pool).await?; LocalUser::update( pool, @@ -298,19 +297,45 @@ mod tests { ..Default::default() }, ) - .await - .unwrap(); + .await?; - let list = PersonView::admins(pool).await.unwrap(); - assert_eq!(list.len(), 1); + let list = PersonView::admins(pool).await?; + assert_length!(1, list); assert_eq!(list[0].person.id, data.alice.id); - let is_admin = PersonView::is_admin(pool, data.alice.id).await.unwrap(); + let is_admin = PersonView::read(pool, data.alice.id).await?.is_admin; assert!(is_admin); - let is_admin = PersonView::is_admin(pool, data.bob.id).await.unwrap(); + let is_admin = PersonView::read(pool, data.bob.id).await?.is_admin; assert!(!is_admin); - cleanup(data, pool).await; + cleanup(data, pool).await + } + + #[tokio::test] + #[serial] + async fn listing_type() -> LemmyResult<()> { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let data = init_data(pool).await?; + + let list = PersonQuery { + listing_type: Some(ListingType::Local), + ..Default::default() + } + .list(pool) + .await?; + assert_length!(1, list); + assert_eq!(list[0].person.id, data.alice.id); + + let list = PersonQuery { + listing_type: Some(ListingType::All), + ..Default::default() + } + .list(pool) + .await?; + assert_length!(2, list); + + cleanup(data, pool).await } } diff --git a/crates/db_views_actor/src/structs.rs b/crates/db_views_actor/src/structs.rs index bdc9e6bbd..ecf9ba11d 100644 --- a/crates/db_views_actor/src/structs.rs +++ b/crates/db_views_actor/src/structs.rs @@ -6,11 +6,9 @@ use lemmy_db_schema::{ comment::Comment, comment_reply::CommentReply, community::Community, - instance::Instance, person::Person, person_mention::PersonMention, post::Post, - site::Site, }, SubscribedType, }; @@ -21,26 +19,7 @@ use ts_rs::TS; #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] -#[cfg_attr(feature = "full", ts(export))] -/// A community block. -pub struct CommunityBlockView { - pub person: Person, - pub community: Community, -} - -#[skip_serializing_none] -#[derive(Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(TS, Queryable))] -#[cfg_attr(feature = "full", ts(export))] -/// An instance block by a user. -pub struct InstanceBlockView { - pub person: Person, - pub instance: Instance, - pub site: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A community follower. pub struct CommunityFollowerView { @@ -50,6 +29,7 @@ pub struct CommunityFollowerView { #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A community moderator. pub struct CommunityModeratorView { @@ -59,6 +39,7 @@ pub struct CommunityModeratorView { #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] /// A community person ban. pub struct CommunityPersonBanView { pub community: Community, @@ -67,6 +48,7 @@ pub struct CommunityPersonBanView { #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A community view. pub struct CommunityView { @@ -74,20 +56,42 @@ pub struct CommunityView { pub subscribed: SubscribedType, pub blocked: bool, pub counts: CommunityAggregates, + pub banned_from_community: bool, } -#[derive(Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "full", derive(TS, Queryable))] +/// The community sort types. See here for descriptions: https://join-lemmy.org/docs/en/users/03-votes-and-ranking.html +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] -/// A person block. -pub struct PersonBlockView { - pub person: Person, - pub target: Person, +pub enum CommunitySortType { + #[default] + Active, + Hot, + New, + Old, + TopDay, + TopWeek, + TopMonth, + TopYear, + TopAll, + MostComments, + NewComments, + TopHour, + TopSixHour, + TopTwelveHour, + TopThreeMonths, + TopSixMonths, + TopNineMonths, + Controversial, + Scaled, + NameAsc, + NameDesc, } #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A person mention view. pub struct PersonMentionView { @@ -99,6 +103,9 @@ pub struct PersonMentionView { pub recipient: Person, pub counts: CommentAggregates, pub creator_banned_from_community: bool, + pub banned_from_community: bool, + pub creator_is_moderator: bool, + pub creator_is_admin: bool, pub subscribed: SubscribedType, pub saved: bool, pub creator_blocked: bool, @@ -108,6 +115,7 @@ pub struct PersonMentionView { #[skip_serializing_none] #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A comment reply view. pub struct CommentReplyView { @@ -118,18 +126,23 @@ pub struct CommentReplyView { pub community: Community, pub recipient: Person, pub counts: CommentAggregates, - pub creator_banned_from_community: bool, // Left Join to CommunityPersonBan - pub subscribed: SubscribedType, // Left join to CommunityFollower - pub saved: bool, // Left join to CommentSaved - pub creator_blocked: bool, // Left join to PersonBlock - pub my_vote: Option, // Left join to CommentLike + pub creator_banned_from_community: bool, + pub banned_from_community: bool, + pub creator_is_moderator: bool, + pub creator_is_admin: bool, + pub subscribed: SubscribedType, + pub saved: bool, + pub creator_blocked: bool, + pub my_vote: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// A person view. pub struct PersonView { pub person: Person, pub counts: PersonAggregates, + pub is_admin: bool, } diff --git a/crates/db_views_moderator/Cargo.toml b/crates/db_views_moderator/Cargo.toml index 7efa922e6..df8ec1ea2 100644 --- a/crates/db_views_moderator/Cargo.toml +++ b/crates/db_views_moderator/Cargo.toml @@ -11,6 +11,9 @@ repository.workspace = true [lib] doctest = false +[lints] +workspace = true + [features] full = ["lemmy_db_schema/full", "diesel", "diesel-async", "ts-rs"] diff --git a/crates/db_views_moderator/src/admin_purge_comment_view.rs b/crates/db_views_moderator/src/admin_purge_comment_view.rs index f62fe0f22..4c650b6fa 100644 --- a/crates/db_views_moderator/src/admin_purge_comment_view.rs +++ b/crates/db_views_moderator/src/admin_purge_comment_view.rs @@ -40,6 +40,11 @@ impl AdminPurgeCommentView { query = query.filter(admin_purge_comment::admin_person_id.eq(admin_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/admin_purge_community_view.rs b/crates/db_views_moderator/src/admin_purge_community_view.rs index 23967ee3b..5eadb8985 100644 --- a/crates/db_views_moderator/src/admin_purge_community_view.rs +++ b/crates/db_views_moderator/src/admin_purge_community_view.rs @@ -38,6 +38,11 @@ impl AdminPurgeCommunityView { query = query.filter(admin_purge_community::admin_person_id.eq(admin_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/admin_purge_person_view.rs b/crates/db_views_moderator/src/admin_purge_person_view.rs index 097785d25..b6dd834c5 100644 --- a/crates/db_views_moderator/src/admin_purge_person_view.rs +++ b/crates/db_views_moderator/src/admin_purge_person_view.rs @@ -38,6 +38,11 @@ impl AdminPurgePersonView { query = query.filter(admin_purge_person::admin_person_id.eq(admin_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/admin_purge_post_view.rs b/crates/db_views_moderator/src/admin_purge_post_view.rs index 8f5eb3a14..b77493c25 100644 --- a/crates/db_views_moderator/src/admin_purge_post_view.rs +++ b/crates/db_views_moderator/src/admin_purge_post_view.rs @@ -40,6 +40,11 @@ impl AdminPurgePostView { query = query.filter(admin_purge_post::admin_person_id.eq(admin_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_add_community_view.rs b/crates/db_views_moderator/src/mod_add_community_view.rs index f96a9b80b..1068aba75 100644 --- a/crates/db_views_moderator/src/mod_add_community_view.rs +++ b/crates/db_views_moderator/src/mod_add_community_view.rs @@ -52,6 +52,11 @@ impl ModAddCommunityView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_add_view.rs b/crates/db_views_moderator/src/mod_add_view.rs index 28fb0a2b6..c5612c4ad 100644 --- a/crates/db_views_moderator/src/mod_add_view.rs +++ b/crates/db_views_moderator/src/mod_add_view.rs @@ -44,6 +44,11 @@ impl ModAddView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_ban_from_community_view.rs b/crates/db_views_moderator/src/mod_ban_from_community_view.rs index 02f18099c..d2d6038f3 100644 --- a/crates/db_views_moderator/src/mod_ban_from_community_view.rs +++ b/crates/db_views_moderator/src/mod_ban_from_community_view.rs @@ -54,6 +54,11 @@ impl ModBanFromCommunityView { query = query.filter(mod_ban_from_community::other_person_id.eq(other_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_ban_view.rs b/crates/db_views_moderator/src/mod_ban_view.rs index 94ac360db..ca0723e83 100644 --- a/crates/db_views_moderator/src/mod_ban_view.rs +++ b/crates/db_views_moderator/src/mod_ban_view.rs @@ -44,6 +44,11 @@ impl ModBanView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_feature_post_view.rs b/crates/db_views_moderator/src/mod_feature_post_view.rs index 324816178..4c0fdb4f7 100644 --- a/crates/db_views_moderator/src/mod_feature_post_view.rs +++ b/crates/db_views_moderator/src/mod_feature_post_view.rs @@ -51,6 +51,15 @@ impl ModFeaturePostView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + if let Some(post_id) = params.post_id { + query = query.filter(post::id.eq(post_id)); + } + + // If a comment ID is given, then don't find any results + if params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_hide_community_view.rs b/crates/db_views_moderator/src/mod_hide_community_view.rs index 36b549814..3c8a7e627 100644 --- a/crates/db_views_moderator/src/mod_hide_community_view.rs +++ b/crates/db_views_moderator/src/mod_hide_community_view.rs @@ -45,6 +45,11 @@ impl ModHideCommunityView { query = query.filter(mod_hide_community::mod_person_id.eq(admin_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_lock_post_view.rs b/crates/db_views_moderator/src/mod_lock_post_view.rs index 7351b4f29..5a6c753d9 100644 --- a/crates/db_views_moderator/src/mod_lock_post_view.rs +++ b/crates/db_views_moderator/src/mod_lock_post_view.rs @@ -52,6 +52,15 @@ impl ModLockPostView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + if let Some(post_id) = params.post_id { + query = query.filter(post::id.eq(post_id)); + } + + // If a comment ID is given, then don't find any results + if params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_remove_comment_view.rs b/crates/db_views_moderator/src/mod_remove_comment_view.rs index 9b8a71a7b..cf0ed325c 100644 --- a/crates/db_views_moderator/src/mod_remove_comment_view.rs +++ b/crates/db_views_moderator/src/mod_remove_comment_view.rs @@ -54,6 +54,15 @@ impl ModRemoveCommentView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + if let Some(comment_id) = params.comment_id { + query = query.filter(comment::id.eq(comment_id)); + } + + // If a post ID is given, then don't find any results + if params.post_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_remove_community_view.rs b/crates/db_views_moderator/src/mod_remove_community_view.rs index 2bc92acc8..ac620ebdb 100644 --- a/crates/db_views_moderator/src/mod_remove_community_view.rs +++ b/crates/db_views_moderator/src/mod_remove_community_view.rs @@ -39,6 +39,11 @@ impl ModRemoveCommunityView { query = query.filter(mod_remove_community::mod_person_id.eq(mod_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_remove_post_view.rs b/crates/db_views_moderator/src/mod_remove_post_view.rs index 60469088c..98504a8e7 100644 --- a/crates/db_views_moderator/src/mod_remove_post_view.rs +++ b/crates/db_views_moderator/src/mod_remove_post_view.rs @@ -52,6 +52,15 @@ impl ModRemovePostView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + if let Some(post_id) = params.post_id { + query = query.filter(post::id.eq(post_id)); + } + + // If a comment ID is given, then don't find any results + if params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/mod_transfer_community_view.rs b/crates/db_views_moderator/src/mod_transfer_community_view.rs index 3d48b0f67..6d62d347a 100644 --- a/crates/db_views_moderator/src/mod_transfer_community_view.rs +++ b/crates/db_views_moderator/src/mod_transfer_community_view.rs @@ -54,6 +54,11 @@ impl ModTransferCommunityView { query = query.filter(person_alias_1.field(person::id).eq(other_person_id)); }; + // If a post or comment ID is given, then don't find any results + if params.post_id.is_some() || params.comment_id.is_some() { + return Ok(vec![]); + } + let (limit, offset) = limit_and_offset(params.page, params.limit)?; query diff --git a/crates/db_views_moderator/src/structs.rs b/crates/db_views_moderator/src/structs.rs index 549b38796..10ad78942 100644 --- a/crates/db_views_moderator/src/structs.rs +++ b/crates/db_views_moderator/src/structs.rs @@ -1,7 +1,7 @@ #[cfg(feature = "full")] use diesel::Queryable; use lemmy_db_schema::{ - newtypes::{CommunityId, PersonId}, + newtypes::{CommentId, CommunityId, PersonId, PostId}, source::{ comment::Comment, community::Community, @@ -34,6 +34,7 @@ use ts_rs::TS; #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is added as a community moderator. pub struct ModAddCommunityView { @@ -46,6 +47,7 @@ pub struct ModAddCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is added as a site moderator. pub struct ModAddView { @@ -57,6 +59,7 @@ pub struct ModAddView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is banned from a community. pub struct ModBanFromCommunityView { @@ -69,6 +72,7 @@ pub struct ModBanFromCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When someone is banned from the site. pub struct ModBanView { @@ -80,6 +84,7 @@ pub struct ModBanView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a community is hidden from public view. pub struct ModHideCommunityView { @@ -91,6 +96,7 @@ pub struct ModHideCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator locks a post (prevents new comments being made). pub struct ModLockPostView { @@ -103,6 +109,7 @@ pub struct ModLockPostView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a comment. pub struct ModRemoveCommentView { @@ -117,6 +124,7 @@ pub struct ModRemoveCommentView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a community. pub struct ModRemoveCommunityView { @@ -128,6 +136,7 @@ pub struct ModRemoveCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator removes a post. pub struct ModRemovePostView { @@ -140,6 +149,7 @@ pub struct ModRemovePostView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator features a post on a community (pins it to the top). pub struct ModFeaturePostView { @@ -152,6 +162,7 @@ pub struct ModFeaturePostView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When a moderator transfers a community to a new owner. pub struct ModTransferCommunityView { @@ -164,6 +175,7 @@ pub struct ModTransferCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a comment. pub struct AdminPurgeCommentView { @@ -175,6 +187,7 @@ pub struct AdminPurgeCommentView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a community. pub struct AdminPurgeCommunityView { @@ -185,6 +198,7 @@ pub struct AdminPurgeCommunityView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a person. pub struct AdminPurgePersonView { @@ -195,6 +209,7 @@ pub struct AdminPurgePersonView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// When an admin purges a post. pub struct AdminPurgePostView { @@ -206,12 +221,15 @@ pub struct AdminPurgePostView { #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone, Copy)] #[cfg_attr(feature = "full", derive(TS, Queryable))] +#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] /// Querying / filtering the modlog. pub struct ModlogListParams { pub community_id: Option, pub mod_person_id: Option, pub other_person_id: Option, + pub post_id: Option, + pub comment_id: Option, pub page: Option, pub limit: Option, pub hide_modlog_names: bool, diff --git a/crates/federate/Cargo.toml b/crates/federate/Cargo.toml index 0c394d9f4..5d7454276 100644 --- a/crates/federate/Cargo.toml +++ b/crates/federate/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "lemmy_federate" +publish = false version.workspace = true edition.workspace = true description.workspace = true @@ -10,6 +11,9 @@ repository.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lints] +workspace = true + [dependencies] lemmy_api_common.workspace = true lemmy_apub.workspace = true @@ -23,19 +27,19 @@ futures.workspace = true chrono.workspace = true diesel = { workspace = true, features = ["postgres", "chrono", "serde_json"] } diesel-async = { workspace = true, features = ["deadpool", "postgres"] } -once_cell.workspace = true reqwest.workspace = true serde_json.workspace = true -serde.workspace = true tokio = { workspace = true, features = ["full"] } tracing.workspace = true +moka.workspace = true +tokio-util = "0.7.12" +async-trait.workspace = true -async-trait = "0.1.71" -bytes = "1.4.0" -enum_delegate = "0.2.0" -moka = { version = "0.11.2", features = ["future"] } -openssl = "0.10.55" -reqwest-middleware = "0.2.2" -reqwest-tracing = "0.4.5" -tokio-util = "0.7.8" -tracing-subscriber = "0.3.17" +[dev-dependencies] +serial_test = { workspace = true } +url.workspace = true +actix-web.workspace = true +tracing-test = "0.2.5" +uuid.workspace = true +test-context = "0.3.0" +mockall = "0.13.0" diff --git a/crates/federate/src/federation_queue_state.rs b/crates/federate/src/federation_queue_state.rs deleted file mode 100644 index 8a3506121..000000000 --- a/crates/federate/src/federation_queue_state.rs +++ /dev/null @@ -1,63 +0,0 @@ -use crate::util::ActivityId; -use anyhow::Result; -use chrono::{DateTime, TimeZone, Utc}; -use diesel::prelude::*; -use diesel_async::RunQueryDsl; -use lemmy_db_schema::{ - newtypes::InstanceId, - utils::{get_conn, DbPool}, -}; - -#[derive(Queryable, Selectable, Insertable, AsChangeset, Clone)] -#[diesel(table_name = lemmy_db_schema::schema::federation_queue_state)] -#[diesel(check_for_backend(diesel::pg::Pg))] -pub struct FederationQueueState { - pub instance_id: InstanceId, - pub last_successful_id: ActivityId, // todo: i64 - pub fail_count: i32, - pub last_retry: DateTime, -} - -impl FederationQueueState { - /// load state or return a default empty value - pub async fn load( - pool: &mut DbPool<'_>, - instance_id_: InstanceId, - ) -> Result { - use lemmy_db_schema::schema::federation_queue_state::dsl::{ - federation_queue_state, - instance_id, - }; - let conn = &mut get_conn(pool).await?; - Ok( - federation_queue_state - .filter(instance_id.eq(&instance_id_)) - .select(FederationQueueState::as_select()) - .get_result(conn) - .await - .optional()? - .unwrap_or(FederationQueueState { - instance_id: instance_id_, - fail_count: 0, - last_retry: Utc.timestamp_nanos(0), - last_successful_id: -1, // this value is set to the most current id for new instances - }), - ) - } - pub async fn upsert(pool: &mut DbPool<'_>, state: &FederationQueueState) -> Result<()> { - use lemmy_db_schema::schema::federation_queue_state::dsl::{ - federation_queue_state, - instance_id, - }; - let conn = &mut get_conn(pool).await?; - - state - .insert_into(federation_queue_state) - .on_conflict(instance_id) - .do_update() - .set(state) - .execute(conn) - .await?; - Ok(()) - } -} diff --git a/crates/federate/src/inboxes.rs b/crates/federate/src/inboxes.rs new file mode 100644 index 000000000..1649e019f --- /dev/null +++ b/crates/federate/src/inboxes.rs @@ -0,0 +1,597 @@ +use crate::util::LEMMY_TEST_FAST_FEDERATION; +use anyhow::Result; +use async_trait::async_trait; +use chrono::{DateTime, TimeZone, Utc}; +use lemmy_db_schema::{ + newtypes::{CommunityId, DbUrl, InstanceId}, + source::{activity::SentActivity, site::Site}, + utils::{ActualDbPool, DbPool}, +}; +use lemmy_db_views_actor::structs::CommunityFollowerView; +use reqwest::Url; +use std::{ + collections::{HashMap, HashSet}, + sync::LazyLock, +}; + +/// interval with which new additions to community_followers are queried. +/// +/// The first time some user on an instance follows a specific remote community (or, more precisely: +/// the first time a (followed_community_id, follower_inbox_url) tuple appears), this delay limits +/// the maximum time until the follow actually results in activities from that community id being +/// sent to that inbox url. This delay currently needs to not be too small because the DB load is +/// currently fairly high because of the current structure of storing inboxes for every person, not +/// having a separate list of shared_inboxes, and the architecture of having every instance queue be +/// fully separate. (see https://github.com/LemmyNet/lemmy/issues/3958) +static FOLLOW_ADDITIONS_RECHECK_DELAY: LazyLock = LazyLock::new(|| { + if *LEMMY_TEST_FAST_FEDERATION { + chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds") + } else { + chrono::TimeDelta::try_minutes(2).expect("TimeDelta out of bounds") + } +}); +/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance +/// unfollows a specific remote community. This is expected to happen pretty rarely and updating it +/// in a timely manner is not too important. +static FOLLOW_REMOVALS_RECHECK_DELAY: LazyLock = + LazyLock::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds")); + +#[async_trait] +pub trait DataSource: Send + Sync { + async fn read_site_from_instance_id( + &self, + instance_id: InstanceId, + ) -> Result, diesel::result::Error>; + async fn get_instance_followed_community_inboxes( + &self, + instance_id: InstanceId, + last_fetch: DateTime, + ) -> Result, diesel::result::Error>; +} +pub struct DbDataSource { + pool: ActualDbPool, +} + +impl DbDataSource { + pub fn new(pool: ActualDbPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl DataSource for DbDataSource { + async fn read_site_from_instance_id( + &self, + instance_id: InstanceId, + ) -> Result, diesel::result::Error> { + Site::read_from_instance_id(&mut DbPool::Pool(&self.pool), instance_id).await + } + + async fn get_instance_followed_community_inboxes( + &self, + instance_id: InstanceId, + last_fetch: DateTime, + ) -> Result, diesel::result::Error> { + CommunityFollowerView::get_instance_followed_community_inboxes( + &mut DbPool::Pool(&self.pool), + instance_id, + last_fetch, + ) + .await + } +} + +pub(crate) struct CommunityInboxCollector { + // load site lazily because if an instance is first seen due to being on allowlist, + // the corresponding row in `site` may not exist yet since that is only added once + // `fetch_instance_actor_for_object` is called. + // (this should be unlikely to be relevant outside of the federation tests) + site_loaded: bool, + site: Option, + followed_communities: HashMap>, + last_full_communities_fetch: DateTime, + last_incremental_communities_fetch: DateTime, + instance_id: InstanceId, + domain: String, + pub(crate) data_source: T, +} + +pub type RealCommunityInboxCollector = CommunityInboxCollector; + +impl CommunityInboxCollector { + pub fn new_real( + pool: ActualDbPool, + instance_id: InstanceId, + domain: String, + ) -> RealCommunityInboxCollector { + CommunityInboxCollector::new(DbDataSource::new(pool), instance_id, domain) + } + pub fn new( + data_source: T, + instance_id: InstanceId, + domain: String, + ) -> CommunityInboxCollector { + CommunityInboxCollector { + data_source, + site_loaded: false, + site: None, + followed_communities: HashMap::new(), + last_full_communities_fetch: Utc.timestamp_nanos(0), + last_incremental_communities_fetch: Utc.timestamp_nanos(0), + instance_id, + domain, + } + } + /// get inbox urls of sending the given activity to the given instance + /// most often this will return 0 values (if instance doesn't care about the activity) + /// or 1 value (the shared inbox) + /// > 1 values only happens for non-lemmy software + pub async fn get_inbox_urls(&mut self, activity: &SentActivity) -> Result> { + let mut inbox_urls: HashSet = HashSet::new(); + + if activity.send_all_instances { + if !self.site_loaded { + self.site = self + .data_source + .read_site_from_instance_id(self.instance_id) + .await?; + self.site_loaded = true; + } + if let Some(site) = &self.site { + // Nutomic: Most non-lemmy software wont have a site row. That means it cant handle these + // activities. So handling it like this is fine. + inbox_urls.insert(site.inbox_url.inner().clone()); + } + } + if let Some(t) = &activity.send_community_followers_of { + if let Some(urls) = self.followed_communities.get(t) { + inbox_urls.extend(urls.iter().cloned()); + } + } + inbox_urls.extend( + activity + .send_inboxes + .iter() + .filter_map(std::option::Option::as_ref) + // a similar filter also happens within the activitypub-federation crate. but that filter + // happens much later - by doing it here, we can ensure that in the happy case, this + // function returns 0 urls which means the system doesn't have to create a tokio + // task for sending at all (since that task has a fair amount of overhead) + .filter(|&u| (u.domain() == Some(&self.domain))) + .map(|u| u.inner().clone()), + ); + tracing::trace!( + "get_inbox_urls: {:?}, send_inboxes: {:?}", + inbox_urls, + activity.send_inboxes + ); + Ok(inbox_urls.into_iter().collect()) + } + + pub async fn update_communities(&mut self) -> Result<()> { + if (Utc::now() - self.last_full_communities_fetch) > *FOLLOW_REMOVALS_RECHECK_DELAY { + tracing::debug!("{}: fetching full list of communities", self.domain); + // process removals every hour + (self.followed_communities, self.last_full_communities_fetch) = self + .get_communities(self.instance_id, Utc.timestamp_nanos(0)) + .await?; + self.last_incremental_communities_fetch = self.last_full_communities_fetch; + } + if (Utc::now() - self.last_incremental_communities_fetch) > *FOLLOW_ADDITIONS_RECHECK_DELAY { + // process additions every minute + let (news, time) = self + .get_communities(self.instance_id, self.last_incremental_communities_fetch) + .await?; + if !news.is_empty() { + tracing::debug!( + "{}: fetched {} incremental new followed communities", + self.domain, + news.len() + ); + } + self.followed_communities.extend(news); + self.last_incremental_communities_fetch = time; + } + Ok(()) + } + + /// get a list of local communities with the remote inboxes on the given instance that cares about + /// them + async fn get_communities( + &mut self, + instance_id: InstanceId, + last_fetch: DateTime, + ) -> Result<(HashMap>, DateTime)> { + // update to time before fetch to ensure overlap. subtract some time to ensure overlap even if + // published date is not exact + let new_last_fetch = Utc::now() - *FOLLOW_ADDITIONS_RECHECK_DELAY / 2; + + let inboxes = self + .data_source + .get_instance_followed_community_inboxes(instance_id, last_fetch) + .await?; + + let map: HashMap> = + inboxes.into_iter().fold(HashMap::new(), |mut map, (c, u)| { + map.entry(c).or_default().insert(u.into()); + map + }); + + Ok((map, new_last_fetch)) + } +} + +#[cfg(test)] +#[expect(clippy::indexing_slicing)] +mod tests { + use super::*; + use lemmy_db_schema::{ + newtypes::{ActivityId, CommunityId, InstanceId, SiteId}, + source::activity::{ActorType, SentActivity}, + }; + use lemmy_utils::error::LemmyResult; + use mockall::{mock, predicate::*}; + use serde_json::json; + mock! { + DataSource {} + #[async_trait] + impl DataSource for DataSource { + async fn read_site_from_instance_id(&self, instance_id: InstanceId) -> Result, diesel::result::Error>; + async fn get_instance_followed_community_inboxes( + &self, + instance_id: InstanceId, + last_fetch: DateTime, + ) -> Result, diesel::result::Error>; + } + } + + fn setup_collector() -> CommunityInboxCollector { + let mock_data_source = MockDataSource::new(); + let instance_id = InstanceId(1); + let domain = "example.com".to_string(); + CommunityInboxCollector::new(mock_data_source, instance_id, domain) + } + + #[tokio::test] + async fn test_get_inbox_urls_empty() -> LemmyResult<()> { + let mut collector = setup_collector(); + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![], + send_community_followers_of: None, + send_all_instances: false, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert!(result.is_empty()); + + Ok(()) + } + + #[tokio::test] + async fn test_get_inbox_urls_send_all_instances() -> LemmyResult<()> { + let mut collector = setup_collector(); + let site_inbox = Url::parse("https://example.com/inbox")?; + let site = Site { + id: SiteId(1), + name: "Test Site".to_string(), + sidebar: None, + published: Utc::now(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: Url::parse("https://example.com/site")?.into(), + last_refreshed_at: Utc::now(), + inbox_url: site_inbox.clone().into(), + private_key: None, + public_key: "test_key".to_string(), + instance_id: InstanceId(1), + content_warning: None, + }; + + collector + .data_source + .expect_read_site_from_instance_id() + .return_once(move |_| Ok(Some(site))); + + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![], + send_community_followers_of: None, + send_all_instances: true, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert_eq!(result.len(), 1); + assert_eq!(result[0], site_inbox); + + Ok(()) + } + + #[tokio::test] + async fn test_get_inbox_urls_community_followers() -> LemmyResult<()> { + let mut collector = setup_collector(); + let community_id = CommunityId(1); + let url1 = "https://follower1.example.com/inbox"; + let url2 = "https://follower2.example.com/inbox"; + + collector + .data_source + .expect_get_instance_followed_community_inboxes() + .return_once(move |_, _| { + Ok(vec![ + ( + community_id, + Url::parse(url1).map_err(|_| diesel::NotFound)?.into(), + ), + ( + community_id, + Url::parse(url2).map_err(|_| diesel::NotFound)?.into(), + ), + ]) + }); + + collector.update_communities().await?; + + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![], + send_community_followers_of: Some(community_id), + send_all_instances: false, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert_eq!(result.len(), 2); + assert!(result.contains(&Url::parse(url1)?)); + assert!(result.contains(&Url::parse(url2)?)); + + Ok(()) + } + + #[tokio::test] + async fn test_get_inbox_urls_send_inboxes() -> LemmyResult<()> { + let mut collector = setup_collector(); + collector.domain = "example.com".to_string(); + let inbox_user_1 = Url::parse("https://example.com/user1/inbox")?; + let inbox_user_2 = Url::parse("https://example.com/user2/inbox")?; + let other_domain_inbox = Url::parse("https://other-domain.com/user3/inbox")?; + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![ + Some(inbox_user_1.clone().into()), + Some(inbox_user_2.clone().into()), + Some(other_domain_inbox.clone().into()), + ], + send_community_followers_of: None, + send_all_instances: false, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert_eq!(result.len(), 2); + assert!(result.contains(&inbox_user_1)); + assert!(result.contains(&inbox_user_2)); + assert!(!result.contains(&other_domain_inbox)); + + Ok(()) + } + + #[tokio::test] + async fn test_get_inbox_urls_combined() -> LemmyResult<()> { + let mut collector = setup_collector(); + collector.domain = "example.com".to_string(); + let community_id = CommunityId(1); + + let site_inbox = Url::parse("https://example.com/site_inbox")?; + let site = Site { + id: SiteId(1), + name: "Test Site".to_string(), + sidebar: None, + published: Utc::now(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: Url::parse("https://example.com/site")?.into(), + last_refreshed_at: Utc::now(), + inbox_url: site_inbox.clone().into(), + private_key: None, + public_key: "test_key".to_string(), + instance_id: InstanceId(1), + content_warning: None, + }; + + collector + .data_source + .expect_read_site_from_instance_id() + .return_once(move |_| Ok(Some(site))); + + let subdomain_inbox = "https://follower.example.com/inbox"; + collector + .data_source + .expect_get_instance_followed_community_inboxes() + .return_once(move |_, _| { + Ok(vec![( + community_id, + Url::parse(subdomain_inbox) + .map_err(|_| diesel::NotFound)? + .into(), + )]) + }); + + collector.update_communities().await?; + let user1_inbox = Url::parse("https://example.com/user1/inbox")?; + let user2_inbox = Url::parse("https://other-domain.com/user2/inbox")?; + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![ + Some(user1_inbox.clone().into()), + Some(user2_inbox.clone().into()), + ], + send_community_followers_of: Some(community_id), + send_all_instances: true, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert_eq!(result.len(), 3); + assert!(result.contains(&site_inbox)); + assert!(result.contains(&Url::parse(subdomain_inbox)?)); + assert!(result.contains(&user1_inbox)); + assert!(!result.contains(&user2_inbox)); + + Ok(()) + } + + #[tokio::test] + async fn test_update_communities() -> LemmyResult<()> { + let mut collector = setup_collector(); + let community_id1 = CommunityId(1); + let community_id2 = CommunityId(2); + let community_id3 = CommunityId(3); + + let user1_inbox_str = "https://follower1.example.com/inbox"; + let user1_inbox = Url::parse(user1_inbox_str)?; + let user2_inbox_str = "https://follower2.example.com/inbox"; + let user2_inbox = Url::parse(user2_inbox_str)?; + let user3_inbox_str = "https://follower3.example.com/inbox"; + let user3_inbox = Url::parse(user3_inbox_str)?; + + collector + .data_source + .expect_get_instance_followed_community_inboxes() + .times(2) + .returning(move |_, last_fetch| { + if last_fetch == Utc.timestamp_nanos(0) { + Ok(vec![ + ( + community_id1, + Url::parse(user1_inbox_str) + .map_err(|_| diesel::NotFound)? + .into(), + ), + ( + community_id2, + Url::parse(user2_inbox_str) + .map_err(|_| diesel::NotFound)? + .into(), + ), + ]) + } else { + Ok(vec![( + community_id3, + Url::parse(user3_inbox_str) + .map_err(|_| diesel::NotFound)? + .into(), + )]) + } + }); + + // First update + collector.update_communities().await?; + assert_eq!(collector.followed_communities.len(), 2); + assert!(collector.followed_communities[&community_id1].contains(&user1_inbox)); + assert!(collector.followed_communities[&community_id2].contains(&user2_inbox)); + + // Simulate time passing + collector.last_full_communities_fetch = + Utc::now() - chrono::TimeDelta::try_minutes(3).expect("TimeDelta out of bounds"); + collector.last_incremental_communities_fetch = + Utc::now() - chrono::TimeDelta::try_minutes(3).expect("TimeDelta out of bounds"); + + // Second update (incremental) + collector.update_communities().await?; + assert_eq!(collector.followed_communities.len(), 3); + assert!(collector.followed_communities[&community_id1].contains(&user1_inbox)); + assert!(collector.followed_communities[&community_id3].contains(&user3_inbox)); + assert!(collector.followed_communities[&community_id2].contains(&user2_inbox)); + + Ok(()) + } + + #[tokio::test] + async fn test_get_inbox_urls_no_duplicates() -> LemmyResult<()> { + let mut collector = setup_collector(); + collector.domain = "example.com".to_string(); + let community_id = CommunityId(1); + let site_inbox = Url::parse("https://example.com/site_inbox")?; + let site_inbox_clone = site_inbox.clone(); + let site = Site { + id: SiteId(1), + name: "Test Site".to_string(), + sidebar: None, + published: Utc::now(), + updated: None, + icon: None, + banner: None, + description: None, + actor_id: Url::parse("https://example.com/site")?.into(), + last_refreshed_at: Utc::now(), + inbox_url: site_inbox.clone().into(), + private_key: None, + public_key: "test_key".to_string(), + instance_id: InstanceId(1), + content_warning: None, + }; + + collector + .data_source + .expect_read_site_from_instance_id() + .return_once(move |_| Ok(Some(site))); + + collector + .data_source + .expect_get_instance_followed_community_inboxes() + .return_once(move |_, _| Ok(vec![(community_id, site_inbox_clone.into())])); + + collector.update_communities().await?; + + let activity = SentActivity { + id: ActivityId(1), + ap_id: Url::parse("https://example.com/activities/1")?.into(), + data: json!({}), + sensitive: false, + published: Utc::now(), + send_inboxes: vec![Some(site_inbox.into())], + send_community_followers_of: Some(community_id), + send_all_instances: true, + actor_type: ActorType::Person, + actor_apub_id: None, + }; + + let result = collector.get_inbox_urls(&activity).await?; + assert_eq!(result.len(), 1); + assert!(result.contains(&Url::parse("https://example.com/site_inbox")?)); + + Ok(()) + } +} diff --git a/crates/federate/src/lib.rs b/crates/federate/src/lib.rs index dad7daad6..983749de3 100644 --- a/crates/federate/src/lib.rs +++ b/crates/federate/src/lib.rs @@ -1,24 +1,25 @@ -use crate::{ - util::{retry_sleep_duration, CancellableTask}, - worker::InstanceWorker, -}; +use crate::{util::CancellableTask, worker::InstanceWorker}; use activitypub_federation::config::FederationConfig; -use chrono::{Local, Timelike}; -use federation_queue_state::FederationQueueState; -use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::{ - newtypes::InstanceId, - source::instance::Instance, - utils::{ActualDbPool, DbPool}, +use lemmy_api_common::{ + context::LemmyContext, + lemmy_utils::settings::structs::FederationWorkerConfig, }; +use lemmy_db_schema::{newtypes::InstanceId, source::instance::Instance}; +use lemmy_utils::error::LemmyResult; +use stats::receive_print_stats; use std::{collections::HashMap, time::Duration}; use tokio::{ - sync::mpsc::{unbounded_channel, UnboundedReceiver}, + sync::mpsc::{unbounded_channel, UnboundedSender}, + task::JoinHandle, time::sleep, }; use tokio_util::sync::CancellationToken; +use tracing::info; +use util::FederationQueueStateWithDomain; -mod federation_queue_state; +mod inboxes; +mod send; +mod stats; mod util; mod worker; @@ -28,6 +29,7 @@ static INSTANCES_RECHECK_DELAY: Duration = Duration::from_secs(5); #[cfg(not(debug_assertions))] static INSTANCES_RECHECK_DELAY: Duration = Duration::from_secs(60); +#[derive(Clone)] pub struct Opts { /// how many processes you are starting in total pub process_count: i32, @@ -35,173 +37,336 @@ pub struct Opts { pub process_index: i32, } -async fn start_stop_federation_workers( +pub struct SendManager { opts: Opts, - pool: ActualDbPool, - federation_config: FederationConfig, - cancel: CancellationToken, -) -> anyhow::Result<()> { - let mut workers = HashMap::>::new(); + workers: HashMap, + context: FederationConfig, + stats_sender: UnboundedSender, + exit_print: JoinHandle<()>, + federation_worker_config: FederationWorkerConfig, +} - let (stats_sender, stats_receiver) = unbounded_channel(); - let exit_print = tokio::spawn(receive_print_stats(pool.clone(), stats_receiver)); - let pool2 = &mut DbPool::Pool(&pool); - let process_index = opts.process_index - 1; - let local_domain = federation_config.settings().get_hostname_without_port()?; - loop { - let mut total_count = 0; - let mut dead_count = 0; - let mut disallowed_count = 0; - for (instance, allowed, is_dead) in Instance::read_all_with_blocked_and_dead(pool2).await? { - if instance.domain == local_domain { - continue; +impl SendManager { + fn new( + opts: Opts, + context: FederationConfig, + federation_worker_config: FederationWorkerConfig, + ) -> Self { + assert!(opts.process_count > 0); + assert!(opts.process_index > 0); + assert!(opts.process_index <= opts.process_count); + + let (stats_sender, stats_receiver) = unbounded_channel(); + Self { + opts, + workers: HashMap::new(), + stats_sender, + exit_print: tokio::spawn(receive_print_stats( + context.inner_pool().clone(), + stats_receiver, + )), + context, + federation_worker_config, + } + } + + pub fn run( + opts: Opts, + context: FederationConfig, + config: FederationWorkerConfig, + ) -> CancellableTask { + CancellableTask::spawn(WORKER_EXIT_TIMEOUT, move |cancel| { + let opts = opts.clone(); + let config = config.clone(); + let context = context.clone(); + let mut manager = Self::new(opts, context, config); + async move { + let result = manager.do_loop(cancel).await; + // the loop function will only return if there is (a) an internal error (e.g. db connection + // failure) or (b) it was cancelled from outside. + if let Err(e) = result { + // don't let this error bubble up, just log it, so the below cancel function will run + // regardless + tracing::error!("SendManager failed: {e}"); + } + // cancel all the dependent workers as well to ensure they don't get orphaned and keep + // running. + manager.cancel().await?; + LemmyResult::Ok(()) + // if the task was not intentionally cancelled, then this whole lambda will be run again by + // CancellableTask after this } - if instance.id.inner() % opts.process_count != process_index { - continue; - } - total_count += 1; - if !allowed { - disallowed_count += 1; - } - if is_dead { - dead_count += 1; - } - let should_federate = allowed && !is_dead; - if should_federate { - if workers.contains_key(&instance.id) { - if workers - .get(&instance.id) - .map(util::CancellableTask::has_ended) - .unwrap_or(false) - { - // task must have errored out, remove and recreated it - let worker = workers - .remove(&instance.id) - .expect("just checked contains_key"); - tracing::error!( - "worker for {} has stopped, recreating: {:?}", - instance.domain, - worker.cancel().await - ); - } else { + }) + } + + async fn do_loop(&mut self, cancel: CancellationToken) -> LemmyResult<()> { + let process_index = self.opts.process_index - 1; + info!( + "Starting federation workers for process count {} and index {}", + self.opts.process_count, process_index + ); + let local_domain = self.context.settings().get_hostname_without_port()?; + let mut pool = self.context.pool(); + loop { + let mut total_count = 0; + let mut dead_count = 0; + let mut disallowed_count = 0; + for (instance, allowed, is_dead) in + Instance::read_federated_with_blocked_and_dead(&mut pool).await? + { + if instance.domain == local_domain { + continue; + } + if instance.id.inner() % self.opts.process_count != process_index { + continue; + } + total_count += 1; + if !allowed { + disallowed_count += 1; + } + if is_dead { + dead_count += 1; + } + let should_federate = allowed && !is_dead; + if should_federate { + if self.workers.contains_key(&instance.id) { + // worker already running continue; } - } - // create new worker - let stats_sender = stats_sender.clone(); - let context = federation_config.to_request_data(); - let pool = pool.clone(); - workers.insert( - instance.id, - CancellableTask::spawn(WORKER_EXIT_TIMEOUT, |stop| async move { - InstanceWorker::init_and_loop( - instance, - context, - &mut DbPool::Pool(&pool), - stop, - stats_sender, - ) - .await?; - Ok(()) - }), - ); - } else if !should_federate { - if let Some(worker) = workers.remove(&instance.id) { - if let Err(e) = worker.cancel().await { - tracing::error!("error stopping worker: {e}"); + // create new worker + let context = self.context.clone(); + let stats_sender = self.stats_sender.clone(); + let federation_worker_config = self.federation_worker_config.clone(); + + self.workers.insert( + instance.id, + CancellableTask::spawn(WORKER_EXIT_TIMEOUT, move |stop| { + // if the instance worker ends unexpectedly due to internal/db errors, this lambda is + // rerun by cancellabletask. + let instance = instance.clone(); + InstanceWorker::init_and_loop( + instance, + context.clone(), + federation_worker_config.clone(), + stop, + stats_sender.clone(), + ) + }), + ); + } else if !should_federate { + if let Some(worker) = self.workers.remove(&instance.id) { + if let Err(e) = worker.cancel().await { + tracing::error!("error stopping worker: {e}"); + } } } } - } - let worker_count = workers.len(); - tracing::info!("Federating to {worker_count}/{total_count} instances ({dead_count} dead, {disallowed_count} disallowed)"); - tokio::select! { - () = sleep(INSTANCES_RECHECK_DELAY) => {}, - _ = cancel.cancelled() => { break; } - } - } - drop(stats_sender); - tracing::warn!( - "Waiting for {} workers ({:.2?} max)", - workers.len(), - WORKER_EXIT_TIMEOUT - ); - // the cancel futures need to be awaited concurrently for the shutdown processes to be triggered concurrently - futures::future::join_all(workers.into_values().map(util::CancellableTask::cancel)).await; - exit_print.await?; - Ok(()) -} - -/// starts and stops federation workers depending on which instances are on db -/// await the returned future to stop/cancel all workers gracefully -pub fn start_stop_federation_workers_cancellable( - opts: Opts, - pool: ActualDbPool, - config: FederationConfig, -) -> CancellableTask<()> { - CancellableTask::spawn(WORKER_EXIT_TIMEOUT, move |c| { - start_stop_federation_workers(opts, pool, config, c) - }) -} - -/// every 60s, print the state for every instance. exits if the receiver is done (all senders dropped) -async fn receive_print_stats( - pool: ActualDbPool, - mut receiver: UnboundedReceiver<(String, FederationQueueState)>, -) { - let pool = &mut DbPool::Pool(&pool); - let mut printerval = tokio::time::interval(Duration::from_secs(60)); - printerval.tick().await; // skip first - let mut stats = HashMap::new(); - loop { - tokio::select! { - ele = receiver.recv() => { - let Some((domain, ele)) = ele else { - tracing::info!("done. quitting"); - print_stats(pool, &stats).await; - return; - }; - stats.insert(domain, ele); - }, - _ = printerval.tick() => { - print_stats(pool, &stats).await; + let worker_count = self.workers.len(); + tracing::info!("Federating to {worker_count}/{total_count} instances ({dead_count} dead, {disallowed_count} disallowed)"); + tokio::select! { + () = sleep(INSTANCES_RECHECK_DELAY) => {}, + _ = cancel.cancelled() => { return Ok(()) } } } } + + pub async fn cancel(self) -> LemmyResult<()> { + drop(self.stats_sender); + tracing::warn!( + "Waiting for {} workers ({:.2?} max)", + self.workers.len(), + WORKER_EXIT_TIMEOUT + ); + // the cancel futures need to be awaited concurrently for the shutdown processes to be triggered + // concurrently + futures::future::join_all( + self + .workers + .into_values() + .map(util::CancellableTask::cancel), + ) + .await; + self.exit_print.await?; + Ok(()) + } } -async fn print_stats(pool: &mut DbPool<'_>, stats: &HashMap) { - let last_id = crate::util::get_latest_activity_id(pool).await; - let Ok(last_id) = last_id else { - tracing::error!("could not get last id"); - return; +#[cfg(test)] +#[expect(clippy::unwrap_used)] +#[expect(clippy::indexing_slicing)] +mod test { + + use super::*; + use activitypub_federation::config::Data; + use chrono::DateTime; + use lemmy_db_schema::source::{ + federation_allowlist::FederationAllowList, + federation_blocklist::FederationBlockList, + instance::InstanceForm, }; - // it's expected that the values are a bit out of date, everything < SAVE_STATE_EVERY should be considered up to date - tracing::info!( - "Federation state as of {}:", - Local::now() - .with_nanosecond(0) - .expect("0 is valid nanos") - .to_rfc3339() - ); - // todo: less noisy output (only output failing instances and summary for successful) - // todo: more stats (act/sec, avg http req duration) - let mut ok_count = 0; - for (domain, stat) in stats { - let behind = last_id - stat.last_successful_id; - if stat.fail_count > 0 { - tracing::info!( - "{}: Warning. {} behind, {} consecutive fails, current retry delay {:.2?}", - domain, - behind, - stat.fail_count, - retry_sleep_duration(stat.fail_count) - ); - } else if behind > 0 { - tracing::info!("{}: Ok. {} behind", domain, behind); - } else { - ok_count += 1; + use lemmy_utils::error::LemmyError; + use serial_test::serial; + use std::{ + collections::HashSet, + sync::{Arc, Mutex}, + }; + use tokio::spawn; + + struct TestData { + send_manager: SendManager, + context: Data, + instances: Vec, + } + impl TestData { + async fn init(process_count: i32, process_index: i32) -> LemmyResult { + let context = LemmyContext::init_test_context().await; + let opts = Opts { + process_count, + process_index, + }; + let federation_config = FederationConfig::builder() + .domain("local.com") + .app_data(context.clone()) + .build() + .await?; + let concurrent_sends_per_instance = std::env::var("LEMMY_TEST_FEDERATION_CONCURRENT_SENDS") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(1); + + let federation_worker_config = FederationWorkerConfig { + concurrent_sends_per_instance, + }; + let pool = &mut context.pool(); + let instances = vec![ + Instance::read_or_create(pool, "alpha.com".to_string()).await?, + Instance::read_or_create(pool, "beta.com".to_string()).await?, + Instance::read_or_create(pool, "gamma.com".to_string()).await?, + ]; + + let send_manager = SendManager::new(opts, federation_config, federation_worker_config); + Ok(Self { + send_manager, + context, + instances, + }) + } + + async fn run(&mut self) -> LemmyResult<()> { + // start it and cancel after workers are running + let cancel = CancellationToken::new(); + let cancel_ = cancel.clone(); + spawn(async move { + sleep(Duration::from_millis(100)).await; + cancel_.cancel(); + }); + self.send_manager.do_loop(cancel.clone()).await?; + Ok(()) + } + + async fn cleanup(self) -> LemmyResult<()> { + self.send_manager.cancel().await?; + Instance::delete_all(&mut self.context.pool()).await?; + Ok(()) } } - tracing::info!("{ok_count} others up to date"); + + /// Basic test with default params and only active/allowed instances + #[tokio::test] + #[serial] + async fn test_send_manager() -> LemmyResult<()> { + let mut data = TestData::init(1, 1).await?; + + data.run().await?; + assert_eq!(3, data.send_manager.workers.len()); + let workers: HashSet<_> = data.send_manager.workers.keys().cloned().collect(); + let instances: HashSet<_> = data.instances.iter().map(|i| i.id).collect(); + assert_eq!(instances, workers); + + data.cleanup().await?; + Ok(()) + } + + /// Running with multiple processes should start correct workers + #[tokio::test] + #[serial] + async fn test_send_manager_processes() -> LemmyResult<()> { + let active = Arc::new(Mutex::new(vec![])); + let execute = |count, index, active: Arc>>| async move { + let mut data = TestData::init(count, index).await?; + data.run().await?; + assert_eq!(1, data.send_manager.workers.len()); + for k in data.send_manager.workers.keys() { + active.lock().unwrap().push(*k); + } + data.cleanup().await?; + Ok::<(), LemmyError>(()) + }; + execute(3, 1, active.clone()).await?; + execute(3, 2, active.clone()).await?; + execute(3, 3, active.clone()).await?; + + // Should run exactly three workers + assert_eq!(3, active.lock().unwrap().len()); + + Ok(()) + } + + /// Use blocklist, should not send to blocked instances + #[tokio::test] + #[serial] + async fn test_send_manager_blocked() -> LemmyResult<()> { + let mut data = TestData::init(1, 1).await?; + + let domain = data.instances[0].domain.clone(); + FederationBlockList::replace(&mut data.context.pool(), Some(vec![domain])).await?; + data.run().await?; + let workers = &data.send_manager.workers; + assert_eq!(2, workers.len()); + assert!(workers.contains_key(&data.instances[1].id)); + assert!(workers.contains_key(&data.instances[2].id)); + + data.cleanup().await?; + Ok(()) + } + + /// Use allowlist, should only send to allowed instance + #[tokio::test] + #[serial] + async fn test_send_manager_allowed() -> LemmyResult<()> { + let mut data = TestData::init(1, 1).await?; + + let domain = data.instances[0].domain.clone(); + FederationAllowList::replace(&mut data.context.pool(), Some(vec![domain])).await?; + data.run().await?; + let workers = &data.send_manager.workers; + assert_eq!(1, workers.len()); + assert!(workers.contains_key(&data.instances[0].id)); + + data.cleanup().await?; + Ok(()) + } + + /// Mark instance as dead, there should be no worker created for it + #[tokio::test] + #[serial] + async fn test_send_manager_dead() -> LemmyResult<()> { + let mut data = TestData::init(1, 1).await?; + + let instance = &data.instances[0]; + let form = InstanceForm { + updated: DateTime::from_timestamp(0, 0), + ..InstanceForm::new(instance.domain.clone()) + }; + Instance::update(&mut data.context.pool(), instance.id, form).await?; + + data.run().await?; + let workers = &data.send_manager.workers; + assert_eq!(2, workers.len()); + assert!(workers.contains_key(&data.instances[1].id)); + assert!(workers.contains_key(&data.instances[2].id)); + + data.cleanup().await?; + Ok(()) + } } diff --git a/crates/federate/src/send.rs b/crates/federate/src/send.rs new file mode 100644 index 000000000..01d620eb0 --- /dev/null +++ b/crates/federate/src/send.rs @@ -0,0 +1,148 @@ +use crate::util::get_actor_cached; +use activitypub_federation::{ + activity_sending::SendActivityTask, + config::Data, + protocol::context::WithContext, +}; +use anyhow::{Context, Result}; +use chrono::{DateTime, Utc}; +use lemmy_api_common::{context::LemmyContext, federate_retry_sleep_duration}; +use lemmy_apub::{activity_lists::SharedInboxActivities, FEDERATION_CONTEXT}; +use lemmy_db_schema::{newtypes::ActivityId, source::activity::SentActivity}; +use reqwest::Url; +use std::ops::Deref; +use tokio::{sync::mpsc::UnboundedSender, time::sleep}; +use tokio_util::sync::CancellationToken; + +#[derive(Debug, Eq)] +pub(crate) struct SendSuccessInfo { + pub activity_id: ActivityId, + pub published: Option>, + // true if the activity was skipped because the target instance is not interested in this + // activity + pub was_skipped: bool, +} +impl PartialEq for SendSuccessInfo { + fn eq(&self, other: &Self) -> bool { + self.activity_id == other.activity_id + } +} +/// order backwards because the binary heap is a max heap, and we need the smallest element to be on +/// top +impl PartialOrd for SendSuccessInfo { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} +impl Ord for SendSuccessInfo { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + other.activity_id.cmp(&self.activity_id) + } +} + +/// Represents the result of sending an activity. +/// +/// This enum is used to communicate the outcome of a send operation from a send task +/// to the main instance worker. It's designed to maintain a clean separation between +/// the send task and the main thread, allowing the send.rs file to be self-contained +/// and easier to understand. +/// +/// The use of a channel for communication (rather than shared atomic variables) was chosen +/// because: +/// 1. It keeps the send task cleanly separated with no direct interaction with the main thread. +/// 2. The failure event needs to be transferred to the main task for database updates anyway. +/// 3. The main fail_count should only be updated under certain conditions, which are best handled +/// in the main task. +/// 4. It maintains consistency in how data is communicated (all via channels rather than a mix of +/// channels and atomics). +/// 5. It simplifies concurrency management and makes the flow of data more predictable. +pub(crate) enum SendActivityResult { + Success(SendSuccessInfo), + Failure { fail_count: i32 }, +} +/// Represents a task for retrying to send an activity. +/// +/// This struct encapsulates all the necessary information and resources for attempting +/// to send an activity to multiple inbox URLs, with built-in retry logic. +pub(crate) struct SendRetryTask<'a> { + pub activity: &'a SentActivity, + pub object: &'a SharedInboxActivities, + /// Must not be empty at this point + pub inbox_urls: Vec, + /// Channel to report results back to the main instance worker + pub report: &'a mut UnboundedSender, + /// The first request will be sent immediately, but subsequent requests will be delayed + /// according to the number of previous fails + 1 + /// + /// This is a read-only immutable variable that is passed only one way, from the main + /// thread to each send task. It allows the task to determine how long to sleep initially + /// if the request fails. + pub initial_fail_count: i32, + /// For logging purposes + pub domain: String, + pub context: Data, + pub stop: CancellationToken, +} + +impl<'a> SendRetryTask<'a> { + // this function will return successfully when (a) send succeeded or (b) worker cancelled + // and will return an error if an internal error occurred (send errors cause an infinite loop) + pub async fn send_retry_loop(self) -> Result<()> { + let SendRetryTask { + activity, + object, + inbox_urls, + report, + initial_fail_count, + domain, + context, + stop, + } = self; + debug_assert!(!inbox_urls.is_empty()); + + let pool = &mut context.pool(); + let Some(actor_apub_id) = &activity.actor_apub_id else { + return Err(anyhow::anyhow!("activity is from before lemmy 0.19")); + }; + let actor = get_actor_cached(pool, activity.actor_type, actor_apub_id) + .await + .context("failed getting actor instance (was it marked deleted / removed?)")?; + + let object = WithContext::new(object.clone(), FEDERATION_CONTEXT.deref().clone()); + let requests = SendActivityTask::prepare(&object, actor.as_ref(), inbox_urls, &context).await?; + for task in requests { + // usually only one due to shared inbox + tracing::debug!("sending out {}", task); + let mut fail_count = initial_fail_count; + while let Err(e) = task.sign_and_send(&context).await { + fail_count += 1; + report.send(SendActivityResult::Failure { + fail_count, + // activity_id: activity.id, + })?; + let retry_delay = federate_retry_sleep_duration(fail_count); + tracing::info!( + "{}: retrying {:?} attempt {} with delay {retry_delay:.2?}. ({e})", + domain, + activity.id, + fail_count + ); + tokio::select! { + () = sleep(retry_delay) => {}, + () = stop.cancelled() => { + // cancel sending without reporting any result. + // the InstanceWorker needs to be careful to not hang on receive of that + // channel when cancelled (see handle_send_results) + return Ok(()); + } + } + } + } + report.send(SendActivityResult::Success(SendSuccessInfo { + activity_id: activity.id, + published: Some(activity.published), + was_skipped: false, + }))?; + Ok(()) + } +} diff --git a/crates/federate/src/stats.rs b/crates/federate/src/stats.rs new file mode 100644 index 000000000..f927f6ddf --- /dev/null +++ b/crates/federate/src/stats.rs @@ -0,0 +1,83 @@ +use crate::util::{get_latest_activity_id, FederationQueueStateWithDomain}; +use chrono::Local; +use lemmy_api_common::federate_retry_sleep_duration; +use lemmy_db_schema::{ + newtypes::InstanceId, + utils::{ActualDbPool, DbPool}, +}; +use lemmy_utils::error::LemmyResult; +use std::{collections::HashMap, time::Duration}; +use tokio::{sync::mpsc::UnboundedReceiver, time::interval}; +use tracing::{debug, info, warn}; + +/// every 60s, print the state for every instance. exits if the receiver is done (all senders +/// dropped) +pub(crate) async fn receive_print_stats( + pool: ActualDbPool, + mut receiver: UnboundedReceiver, +) { + let pool = &mut DbPool::Pool(&pool); + let mut printerval = interval(Duration::from_secs(60)); + let mut stats = HashMap::new(); + loop { + tokio::select! { + ele = receiver.recv() => { + match ele { + // update stats for instance + Some(ele) => {stats.insert(ele.state.instance_id, ele);}, + // receiver closed, print stats and exit + None => { + print_stats(pool, &stats).await; + return; + } + } + }, + _ = printerval.tick() => { + print_stats(pool, &stats).await; + } + } + } +} + +async fn print_stats( + pool: &mut DbPool<'_>, + stats: &HashMap, +) { + let res = print_stats_with_error(pool, stats).await; + if let Err(e) = res { + warn!("Failed to print stats: {e}"); + } +} + +async fn print_stats_with_error( + pool: &mut DbPool<'_>, + stats: &HashMap, +) -> LemmyResult<()> { + let last_id = get_latest_activity_id(pool).await?; + + // it's expected that the values are a bit out of date, everything < SAVE_STATE_EVERY should be + // considered up to date + info!("Federation state as of {}:", Local::now().to_rfc3339()); + // todo: more stats (act/sec, avg http req duration) + let mut ok_count = 0; + let mut behind_count = 0; + for ele in stats.values() { + let stat = &ele.state; + let domain = &ele.domain; + let behind = last_id.0 - stat.last_successful_id.map(|e| e.0).unwrap_or(0); + if stat.fail_count > 0 { + info!( + "{domain}: Warning. {behind} behind, {} consecutive fails, current retry delay {:.2?}", + stat.fail_count, + federate_retry_sleep_duration(stat.fail_count) + ); + } else if behind > 0 { + debug!("{}: Ok. {} activities behind", domain, behind); + behind_count += 1; + } else { + ok_count += 1; + } + } + info!("{ok_count} others up to date. {behind_count} instances behind."); + Ok(()) +} diff --git a/crates/federate/src/util.rs b/crates/federate/src/util.rs index f744d45f4..9473aafa3 100644 --- a/crates/federate/src/util.rs +++ b/crates/federate/src/util.rs @@ -6,9 +6,11 @@ use lemmy_apub::{ fetcher::{site_or_community_or_user::SiteOrCommunityOrUser, user_or_community::UserOrCommunity}, }; use lemmy_db_schema::{ + newtypes::ActivityId, source::{ activity::{ActorType, SentActivity}, community::Community, + federation_queue_state::FederationQueueState, person::Person, site::Site, }, @@ -16,31 +18,43 @@ use lemmy_db_schema::{ utils::{get_conn, DbPool}, }; use moka::future::Cache; -use once_cell::sync::Lazy; use reqwest::Url; use serde_json::Value; use std::{ + fmt::Debug, future::Future, pin::Pin, - sync::{Arc, RwLock}, + sync::{Arc, LazyLock}, time::Duration, }; use tokio::{task::JoinHandle, time::sleep}; use tokio_util::sync::CancellationToken; /// Decrease the delays of the federation queue. -/// Should only be used for federation tests since it significantly increases CPU and DB load of the federation queue. -pub(crate) static LEMMY_TEST_FAST_FEDERATION: Lazy = Lazy::new(|| { +/// Should only be used for federation tests since it significantly increases CPU and DB load of the +/// federation queue. This is intentionally a separate flag from other flags like debug_assertions, +/// since this is a invasive change we only need rarely. +pub(crate) static LEMMY_TEST_FAST_FEDERATION: LazyLock = LazyLock::new(|| { std::env::var("LEMMY_TEST_FAST_FEDERATION") .map(|s| !s.is_empty()) .unwrap_or(false) }); -/// Recheck for new federation work every n seconds. + +/// Recheck for new federation work every n seconds within each InstanceWorker. /// -/// When the queue is processed faster than new activities are added and it reaches the current time with an empty batch, -/// this is the delay the queue waits before it checks if new activities have been added to the sent_activities table. -/// This delay is only applied if no federated activity happens during sending activities of the last batch. -pub(crate) static WORK_FINISHED_RECHECK_DELAY: Lazy = Lazy::new(|| { +/// When the queue is processed faster than new activities are added and it reaches the current time +/// with an empty batch, this is the delay the queue waits before it checks if new activities have +/// been added to the sent_activities table. This delay is only applied if no federated activity +/// happens during sending activities of the last batch, which means on high-activity instances it +/// may never be used. This means that it does not affect the maximum throughput of the queue. +/// +/// +/// This is thus the interval with which tokio wakes up each of the +/// InstanceWorkers to check for new work, if the queue previously was empty. +/// If the delay is too short, the workers (one per federated instance) will wake up too +/// often and consume a lot of CPU. If the delay is long, then activities on low-traffic instances +/// will on average take delay/2 seconds to federate. +pub(crate) static WORK_FINISHED_RECHECK_DELAY: LazyLock = LazyLock::new(|| { if *LEMMY_TEST_FAST_FEDERATION { Duration::from_millis(100) } else { @@ -48,46 +62,62 @@ pub(crate) static WORK_FINISHED_RECHECK_DELAY: Lazy = Lazy::new(|| { } }); -pub struct CancellableTask { - f: Pin> + Send + 'static>>, - ended: Arc>, +/// Cache the latest activity id for a certain duration. +/// +/// This cache is common to all the instance workers and prevents there from being more than one +/// call per N seconds between each DB query to find max(activity_id). +pub(crate) static CACHE_DURATION_LATEST_ID: LazyLock = LazyLock::new(|| { + if *LEMMY_TEST_FAST_FEDERATION { + // in test mode, we use the same cache duration as the recheck delay so when recheck happens + // data is fresh, accelerating the time the tests take. + *WORK_FINISHED_RECHECK_DELAY + } else { + // in normal mode, we limit the query to one per second + Duration::from_secs(1) + } +}); + +/// A task that will be run in an infinite loop, unless it is cancelled. +/// If the task exits without being cancelled, an error will be logged and the task will be +/// restarted. +pub struct CancellableTask { + f: Pin> + Send + 'static>>, } -impl CancellableTask { +impl CancellableTask { /// spawn a task but with graceful shutdown - pub fn spawn( + pub fn spawn( timeout: Duration, - task: impl FnOnce(CancellationToken) -> F, - ) -> CancellableTask + task: impl Fn(CancellationToken) -> F + Send + 'static, + ) -> CancellableTask where - F: Future> + Send + 'static, + F: Future + Send + 'static, + R: Send + Debug + 'static, { let stop = CancellationToken::new(); - let task = task(stop.clone()); - let ended = Arc::new(RwLock::new(false)); - let ended_write = ended.clone(); - let task: JoinHandle> = tokio::spawn(async move { - match task.await { - Ok(o) => Ok(o), - Err(e) => { - *ended_write.write().expect("poisoned") = true; - Err(e) + let stop2 = stop.clone(); + let task: JoinHandle<()> = tokio::spawn(async move { + loop { + let res = task(stop2.clone()).await; + if stop2.is_cancelled() { + return; + } else { + tracing::warn!("task exited, restarting: {res:?}"); } } }); let abort = task.abort_handle(); CancellableTask { - ended, f: Box::pin(async move { stop.cancel(); tokio::select! { r = task => { - Ok(r.context("could not join")??) + r.context("CancellableTask failed to cancel cleanly, returned error")?; + Ok(()) }, _ = sleep(timeout) => { abort.abort(); - tracing::warn!("Graceful shutdown timed out, aborting task"); - Err(anyhow!("task aborted due to timeout")) + Err(anyhow!("CancellableTask aborted due to shutdown timeout")) } } }), @@ -95,12 +125,9 @@ impl CancellableTask { } /// cancel the cancel signal, wait for timeout for the task to stop gracefully, otherwise abort it - pub async fn cancel(self) -> Result { + pub async fn cancel(self) -> Result<(), anyhow::Error> { self.f.await } - pub fn has_ended(&self) -> bool { - *self.ended.read().expect("poisoned") - } } /// assuming apub priv key and ids are immutable, then we don't need to have TTL @@ -110,8 +137,8 @@ pub(crate) async fn get_actor_cached( actor_type: ActorType, actor_apub_id: &Url, ) -> Result> { - static CACHE: Lazy>> = - Lazy::new(|| Cache::builder().max_capacity(10000).build()); + static CACHE: LazyLock>> = + LazyLock::new(|| Cache::builder().max_capacity(10000).build()); CACHE .try_get_with(actor_apub_id.clone(), async { let url = actor_apub_id.clone().into(); @@ -141,9 +168,6 @@ pub(crate) async fn get_actor_cached( .map_err(|e| anyhow::anyhow!("err getting actor {actor_type:?} {actor_apub_id}: {e:?}")) } -/// this should maybe be a newtype like all the other PersonId CommunityId etc. -pub(crate) type ActivityId = i64; - type CachedActivityInfo = Option>; /// activities are immutable so cache does not need to have TTL /// May return None if the corresponding id does not exist or is a received activity. @@ -153,15 +177,14 @@ pub(crate) async fn get_activity_cached( pool: &mut DbPool<'_>, activity_id: ActivityId, ) -> Result { - static ACTIVITIES: Lazy> = - Lazy::new(|| Cache::builder().max_capacity(10000).build()); + static ACTIVITIES: LazyLock> = + LazyLock::new(|| Cache::builder().max_capacity(10000).build()); ACTIVITIES .try_get_with(activity_id, async { let row = SentActivity::read(pool, activity_id) .await - .optional() - .context("could not read activity")?; - let Some(mut row) = row else { + .context("could not read activity"); + let Ok(mut row) = row else { return anyhow::Result::<_, anyhow::Error>::Ok(None); }; // swap to avoid cloning @@ -177,13 +200,9 @@ pub(crate) async fn get_activity_cached( /// return the most current activity id (with 1 second cache) pub(crate) async fn get_latest_activity_id(pool: &mut DbPool<'_>) -> Result { - static CACHE: Lazy> = Lazy::new(|| { + static CACHE: LazyLock> = LazyLock::new(|| { Cache::builder() - .time_to_live(if *LEMMY_TEST_FAST_FEDERATION { - *WORK_FINISHED_RECHECK_DELAY - } else { - Duration::from_secs(1) - }) + .time_to_live(*CACHE_DURATION_LATEST_ID) .build() }); CACHE @@ -192,14 +211,16 @@ pub(crate) async fn get_latest_activity_id(pool: &mut DbPool<'_>) -> Result = sent_activity.select(max(id)).get_result(conn).await?; - let latest_id = seq.unwrap_or(0); + let latest_id = seq.unwrap_or(ActivityId(0)); anyhow::Result::<_, anyhow::Error>::Ok(latest_id as ActivityId) }) .await .map_err(|e| anyhow::anyhow!("err getting id: {e:?}")) } -/// how long to sleep based on how many retries have already happened -pub(crate) fn retry_sleep_duration(retry_count: i32) -> Duration { - Duration::from_secs_f64(10.0 * 2.0_f64.powf(f64::from(retry_count))) +/// the domain name is needed for logging, pass it to the stats printer so it doesn't need to look +/// up the domain itself +pub(crate) struct FederationQueueStateWithDomain { + pub domain: String, + pub state: FederationQueueState, } diff --git a/crates/federate/src/worker.rs b/crates/federate/src/worker.rs index 3eda2e746..b0254ba0b 100644 --- a/crates/federate/src/worker.rs +++ b/crates/federate/src/worker.rs @@ -1,328 +1,771 @@ use crate::{ - federation_queue_state::FederationQueueState, + inboxes::RealCommunityInboxCollector, + send::{SendActivityResult, SendRetryTask, SendSuccessInfo}, util::{ get_activity_cached, - get_actor_cached, get_latest_activity_id, - retry_sleep_duration, - LEMMY_TEST_FAST_FEDERATION, + FederationQueueStateWithDomain, WORK_FINISHED_RECHECK_DELAY, }, }; -use activitypub_federation::{activity_sending::SendActivityTask, config::Data}; +use activitypub_federation::config::FederationConfig; use anyhow::{Context, Result}; -use chrono::{DateTime, TimeZone, Utc}; -use lemmy_api_common::context::LemmyContext; -use lemmy_apub::activity_lists::SharedInboxActivities; +use chrono::{DateTime, Days, TimeZone, Utc}; +use lemmy_api_common::{ + context::LemmyContext, + federate_retry_sleep_duration, + lemmy_utils::settings::structs::FederationWorkerConfig, +}; use lemmy_db_schema::{ - newtypes::{CommunityId, InstanceId}, - source::{activity::SentActivity, instance::Instance, site::Site}, - utils::DbPool, + newtypes::ActivityId, + source::{ + federation_queue_state::FederationQueueState, + instance::{Instance, InstanceForm}, + }, + utils::{naive_now, ActualDbPool, DbPool}, }; -use lemmy_db_views_actor::structs::CommunityFollowerView; -use lemmy_utils::error::LemmyErrorExt2; -use once_cell::sync::Lazy; -use reqwest::Url; -use std::{ - collections::{HashMap, HashSet}, - time::Duration, +use std::{collections::BinaryHeap, ops::Add, time::Duration}; +use tokio::{ + sync::mpsc::{self, UnboundedSender}, + time::sleep, }; -use tokio::{sync::mpsc::UnboundedSender, time::sleep}; use tokio_util::sync::CancellationToken; -/// Check whether to save state to db every n sends if there's no failures (during failures state is saved after every attempt) -/// This determines the batch size for loop_batch. After a batch ends and SAVE_STATE_EVERY_TIME has passed, the federation_queue_state is updated in the DB. -static CHECK_SAVE_STATE_EVERY_IT: i64 = 100; -/// Save state to db after this time has passed since the last state (so if the server crashes or is SIGKILLed, less than X seconds of activities are resent) +/// Save state to db after this time has passed since the last state (so if the server crashes or is +/// SIGKILLed, less than X seconds of activities are resent) +#[cfg(not(test))] static SAVE_STATE_EVERY_TIME: Duration = Duration::from_secs(60); -/// interval with which new additions to community_followers are queried. +#[cfg(test)] +/// in test mode, we want it to save state and send it to print_stats after every send +static SAVE_STATE_EVERY_TIME: Duration = Duration::from_secs(0); +/// Maximum number of successful sends to allow out of order +const MAX_SUCCESSFULS: usize = 1000; + +/// in prod mode, try to collect multiple send results at the same time to reduce load +#[cfg(not(test))] +static MIN_ACTIVITY_SEND_RESULTS_TO_HANDLE: usize = 4; +#[cfg(test)] +static MIN_ACTIVITY_SEND_RESULTS_TO_HANDLE: usize = 0; + /// -/// The first time some user on an instance follows a specific remote community (or, more precisely: the first time a (followed_community_id, follower_inbox_url) tuple appears), -/// this delay limits the maximum time until the follow actually results in activities from that community id being sent to that inbox url. -/// This delay currently needs to not be too small because the DB load is currently fairly high because of the current structure of storing inboxes for every person, not having a separate list of shared_inboxes, and the architecture of having every instance queue be fully separate. -/// (see https://github.com/LemmyNet/lemmy/issues/3958) -static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy = Lazy::new(|| { - if *LEMMY_TEST_FAST_FEDERATION { - chrono::Duration::seconds(1) - } else { - chrono::Duration::minutes(2) - } -}); -/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance unfollows a specific remote community. -/// This is expected to happen pretty rarely and updating it in a timely manner is not too important. -static FOLLOW_REMOVALS_RECHECK_DELAY: Lazy = - Lazy::new(|| chrono::Duration::hours(1)); +/// SendManager --(has many)--> InstanceWorker --(has many)--> SendRetryTask +/// | | | +/// -----|------create worker -> loop activities--create task-> send activity +/// | | vvvv +/// | | fail or success +/// | | <-report result-- | +/// | <---order and aggrate results--- | +/// | <---send stats--- | | +/// filter and print stats | | pub(crate) struct InstanceWorker { instance: Instance, - // load site lazily because if an instance is first seen due to being on allowlist, - // the corresponding row in `site` may not exist yet since that is only added once - // `fetch_instance_actor_for_object` is called. - // (this should be unlikely to be relevant outside of the federation tests) - site_loaded: bool, - site: Option, - followed_communities: HashMap>, stop: CancellationToken, - context: Data, - stats_sender: UnboundedSender<(String, FederationQueueState)>, - last_full_communities_fetch: DateTime, - last_incremental_communities_fetch: DateTime, + federation_lib_config: FederationConfig, + federation_worker_config: FederationWorkerConfig, state: FederationQueueState, last_state_insert: DateTime, + pool: ActualDbPool, + inbox_collector: RealCommunityInboxCollector, + // regularily send stats back to the SendManager + stats_sender: UnboundedSender, + // each HTTP send will report back to this channel concurrently + receive_send_result: mpsc::UnboundedReceiver, + // this part of the channel is cloned and passed to the SendRetryTasks + report_send_result: mpsc::UnboundedSender, + // activities that have been successfully sent but + // that are not the lowest number and thus can't be written to the database yet + successfuls: BinaryHeap, + // number of activities that currently have a task spawned to send it + in_flight: i8, } impl InstanceWorker { pub(crate) async fn init_and_loop( instance: Instance, - context: Data, - pool: &mut DbPool<'_>, // in theory there's a ref to the pool in context, but i couldn't get that to work wrt lifetimes + config: FederationConfig, + federation_worker_config: FederationWorkerConfig, stop: CancellationToken, - stats_sender: UnboundedSender<(String, FederationQueueState)>, + stats_sender: UnboundedSender, ) -> Result<(), anyhow::Error> { - let state = FederationQueueState::load(pool, instance.id).await?; + let pool = config.to_request_data().inner_pool().clone(); + let state = FederationQueueState::load(&mut DbPool::Pool(&pool), instance.id).await?; + let (report_send_result, receive_send_result) = + tokio::sync::mpsc::unbounded_channel::(); let mut worker = InstanceWorker { + inbox_collector: RealCommunityInboxCollector::new_real( + pool.clone(), + instance.id, + instance.domain.clone(), + ), + federation_worker_config, instance, - site_loaded: false, - site: None, - followed_communities: HashMap::new(), stop, - context, + federation_lib_config: config, stats_sender, - last_full_communities_fetch: Utc.timestamp_nanos(0), - last_incremental_communities_fetch: Utc.timestamp_nanos(0), state, last_state_insert: Utc.timestamp_nanos(0), + pool, + receive_send_result, + report_send_result, + successfuls: BinaryHeap::::new(), + in_flight: 0, }; - worker.loop_until_stopped(pool).await + + worker.loop_until_stopped().await } /// loop fetch new activities from db and send them to the inboxes of the given instances - /// this worker only returns if (a) there is an internal error or (b) the cancellation token is cancelled (graceful exit) - pub(crate) async fn loop_until_stopped( - &mut self, - pool: &mut DbPool<'_>, - ) -> Result<(), anyhow::Error> { - let save_state_every = chrono::Duration::from_std(SAVE_STATE_EVERY_TIME).expect("not negative"); - - self.update_communities(pool).await?; + /// this worker only returns if (a) there is an internal error or (b) the cancellation token is + /// cancelled (graceful exit) + async fn loop_until_stopped(&mut self) -> Result<()> { self.initial_fail_sleep().await?; + let (mut last_sent_id, mut newest_id) = self.get_latest_ids().await?; + while !self.stop.is_cancelled() { - self.loop_batch(pool).await?; - if self.stop.is_cancelled() { - break; + // check if we need to wait for a send to finish before sending the next one + // we wait if (a) the last request failed, only if a request is already in flight (not at the + // start of the loop) or (b) if we have too many successfuls in memory or (c) if we have + // too many in flight + let need_wait_for_event = (self.in_flight != 0 && self.state.fail_count > 0) + || self.successfuls.len() >= MAX_SUCCESSFULS + || self.in_flight >= self.federation_worker_config.concurrent_sends_per_instance; + if need_wait_for_event || self.receive_send_result.len() > MIN_ACTIVITY_SEND_RESULTS_TO_HANDLE + { + // if len() > 0 then this does not block and allows us to write to db more often + // if len is 0 then this means we wait for something to change our above conditions, + // which can only happen by an event sent into the channel + self.handle_send_results().await?; + // handle_send_results does not guarantee that we are now in a condition where we want to + // send a new one, so repeat this check until the if no longer applies + continue; } - if (Utc::now() - self.last_state_insert) > save_state_every { - self.save_and_send_state(pool).await?; + + // send a new activity if there is one + self.inbox_collector.update_communities().await?; + let next_id_to_send = ActivityId(last_sent_id.0 + 1); + { + // sanity check: calculate next id to send based on the last id and the in flight requests + let expected_next_id = self.state.last_successful_id.map(|last_successful_id| { + last_successful_id.0 + (self.successfuls.len() as i64) + i64::from(self.in_flight) + 1 + }); + // compare to next id based on incrementing + if expected_next_id != Some(next_id_to_send.0) { + anyhow::bail!( + "{}: next id to send is not as expected: {:?} != {:?}", + self.instance.domain, + expected_next_id, + next_id_to_send + ) + } } - self.update_communities(pool).await?; + + if next_id_to_send > newest_id { + // lazily fetch latest id only if we have cought up + newest_id = self.get_latest_ids().await?.1; + if next_id_to_send > newest_id { + if next_id_to_send > ActivityId(newest_id.0 + 1) { + tracing::error!( + "{}: next send id {} is higher than latest id {}+1 in database (did the db get cleared?)", + self.instance.domain, + next_id_to_send.0, + newest_id.0 + ); + } + // no more work to be done, wait before rechecking + tokio::select! { + () = sleep(*WORK_FINISHED_RECHECK_DELAY) => {}, + () = self.stop.cancelled() => { + tracing::debug!("cancelled worker loop while waiting for new work") + } + } + continue; + } + } + self.in_flight += 1; + last_sent_id = next_id_to_send; + self.spawn_send_if_needed(next_id_to_send).await?; } - // final update of state in db - self.save_and_send_state(pool).await?; + tracing::debug!("cancelled worker loop after send"); + + // final update of state in db on shutdown + self.save_and_send_state().await?; Ok(()) } async fn initial_fail_sleep(&mut self) -> Result<()> { // before starting queue, sleep remaining duration if last request failed if self.state.fail_count > 0 { - let elapsed = (Utc::now() - self.state.last_retry).to_std()?; - let required = retry_sleep_duration(self.state.fail_count); + let last_retry = self + .state + .last_retry + .context("impossible: if fail count set last retry also set")?; + let elapsed = (Utc::now() - last_retry).to_std()?; + let required = federate_retry_sleep_duration(self.state.fail_count); if elapsed >= required { return Ok(()); } let remaining = required - elapsed; + tracing::debug!( + "{}: fail-sleeping for {:?} before starting queue", + self.instance.domain, + remaining + ); tokio::select! { () = sleep(remaining) => {}, - () = self.stop.cancelled() => {} - } - } - Ok(()) - } - /// send out a batch of CHECK_SAVE_STATE_EVERY_IT activities - async fn loop_batch(&mut self, pool: &mut DbPool<'_>) -> Result<()> { - let latest_id = get_latest_activity_id(pool).await?; - if self.state.last_successful_id == -1 { - // this is the initial creation (instance first seen) of the federation queue for this instance - // skip all past activities: - self.state.last_successful_id = latest_id; - // save here to ensure it's not read as 0 again later if no activities have happened - self.save_and_send_state(pool).await?; - } - let mut id = self.state.last_successful_id; - if id == latest_id { - // no more work to be done, wait before rechecking - tokio::select! { - () = sleep(*WORK_FINISHED_RECHECK_DELAY) => {}, - () = self.stop.cancelled() => {} - } - return Ok(()); - } - let mut processed_activities = 0; - while id < latest_id - && processed_activities < CHECK_SAVE_STATE_EVERY_IT - && !self.stop.is_cancelled() - { - id += 1; - processed_activities += 1; - let Some(ele) = get_activity_cached(pool, id) - .await - .context("failed reading activity from db")? - else { - self.state.last_successful_id = id; - continue; - }; - if let Err(e) = self.send_retry_loop(pool, &ele.0, &ele.1).await { - tracing::warn!( - "sending {} errored internally, skipping activity: {:?}", - ele.0.ap_id, - e - ); - } - if self.stop.is_cancelled() { - return Ok(()); - } - // send success! - self.state.last_successful_id = id; - self.state.fail_count = 0; - } - Ok(()) - } - - // this function will return successfully when (a) send succeeded or (b) worker cancelled - // and will return an error if an internal error occurred (send errors cause an infinite loop) - async fn send_retry_loop( - &mut self, - pool: &mut DbPool<'_>, - activity: &SentActivity, - object: &SharedInboxActivities, - ) -> Result<()> { - let inbox_urls = self - .get_inbox_urls(pool, activity) - .await - .context("failed figuring out inbox urls")?; - if inbox_urls.is_empty() { - self.state.last_successful_id = activity.id; - return Ok(()); - } - let Some(actor_apub_id) = &activity.actor_apub_id else { - return Ok(()); // activity was inserted before persistent queue was activated - }; - let actor = get_actor_cached(pool, activity.actor_type, actor_apub_id) - .await - .context("failed getting actor instance (was it marked deleted / removed?)")?; - - let inbox_urls = inbox_urls.into_iter().collect(); - let requests = SendActivityTask::prepare(object, actor.as_ref(), inbox_urls, &self.context) - .await - .into_anyhow()?; - for task in requests { - // usually only one due to shared inbox - tracing::info!("sending out {}", task); - while let Err(e) = task.sign_and_send(&self.context).await { - self.state.fail_count += 1; - self.state.last_retry = Utc::now(); - let retry_delay: Duration = retry_sleep_duration(self.state.fail_count); - tracing::info!( - "{}: retrying {} attempt {} with delay {retry_delay:.2?}. ({e})", - self.instance.domain, - activity.id, - self.state.fail_count - ); - self.save_and_send_state(pool).await?; - tokio::select! { - () = sleep(retry_delay) => {}, - () = self.stop.cancelled() => { - // save state to db and exit - return Ok(()); - } + () = self.stop.cancelled() => { + tracing::debug!("cancelled worker loop during initial fail sleep") } } } Ok(()) } - /// get inbox urls of sending the given activity to the given instance - /// most often this will return 0 values (if instance doesn't care about the activity) - /// or 1 value (the shared inbox) - /// > 1 values only happens for non-lemmy software - async fn get_inbox_urls( - &mut self, - pool: &mut DbPool<'_>, - activity: &SentActivity, - ) -> Result> { - let mut inbox_urls: HashSet = HashSet::new(); + /// return the last successfully sent id and the newest activity id in the database + /// sets last_successful_id in database if it's the first time this instance is seen + async fn get_latest_ids(&mut self) -> Result<(ActivityId, ActivityId)> { + let latest_id = get_latest_activity_id(&mut self.pool()).await?; + let last = if let Some(last) = self.state.last_successful_id { + last + } else { + // this is the initial creation (instance first seen) of the federation queue for this + // instance - if activity.send_all_instances { - if !self.site_loaded { - self.site = Site::read_from_instance_id(pool, self.instance.id).await?; - self.site_loaded = true; - } - if let Some(site) = &self.site { - // Nutomic: Most non-lemmy software wont have a site row. That means it cant handle these activities. So handling it like this is fine. - inbox_urls.insert(site.inbox_url.inner().clone()); - } - } - if let Some(t) = &activity.send_community_followers_of { - if let Some(urls) = self.followed_communities.get(t) { - inbox_urls.extend(urls.iter().map(std::clone::Clone::clone)); - } - } - inbox_urls.extend( - activity - .send_inboxes - .iter() - .filter_map(std::option::Option::as_ref) - .filter(|&u| (u.domain() == Some(&self.instance.domain))) - .map(|u| u.inner().clone()), - ); - Ok(inbox_urls) + // skip all past activities: + self.state.last_successful_id = Some(latest_id); + // save here to ensure it's not read as 0 again later if no activities have happened + self.save_and_send_state().await?; + latest_id + }; + Ok((last, latest_id)) } - async fn update_communities(&mut self, pool: &mut DbPool<'_>) -> Result<()> { - if (Utc::now() - self.last_full_communities_fetch) > *FOLLOW_REMOVALS_RECHECK_DELAY { - // process removals every hour - (self.followed_communities, self.last_full_communities_fetch) = self - .get_communities(pool, self.instance.id, Utc.timestamp_nanos(0)) - .await?; - self.last_incremental_communities_fetch = self.last_full_communities_fetch; + async fn handle_send_results(&mut self) -> Result<(), anyhow::Error> { + let mut force_write = false; + let mut events = Vec::new(); + // Wait for at least one event but if there's multiple handle them all. + // We need to listen to the cancel event here as well in order to prevent a hang on shutdown: + // If the SendRetryTask gets cancelled, it immediately exits without reporting any state. + // So if the worker is waiting for a send result and all SendRetryTask gets cancelled, this recv + // could hang indefinitely otherwise. The tasks will also drop their handle of + // report_send_result which would cause the recv_many method to return 0 elements, but since + // InstanceWorker holds a copy of the send result channel as well, that won't happen. + tokio::select! { + _ = self.receive_send_result.recv_many(&mut events, 1000) => {}, + () = self.stop.cancelled() => { + tracing::debug!("cancelled worker loop while waiting for send results"); + return Ok(()); + } } - if (Utc::now() - self.last_incremental_communities_fetch) > *FOLLOW_ADDITIONS_RECHECK_DELAY { - // process additions every minute - let (news, time) = self - .get_communities( - pool, - self.instance.id, - self.last_incremental_communities_fetch, - ) - .await?; - self.followed_communities.extend(news); - self.last_incremental_communities_fetch = time; + for event in events { + match event { + SendActivityResult::Success(s) => { + self.in_flight -= 1; + if !s.was_skipped { + self.state.fail_count = 0; + self.mark_instance_alive().await?; + } + self.successfuls.push(s); + } + SendActivityResult::Failure { fail_count, .. } => { + if fail_count > self.state.fail_count { + // override fail count - if multiple activities are currently sending this value may get + // conflicting info but that's fine. + // This needs to be this way, all alternatives would be worse. The reason is that if 10 + // simultaneous requests fail within a 1s period, we don't want the next retry to be + // exponentially 2**10 s later. Any amount of failures within a fail-sleep period should + // only count as one failure. + + self.state.fail_count = fail_count; + self.state.last_retry = Some(Utc::now()); + force_write = true; + } + } + } + } + self.pop_successfuls_and_write(force_write).await?; + Ok(()) + } + async fn mark_instance_alive(&mut self) -> Result<()> { + // Activity send successful, mark instance as alive if it hasn't been updated in a while. + let updated = self.instance.updated.unwrap_or(self.instance.published); + if updated.add(Days::new(1)) < Utc::now() { + self.instance.updated = Some(Utc::now()); + + let form = InstanceForm { + updated: Some(naive_now()), + ..InstanceForm::new(self.instance.domain.clone()) + }; + Instance::update(&mut self.pool(), self.instance.id, form).await?; + } + Ok(()) + } + /// Checks that sequential activities `last_successful_id + 1`, `last_successful_id + 2` etc have + /// been sent successfully. In that case updates `last_successful_id` and saves the state to the + /// database if the time since the last save is greater than `SAVE_STATE_EVERY_TIME`. + async fn pop_successfuls_and_write(&mut self, force_write: bool) -> Result<()> { + let Some(mut last_id) = self.state.last_successful_id else { + tracing::warn!( + "{} should be impossible: last successful id is None", + self.instance.domain + ); + return Ok(()); + }; + tracing::debug!( + "{} last: {:?}, next: {:?}, currently in successfuls: {:?}", + self.instance.domain, + last_id, + self.successfuls.peek(), + self.successfuls.iter() + ); + while self + .successfuls + .peek() + .map(|a| a.activity_id == ActivityId(last_id.0 + 1)) + .unwrap_or(false) + { + let next = self + .successfuls + .pop() + .context("peek above ensures pop has value")?; + last_id = next.activity_id; + self.state.last_successful_id = Some(next.activity_id); + self.state.last_successful_published_time = next.published; + } + + let save_state_every = chrono::Duration::from_std(SAVE_STATE_EVERY_TIME).expect("not negative"); + if force_write || (Utc::now() - self.last_state_insert) > save_state_every { + self.save_and_send_state().await?; } Ok(()) } - /// get a list of local communities with the remote inboxes on the given instance that cares about them - async fn get_communities( - &mut self, - pool: &mut DbPool<'_>, - instance_id: InstanceId, - last_fetch: DateTime, - ) -> Result<(HashMap>, DateTime)> { - let new_last_fetch = Utc::now() - chrono::Duration::seconds(10); // update to time before fetch to ensure overlap. subtract 10s to ensure overlap even if published date is not exact - Ok(( - CommunityFollowerView::get_instance_followed_community_inboxes(pool, instance_id, last_fetch) - .await? - .into_iter() - .fold(HashMap::new(), |mut map, (c, u)| { - map.entry(c).or_default().insert(u.into()); - map - }), - new_last_fetch, - )) + /// we collect the relevant inboxes in the main instance worker task, and only spawn the send task + /// if we have inboxes to send to this limits CPU usage and reduces overhead for the (many) + /// cases where we don't have any inboxes + async fn spawn_send_if_needed(&mut self, activity_id: ActivityId) -> Result<()> { + let Some(ele) = get_activity_cached(&mut self.pool(), activity_id) + .await + .context("failed reading activity from db")? + else { + tracing::debug!("{}: {:?} does not exist", self.instance.domain, activity_id); + self + .report_send_result + .send(SendActivityResult::Success(SendSuccessInfo { + activity_id, + published: None, + was_skipped: true, + }))?; + return Ok(()); + }; + let activity = &ele.0; + let inbox_urls = self + .inbox_collector + .get_inbox_urls(activity) + .await + .context("failed figuring out inbox urls")?; + if inbox_urls.is_empty() { + // this is the case when the activity is not relevant to this receiving instance (e.g. no user + // subscribed to the relevant community) + tracing::debug!("{}: {:?} no inboxes", self.instance.domain, activity.id); + self + .report_send_result + .send(SendActivityResult::Success(SendSuccessInfo { + activity_id, + // it would be valid here to either return None or Some(activity.published). The published + // time is only used for stats pages that track federation delay. None can be a bit + // misleading because if you look at / chart the published time for federation from a + // large to a small instance that's only subscribed to a few small communities, + // then it will show the last published time as a days ago even though + // federation is up to date. + published: Some(activity.published), + was_skipped: true, + }))?; + return Ok(()); + } + let initial_fail_count = self.state.fail_count; + let data = self.federation_lib_config.to_request_data(); + let stop = self.stop.clone(); + let domain = self.instance.domain.clone(); + let mut report = self.report_send_result.clone(); + tokio::spawn(async move { + let res = SendRetryTask { + activity: &ele.0, + object: &ele.1, + inbox_urls, + report: &mut report, + initial_fail_count, + domain, + context: data, + stop, + } + .send_retry_loop() + .await; + if let Err(e) = res { + tracing::warn!( + "sending {} errored internally, skipping activity: {:?}", + ele.0.ap_id, + e + ); + // An error in this location means there is some deeper internal issue with the activity, + // for example the actor can't be loaded or similar. These issues are probably not + // solveable by retrying and would cause the federation for this instance to permanently be + // stuck in a retry loop. So we log the error and skip the activity (by reporting success to + // the worker) + report + .send(SendActivityResult::Success(SendSuccessInfo { + activity_id, + published: None, + was_skipped: true, + })) + .ok(); + } + }); + Ok(()) } - async fn save_and_send_state(&mut self, pool: &mut DbPool<'_>) -> Result<()> { + + async fn save_and_send_state(&mut self) -> Result<()> { + tracing::debug!("{}: saving and sending state", self.instance.domain); self.last_state_insert = Utc::now(); - FederationQueueState::upsert(pool, &self.state).await?; - self - .stats_sender - .send((self.instance.domain.clone(), self.state.clone()))?; + FederationQueueState::upsert(&mut self.pool(), &self.state).await?; + self.stats_sender.send(FederationQueueStateWithDomain { + state: self.state.clone(), + domain: self.instance.domain.clone(), + })?; + Ok(()) + } + + fn pool(&self) -> DbPool<'_> { + DbPool::Pool(&self.pool) + } +} + +#[cfg(test)] +#[expect(clippy::unwrap_used)] +#[expect(clippy::indexing_slicing)] +mod test { + + use super::*; + use activitypub_federation::{ + http_signatures::generate_actor_keypair, + protocol::context::WithContext, + }; + use actix_web::{dev::ServerHandle, web, App, HttpResponse, HttpServer}; + use lemmy_api_common::utils::generate_inbox_url; + use lemmy_db_schema::{ + newtypes::DbUrl, + source::{ + activity::{ActorType, SentActivity, SentActivityForm}, + person::{Person, PersonInsertForm}, + }, + traits::Crud, + }; + use lemmy_utils::error::LemmyResult; + use serde_json::{json, Value}; + use serial_test::serial; + use test_context::{test_context, AsyncTestContext}; + use tokio::{ + spawn, + sync::mpsc::{error::TryRecvError, unbounded_channel, UnboundedReceiver}, + }; + use tracing_test::traced_test; + use url::Url; + + struct Data { + context: activitypub_federation::config::Data, + instance: Instance, + person: Person, + stats_receiver: UnboundedReceiver, + inbox_receiver: UnboundedReceiver, + cancel: CancellationToken, + cleaned_up: bool, + wait_stop_server: ServerHandle, + is_concurrent: bool, + } + + impl Data { + async fn init() -> LemmyResult { + let context = LemmyContext::init_test_federation_config().await; + let instance = Instance::read_or_create(&mut context.pool(), "localhost".to_string()).await?; + + let actor_keypair = generate_actor_keypair()?; + let actor_id: DbUrl = Url::parse("http://local.com/u/alice")?.into(); + let person_form = PersonInsertForm { + actor_id: Some(actor_id.clone()), + private_key: (Some(actor_keypair.private_key)), + inbox_url: Some(generate_inbox_url()?), + ..PersonInsertForm::new("alice".to_string(), actor_keypair.public_key, instance.id) + }; + let person = Person::create(&mut context.pool(), &person_form).await?; + + let cancel = CancellationToken::new(); + let (stats_sender, stats_receiver) = unbounded_channel(); + let (inbox_sender, inbox_receiver) = unbounded_channel(); + + // listen for received activities in background + let wait_stop_server = listen_activities(inbox_sender)?; + + let concurrent_sends_per_instance = std::env::var("LEMMY_TEST_FEDERATION_CONCURRENT_SENDS") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(10); + + let fed_config = FederationWorkerConfig { + concurrent_sends_per_instance, + }; + spawn(InstanceWorker::init_and_loop( + instance.clone(), + context.clone(), + fed_config, + cancel.clone(), + stats_sender, + )); + // wait for startup + sleep(*WORK_FINISHED_RECHECK_DELAY).await; + + Ok(Self { + context: context.to_request_data(), + instance, + person, + stats_receiver, + inbox_receiver, + cancel, + wait_stop_server, + cleaned_up: false, + is_concurrent: concurrent_sends_per_instance > 1, + }) + } + + async fn cleanup(&mut self) -> LemmyResult<()> { + if self.cleaned_up { + return Ok(()); + } + self.cleaned_up = true; + self.cancel.cancel(); + sleep(*WORK_FINISHED_RECHECK_DELAY).await; + Instance::delete_all(&mut self.context.pool()).await?; + Person::delete(&mut self.context.pool(), self.person.id).await?; + self.wait_stop_server.stop(true).await; + Ok(()) + } + } + + /// In order to guarantee that the webserver is stopped via the cleanup function, + /// we implement a test context. + impl AsyncTestContext for Data { + async fn setup() -> Data { + Data::init().await.unwrap() + } + async fn teardown(mut self) { + self.cleanup().await.unwrap() + } + } + + #[test_context(Data)] + #[tokio::test] + #[traced_test] + #[serial] + async fn test_stats(data: &mut Data) -> LemmyResult<()> { + tracing::debug!("hello world"); + + // first receive at startup + let rcv = data.stats_receiver.recv().await.unwrap(); + tracing::debug!("received first stats"); + assert_eq!(data.instance.id, rcv.state.instance_id); + + let sent = send_activity(data.person.actor_id.clone(), &data.context, true).await?; + tracing::debug!("sent activity"); + // receive for successfully sent activity + let inbox_rcv = data.inbox_receiver.recv().await.unwrap(); + let parsed_activity = serde_json::from_str::>(&inbox_rcv)?; + assert_eq!(&sent.data, parsed_activity.inner()); + tracing::debug!("received activity"); + + let rcv = data.stats_receiver.recv().await.unwrap(); + assert_eq!(data.instance.id, rcv.state.instance_id); + assert_eq!(Some(sent.id), rcv.state.last_successful_id); + tracing::debug!("received second stats"); + + data.cleanup().await?; + + // it also sends state on shutdown + let rcv = data.stats_receiver.try_recv(); + assert!(rcv.is_ok()); + + // nothing further received + let rcv = data.stats_receiver.try_recv(); + assert_eq!(Some(TryRecvError::Disconnected), rcv.err()); + let inbox_rcv = data.inbox_receiver.try_recv(); + assert_eq!(Some(TryRecvError::Disconnected), inbox_rcv.err()); + + Ok(()) + } + + #[test_context(Data)] + #[tokio::test] + #[traced_test] + #[serial] + async fn test_send_40(data: &mut Data) -> LemmyResult<()> { + tracing::debug!("hello world"); + + // first receive at startup + let rcv = data.stats_receiver.recv().await.unwrap(); + tracing::debug!("received first stats"); + assert_eq!(data.instance.id, rcv.state.instance_id); + // assert_eq!(Some(ActivityId(0)), rcv.state.last_successful_id); + // let last_id_before = rcv.state.last_successful_id.unwrap(); + let mut sent = Vec::new(); + for _ in 0..40 { + sent.push(send_activity(data.person.actor_id.clone(), &data.context, false).await?); + } + sleep(2 * *WORK_FINISHED_RECHECK_DELAY).await; + tracing::debug!("sent activity"); + compare_sent_with_receive(data, sent).await?; + + Ok(()) + } + + #[test_context(Data)] + #[tokio::test] + #[traced_test] + #[serial] + /// this test sends 15 activities, waits and checks they have all been received, then sends 50, + /// etc + async fn test_send_15_20_30(data: &mut Data) -> LemmyResult<()> { + tracing::debug!("hello world"); + + // first receive at startup + let rcv = data.stats_receiver.recv().await.unwrap(); + tracing::debug!("received first stats"); + assert_eq!(data.instance.id, rcv.state.instance_id); + // assert_eq!(Some(ActivityId(0)), rcv.state.last_successful_id); + // let last_id_before = rcv.state.last_successful_id.unwrap(); + let counts = vec![15, 20, 35]; + for count in counts { + tracing::debug!("sending {} activities", count); + let mut sent = Vec::new(); + for _ in 0..count { + sent.push(send_activity(data.person.actor_id.clone(), &data.context, false).await?); + } + sleep(2 * *WORK_FINISHED_RECHECK_DELAY).await; + tracing::debug!("sent activity"); + compare_sent_with_receive(data, sent).await?; + } + + Ok(()) + } + + #[test_context(Data)] + #[tokio::test] + #[serial] + async fn test_update_instance(data: &mut Data) -> LemmyResult<()> { + let form = InstanceForm::new(data.instance.domain.clone()); + Instance::update(&mut data.context.pool(), data.instance.id, form).await?; + + send_activity(data.person.actor_id.clone(), &data.context, true).await?; + data.inbox_receiver.recv().await.unwrap(); + + let instance = + Instance::read_or_create(&mut data.context.pool(), data.instance.domain.clone()).await?; + + assert!(instance.updated.is_some()); + + data.cleanup().await?; + + Ok(()) + } + + fn listen_activities(inbox_sender: UnboundedSender) -> LemmyResult { + let run = HttpServer::new(move || { + App::new() + .app_data(actix_web::web::Data::new(inbox_sender.clone())) + .route( + "/inbox", + web::post().to( + |inbox_sender: actix_web::web::Data>, body: String| async move { + tracing::debug!("received activity: {:?}", body); + inbox_sender.send(body.clone()).unwrap(); + HttpResponse::new(actix_web::http::StatusCode::OK) + }, + ), + ) + }) + .bind(("127.0.0.1", 8085))? + .run(); + let handle = run.handle(); + tokio::spawn(async move { + run.await.unwrap(); + /*select! { + _ = run => {}, + _ = cancel.cancelled() => { } + }*/ + }); + Ok(handle) + } + + async fn send_activity( + actor_id: DbUrl, + context: &LemmyContext, + wait: bool, + ) -> LemmyResult { + // create outgoing activity + let data = json!({ + "actor": "http://ds9.lemmy.ml/u/lemmy_alpha", + "object": "http://ds9.lemmy.ml/comment/1", + "audience": "https://enterprise.lemmy.ml/c/tenforward", + "type": "Like", + "id": format!("http://ds9.lemmy.ml/activities/like/{}", uuid::Uuid::new_v4()), + }); + let form = SentActivityForm { + ap_id: Url::parse(&format!( + "http://local.com/activity/{}", + uuid::Uuid::new_v4() + ))? + .into(), + data, + sensitive: false, + send_inboxes: vec![Some(Url::parse("http://localhost:8085/inbox")?.into())], + send_all_instances: false, + send_community_followers_of: None, + actor_type: ActorType::Person, + actor_apub_id: actor_id, + }; + let sent = SentActivity::create(&mut context.pool(), form).await?; + + if wait { + sleep(*WORK_FINISHED_RECHECK_DELAY * 2).await; + } + + Ok(sent) + } + async fn compare_sent_with_receive(data: &mut Data, mut sent: Vec) -> Result<()> { + let check_order = !data.is_concurrent; // allow out-of order receiving when running parallel + let mut received = Vec::new(); + for _ in 0..sent.len() { + let inbox_rcv = data.inbox_receiver.recv().await.unwrap(); + let parsed_activity = serde_json::from_str::>(&inbox_rcv)?; + received.push(parsed_activity); + } + if !check_order { + // sort by id + received.sort_by(|a, b| { + a.inner()["id"] + .as_str() + .unwrap() + .cmp(b.inner()["id"].as_str().unwrap()) + }); + sent.sort_by(|a, b| { + a.data["id"] + .as_str() + .unwrap() + .cmp(b.data["id"].as_str().unwrap()) + }); + } + // receive for successfully sent activity + for i in 0..sent.len() { + let sent_activity = &sent[i]; + let received_activity = received[i].inner(); + assert_eq!(&sent_activity.data, received_activity); + tracing::debug!("received activity"); + } Ok(()) } } diff --git a/crates/routes/Cargo.toml b/crates/routes/Cargo.toml index 88672a1ec..4a8c53dea 100644 --- a/crates/routes/Cargo.toml +++ b/crates/routes/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "lemmy_routes" +publish = false version.workspace = true edition.workspace = true description.workspace = true @@ -11,8 +12,11 @@ repository.workspace = true [lib] doctest = false +[lints] +workspace = true + [dependencies] -lemmy_utils = { workspace = true } +lemmy_utils = { workspace = true, features = ["full"] } lemmy_db_views = { workspace = true } lemmy_db_views_actor = { workspace = true } lemmy_db_schema = { workspace = true } @@ -26,8 +30,7 @@ reqwest = { workspace = true, features = ["stream"] } reqwest-middleware = { workspace = true } serde = { workspace = true } url = { workspace = true } -strum = { workspace = true } -once_cell = { workspace = true } tracing = { workspace = true } tokio = { workspace = true } -rss = "2.0.4" +http.workspace = true +rss = "2.0.9" diff --git a/crates/routes/src/feeds.rs b/crates/routes/src/feeds.rs index 3ef760a51..00518032d 100644 --- a/crates/routes/src/feeds.rs +++ b/crates/routes/src/feeds.rs @@ -2,13 +2,14 @@ use crate::local_user_view_from_jwt; use actix_web::{error::ErrorBadRequest, web, Error, HttpRequest, HttpResponse, Result}; use anyhow::anyhow; use chrono::{DateTime, Utc}; -use lemmy_api_common::context::LemmyContext; +use lemmy_api_common::{context::LemmyContext, utils::check_private_instance}; use lemmy_db_schema::{ source::{community::Community, person::Person}, traits::ApubActor, CommentSortType, + CommunityVisibility, ListingType, - SortType, + PostSortType, }; use lemmy_db_views::{ post_view::PostQuery, @@ -21,19 +22,19 @@ use lemmy_db_views_actor::{ }; use lemmy_utils::{ cache_header::cache_1hour, - error::LemmyError, + error::{LemmyError, LemmyErrorType, LemmyResult}, utils::markdown::{markdown_to_html, sanitize_html}, }; -use once_cell::sync::Lazy; use rss::{ - extension::dublincore::DublinCoreExtensionBuilder, - ChannelBuilder, - GuidBuilder, + extension::{dublincore::DublinCoreExtension, ExtensionBuilder, ExtensionMap}, + Category, + Channel, + EnclosureBuilder, + Guid, Item, - ItemBuilder, }; use serde::Deserialize; -use std::{collections::BTreeMap, str::FromStr}; +use std::{collections::BTreeMap, str::FromStr, sync::LazyLock}; const RSS_FETCH_LIMIT: i64 = 20; @@ -45,12 +46,12 @@ struct Params { } impl Params { - fn sort_type(&self) -> Result { + fn sort_type(&self) -> Result { let sort_query = self .sort .clone() - .unwrap_or_else(|| SortType::Hot.to_string()); - SortType::from_str(&sort_query).map_err(ErrorBadRequest) + .unwrap_or_else(|| PostSortType::Hot.to_string()); + PostSortType::from_str(&sort_query).map_err(ErrorBadRequest) } fn get_limit(&self) -> i64 { self.limit.unwrap_or(RSS_FETCH_LIMIT) @@ -79,15 +80,36 @@ pub fn config(cfg: &mut web::ServiceConfig) { ); } -static RSS_NAMESPACE: Lazy> = Lazy::new(|| { +static RSS_NAMESPACE: LazyLock> = LazyLock::new(|| { let mut h = BTreeMap::new(); h.insert( "dc".to_string(), rss::extension::dublincore::NAMESPACE.to_string(), ); + h.insert( + "media".to_string(), + "http://search.yahoo.com/mrss/".to_string(), + ); h }); +/// Removes any characters disallowed by the XML grammar. +/// See https://www.w3.org/TR/xml/#NT-Char for details. +fn sanitize_xml(input: String) -> String { + input + .chars() + .filter(|&c| { + matches!(c, + '\u{09}' + | '\u{0A}' + | '\u{0D}' + | '\u{20}'..='\u{D7FF}' + | '\u{E000}'..='\u{FFFD}' + | '\u{10000}'..='\u{10FFFF}') + }) + .collect() +} + #[tracing::instrument(skip_all)] async fn get_all_feed( info: web::Query, @@ -126,12 +148,14 @@ async fn get_local_feed( async fn get_feed_data( context: &LemmyContext, listing_type: ListingType, - sort_type: SortType, + sort_type: PostSortType, limit: i64, page: i64, -) -> Result { +) -> LemmyResult { let site_view = SiteView::read_local(&mut context.pool()).await?; + check_private_instance(&None, &site_view.local_site)?; + let posts = PostQuery { listing_type: (Some(listing_type)), sort: (Some(sort_type)), @@ -139,23 +163,24 @@ async fn get_feed_data( page: (Some(page)), ..Default::default() } - .list(&mut context.pool()) + .list(&site_view.site, &mut context.pool()) .await?; let items = create_post_items(posts, &context.settings().get_protocol_and_hostname())?; - let mut channel_builder = ChannelBuilder::default(); - channel_builder - .namespaces(RSS_NAMESPACE.clone()) - .title(&format!("{} - {}", site_view.site.name, listing_type)) - .link(context.settings().get_protocol_and_hostname()) - .items(items); + let mut channel = Channel { + namespaces: RSS_NAMESPACE.clone(), + title: format!("{} - {}", site_view.site.name, listing_type), + link: context.settings().get_protocol_and_hostname(), + items, + ..Default::default() + }; if let Some(site_desc) = site_view.site.description { - channel_builder.description(&site_desc); + channel.set_description(&site_desc); } - let rss = channel_builder.build().to_string(); + let rss = channel.to_string(); Ok( HttpResponse::Ok() .content_type("application/rss+xml") @@ -215,7 +240,7 @@ async fn get_feed( } .map_err(ErrorBadRequest)?; - let rss = builder.build().to_string(); + let rss = builder.to_string(); Ok( HttpResponse::Ok() @@ -227,13 +252,17 @@ async fn get_feed( #[tracing::instrument(skip_all)] async fn get_feed_user( context: &LemmyContext, - sort_type: &SortType, + sort_type: &PostSortType, limit: &i64, page: &i64, user_name: &str, -) -> Result { +) -> LemmyResult { let site_view = SiteView::read_local(&mut context.pool()).await?; - let person = Person::read_from_name(&mut context.pool(), user_name, false).await?; + let person = Person::read_from_name(&mut context.pool(), user_name, false) + .await? + .ok_or(LemmyErrorType::NotFound)?; + + check_private_instance(&None, &site_view.local_site)?; let posts = PostQuery { listing_type: (Some(ListingType::All)), @@ -243,31 +272,38 @@ async fn get_feed_user( page: (Some(*page)), ..Default::default() } - .list(&mut context.pool()) + .list(&site_view.site, &mut context.pool()) .await?; let items = create_post_items(posts, &context.settings().get_protocol_and_hostname())?; + let channel = Channel { + namespaces: RSS_NAMESPACE.clone(), + title: format!("{} - {}", sanitize_xml(site_view.site.name), person.name), + link: person.actor_id.to_string(), + items, + ..Default::default() + }; - let mut channel_builder = ChannelBuilder::default(); - channel_builder - .namespaces(RSS_NAMESPACE.clone()) - .title(&format!("{} - {}", site_view.site.name, person.name)) - .link(person.actor_id.to_string()) - .items(items); - - Ok(channel_builder) + Ok(channel) } #[tracing::instrument(skip_all)] async fn get_feed_community( context: &LemmyContext, - sort_type: &SortType, + sort_type: &PostSortType, limit: &i64, page: &i64, community_name: &str, -) -> Result { +) -> LemmyResult { let site_view = SiteView::read_local(&mut context.pool()).await?; - let community = Community::read_from_name(&mut context.pool(), community_name, false).await?; + let community = Community::read_from_name(&mut context.pool(), community_name, false) + .await? + .ok_or(LemmyErrorType::NotFound)?; + if community.visibility != CommunityVisibility::Public { + return Err(LemmyErrorType::NotFound.into()); + } + + check_private_instance(&None, &site_view.local_site)?; let posts = PostQuery { sort: (Some(*sort_type)), @@ -276,66 +312,69 @@ async fn get_feed_community( page: (Some(*page)), ..Default::default() } - .list(&mut context.pool()) + .list(&site_view.site, &mut context.pool()) .await?; let items = create_post_items(posts, &context.settings().get_protocol_and_hostname())?; - let mut channel_builder = ChannelBuilder::default(); - channel_builder - .namespaces(RSS_NAMESPACE.clone()) - .title(&format!("{} - {}", site_view.site.name, community.name)) - .link(community.actor_id.to_string()) - .items(items); + let mut channel = Channel { + namespaces: RSS_NAMESPACE.clone(), + title: format!("{} - {}", sanitize_xml(site_view.site.name), community.name), + link: community.actor_id.to_string(), + items, + ..Default::default() + }; if let Some(community_desc) = community.description { - channel_builder.description(markdown_to_html(&community_desc)); + channel.set_description(markdown_to_html(&community_desc)); } - Ok(channel_builder) + Ok(channel) } #[tracing::instrument(skip_all)] async fn get_feed_front( context: &LemmyContext, - sort_type: &SortType, + sort_type: &PostSortType, limit: &i64, page: &i64, jwt: &str, -) -> Result { +) -> LemmyResult { let site_view = SiteView::read_local(&mut context.pool()).await?; let local_user = local_user_view_from_jwt(jwt, context).await?; + check_private_instance(&Some(local_user.clone()), &site_view.local_site)?; + let posts = PostQuery { listing_type: (Some(ListingType::Subscribed)), - local_user: (Some(&local_user)), + local_user: (Some(&local_user.local_user)), sort: (Some(*sort_type)), limit: (Some(*limit)), page: (Some(*page)), ..Default::default() } - .list(&mut context.pool()) + .list(&site_view.site, &mut context.pool()) .await?; let protocol_and_hostname = context.settings().get_protocol_and_hostname(); let items = create_post_items(posts, &protocol_and_hostname)?; - - let mut channel_builder = ChannelBuilder::default(); - channel_builder - .namespaces(RSS_NAMESPACE.clone()) - .title(&format!("{} - Subscribed", site_view.site.name)) - .link(protocol_and_hostname) - .items(items); + let mut channel = Channel { + namespaces: RSS_NAMESPACE.clone(), + title: format!("{} - Subscribed", sanitize_xml(site_view.site.name)), + link: protocol_and_hostname, + items, + ..Default::default() + }; if let Some(site_desc) = site_view.site.description { - channel_builder.description(markdown_to_html(&site_desc)); + channel.set_description(markdown_to_html(&site_desc)); } - Ok(channel_builder) + Ok(channel) } #[tracing::instrument(skip_all)] -async fn get_feed_inbox(context: &LemmyContext, jwt: &str) -> Result { +async fn get_feed_inbox(context: &LemmyContext, jwt: &str) -> LemmyResult { let site_view = SiteView::read_local(&mut context.pool()).await?; let local_user = local_user_view_from_jwt(jwt, context).await?; let person_id = local_user.local_user.person_id; @@ -343,6 +382,8 @@ async fn get_feed_inbox(context: &LemmyContext, jwt: &str) -> Result Result, mentions: Vec, protocol_and_hostname: &str, -) -> Result, LemmyError> { +) -> LemmyResult> { let mut reply_items: Vec = replies .iter() .map(|r| { @@ -400,7 +442,7 @@ fn create_reply_and_mention_items( protocol_and_hostname, ) }) - .collect::, LemmyError>>()?; + .collect::>>()?; let mut mention_items: Vec = mentions .iter() @@ -414,7 +456,7 @@ fn create_reply_and_mention_items( protocol_and_hostname, ) }) - .collect::, LemmyError>>()?; + .collect::>>()?; reply_items.append(&mut mention_items); Ok(reply_items) @@ -427,58 +469,48 @@ fn build_item( url: &str, content: &str, protocol_and_hostname: &str, -) -> Result { - let mut i = ItemBuilder::default(); - i.title(format!("Reply from {creator_name}")); - let author_url = format!("{protocol_and_hostname}/u/{creator_name}"); - i.author(format!( - "/u/{creator_name} (link)" - )); - let dt = published; - i.pub_date(dt.to_rfc2822()); - i.comments(url.to_owned()); - let guid = GuidBuilder::default().permalink(true).value(url).build(); - i.guid(guid); - i.link(url.to_owned()); +) -> LemmyResult { // TODO add images - let html = markdown_to_html(content); - i.description(html); - Ok(i.build()) + let author_url = format!("{protocol_and_hostname}/u/{creator_name}"); + let guid = Some(Guid { + permalink: true, + value: url.to_owned(), + }); + let description = Some(markdown_to_html(content)); + + Ok(Item { + title: Some(format!("Reply from {creator_name}")), + author: Some(format!( + "/u/{creator_name} (link)" + )), + pub_date: Some(published.to_rfc2822()), + comments: Some(url.to_owned()), + link: Some(url.to_owned()), + guid, + description, + ..Default::default() + }) } #[tracing::instrument(skip_all)] -fn create_post_items( - posts: Vec, - protocol_and_hostname: &str, -) -> Result, LemmyError> { +fn create_post_items(posts: Vec, protocol_and_hostname: &str) -> LemmyResult> { let mut items: Vec = Vec::new(); for p in posts { - let mut i = ItemBuilder::default(); - let mut dc_extension = DublinCoreExtensionBuilder::default(); - - i.title(sanitize_html(&p.post.name)); - - dc_extension.creators(vec![p.creator.actor_id.to_string()]); - - let dt = p.post.published; - i.pub_date(dt.to_rfc2822()); - let post_url = format!("{}/post/{}", protocol_and_hostname, p.post.id); - i.comments(post_url.clone()); - let guid = GuidBuilder::default() - .permalink(true) - .value(&post_url) - .build(); - i.guid(guid); - let community_url = format!( "{}/c/{}", protocol_and_hostname, sanitize_html(&p.community.name) ); - - // TODO add images + let dublin_core_ext = Some(DublinCoreExtension { + creators: vec![p.creator.actor_id.to_string()], + ..DublinCoreExtension::default() + }); + let guid = Some(Guid { + permalink: true, + value: post_url.clone(), + }); let mut description = format!("submitted by {} to {}
{} points | {} comments", p.creator.actor_id, sanitize_html(&p.creator.name), @@ -489,23 +521,65 @@ fn create_post_items( p.counts.comments); // If its a url post, add it to the description - if let Some(url) = p.post.url { + // and see if we can parse it as a media enclosure. + let enclosure_opt = p.post.url.map(|url| { let link_html = format!("
{url}"); description.push_str(&link_html); - i.link(url.to_string()); - } else { - i.link(post_url.clone()); - } + + let mime_type = p + .post + .url_content_type + .unwrap_or_else(|| "application/octet-stream".to_string()); + let mut enclosure_bld = EnclosureBuilder::default(); + + enclosure_bld.url(url.as_str().to_string()); + enclosure_bld.mime_type(mime_type); + enclosure_bld.length("0".to_string()); + enclosure_bld.build() + }); if let Some(body) = p.post.body { let html = markdown_to_html(&body); description.push_str(&html); } - i.description(description); + let mut extensions = ExtensionMap::new(); - i.dublin_core_ext(dc_extension.build()); - items.push(i.build()); + // If there's a thumbnail URL, add a media:content tag to display it. + // See https://www.rssboard.org/media-rss#media-content for details. + if let Some(url) = p.post.thumbnail_url { + let mut thumbnail_ext = ExtensionBuilder::default(); + thumbnail_ext.name("media:content".to_string()); + thumbnail_ext.attrs(BTreeMap::from([ + ("url".to_string(), url.to_string()), + ("medium".to_string(), "image".to_string()), + ])); + + extensions.insert( + "media".to_string(), + BTreeMap::from([("content".to_string(), vec![thumbnail_ext.build()])]), + ); + } + let category = Category { + name: p.community.title, + domain: Some(p.community.actor_id.to_string()), + }; + + let i = Item { + title: Some(sanitize_html(sanitize_xml(p.post.name).as_str())), + pub_date: Some(p.post.published.to_rfc2822()), + comments: Some(post_url.clone()), + guid, + description: Some(sanitize_xml(description)), + dublin_core_ext, + link: Some(post_url.clone()), + extensions, + enclosure: enclosure_opt, + categories: vec![category], + ..Default::default() + }; + + items.push(i); } Ok(items) diff --git a/crates/routes/src/images.rs b/crates/routes/src/images.rs index a537300d2..a0f804b6b 100644 --- a/crates/routes/src/images.rs +++ b/crates/routes/src/images.rs @@ -1,27 +1,28 @@ use actix_web::{ body::BodyStream, - error, http::{ header::{HeaderName, ACCEPT_ENCODING, HOST}, + Method, StatusCode, }, - web, - Error, + web::{self, Query}, HttpRequest, HttpResponse, }; use futures::stream::{Stream, StreamExt}; -use lemmy_api_common::context::LemmyContext; +use http::HeaderValue; +use lemmy_api_common::{context::LemmyContext, request::PictrsResponse}; use lemmy_db_schema::source::{ - image_upload::{ImageUpload, ImageUploadForm}, + images::{LocalImage, LocalImageForm, RemoteImage}, local_site::LocalSite, }; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::{rate_limit::RateLimitCell, REQWEST_TIMEOUT}; +use lemmy_utils::{error::LemmyResult, rate_limit::RateLimitCell, REQWEST_TIMEOUT}; use reqwest::Body; use reqwest_middleware::{ClientWithMiddleware, RequestBuilder}; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use std::time::Duration; +use url::Url; pub fn config( cfg: &mut web::ServiceConfig, @@ -40,42 +41,76 @@ pub fn config( .service(web::resource("/pictrs/image/delete/{token}/{filename}").route(web::get().to(delete))); } -#[derive(Debug, Serialize, Deserialize)] -struct Image { - file: String, - delete_token: String, +trait ProcessUrl { + /// If thumbnail or format is given, this uses the pictrs process endpoint. + /// Otherwise, it uses the normal pictrs url (IE image/original). + fn process_url(&self, image_url: &str, pictrs_url: &Url) -> String; } -#[derive(Debug, Serialize, Deserialize)] -struct Images { - msg: String, - files: Option>, -} - -#[derive(Deserialize)] -struct PictrsParams { +#[derive(Deserialize, Clone)] +struct PictrsGetParams { format: Option, thumbnail: Option, } -#[derive(Deserialize)] -enum PictrsPurgeParams { - #[serde(rename = "file")] - File(String), - #[serde(rename = "alias")] - Alias(String), +impl ProcessUrl for PictrsGetParams { + fn process_url(&self, src: &str, pictrs_url: &Url) -> String { + if self.format.is_none() && self.thumbnail.is_none() { + format!("{}image/original/{}", pictrs_url, src) + } else { + // Take file type from name, or jpg if nothing is given + let format = self + .clone() + .format + .unwrap_or_else(|| src.split('.').last().unwrap_or("jpg").to_string()); + + let mut url = format!("{}image/process.{}?src={}", pictrs_url, format, src); + + if let Some(size) = self.thumbnail { + url = format!("{url}&thumbnail={size}",); + } + url + } + } } +#[derive(Deserialize, Clone)] +pub struct ImageProxyParams { + url: String, + format: Option, + thumbnail: Option, +} + +impl ProcessUrl for ImageProxyParams { + fn process_url(&self, proxy_url: &str, pictrs_url: &Url) -> String { + if self.format.is_none() && self.thumbnail.is_none() { + format!("{}image/original?proxy={}", pictrs_url, proxy_url) + } else { + // Take file type from name, or jpg if nothing is given + let format = self + .clone() + .format + .unwrap_or_else(|| proxy_url.split('.').last().unwrap_or("jpg").to_string()); + + let mut url = format!("{}image/process.{}?proxy={}", pictrs_url, format, proxy_url); + + if let Some(size) = self.thumbnail { + url = format!("{url}&thumbnail={size}",); + } + url + } + } +} fn adapt_request( request: &HttpRequest, client: &ClientWithMiddleware, url: String, ) -> RequestBuilder { - // remove accept-encoding header so that pictrs doesnt compress the response + // remove accept-encoding header so that pictrs doesn't compress the response const INVALID_HEADERS: &[HeaderName] = &[ACCEPT_ENCODING, HOST]; let client_request = client - .request(request.method().clone(), url) + .request(convert_method(request.method()), url) .timeout(REQWEST_TIMEOUT); request @@ -85,7 +120,8 @@ fn adapt_request( if INVALID_HEADERS.contains(key) { client_req } else { - client_req.header(key, value) + // TODO: remove as_str and as_bytes conversions after actix-web upgrades to http 1.0 + client_req.header(key.as_str(), value.as_bytes()) } }) } @@ -95,54 +131,56 @@ async fn upload( body: web::Payload, // require login local_user_view: LocalUserView, + client: web::Data, context: web::Data, -) -> Result { +) -> LemmyResult { // TODO: check rate limit here let pictrs_config = context.settings().pictrs_config()?; let image_url = format!("{}image", pictrs_config.url); - let mut client_req = adapt_request(&req, context.client(), image_url); + let mut client_req = adapt_request(&req, &client, image_url); if let Some(addr) = req.head().peer_addr { client_req = client_req.header("X-Forwarded-For", addr.to_string()) }; let res = client_req - .timeout(Duration::from_secs(30)) + .timeout(Duration::from_secs(pictrs_config.upload_timeout)) .body(Body::wrap_stream(make_send(body))) .send() - .await - .map_err(error::ErrorBadRequest)?; + .await?; let status = res.status(); - let images = res.json::().await.map_err(error::ErrorBadRequest)?; + let images = res.json::().await?; if let Some(images) = &images.files { - for uploaded_image in images { - let form = ImageUploadForm { - local_user_id: local_user_view.local_user.id, - pictrs_alias: uploaded_image.file.to_string(), - pictrs_delete_token: uploaded_image.delete_token.to_string(), + for image in images { + let form = LocalImageForm { + local_user_id: Some(local_user_view.local_user.id), + pictrs_alias: image.file.to_string(), + pictrs_delete_token: image.delete_token.to_string(), }; - ImageUpload::create(&mut context.pool(), &form) - .await - .map_err(error::ErrorBadRequest)?; + + let protocol_and_hostname = context.settings().get_protocol_and_hostname(); + let thumbnail_url = image.thumbnail_url(&protocol_and_hostname)?; + + // Also store the details for the image + let details_form = image.details.build_image_details_form(&thumbnail_url); + LocalImage::create(&mut context.pool(), &form, &details_form).await?; } } - Ok(HttpResponse::build(status).json(images)) + Ok(HttpResponse::build(convert_status(status)).json(images)) } async fn full_res( filename: web::Path, - web::Query(params): web::Query, + web::Query(params): web::Query, req: HttpRequest, client: web::Data, context: web::Data, local_user_view: Option, -) -> Result { +) -> LemmyResult { // block access to images if instance is private and unauthorized, public - let local_site = LocalSite::read(&mut context.pool()) - .await - .map_err(error::ErrorBadRequest)?; + let local_site = LocalSite::read(&mut context.pool()).await?; if local_site.private_instance && local_user_view.is_none() { return Ok(HttpResponse::Unauthorized().finish()); } @@ -150,31 +188,18 @@ async fn full_res( // If there are no query params, the URL is original let pictrs_config = context.settings().pictrs_config()?; - let url = if params.format.is_none() && params.thumbnail.is_none() { - format!("{}image/original/{}", pictrs_config.url, name,) - } else { - // Take file type from name, or jpg if nothing is given - let format = params - .format - .unwrap_or_else(|| name.split('.').last().unwrap_or("jpg").to_string()); - let mut url = format!("{}image/process.{}?src={}", pictrs_config.url, format, name,); + let processed_url = params.process_url(name, &pictrs_config.url); - if let Some(size) = params.thumbnail { - url = format!("{url}&thumbnail={size}",); - } - url - }; - - image(url, req, client).await + image(processed_url, req, &client).await } async fn image( url: String, req: HttpRequest, - client: web::Data, -) -> Result { - let mut client_req = adapt_request(&req, &client, url); + client: &ClientWithMiddleware, +) -> LemmyResult { + let mut client_req = adapt_request(&req, client, url); if let Some(addr) = req.head().peer_addr { client_req = client_req.header("X-Forwarded-For", addr.to_string()); @@ -184,16 +209,16 @@ async fn image( client_req = client_req.header("X-Forwarded-For", addr.to_string()); } - let res = client_req.send().await.map_err(error::ErrorBadRequest)?; + let res = client_req.send().await?; - if res.status() == StatusCode::NOT_FOUND { + if res.status() == http::StatusCode::NOT_FOUND { return Ok(HttpResponse::NotFound().finish()); } - let mut client_res = HttpResponse::build(res.status()); + let mut client_res = HttpResponse::build(StatusCode::from_u16(res.status().as_u16())?); for (name, value) in res.headers().iter().filter(|(h, _)| *h != "connection") { - client_res.insert_header((name.clone(), value.clone())); + client_res.insert_header(convert_header(name, value)); } Ok(client_res.body(BodyStream::new(res.bytes_stream()))) @@ -206,7 +231,7 @@ async fn delete( context: web::Data, // require login _local_user_view: LocalUserView, -) -> Result { +) -> LemmyResult { let (token, file) = components.into_inner(); let pictrs_config = context.settings().pictrs_config()?; @@ -218,13 +243,30 @@ async fn delete( client_req = client_req.header("X-Forwarded-For", addr.to_string()); } - let res = client_req.send().await.map_err(error::ErrorBadRequest)?; + let res = client_req.send().await?; - ImageUpload::delete_by_alias(&mut context.pool(), &file) - .await - .map_err(error::ErrorBadRequest)?; + LocalImage::delete_by_alias(&mut context.pool(), &file).await?; - Ok(HttpResponse::build(res.status()).body(BodyStream::new(res.bytes_stream()))) + Ok(HttpResponse::build(convert_status(res.status())).body(BodyStream::new(res.bytes_stream()))) +} + +pub async fn image_proxy( + Query(params): Query, + req: HttpRequest, + client: web::Data, + context: web::Data, +) -> LemmyResult { + let url = Url::parse(¶ms.url)?; + + // Check that url corresponds to a federated image so that this can't be abused as a proxy + // for arbitrary purposes. + RemoteImage::validate(&mut context.pool(), url.clone().into()).await?; + + let pictrs_config = context.settings().pictrs_config()?; + + let processed_url = params.process_url(¶ms.url, &pictrs_config.url); + + image(processed_url, req, &client).await } fn make_send(mut stream: S) -> impl Stream + Send + Unpin + 'static @@ -268,3 +310,14 @@ where std::pin::Pin::new(&mut self.rx).poll_recv(cx) } } + +// TODO: remove these conversions after actix-web upgrades to http 1.0 +fn convert_status(status: http::StatusCode) -> StatusCode { + StatusCode::from_u16(status.as_u16()).expect("status can be converted") +} +fn convert_method(method: &Method) -> http::Method { + http::Method::from_bytes(method.as_str().as_bytes()).expect("method can be converted") +} +fn convert_header<'a>(name: &'a http::HeaderName, value: &'a HeaderValue) -> (&'a str, &'a [u8]) { + (name.as_str(), value.as_bytes()) +} diff --git a/crates/routes/src/lib.rs b/crates/routes/src/lib.rs index ec28fda45..a88225622 100644 --- a/crates/routes/src/lib.rs +++ b/crates/routes/src/lib.rs @@ -1,6 +1,6 @@ use lemmy_api_common::{claims::Claims, context::LemmyContext, utils::check_user_valid}; use lemmy_db_views::structs::LocalUserView; -use lemmy_utils::error::LemmyError; +use lemmy_utils::error::LemmyResult; pub mod feeds; pub mod images; @@ -8,10 +8,7 @@ pub mod nodeinfo; pub mod webfinger; #[tracing::instrument(skip_all)] -async fn local_user_view_from_jwt( - jwt: &str, - context: &LemmyContext, -) -> Result { +async fn local_user_view_from_jwt(jwt: &str, context: &LemmyContext) -> LemmyResult { let local_user_id = Claims::validate(jwt, context).await?; let local_user_view = LocalUserView::read(&mut context.pool(), local_user_id).await?; check_user_valid(&local_user_view.person)?; diff --git a/crates/routes/src/nodeinfo.rs b/crates/routes/src/nodeinfo.rs index 06d4c2dd6..e5b183a0b 100644 --- a/crates/routes/src/nodeinfo.rs +++ b/crates/routes/src/nodeinfo.rs @@ -1,37 +1,40 @@ -use actix_web::{error::ErrorBadRequest, web, Error, HttpResponse, Result}; -use anyhow::anyhow; +use actix_web::{web, Error, HttpResponse, Result}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::RegistrationMode; use lemmy_db_views::structs::SiteView; use lemmy_utils::{ cache_header::{cache_1hour, cache_3days}, - error::LemmyError, - version, + error::LemmyResult, + VERSION, }; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; use url::Url; +/// A description of the nodeinfo endpoint is here: +/// https://github.com/jhass/nodeinfo/blob/main/PROTOCOL.md pub fn config(cfg: &mut web::ServiceConfig) { cfg .route( - "/nodeinfo/2.0.json", + "/nodeinfo/2.1", web::get().to(node_info).wrap(cache_1hour()), ) - .service(web::redirect("/version", "/nodeinfo/2.0.json")) + .service(web::redirect("/version", "/nodeinfo/2.1")) + // For backwards compatibility, can be removed after Lemmy 0.20 + .service(web::redirect("/nodeinfo/2.0.json", "/nodeinfo/2.1")) + .service(web::redirect("/nodeinfo/2.1.json", "/nodeinfo/2.1")) .route( "/.well-known/nodeinfo", web::get().to(node_info_well_known).wrap(cache_3days()), ); } -async fn node_info_well_known( - context: web::Data, -) -> Result { +async fn node_info_well_known(context: web::Data) -> LemmyResult { let node_info = NodeInfoWellKnown { links: vec![NodeInfoWellKnownLinks { - rel: Url::parse("http://nodeinfo.diaspora.software/ns/schema/2.0")?, + rel: Url::parse("http://nodeinfo.diaspora.software/ns/schema/2.1")?, href: Url::parse(&format!( - "{}/nodeinfo/2.0.json", + "{}/nodeinfo/2.1", &context.settings().get_protocol_and_hostname(), ))?, }], @@ -40,25 +43,20 @@ async fn node_info_well_known( } async fn node_info(context: web::Data) -> Result { - let site_view = SiteView::read_local(&mut context.pool()) - .await - .map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?; + let site_view = SiteView::read_local(&mut context.pool()).await?; - let protocols = if site_view.local_site.federation_enabled { - Some(vec!["activitypub".to_string()]) - } else { - None - }; // Since there are 3 registration options, // we need to set open_registrations as true if RegistrationMode is not Closed. let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed); let json = NodeInfo { - version: Some("2.0".to_string()), + version: Some("2.1".to_string()), software: Some(NodeInfoSoftware { name: Some("lemmy".to_string()), - version: Some(version::VERSION.to_string()), + version: Some(VERSION.to_string()), + repository: Some("https://github.com/LemmyNet/lemmy".to_string()), + homepage: Some("https://join-lemmy.org/".to_string()), }), - protocols, + protocols: Some(vec!["activitypub".to_string()]), usage: Some(NodeInfoUsage { users: Some(NodeInfoUsers { total: Some(site_view.counts.users), @@ -69,22 +67,28 @@ async fn node_info(context: web::Data) -> Result, } #[derive(Serialize, Deserialize, Debug)] -struct NodeInfoWellKnownLinks { +pub struct NodeInfoWellKnownLinks { pub rel: Url, pub href: Url, } +/// Nodeinfo spec: http://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1 #[derive(Serialize, Deserialize, Debug, Default)] #[serde(rename_all = "camelCase", default)] pub struct NodeInfo { @@ -93,6 +97,9 @@ pub struct NodeInfo { pub protocols: Option>, pub usage: Option, pub open_registrations: Option, + /// These fields are required by the spec for no reason + pub services: Option, + pub metadata: Option>, } #[derive(Serialize, Deserialize, Debug, Default)] @@ -100,6 +107,8 @@ pub struct NodeInfo { pub struct NodeInfoSoftware { pub name: Option, pub version: Option, + pub repository: Option, + pub homepage: Option, } #[derive(Serialize, Deserialize, Debug, Default)] @@ -117,3 +126,10 @@ pub struct NodeInfoUsers { pub active_halfyear: Option, pub active_month: Option, } + +#[derive(Serialize, Deserialize, Debug, Default)] +#[serde(rename_all = "camelCase", default)] +pub struct NodeInfoServices { + pub inbound: Option>, + pub outbound: Option>, +} diff --git a/crates/routes/src/webfinger.rs b/crates/routes/src/webfinger.rs index afb2d33ae..c5b7024cd 100644 --- a/crates/routes/src/webfinger.rs +++ b/crates/routes/src/webfinger.rs @@ -1,14 +1,15 @@ use activitypub_federation::{ config::Data, - fetch::webfinger::{extract_webfinger_name, Webfinger, WebfingerLink}, + fetch::webfinger::{extract_webfinger_name, Webfinger, WebfingerLink, WEBFINGER_CONTENT_TYPE}, }; use actix_web::{web, web::Query, HttpResponse}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ source::{community::Community, person::Person}, traits::ApubActor, + CommunityVisibility, }; -use lemmy_utils::{cache_header::cache_3days, error::LemmyError}; +use lemmy_utils::{cache_header::cache_3days, error::LemmyResult}; use serde::Deserialize; use std::collections::HashMap; use url::Url; @@ -34,36 +35,59 @@ pub fn config(cfg: &mut web::ServiceConfig) { async fn get_webfinger_response( info: Query, context: Data, -) -> Result { +) -> LemmyResult { let name = extract_webfinger_name(&info.resource, &context)?; - let name_ = name.clone(); - let user_id: Option = Person::read_from_name(&mut context.pool(), &name_, false) - .await - .ok() - .map(|c| c.actor_id.into()); - let community_id: Option = Community::read_from_name(&mut context.pool(), &name, false) - .await - .ok() - .map(|c| c.actor_id.into()); + let links = if name == context.settings().hostname { + // webfinger response for instance actor (required for mastodon authorized fetch) + let url = Url::parse(&context.settings().get_protocol_and_hostname())?; + vec![webfinger_link_for_actor(Some(url), "none", &context)] + } else { + // webfinger response for user/community + let user_id: Option = Person::read_from_name(&mut context.pool(), name, false) + .await + .ok() + .flatten() + .map(|c| c.actor_id.into()); + let community_id: Option = Community::read_from_name(&mut context.pool(), name, false) + .await + .ok() + .flatten() + .and_then(|c| { + if c.visibility == CommunityVisibility::Public { + let id: Url = c.actor_id.into(); + Some(id) + } else { + None + } + }); - // Mastodon seems to prioritize the last webfinger item in case of duplicates. Put - // community last so that it gets prioritized. For Lemmy the order doesnt matter. - let links = vec![ - webfinger_link_for_actor(user_id, "Person", &context), - webfinger_link_for_actor(community_id, "Group", &context), - ] + // Mastodon seems to prioritize the last webfinger item in case of duplicates. Put + // community last so that it gets prioritized. For Lemmy the order doesn't matter. + vec![ + webfinger_link_for_actor(user_id, "Person", &context), + webfinger_link_for_actor(community_id, "Group", &context), + ] + } .into_iter() .flatten() - .collect(); + .collect::>(); - let json = Webfinger { - subject: info.resource.clone(), - links, - ..Default::default() - }; + if links.is_empty() { + Ok(HttpResponse::NotFound().finish()) + } else { + let json = Webfinger { + subject: info.resource.clone(), + links, + ..Default::default() + }; - Ok(HttpResponse::Ok().json(json)) + Ok( + HttpResponse::Ok() + .content_type(WEBFINGER_CONTENT_TYPE.as_bytes()) + .json(json), + ) + } } fn webfinger_link_for_actor( diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index dc9714b0d..c22f863c1 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -13,44 +13,85 @@ name = "lemmy_utils" path = "src/lib.rs" doctest = false +[[bin]] +name = "lemmy_util_bin" +path = "src/main.rs" +required-features = ["full"] + +[lints] +workspace = true + [features] -full = ["ts-rs"] +full = [ + "dep:ts-rs", + "dep:diesel", + "dep:rosetta-i18n", + "dep:actix-web", + "dep:reqwest-middleware", + "dep:tracing", + "dep:actix-web", + "dep:serde_json", + "dep:anyhow", + "dep:http", + "dep:deser-hjson", + "dep:regex", + "dep:urlencoding", + "dep:doku", + "dep:url", + "dep:smart-default", + "dep:enum-map", + "dep:futures", + "dep:tokio", + "dep:html2text", + "dep:lettre", + "dep:uuid", + "dep:itertools", + "dep:markdown-it", +] + +[package.metadata.cargo-shear] +ignored = ["http"] [dependencies] -regex = { workspace = true } -chrono = { workspace = true } -tracing = { workspace = true } -tracing-error = { workspace = true } -itertools = { workspace = true } +regex = { workspace = true, optional = true } +tracing = { workspace = true, optional = true } +itertools = { workspace = true, optional = true } serde = { workspace = true } -serde_json = { workspace = true } -once_cell = { workspace = true } -url = { workspace = true } -actix-web = { workspace = true } -anyhow = { workspace = true } -reqwest-middleware = { workspace = true } +serde_json = { workspace = true, optional = true } +url = { workspace = true, optional = true } +actix-web = { workspace = true, optional = true } +anyhow = { workspace = true, optional = true } +reqwest-middleware = { workspace = true, optional = true } strum = { workspace = true } -strum_macros = { workspace = true } -futures = { workspace = true } -diesel = { workspace = true, features = ["chrono"] } -http = { workspace = true } -doku = { workspace = true, features = ["url-2"] } -uuid = { workspace = true, features = ["serde", "v4"] } -rosetta-i18n = { workspace = true } -typed-builder = { workspace = true } -percent-encoding = { workspace = true } -tokio = { workspace = true } -openssl = "0.10.55" -html2text = "0.6.0" -deser-hjson = "1.2.0" -smart-default = "0.7.1" -lettre = { version = "0.10.4", features = ["tokio1", "tokio1-native-tls"] } -markdown-it = "0.5.1" +futures = { workspace = true, optional = true } +diesel = { workspace = true, features = ["chrono"], optional = true } +http = { workspace = true, optional = true } +doku = { workspace = true, features = ["url-2"], optional = true } +uuid = { workspace = true, features = ["serde", "v4"], optional = true } +rosetta-i18n = { workspace = true, optional = true } +tokio = { workspace = true, optional = true } +urlencoding = { workspace = true, optional = true } +html2text = { version = "0.12.5", optional = true } +deser-hjson = { version = "2.2.4", optional = true } +smart-default = { version = "0.7.1", optional = true } +lettre = { version = "0.11.8", default-features = false, features = [ + "builder", + "tokio1", + "tokio1-rustls-tls", + "smtp-transport", +], optional = true } +markdown-it = { version = "0.6.1", optional = true } ts-rs = { workspace = true, optional = true } -enum-map = { workspace = true } +enum-map = { workspace = true, optional = true } +cfg-if = "1" +clearurls = { version = "0.0.4", features = ["linkify"] } +markdown-it-block-spoiler = "1.0.0" +markdown-it-sub = "1.0.0" +markdown-it-sup = "1.0.0" +markdown-it-ruby = "1.0.0" [dev-dependencies] -reqwest = { workspace = true } +pretty_assertions = { workspace = true } [build-dependencies] rosetta-build = { version = "0.1.3", default-features = false } diff --git a/crates/utils/src/apub.rs b/crates/utils/src/apub.rs deleted file mode 100644 index 53e069d77..000000000 --- a/crates/utils/src/apub.rs +++ /dev/null @@ -1,26 +0,0 @@ -use openssl::{pkey::PKey, rsa::Rsa}; -use std::io::{Error, ErrorKind}; - -pub struct Keypair { - pub private_key: String, - pub public_key: String, -} - -/// Generate the asymmetric keypair for ActivityPub HTTP signatures. -pub fn generate_actor_keypair() -> Result { - let rsa = Rsa::generate(2048)?; - let pkey = PKey::from_rsa(rsa)?; - let public_key = pkey.public_key_to_pem()?; - let private_key = pkey.private_key_to_pem_pkcs8()?; - let key_to_string = |key| match String::from_utf8(key) { - Ok(s) => Ok(s), - Err(e) => Err(Error::new( - ErrorKind::Other, - format!("Failed converting key to string: {e}"), - )), - }; - Ok(Keypair { - private_key: key_to_string(private_key)?, - public_key: key_to_string(public_key)?, - }) -} diff --git a/crates/utils/src/email.rs b/crates/utils/src/email.rs index fdff19033..7bac7ad67 100644 --- a/crates/utils/src/email.rs +++ b/crates/utils/src/email.rs @@ -1,5 +1,5 @@ use crate::{ - error::{LemmyError, LemmyErrorExt, LemmyErrorType}, + error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, settings::structs::Settings, }; use html2text; @@ -25,7 +25,7 @@ pub async fn send_email( to_username: &str, html: &str, settings: &Settings, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { let email_config = settings.email.clone().ok_or(LemmyErrorType::NoEmailSetup)?; let domain = settings.hostname.clone(); @@ -75,10 +75,7 @@ pub async fn send_email( }; // Set the creds if they exist - let smtp_password = std::env::var("LEMMY_SMTP_PASSWORD") - .ok() - .or(email_config.smtp_password); - + let smtp_password = email_config.smtp_password(); if let (Some(username), Some(password)) = (email_config.smtp_login, smtp_password) { builder = builder.credentials(Credentials::new(username, password)); } diff --git a/crates/utils/src/error.rs b/crates/utils/src/error.rs index 9fa6bc508..c95af03e2 100644 --- a/crates/utils/src/error.rs +++ b/crates/utils/src/error.rs @@ -1,79 +1,13 @@ +use cfg_if::cfg_if; use serde::{Deserialize, Serialize}; -use std::{ - fmt, - fmt::{Debug, Display}, -}; -use tracing_error::SpanTrace; -#[cfg(feature = "full")] -use ts_rs::TS; +use std::{backtrace::Backtrace, fmt::Debug}; +use strum::{Display, EnumIter}; -pub type LemmyResult = Result; - -pub struct LemmyError { - pub error_type: LemmyErrorType, - pub inner: anyhow::Error, - pub context: SpanTrace, -} - -/// Maximum number of items in an array passed as API parameter. See [[LemmyErrorType::TooManyItems]] -pub const MAX_API_PARAM_ELEMENTS: usize = 1000; - -impl From for LemmyError -where - T: Into, -{ - fn from(t: T) -> Self { - let cause = t.into(); - LemmyError { - error_type: LemmyErrorType::Unknown(format!("{}", &cause)), - inner: cause, - context: SpanTrace::capture(), - } - } -} - -impl Debug for LemmyError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("LemmyError") - .field("message", &self.error_type) - .field("inner", &self.inner) - .field("context", &self.context) - .finish() - } -} - -impl Display for LemmyError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}: ", &self.error_type)?; - // print anyhow including trace - // https://docs.rs/anyhow/latest/anyhow/struct.Error.html#display-representations - // this will print the anyhow trace (only if it exists) - // and if RUST_BACKTRACE=1, also a full backtrace - writeln!(f, "{:?}", self.inner)?; - fmt::Display::fmt(&self.context, f) - } -} - -impl actix_web::error::ResponseError for LemmyError { - fn status_code(&self) -> http::StatusCode { - if self.error_type == LemmyErrorType::IncorrectLogin { - return http::StatusCode::UNAUTHORIZED; - } - match self.inner.downcast_ref::() { - Some(diesel::result::Error::NotFound) => http::StatusCode::NOT_FOUND, - _ => http::StatusCode::BAD_REQUEST, - } - } - - fn error_response(&self) -> actix_web::HttpResponse { - actix_web::HttpResponse::build(self.status_code()).json(&self.error_type) - } -} - -#[derive(Display, Debug, Serialize, Deserialize, Clone, PartialEq, EnumIter)] -#[cfg_attr(feature = "full", derive(TS))] +#[derive(Display, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, EnumIter, Hash)] +#[cfg_attr(feature = "full", derive(ts_rs::TS))] #[cfg_attr(feature = "full", ts(export))] #[serde(tag = "error", content = "message", rename_all = "snake_case")] +#[non_exhaustive] // TODO: order these based on the crate they belong to (utils, federation, db, api) pub enum LemmyErrorType { ReportReasonRequired, @@ -89,28 +23,24 @@ pub enum LemmyErrorType { CouldntUpdateComment, CouldntUpdatePrivateMessage, CannotLeaveAdmin, - NoLinesInHtml, - SiteMetadataPageIsNotDoctypeHtml, + // TODO: also remove the translations of unused errors PictrsResponseError(String), PictrsPurgeResponseError(String), - PictrsCachingDisabled, ImageUrlMissingPathSegments, ImageUrlMissingLastPathSegment, PictrsApiKeyNotProvided, NoContentTypeHeader, NotAnImageType, NotAModOrAdmin, - NoAdmins, - NotTopAdmin, NotTopMod, NotLoggedIn, + NotHigherMod, + NotHigherAdmin, SiteBan, Deleted, - BannedFromCommunity, - CouldntFindCommunity, - CouldntFindPerson, PersonIsBlocked, - DownvotesAreDisabled, + CommunityIsBlocked, + InstanceIsBlocked, InstanceIsPrivate, /// Password must be between 10 and 60 characters InvalidPassword, @@ -125,37 +55,23 @@ pub enum LemmyErrorType { OnlyAdminsCanCreateCommunities, CommunityAlreadyExists, LanguageNotAllowed, - OnlyModsCanPostInCommunity, CouldntUpdatePost, NoPostEditAllowed, - CouldntFindPost, EditPrivateMessageNotAllowed, SiteAlreadyExists, ApplicationQuestionRequired, InvalidDefaultPostListingType, RegistrationClosed, RegistrationApplicationAnswerRequired, + RegistrationUsernameRequired, EmailAlreadyExists, - FederationForbiddenByStrictAllowList, + UsernameAlreadyExists, PersonIsBannedFromCommunity, - ObjectIsNotPublic, - InvalidCommunity, - CannotCreatePostOrCommentInDeletedOrRemovedCommunity, - CannotReceivePage, - NewPostCannotBeLocked, - OnlyLocalAdminCanRemoveCommunity, - OnlyLocalAdminCanRestoreCommunity, NoIdGiven, IncorrectLogin, - InvalidQuery, ObjectNotLocal, - PostIsLocked, - PersonIsBannedFromSite(String), - InvalidVoteValue, - PageDoesNotSpecifyCreator, - PageDoesNotSpecifyGroup, - NoCommunityFoundInCc, NoEmailSetup, + LocalSiteNotSetup, EmailSmtpServerNeedsAPort, MissingAnEmail, RateLimitError, @@ -165,6 +81,7 @@ pub enum LemmyErrorType { InvalidPostTitle, InvalidBodyField, BioLengthOverflow, + AltTextLengthOverflow, MissingTotpToken, MissingTotpSecret, IncorrectTotpToken, @@ -182,14 +99,13 @@ pub enum LemmyErrorType { CouldntUpdateCommunityHiddenStatus, PersonBlockAlreadyExists, UserAlreadyExists, - TokenNotFound, CouldntLikePost, CouldntSavePost, CouldntMarkPostAsRead, + CouldntHidePost, CouldntUpdateCommunity, CouldntUpdateReplies, CouldntUpdatePersonMentions, - PostTitleTooLong, CouldntCreatePost, CouldntCreatePrivateMessage, CouldntUpdatePrivate, @@ -197,122 +113,255 @@ pub enum LemmyErrorType { CouldntSetAllRegistrationsAccepted, CouldntSetAllEmailVerified, Banned, + BlockedUrl, CouldntGetComments, CouldntGetPosts, InvalidUrl, EmailSendFailed, Slurs, - CouldntFindObject, RegistrationDenied(Option), - FederationDisabled, - DomainBlocked(String), - DomainNotInAllowList(String), - FederationDisabledByStrictAllowList, SiteNameRequired, SiteNameLengthOverflow, PermissiveRegex, InvalidRegex, CaptchaIncorrect, - PasswordResetLimitReached, CouldntCreateAudioCaptcha, InvalidUrlScheme, CouldntSendWebmention, ContradictingFilters, InstanceBlockAlreadyExists, - /// `jwt` cookie must be marked secure and httponly - AuthCookieInsecure, - /// Thrown when an API call is submitted with more than 1000 array elements, see [[MAX_API_PARAM_ELEMENTS]] + /// Thrown when an API call is submitted with more than 1000 array elements, see + /// [[MAX_API_PARAM_ELEMENTS]] TooManyItems, - CommunityHasNoFollowers, BanExpirationInPast, InvalidUnixTime, + InvalidBotAction, + CantBlockLocalInstance, Unknown(String), + UrlLengthOverflow, + OauthAuthorizationInvalid, + OauthLoginFailed, + OauthRegistrationClosed, + CouldntDeleteOauthProvider, + NotFound, + CommunityHasNoFollowers, + PostScheduleTimeMustBeInFuture, + TooManyScheduledPosts, + FederationError(Option), } -impl From for LemmyError { - fn from(error_type: LemmyErrorType) -> Self { - let inner = anyhow::anyhow!("{}", error_type); - LemmyError { - error_type, - inner, - context: SpanTrace::capture(), +/// Federation related errors, these dont need to be translated. +#[derive(Display, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, EnumIter, Hash)] +#[cfg_attr(feature = "full", derive(ts_rs::TS))] +#[cfg_attr(feature = "full", ts(export))] +#[non_exhaustive] +pub enum FederationError { + // TODO: merge into a single NotFound error + CouldntFindActivity, + InvalidCommunity, + CannotCreatePostOrCommentInDeletedOrRemovedCommunity, + CannotReceivePage, + OnlyLocalAdminCanRemoveCommunity, + OnlyLocalAdminCanRestoreCommunity, + PostIsLocked, + PersonIsBannedFromSite(String), + InvalidVoteValue, + PageDoesNotSpecifyCreator, + CouldntGetComments, + CouldntGetPosts, + FederationDisabled, + DomainBlocked(String), + DomainNotInAllowList(String), + FederationDisabledByStrictAllowList, + ContradictingFilters, + UrlWithoutDomain, + InboxTimeout, + CantDeleteSite, + ObjectIsNotPublic, +} + +cfg_if! { + if #[cfg(feature = "full")] { + + use std::fmt; + pub type LemmyResult = Result; + + pub struct LemmyError { + pub error_type: LemmyErrorType, + pub inner: anyhow::Error, + pub context: Backtrace, + } + + /// Maximum number of items in an array passed as API parameter. See [[LemmyErrorType::TooManyItems]] + pub const MAX_API_PARAM_ELEMENTS: usize = 10_000; + + impl From for LemmyError + where + T: Into, + { + fn from(t: T) -> Self { + let cause = t.into(); + let error_type = match cause.downcast_ref::() { + Some(&diesel::NotFound) => LemmyErrorType::NotFound, + _ => LemmyErrorType::Unknown(format!("{}", &cause)) + }; + LemmyError { + error_type, + inner: cause, + context: Backtrace::capture(), + } + } + } + + impl Debug for LemmyError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("LemmyError") + .field("message", &self.error_type) + .field("inner", &self.inner) + .field("context", &self.context) + .finish() + } + } + + impl fmt::Display for LemmyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}: ", &self.error_type)?; + writeln!(f, "{}", self.inner)?; + fmt::Display::fmt(&self.context, f) + } + } + + impl actix_web::error::ResponseError for LemmyError { + fn status_code(&self) -> actix_web::http::StatusCode { + if self.error_type == LemmyErrorType::IncorrectLogin { + return actix_web::http::StatusCode::UNAUTHORIZED; + } + match self.inner.downcast_ref::() { + Some(diesel::result::Error::NotFound) => actix_web::http::StatusCode::NOT_FOUND, + _ => actix_web::http::StatusCode::BAD_REQUEST, + } + } + + fn error_response(&self) -> actix_web::HttpResponse { + actix_web::HttpResponse::build(self.status_code()).json(&self.error_type) + } + } + + impl From for LemmyError { + fn from(error_type: LemmyErrorType) -> Self { + let inner = anyhow::anyhow!("{}", error_type); + LemmyError { + error_type, + inner, + context: Backtrace::capture(), + } + } + } + + impl From for LemmyError { + fn from(error_type: FederationError) -> Self { + let inner = anyhow::anyhow!("{}", error_type); + LemmyError { + error_type: LemmyErrorType::FederationError(Some(error_type)), + inner, + context: Backtrace::capture(), + } + } + } + + pub trait LemmyErrorExt> { + fn with_lemmy_type(self, error_type: LemmyErrorType) -> LemmyResult; + } + + impl> LemmyErrorExt for Result { + fn with_lemmy_type(self, error_type: LemmyErrorType) -> LemmyResult { + self.map_err(|error| LemmyError { + error_type, + inner: error.into(), + context: Backtrace::capture(), + }) + } + } + pub trait LemmyErrorExt2 { + fn with_lemmy_type(self, error_type: LemmyErrorType) -> LemmyResult; + fn into_anyhow(self) -> Result; + } + + impl LemmyErrorExt2 for LemmyResult { + fn with_lemmy_type(self, error_type: LemmyErrorType) -> LemmyResult { + self.map_err(|mut e| { + e.error_type = error_type; + e + }) + } + // this function can't be an impl From or similar because it would conflict with one of the other broad Into<> implementations + fn into_anyhow(self) -> Result { + self.map_err(|e| e.inner) + } + } + + #[cfg(test)] + mod tests { + #![allow(clippy::indexing_slicing)] + use super::*; + use actix_web::{body::MessageBody, ResponseError}; + use pretty_assertions::assert_eq; + use std::fs::read_to_string; + use strum::IntoEnumIterator; + + #[test] + fn deserializes_no_message() -> LemmyResult<()> { + let err = LemmyError::from(LemmyErrorType::Banned).error_response(); + let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?; + assert_eq!(&json, "{\"error\":\"banned\"}"); + + Ok(()) + } + + #[test] + fn deserializes_with_message() -> LemmyResult<()> { + let reg_banned = LemmyErrorType::PictrsResponseError(String::from("reason")); + let err = LemmyError::from(reg_banned).error_response(); + let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?; + assert_eq!( + &json, + "{\"error\":\"pictrs_response_error\",\"message\":\"reason\"}" + ); + + Ok(()) + } + + #[test] + fn test_convert_diesel_errors() { + let not_found_error = LemmyError::from(diesel::NotFound); + assert_eq!(LemmyErrorType::NotFound, not_found_error.error_type); + assert_eq!(404, not_found_error.status_code()); + + let other_error = LemmyError::from(diesel::result::Error::NotInTransaction); + assert!(matches!(other_error.error_type, LemmyErrorType::Unknown{..})); + assert_eq!(400, other_error.status_code()); + } + + /// Check if errors match translations. Disabled because many are not translated at all. + #[test] + #[ignore] + fn test_translations_match() -> LemmyResult<()> { + #[derive(Deserialize)] + struct Err { + error: String, + } + + let translations = read_to_string("translations/translations/en.json")?; + + for e in LemmyErrorType::iter() { + let msg = serde_json::to_string(&e)?; + let msg: Err = serde_json::from_str(&msg)?; + let msg = msg.error; + assert!(translations.contains(&format!("\"{msg}\"")), "{msg}"); + } + + Ok(()) + } } } } - -pub trait LemmyErrorExt> { - fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result; -} - -impl> LemmyErrorExt for Result { - fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result { - self.map_err(|error| LemmyError { - error_type, - inner: error.into(), - context: SpanTrace::capture(), - }) - } -} -pub trait LemmyErrorExt2 { - fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result; - fn into_anyhow(self) -> Result; -} - -impl LemmyErrorExt2 for Result { - fn with_lemmy_type(self, error_type: LemmyErrorType) -> Result { - self.map_err(|mut e| { - e.error_type = error_type; - e - }) - } - // this function can't be an impl From or similar because it would conflict with one of the other broad Into<> implementations - fn into_anyhow(self) -> Result { - self.map_err(|e| e.inner) - } -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use super::*; - use actix_web::{body::MessageBody, ResponseError}; - use std::fs::read_to_string; - use strum::IntoEnumIterator; - - #[test] - fn deserializes_no_message() { - let err = LemmyError::from(LemmyErrorType::Banned).error_response(); - let json = String::from_utf8(err.into_body().try_into_bytes().unwrap().to_vec()).unwrap(); - assert_eq!(&json, "{\"error\":\"banned\"}") - } - - #[test] - fn deserializes_with_message() { - let reg_banned = LemmyErrorType::PersonIsBannedFromSite(String::from("reason")); - let err = LemmyError::from(reg_banned).error_response(); - let json = String::from_utf8(err.into_body().try_into_bytes().unwrap().to_vec()).unwrap(); - assert_eq!( - &json, - "{\"error\":\"person_is_banned_from_site\",\"message\":\"reason\"}" - ) - } - - /// Check if errors match translations. Disabled because many are not translated at all. - #[test] - #[ignore] - fn test_translations_match() { - #[derive(Deserialize)] - struct Err { - error: String, - } - - let translations = read_to_string("translations/translations/en.json").unwrap(); - LemmyErrorType::iter().for_each(|e| { - let msg = serde_json::to_string(&e).unwrap(); - let msg: Err = serde_json::from_str(&msg).unwrap(); - let msg = msg.error; - assert!(translations.contains(&format!("\"{msg}\"")), "{msg}"); - }); - } -} diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 6f261febd..7f0691496 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -1,28 +1,36 @@ -#[macro_use] -extern crate strum_macros; -#[macro_use] -extern crate smart_default; +use cfg_if::cfg_if; + +cfg_if! { + if #[cfg(feature = "full")] { + pub mod cache_header; + pub mod email; + pub mod rate_limit; + pub mod request; + pub mod response; + pub mod settings; + pub mod utils; + } +} -pub mod apub; -pub mod cache_header; -pub mod email; pub mod error; -pub mod rate_limit; -pub mod request; -pub mod response; -pub mod settings; -pub mod utils; -pub mod version; - -use error::LemmyError; -use futures::Future; +pub use error::LemmyErrorType; use std::time::Duration; -use tracing::Instrument; pub type ConnectionId = usize; +pub const VERSION: &str = env!("CARGO_PKG_VERSION"); + pub const REQWEST_TIMEOUT: Duration = Duration::from_secs(10); +#[cfg(debug_assertions)] +pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_millis(500); +#[cfg(not(debug_assertions))] +pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60); + +pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); + +pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50; + #[macro_export] macro_rules! location_info { () => { @@ -35,16 +43,21 @@ macro_rules! location_info { }; } +#[cfg(feature = "full")] /// tokio::spawn, but accepts a future that may fail and also /// * logs errors /// * attaches the spawned task to the tracing span of the caller for better logging -pub fn spawn_try_task(task: impl Future> + Send + 'static) { +pub fn spawn_try_task( + task: impl futures::Future> + Send + 'static, +) { + use tracing::Instrument; tokio::spawn( async { if let Err(e) = task.await { tracing::warn!("error in spawn: {e}"); } } - .in_current_span(), // this makes sure the inner tracing gets the same context as where spawn was called + .in_current_span(), /* this makes sure the inner tracing gets the same context as where + * spawn was called */ ); } diff --git a/crates/utils/src/main.rs b/crates/utils/src/main.rs index c2365f233..ed658b097 100644 --- a/crates/utils/src/main.rs +++ b/crates/utils/src/main.rs @@ -1,16 +1,24 @@ -use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle}; -use lemmy_utils::settings::structs::Settings; +use cfg_if::cfg_if; + fn main() { - let fmt = Formatting { - auto_comments: AutoComments::none(), - comments_style: CommentsStyle { - separator: "#".to_owned(), - }, - objects_style: ObjectsStyle { - surround_keys_with_quotes: false, - use_comma_as_separator: false, - }, - ..Default::default() - }; - println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default())); + cfg_if! { + if #[cfg(feature = "full")] { + use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle}; + use lemmy_utils::settings::structs::Settings; + let fmt = Formatting { + auto_comments: AutoComments::none(), + comments_style: CommentsStyle { + separator: "#".to_owned(), + }, + objects_style: ObjectsStyle { + surround_keys_with_quotes: false, + use_comma_as_separator: false, + }, + ..Default::default() + }; + println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default())); + } else { + + } + } } diff --git a/crates/utils/src/rate_limit/mod.rs b/crates/utils/src/rate_limit/mod.rs index 63090749b..a6cf92150 100644 --- a/crates/utils/src/rate_limit/mod.rs +++ b/crates/utils/src/rate_limit/mod.rs @@ -36,11 +36,11 @@ impl RateLimitCell { let state_weak_ref = Arc::downgrade(&state); tokio::spawn(async move { - let hour = Duration::from_secs(3600); + let interval = Duration::from_secs(120); // This loop stops when all other references to `state` are dropped while let Some(state) = state_weak_ref.upgrade() { - tokio::time::sleep(hour).await; + tokio::time::sleep(interval).await; state .lock() .expect("Failed to lock rate limit mutex for reading") @@ -222,8 +222,6 @@ fn parse_ip(addr: &str) -> Option { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] #[test] fn test_parse_ip() { diff --git a/crates/utils/src/rate_limit/rate_limiter.rs b/crates/utils/src/rate_limit/rate_limiter.rs index d0dad5df2..01d379986 100644 --- a/crates/utils/src/rate_limit/rate_limiter.rs +++ b/crates/utils/src/rate_limit/rate_limiter.rs @@ -1,14 +1,15 @@ use enum_map::EnumMap; -use once_cell::sync::Lazy; use std::{ collections::HashMap, hash::Hash, net::{IpAddr, Ipv4Addr, Ipv6Addr}, + sync::LazyLock, time::Instant, }; +use strum::{AsRefStr, Display}; use tracing::debug; -static START_TIME: Lazy = Lazy::new(Instant::now); +static START_TIME: LazyLock = LazyLock::new(Instant::now); /// Smaller than `std::time::Instant` because it uses a smaller integer for seconds and doesn't /// store nanoseconds @@ -45,9 +46,10 @@ impl Bucket { fn update(self, now: InstantSecs, config: BucketConfig) -> Self { let secs_since_last_checked = now.secs.saturating_sub(self.last_checked.secs); - // For `secs_since_last_checked` seconds, the amount of tokens increases by `capacity` every `secs_to_refill` seconds. - // The amount of tokens added per second is `capacity / secs_to_refill`. - // The expression below is like `secs_since_last_checked * (capacity / secs_to_refill)` but with precision and non-overflowing multiplication. + // For `secs_since_last_checked` seconds, the amount of tokens increases by `capacity` every + // `secs_to_refill` seconds. The amount of tokens added per second is `capacity / + // secs_to_refill`. The expression below is like `secs_since_last_checked * (capacity / + // secs_to_refill)` but with precision and non-overflowing multiplication. let added_tokens = u64::from(secs_since_last_checked) * u64::from(config.capacity) / u64::from(config.secs_to_refill); @@ -64,7 +66,7 @@ impl Bucket { } } -#[derive(Debug, enum_map::Enum, Copy, Clone, AsRefStr)] +#[derive(Debug, enum_map::Enum, Copy, Clone, Display, AsRefStr)] pub enum ActionType { Message, Register, @@ -123,8 +125,9 @@ impl MapLevel for Map { ..config }); - // Remove groups that are no longer needed if the hash map's existing allocation has no space for new groups. - // This is done before calling `HashMap::entry` because that immediately allocates just like `HashMap::insert`. + // Remove groups that are no longer needed if the hash map's existing allocation has no space + // for new groups. This is done before calling `HashMap::entry` because that immediately + // allocates just like `HashMap::insert`. if (self.capacity() == self.len()) && !self.contains_key(&addr_part) { self.remove_full_buckets(now, configs); } @@ -133,7 +136,6 @@ impl MapLevel for Map { .entry(addr_part) .or_insert(RateLimitedGroup::new(now, adjusted_configs)); - #[allow(clippy::indexing_slicing)] let total_passes = group.check_total(action_type, now, adjusted_configs[action_type]); let children_pass = group.children.check( @@ -157,8 +159,7 @@ impl MapLevel for Map { // Evaluated if `some_children_remaining` is false let total_has_refill_in_future = || { - group.total.into_iter().all(|(action_type, bucket)| { - #[allow(clippy::indexing_slicing)] + group.total.into_iter().any(|(action_type, bucket)| { let config = configs[action_type]; bucket.update(now, config).tokens != config.capacity }) @@ -211,14 +212,14 @@ impl RateLimitedGroup { now: InstantSecs, config: BucketConfig, ) -> bool { - #[allow(clippy::indexing_slicing)] // `EnumMap` has no `get` funciton let bucket = &mut self.total[action_type]; let new_bucket = bucket.update(now, config); if new_bucket.tokens == 0 { // Not enough tokens yet - // Setting `bucket` to `new_bucket` here is useless and would cause the bucket to start over at 0 tokens because of rounding + // Setting `bucket` to `new_bucket` here is useless and would cause the bucket to start over + // at 0 tokens because of rounding false } else { // Consume 1 token @@ -238,10 +239,12 @@ pub struct RateLimitState { /// /// The same thing happens for the first 48 and 56 bits, but with increased capacity. /// - /// This is done because all users can easily switch to any other IPv6 address that has the same first 64 bits. - /// It could be as low as 48 bits for some networks, which is the reason for 48 and 56 bit address groups. + /// This is done because all users can easily switch to any other IPv6 address that has the same + /// first 64 bits. It could be as low as 48 bits for some networks, which is the reason for 48 + /// and 56 bit address groups. ipv6_buckets: Map<[u8; 6], Map>>, - /// This stores a `BucketConfig` for each `ActionType`. `EnumMap` makes it impossible to have a missing `BucketConfig`. + /// This stores a `BucketConfig` for each `ActionType`. `EnumMap` makes it impossible to have a + /// missing `BucketConfig`. bucket_configs: EnumMap, } @@ -306,10 +309,10 @@ fn split_ipv6(ip: Ipv6Addr) -> ([u8; 6], u8, u8) { #[cfg(test)] mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use super::{ActionType, BucketConfig, InstantSecs, RateLimitState, RateLimitedGroup}; + use crate::error::LemmyResult; + use pretty_assertions::assert_eq; #[test] fn test_split_ipv6() { @@ -323,7 +326,7 @@ mod tests { } #[test] - fn test_rate_limiter() { + fn test_rate_limiter() -> LemmyResult<()> { let bucket_configs = enum_map::enum_map! { ActionType::Message => BucketConfig { capacity: 2, @@ -337,7 +340,8 @@ mod tests { let mut rate_limiter = RateLimitState::new(bucket_configs); let mut now = InstantSecs::now(); - // Do 1 `Message` and 1 `Post` action for each IP address, and expect the limit to not be reached + // Do 1 `Message` and 1 `Post` action for each IP address, and expect the limit to not be + // reached let ips = [ "123.123.123.123", "1:2:3::", @@ -346,14 +350,13 @@ mod tests { "1:2:3:0405:6::", ]; for ip in ips { - let ip = ip.parse().unwrap(); + let ip = ip.parse()?; let message_passed = rate_limiter.check(ActionType::Message, ip, now); let post_passed = rate_limiter.check(ActionType::Post, ip, now); assert!(message_passed); assert!(post_passed); } - #[allow(clippy::indexing_slicing)] let expected_buckets = |factor: u32, tokens_consumed: u32| { let adjusted_configs = bucket_configs.map(|_, config| BucketConfig { capacity: config.capacity.saturating_mul(factor), @@ -404,7 +407,7 @@ mod tests { // Do 2 `Message` actions for 1 IP address and expect only the 2nd one to fail for expected_to_pass in [true, false] { - let ip = "1:2:3:0400::".parse().unwrap(); + let ip = "1:2:3:0400::".parse()?; let passed = rate_limiter.check(ActionType::Message, ip, now); assert_eq!(passed, expected_to_pass); } @@ -414,5 +417,25 @@ mod tests { rate_limiter.remove_full_buckets(now); assert!(rate_limiter.ipv4_buckets.is_empty()); assert!(rate_limiter.ipv6_buckets.is_empty()); + + // `remove full buckets` should not remove empty buckets + let ip = "1.1.1.1".parse()?; + // empty the bucket with 2 requests + assert!(rate_limiter.check(ActionType::Post, ip, now)); + assert!(rate_limiter.check(ActionType::Post, ip, now)); + + rate_limiter.remove_full_buckets(now); + assert!(!rate_limiter.ipv4_buckets.is_empty()); + + // `remove full buckets` should not remove partial buckets + now.secs += 2; + let ip = "1.1.1.1".parse()?; + // Only make one request, so bucket still has 1 token + assert!(rate_limiter.check(ActionType::Post, ip, now)); + + rate_limiter.remove_full_buckets(now); + assert!(!rate_limiter.ipv4_buckets.is_empty()); + + Ok(()) } } diff --git a/crates/utils/src/response.rs b/crates/utils/src/response.rs index b521c0963..f37c15dd7 100644 --- a/crates/utils/src/response.rs +++ b/crates/utils/src/response.rs @@ -37,6 +37,7 @@ mod tests { use crate::error::{LemmyError, LemmyErrorType}; use actix_web::{ error::ErrorInternalServerError, + http::StatusCode, middleware::ErrorHandlers, test, web, @@ -45,7 +46,7 @@ mod tests { Handler, Responder, }; - use http::StatusCode; + use pretty_assertions::assert_eq; #[actix_web::test] async fn test_non_error_responses_are_not_modified() { diff --git a/crates/utils/src/settings/mod.rs b/crates/utils/src/settings/mod.rs index 6b8982a11..aba1a4fb1 100644 --- a/crates/utils/src/settings/mod.rs +++ b/crates/utils/src/settings/mod.rs @@ -1,25 +1,29 @@ -use crate::{ - error::LemmyError, - location_info, - settings::structs::{PictrsConfig, Settings}, -}; +use crate::{error::LemmyResult, location_info}; use anyhow::{anyhow, Context}; use deser_hjson::from_str; -use once_cell::sync::Lazy; -use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; use regex::Regex; -use std::{env, fs, io::Error}; +use std::{env, fs, io::Error, sync::LazyLock}; +use urlencoding::encode; pub mod structs; -use structs::DatabaseConnection; +use structs::{DatabaseConnection, PictrsConfig, PictrsImageMode, Settings}; static DEFAULT_CONFIG_FILE: &str = "config/config.hjson"; -pub static SETTINGS: Lazy = Lazy::new(|| { - Settings::init().expect("Failed to load settings file, see documentation (https://join-lemmy.org/docs/en/administration/configuration.html)") +pub static SETTINGS: LazyLock = LazyLock::new(|| { + if env::var("LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS").is_ok() { + println!( + "LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS was set, any configuration file has been ignored." + ); + println!("Use with other environment variables to configure this instance further; e.g. LEMMY_DATABASE_URL."); + Settings::default() + } else { + Settings::init().expect("Failed to load settings file, see documentation (https://join-lemmy.org/docs/en/administration/configuration.html).") + } }); -static WEBFINGER_REGEX: Lazy = Lazy::new(|| { + +static WEBFINGER_REGEX: LazyLock = LazyLock::new(|| { Regex::new(&format!( "^acct:([a-zA-Z0-9_]{{3,}})@{}$", SETTINGS.hostname @@ -33,10 +37,8 @@ impl Settings { /// Note: The env var `LEMMY_DATABASE_URL` is parsed in /// `lemmy_db_schema/src/lib.rs::get_database_url_from_env()` /// Warning: Only call this once. - pub(crate) fn init() -> Result { - // Read the config file + pub(crate) fn init() -> LemmyResult { let config = from_str::(&Self::read_config_file()?)?; - if config.hostname == "unset" { Err(anyhow!("Hostname variable is not set!").into()) } else { @@ -45,16 +47,19 @@ impl Settings { } pub fn get_database_url(&self) -> String { + if let Ok(url) = env::var("LEMMY_DATABASE_URL") { + return url; + } match &self.database.connection { DatabaseConnection::Uri { uri } => uri.clone(), DatabaseConnection::Parts(parts) => { format!( "postgres://{}:{}@{}:{}/{}", - utf8_percent_encode(&parts.user, NON_ALPHANUMERIC), - utf8_percent_encode(&parts.password, NON_ALPHANUMERIC), + encode(&parts.user), + encode(&parts.password), parts.host, parts.port, - utf8_percent_encode(&parts.database, NON_ALPHANUMERIC), + encode(&parts.database), ) } } @@ -102,10 +107,24 @@ impl Settings { WEBFINGER_REGEX.clone() } - pub fn pictrs_config(&self) -> Result { + pub fn pictrs_config(&self) -> LemmyResult { self .pictrs .clone() .ok_or_else(|| anyhow!("images_disabled").into()) } } + +impl PictrsConfig { + pub fn image_mode(&self) -> PictrsImageMode { + if let Some(cache_external_link_previews) = self.cache_external_link_previews { + if cache_external_link_previews { + PictrsImageMode::StoreLinkPreviews + } else { + PictrsImageMode::None + } + } else { + self.image_mode.clone() + } + } +} diff --git a/crates/utils/src/settings/structs.rs b/crates/utils/src/settings/structs.rs index aa3f852ce..8c28d908a 100644 --- a/crates/utils/src/settings/structs.rs +++ b/crates/utils/src/settings/structs.rs @@ -1,6 +1,10 @@ use doku::Document; use serde::{Deserialize, Serialize}; -use std::net::{IpAddr, Ipv4Addr}; +use smart_default::SmartDefault; +use std::{ + env, + net::{IpAddr, Ipv4Addr}, +}; use url::Url; #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] @@ -9,7 +13,6 @@ pub struct Settings { /// settings related to the postgresql database #[default(Default::default())] pub database: DatabaseConfig, - /// Settings related to activitypub federation /// Pictrs image server configuration. #[default(Some(Default::default()))] pub(crate) pictrs: Option, @@ -35,20 +38,30 @@ pub struct Settings { /// Whether the site is available over TLS. Needs to be true for federation to work. #[default(true)] pub tls_enabled: bool, - /// Set the URL for opentelemetry exports. If you do not have an opentelemetry collector, do not set this option + /// Set the URL for opentelemetry exports. If you do not have an opentelemetry collector, do not + /// set this option #[default(None)] #[doku(skip)] pub opentelemetry_url: Option, - /// The number of activitypub federation workers that can be in-flight concurrently - #[default(0)] - pub worker_count: usize, - /// The number of activitypub federation retry workers that can be in-flight concurrently - #[default(0)] - pub retry_count: usize, + #[default(Default::default())] + pub federation: FederationWorkerConfig, // Prometheus configuration. #[default(None)] #[doku(example = "Some(Default::default())")] pub prometheus: Option, + /// Sets a response Access-Control-Allow-Origin CORS header + /// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin + #[default(None)] + #[doku(example = "*")] + cors_origin: Option, +} + +impl Settings { + pub fn cors_origin(&self) -> Option { + env::var("LEMMY_CORS_ORIGIN") + .ok() + .or(self.cors_origin.clone()) + } } #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] @@ -63,19 +76,57 @@ pub struct PictrsConfig { #[default(None)] pub api_key: Option, - /// Cache remote images - #[default(true)] - pub cache_remote_images: bool, + /// Backwards compatibility with 0.18.1. False is equivalent to `image_mode: None`, true is + /// equivalent to `image_mode: StoreLinkPreviews`. + /// + /// To be removed in 0.20 + pub(super) cache_external_link_previews: Option, + + /// Specifies how to handle remote images, so that users don't have to connect directly to remote + /// servers. + #[default(PictrsImageMode::StoreLinkPreviews)] + pub(super) image_mode: PictrsImageMode, + + /// Timeout for uploading images to pictrs (in seconds) + #[default(30)] + pub upload_timeout: u64, + + /// Resize post thumbnails to this maximum width/height. + #[default(256)] + pub max_thumbnail_size: u32, +} + +#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document, PartialEq)] +#[serde(deny_unknown_fields)] +pub enum PictrsImageMode { + /// Leave images unchanged, don't generate any local thumbnails for post urls. Instead the + /// Opengraph image is directly returned as thumbnail + None, + /// Generate thumbnails for external post urls and store them persistently in pict-rs. This + /// ensures that they can be reliably retrieved and can be resized using pict-rs APIs. However + /// it also increases storage usage. + /// + /// This is the default behaviour, and also matches Lemmy 0.18. + #[default] + StoreLinkPreviews, + /// If enabled, all images from remote domains are rewritten to pass through + /// `/api/v3/image_proxy`, including embedded images in markdown. Images are stored temporarily + /// in pict-rs for caching. This improves privacy as users don't expose their IP to untrusted + /// servers, and decreases load on other servers. However it increases bandwidth use for the + /// local server. + /// + /// Requires pict-rs 0.5 + ProxyAllImages, } #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] #[serde(default)] pub struct DatabaseConfig { #[serde(flatten, default)] - pub connection: DatabaseConnection, + pub(crate) connection: DatabaseConnection, /// Maximum number of active sql connections - #[default(95)] + #[default(30)] pub pool_size: usize, } @@ -117,10 +168,10 @@ pub struct DatabaseConnectionParts { pub(super) user: String, /// Password to connect to postgres #[default("password")] - pub password: String, + pub(super) password: String, #[default("localhost")] /// Host where postgres is running - pub host: String, + pub(super) host: String, /// Port where postgres can be accessed #[default(5432)] pub(super) port: i32, @@ -138,7 +189,7 @@ pub struct EmailConfig { /// Login name for smtp server pub smtp_login: Option, /// Password to login to the smtp server - pub smtp_password: Option, + smtp_password: Option, #[doku(example = "noreply@example.com")] /// Address to send emails from, eg "noreply@your-instance.com" pub smtp_from_address: String, @@ -148,6 +199,14 @@ pub struct EmailConfig { pub tls_type: String, } +impl EmailConfig { + pub fn smtp_password(&self) -> Option { + std::env::var("LEMMY_SMTP_PASSWORD") + .ok() + .or(self.smtp_password.clone()) + } +} + #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] #[serde(deny_unknown_fields)] pub struct SetupConfig { @@ -170,11 +229,22 @@ pub struct SetupConfig { #[serde(deny_unknown_fields)] pub struct PrometheusConfig { // Address that the Prometheus metrics will be served on. - #[default(Some(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))))] + #[default(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)))] #[doku(example = "127.0.0.1")] - pub bind: Option, + pub bind: IpAddr, // Port that the Prometheus metrics will be served on. - #[default(Some(10002))] + #[default(10002)] #[doku(example = "10002")] - pub port: Option, + pub port: i32, +} + +#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] +#[serde(default)] +// named federation"worker"config to disambiguate from the activitypub library configuration +pub struct FederationWorkerConfig { + /// Limit to the number of concurrent outgoing federation requests per target instance. + /// Set this to a higher value than 1 (e.g. 6) only if you have a huge instance (>10 activities + /// per second) and if a receiving instance is not keeping up. + #[default(1)] + pub concurrent_sends_per_instance: i8, } diff --git a/crates/utils/src/utils/markdown.rs b/crates/utils/src/utils/markdown.rs deleted file mode 100644 index a051310ca..000000000 --- a/crates/utils/src/utils/markdown.rs +++ /dev/null @@ -1,112 +0,0 @@ -use markdown_it::MarkdownIt; -use once_cell::sync::Lazy; - -mod spoiler_rule; - -static MARKDOWN_PARSER: Lazy = Lazy::new(|| { - let mut parser = MarkdownIt::new(); - markdown_it::plugins::cmark::add(&mut parser); - markdown_it::plugins::extra::add(&mut parser); - spoiler_rule::add(&mut parser); - - parser -}); - -/// Replace special HTML characters in API parameters to prevent XSS attacks. -/// -/// Taken from https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.md#output-encoding-for-html-contexts -/// -/// `>` is left in place because it is interpreted as markdown quote. -pub fn sanitize_html(text: &str) -> String { - text - .replace('&', "&") - .replace('<', "<") - .replace('\"', """) - .replace('\'', "'") -} - -/// Converts text from markdown to HTML, while escaping special characters. -pub fn markdown_to_html(text: &str) -> String { - MARKDOWN_PARSER.parse(text).xrender() -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - - use super::*; - - #[test] - fn test_basic_markdown() { - let tests: Vec<_> = vec![ - ( - "headings", - "# h1\n## h2\n### h3\n#### h4\n##### h5\n###### h6", - "

h1

\n

h2

\n

h3

\n

h4

\n
h5
\n
h6
\n" - ), - ( - "line breaks", - "First\rSecond", - "

First\nSecond

\n"), - ( - "emphasis", - "__bold__ **bold** *italic* ***bold+italic***", - "

bold bold italic bold+italic

\n" - ), - ( - "blockquotes", - "> #### Hello\n > \n > - Hola\n > - 안영 \n>> Goodbye\n", - "
\n

Hello

\n
    \n
  • Hola
  • \n
  • 안영
  • \n
\n
\n

Goodbye

\n
\n
\n" - ), - ( - "lists (ordered, unordered)", - "1. pen\n2. apple\n3. apple pen\n- pen\n- pineapple\n- pineapple pen", - "
    \n
  1. pen
  2. \n
  3. apple
  4. \n
  5. apple pen
  6. \n
\n
    \n
  • pen
  • \n
  • pineapple
  • \n
  • pineapple pen
  • \n
\n" - ), - ( - "code and code blocks", - "this is my amazing `code snippet` and my amazing ```code block```", - "

this is my amazing code snippet and my amazing code block

\n" - ), - ( - "links", - "[Lemmy](https://join-lemmy.org/ \"Join Lemmy!\")", - "

Lemmy

\n" - ), - ( - "images", - "![My linked image](https://image.com \"image alt text\")", - "

\"My

\n" - ), - // Ensure any custom plugins are added to 'MARKDOWN_PARSER' implementation. - ( - "basic spoiler", - "::: spoiler click to see more\nhow spicy!\n:::\n", - "
click to see more

how spicy!\n

\n" - ), - ( - "escape html special chars", - " hello &\"", - "

<script>alert(‘xss’);</script> hello &"

\n" - ) - ]; - - tests.iter().for_each(|&(msg, input, expected)| { - let result = markdown_to_html(input); - - assert_eq!( - result, expected, - "Testing {}, with original input '{}'", - msg, input - ); - }); - } - - #[test] - fn test_sanitize_html() { - let sanitized = sanitize_html(" hello &\"'"); - let expected = "<script>alert('xss');</script> hello &"'"; - assert_eq!(expected, sanitized) - } -} diff --git a/crates/utils/src/utils/markdown/image_links.rs b/crates/utils/src/utils/markdown/image_links.rs new file mode 100644 index 000000000..7456190e4 --- /dev/null +++ b/crates/utils/src/utils/markdown/image_links.rs @@ -0,0 +1,165 @@ +use super::{link_rule::Link, MARKDOWN_PARSER}; +use crate::settings::SETTINGS; +use markdown_it::{plugins::cmark::inline::image::Image, NodeValue}; +use url::Url; +use urlencoding::encode; + +/// Rewrites all links to remote domains in markdown, so they go through `/api/v3/image_proxy`. +pub fn markdown_rewrite_image_links(mut src: String) -> (String, Vec) { + let links_offsets = find_urls::(&src); + + let mut links = vec![]; + // Go through the collected links in reverse order + for (start, end) in links_offsets.into_iter().rev() { + let (url, extra) = markdown_handle_title(&src, start, end); + match Url::parse(url) { + Ok(parsed) => { + links.push(parsed.clone()); + // If link points to remote domain, replace with proxied link + if parsed.domain() != Some(&SETTINGS.hostname) { + let mut proxied = format!( + "{}/api/v3/image_proxy?url={}", + SETTINGS.get_protocol_and_hostname(), + encode(url), + ); + // restore custom emoji format + if let Some(extra) = extra { + proxied = format!("{proxied} {extra}"); + } + src.replace_range(start..end, &proxied); + } + } + Err(_) => { + // If its not a valid url, replace with empty text + src.replace_range(start..end, ""); + } + } + } + + (src, links) +} + +pub fn markdown_handle_title(src: &str, start: usize, end: usize) -> (&str, Option<&str>) { + let content = src.get(start..end).unwrap_or_default(); + // necessary for custom emojis which look like `![name](url "title")` + match content.split_once(' ') { + Some((a, b)) => (a, Some(b)), + _ => (content, None), + } +} + +pub fn markdown_find_links(src: &str) -> Vec<(usize, usize)> { + find_urls::(src) +} + +// Walk the syntax tree to find positions of image or link urls +fn find_urls(src: &str) -> Vec<(usize, usize)> { + let ast = MARKDOWN_PARSER.parse(src); + let mut links_offsets = vec![]; + ast.walk(|node, _depth| { + if let Some(image) = node.cast::() { + let (_, node_offset) = node.srcmap.expect("srcmap is none").get_byte_offsets(); + let start_offset = node_offset - image.url_len() - 1 - image.title_len(); + let end_offset = node_offset - 1; + + links_offsets.push((start_offset, end_offset)); + } + }); + links_offsets +} + +pub trait UrlAndTitle { + fn url_len(&self) -> usize; + fn title_len(&self) -> usize; +} + +impl UrlAndTitle for Image { + fn url_len(&self) -> usize { + self.url.len() + } + + fn title_len(&self) -> usize { + self.title.as_ref().map(|t| t.len() + 3).unwrap_or_default() + } +} +impl UrlAndTitle for Link { + fn url_len(&self) -> usize { + self.url.len() + } + fn title_len(&self) -> usize { + self.title.as_ref().map(|t| t.len() + 3).unwrap_or_default() + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn test_find_links() { + let links = markdown_find_links("[test](https://example.com)"); + assert_eq!(vec![(7, 26)], links); + + let links = find_urls::("![test](https://example.com)"); + assert_eq!(vec![(8, 27)], links); + } + + #[test] + fn test_markdown_proxy_images() { + let tests: Vec<_> = + vec![ + ( + "remote image proxied", + "![link](http://example.com/image.jpg)", + "![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)", + ), + ( + "local image unproxied", + "![link](http://lemmy-alpha/image.jpg)", + "![link](http://lemmy-alpha/image.jpg)", + ), + ( + "multiple image links", + "![link](http://example.com/image1.jpg) ![link](http://example.com/image2.jpg)", + "![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage1.jpg) ![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage2.jpg)", + ), + ( + "empty link handled", + "![image]()", + "![image]()" + ), + ( + "empty label handled", + "![](http://example.com/image.jpg)", + "![](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)" + ), + ( + "invalid image link removed", + "![image](http-not-a-link)", + "![image]()" + ), + ( + "label with nested markdown handled", + "![a *b* c](http://example.com/image.jpg)", + "![a *b* c](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)" + ), + ( + "custom emoji support", + r#"![party-blob](https://www.hexbear.net/pictrs/image/83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#, + r#"![party-blob](https://lemmy-alpha/api/v3/image_proxy?url=https%3A%2F%2Fwww.hexbear.net%2Fpictrs%2Fimage%2F83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"# + ) + ]; + + tests.iter().for_each(|&(msg, input, expected)| { + let result = markdown_rewrite_image_links(input.to_string()); + + assert_eq!( + result.0, expected, + "Testing {}, with original input '{}'", + msg, input + ); + }); + } +} diff --git a/crates/utils/src/utils/markdown/link_rule.rs b/crates/utils/src/utils/markdown/link_rule.rs new file mode 100644 index 000000000..15edcd7b1 --- /dev/null +++ b/crates/utils/src/utils/markdown/link_rule.rs @@ -0,0 +1,38 @@ +use markdown_it::{generics::inline::full_link, MarkdownIt, Node, NodeValue, Renderer}; + +/// Renders markdown links. Copied directly from markdown-it source, unlike original code it also +/// sets `rel=nofollow` attribute. +/// +/// TODO: We can set nofollow only if post was not made by mod/admin, but then we have to construct +/// new parser for every invocation which might have performance implications. +/// https://github.com/markdown-it-rust/markdown-it/blob/master/src/plugins/cmark/inline/link.rs +#[derive(Debug)] +pub struct Link { + pub url: String, + pub title: Option, +} + +impl NodeValue for Link { + fn render(&self, node: &Node, fmt: &mut dyn Renderer) { + let mut attrs = node.attrs.clone(); + attrs.push(("href", self.url.clone())); + attrs.push(("rel", "nofollow".to_string())); + + if let Some(title) = &self.title { + attrs.push(("title", title.clone())); + } + + fmt.open("a", &attrs); + fmt.contents(&node.children); + fmt.close("a"); + } +} + +pub fn add(md: &mut MarkdownIt) { + full_link::add::(md, |href, title| { + Node::new(Link { + url: href.unwrap_or_default(), + title, + }) + }); +} diff --git a/crates/utils/src/utils/markdown/mod.rs b/crates/utils/src/utils/markdown/mod.rs new file mode 100644 index 000000000..a51b507ce --- /dev/null +++ b/crates/utils/src/utils/markdown/mod.rs @@ -0,0 +1,264 @@ +use crate::{error::LemmyResult, LemmyErrorType}; +use markdown_it::MarkdownIt; +use regex::RegexSet; +use std::sync::LazyLock; + +pub mod image_links; +mod link_rule; + +static MARKDOWN_PARSER: LazyLock = LazyLock::new(|| { + let mut parser = MarkdownIt::new(); + markdown_it::plugins::cmark::add(&mut parser); + markdown_it::plugins::extra::add(&mut parser); + markdown_it_block_spoiler::add(&mut parser); + markdown_it_sub::add(&mut parser); + markdown_it_sup::add(&mut parser); + markdown_it_ruby::add(&mut parser); + link_rule::add(&mut parser); + + parser +}); + +/// Replace special HTML characters in API parameters to prevent XSS attacks. +/// +/// Taken from https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.md#output-encoding-for-html-contexts +/// +/// `>` is left in place because it is interpreted as markdown quote. +pub fn sanitize_html(text: &str) -> String { + text + .replace('&', "&") + .replace('<', "<") + .replace('\"', """) + .replace('\'', "'") +} + +pub fn markdown_to_html(text: &str) -> String { + MARKDOWN_PARSER.parse(text).xrender() +} + +pub fn markdown_check_for_blocked_urls(text: &str, blocklist: &RegexSet) -> LemmyResult<()> { + if blocklist.is_match(text) { + Err(LemmyErrorType::BlockedUrl)? + } + Ok(()) +} + +#[cfg(test)] +mod tests { + + use super::*; + use image_links::markdown_rewrite_image_links; + use pretty_assertions::assert_eq; + + #[test] + fn test_basic_markdown() { + let tests: Vec<_> = vec![ + ( + "headings", + "# h1\n## h2\n### h3\n#### h4\n##### h5\n###### h6", + "

h1

\n

h2

\n

h3

\n

h4

\n
h5
\n
h6
\n" + ), + ( + "line breaks", + "First\rSecond", + "

First\nSecond

\n"), + ( + "emphasis", + "__bold__ **bold** *italic* ***bold+italic***", + "

bold bold italic bold+italic

\n" + ), + ( + "blockquotes", + "> #### Hello\n > \n > - Hola\n > - 안영 \n>> Goodbye\n", + "
\n

Hello

\n
    \n
  • Hola
  • \n
  • 안영
  • \n
\n
\n

Goodbye

\n
\n
\n" + ), + ( + "lists (ordered, unordered)", + "1. pen\n2. apple\n3. apple pen\n- pen\n- pineapple\n- pineapple pen", + "
    \n
  1. pen
  2. \n
  3. apple
  4. \n
  5. apple pen
  6. \n
\n
    \n
  • pen
  • \n
  • pineapple
  • \n
  • pineapple pen
  • \n
\n" + ), + ( + "code and code blocks", + "this is my amazing `code snippet` and my amazing ```code block```", + "

this is my amazing code snippet and my amazing code block

\n" + ), + // Links with added nofollow attribute + ( + "links", + "[Lemmy](https://join-lemmy.org/ \"Join Lemmy!\")", + "

Lemmy

\n" + ), + // Remote images with proxy + ( + "images", + "![My linked image](https://example.com/image.png \"image alt text\")", + "

\"My

\n" + ), + // Local images without proxy + ( + "images", + "![My linked image](https://lemmy-alpha/image.png \"image alt text\")", + "

\"My

\n" + ), + // Ensure spoiler plugin is added + ( + "basic spoiler", + "::: spoiler click to see more\nhow spicy!\n:::\n", + "
click to see morehow spicy!\n
\n" + ), + ( + "escape html special chars", + " hello &\"", + "

<script>alert(‘xss’);</script> hello &"

\n" + ),("subscript","log~2~(a)","

log2(a)

\n"), + ( + "superscript", + "Markdown^TM^", + "

MarkdownTM

\n" + ), + ( + "ruby text", + "{漢|Kan}{字|ji}", + "

(Kan)(ji)

\n" + ) + ]; + + tests.iter().for_each(|&(msg, input, expected)| { + let result = markdown_to_html(input); + + assert_eq!( + result, expected, + "Testing {}, with original input '{}'", + msg, input + ); + }); + } + + #[test] + fn test_markdown_proxy_images() { + let tests: Vec<_> = + vec![ + ( + "remote image proxied", + "![link](http://example.com/image.jpg)", + "![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)", + ), + ( + "local image unproxied", + "![link](http://lemmy-alpha/image.jpg)", + "![link](http://lemmy-alpha/image.jpg)", + ), + ( + "multiple image links", + "![link](http://example.com/image1.jpg) ![link](http://example.com/image2.jpg)", + "![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage1.jpg) ![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage2.jpg)", + ), + ( + "empty link handled", + "![image]()", + "![image]()" + ), + ( + "empty label handled", + "![](http://example.com/image.jpg)", + "![](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)" + ), + ( + "invalid image link removed", + "![image](http-not-a-link)", + "![image]()" + ), + ( + "label with nested markdown handled", + "![a *b* c](http://example.com/image.jpg)", + "![a *b* c](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)" + ), + ( + "custom emoji support", + r#"![party-blob](https://www.hexbear.net/pictrs/image/83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#, + r#"![party-blob](https://lemmy-alpha/api/v3/image_proxy?url=https%3A%2F%2Fwww.hexbear.net%2Fpictrs%2Fimage%2F83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"# + ) + ]; + + tests.iter().for_each(|&(msg, input, expected)| { + let result = markdown_rewrite_image_links(input.to_string()); + + assert_eq!( + result.0, expected, + "Testing {}, with original input '{}'", + msg, input + ); + }); + } + + #[test] + fn test_url_blocking() -> LemmyResult<()> { + let set = RegexSet::new(vec![r"(https://)?example\.com/?"])?; + + assert!( + markdown_check_for_blocked_urls(&String::from("[](https://example.com)"), &set).is_err() + ); + + assert!(markdown_check_for_blocked_urls( + &String::from("Go to https://example.com to get free Robux"), + &set + ) + .is_err()); + + assert!( + markdown_check_for_blocked_urls(&String::from("[](https://example.blog)"), &set).is_ok() + ); + + assert!(markdown_check_for_blocked_urls(&String::from("example.com"), &set).is_err()); + + assert!(markdown_check_for_blocked_urls( + "Odio exercitationem culpa sed sunt + et. Sit et similique tempora deserunt doloremque. Cupiditate iusto + repellat et quis qui. Cum veritatis facere quasi repellendus sunt + eveniet nemo sint. Cumque sit unde est. https://example.com Alias + repellendus at quos.", + &set + ) + .is_err()); + + let set = RegexSet::new(vec![r"(https://)?example\.com/spam\.jpg"])?; + assert!(markdown_check_for_blocked_urls( + &String::from("![](https://example.com/spam.jpg)"), + &set + ) + .is_err()); + + let set = RegexSet::new(vec![ + r"(https://)?quo\.example\.com/?", + r"(https://)?foo\.example\.com/?", + r"(https://)?bar\.example\.com/?", + ])?; + + assert!( + markdown_check_for_blocked_urls(&String::from("https://baz.example.com"), &set).is_ok() + ); + + assert!( + markdown_check_for_blocked_urls(&String::from("https://bar.example.com"), &set).is_err() + ); + + let set = RegexSet::new(vec![r"(https://)?example\.com/banned_page"])?; + + assert!( + markdown_check_for_blocked_urls(&String::from("https://example.com/page"), &set).is_ok() + ); + + let set = RegexSet::new(vec![r"(https://)?ex\.mple\.com/?"])?; + + assert!(markdown_check_for_blocked_urls("example.com", &set).is_ok()); + + Ok(()) + } + + #[test] + fn test_sanitize_html() { + let sanitized = sanitize_html(" hello &\"'"); + let expected = "<script>alert('xss');</script> hello &"'"; + assert_eq!(expected, sanitized) + } +} diff --git a/crates/utils/src/utils/markdown/spoiler_rule.rs b/crates/utils/src/utils/markdown/spoiler_rule.rs deleted file mode 100644 index bae858bfd..000000000 --- a/crates/utils/src/utils/markdown/spoiler_rule.rs +++ /dev/null @@ -1,203 +0,0 @@ -// Custom Markdown plugin to manage spoilers. -// -// Matches the capability described in Lemmy UI: -// https://github.com/LemmyNet/lemmy-ui/blob/main/src/shared/utils.ts#L159 -// that is based off of: -// https://github.com/markdown-it/markdown-it-container/tree/master#example -// -// FORMAT: -// Input Markdown: ::: spoiler VISIBLE_TEXT\nHIDDEN_SPOILER\n:::\n -// Output HTML:
VISIBLE_TEXT

nHIDDEN_SPOILER

-// -// Anatomy of a spoiler: -// keyword -// ^ -// ::: spoiler VISIBLE_HINT -// ^ ^ -// begin fence visible text -// -// HIDDEN_SPOILER -// ^ -// hidden text -// -// ::: -// ^ -// end fence - -use markdown_it::{ - parser::{ - block::{BlockRule, BlockState}, - inline::InlineRoot, - }, - MarkdownIt, - Node, - NodeValue, - Renderer, -}; -use once_cell::sync::Lazy; -use regex::Regex; - -#[derive(Debug)] -struct SpoilerBlock { - visible_text: String, -} - -const SPOILER_PREFIX: &str = "::: spoiler "; -const SPOILER_SUFFIX: &str = ":::"; -const SPOILER_SUFFIX_NEWLINE: &str = ":::\n"; - -static SPOILER_REGEX: Lazy = - Lazy::new(|| Regex::new(r"^::: spoiler .*$").expect("compile spoiler markdown regex.")); - -impl NodeValue for SpoilerBlock { - // Formats any node marked as a 'SpoilerBlock' into HTML. - // See the SpoilerBlockScanner#run implementation to see how these nodes get added to the tree. - fn render(&self, node: &Node, fmt: &mut dyn Renderer) { - fmt.cr(); - fmt.open("details", &node.attrs); - fmt.open("summary", &[]); - // Not allowing special styling to the visible text to keep it simple. - // If allowed, would need to parse the child nodes to assign to visible vs hidden text sections. - fmt.text(&self.visible_text); - fmt.close("summary"); - fmt.open("p", &[]); - fmt.contents(&node.children); - fmt.close("p"); - fmt.close("details"); - fmt.cr(); - } -} - -struct SpoilerBlockScanner; - -impl BlockRule for SpoilerBlockScanner { - // Invoked on every line in the provided Markdown text to check if the BlockRule applies. - // - // NOTE: This does NOT support nested spoilers at this time. - fn run(state: &mut BlockState) -> Option<(Node, usize)> { - let first_line: &str = state.get_line(state.line).trim(); - - // 1. Check if the first line contains the spoiler syntax... - if !SPOILER_REGEX.is_match(first_line) { - return None; - } - - let begin_spoiler_line_idx: usize = state.line + 1; - let mut end_fence_line_idx: usize = begin_spoiler_line_idx; - let mut has_end_fence: bool = false; - - // 2. Search for the end of the spoiler and find the index of the last line of the spoiler. - // There could potentially be multiple lines between the beginning and end of the block. - // - // Block ends with a line with ':::' or ':::\n'; it must be isolated from other markdown. - while end_fence_line_idx < state.line_max && !has_end_fence { - let next_line: &str = state.get_line(end_fence_line_idx).trim(); - - if next_line.eq(SPOILER_SUFFIX) || next_line.eq(SPOILER_SUFFIX_NEWLINE) { - has_end_fence = true; - break; - } - - end_fence_line_idx += 1; - } - - // 3. If available, construct and return the spoiler node to add to the tree. - if has_end_fence { - let (spoiler_content, mapping) = state.get_lines( - begin_spoiler_line_idx, - end_fence_line_idx, - state.blk_indent, - true, - ); - - let mut node = Node::new(SpoilerBlock { - visible_text: String::from(first_line.replace(SPOILER_PREFIX, "").trim()), - }); - - // Add the spoiler content as children; marking as a child tells the tree to process the - // node again, which means other Markdown syntax (ex: emphasis, links) can be rendered. - node - .children - .push(Node::new(InlineRoot::new(spoiler_content, mapping))); - - // NOTE: Not using begin_spoiler_line_idx here because of incorrect results when - // state.line == 0 (subtracts an idx) vs the expected correct result (adds an idx). - Some((node, end_fence_line_idx - state.line + 1)) - } else { - None - } - } -} - -pub fn add(markdown_parser: &mut MarkdownIt) { - markdown_parser.block.add_rule::(); -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - - use crate::utils::markdown::spoiler_rule::add; - use markdown_it::MarkdownIt; - - #[test] - fn test_spoiler_markdown() { - let tests: Vec<_> = vec![ - ( - "invalid spoiler", - "::: spoiler click to see more\nbut I never finished", - "

::: spoiler click to see more\nbut I never finished

\n", - ), - ( - "another invalid spoiler", - "::: spoiler\nnever added the lead in\n:::", - "

::: spoiler\nnever added the lead in\n:::

\n", - ), - ( - "basic spoiler, but no newline at the end", - "::: spoiler click to see more\nhow spicy!\n:::", - "
click to see more

how spicy!\n

\n" - ), - ( - "basic spoiler with a newline at the end", - "::: spoiler click to see more\nhow spicy!\n:::\n", - "
click to see more

how spicy!\n

\n" - ), - ( - "spoiler with extra markdown on the call to action (no extra parsing)", - "::: spoiler _click to see more_\nhow spicy!\n:::\n", - "
_click to see more_

how spicy!\n

\n" - ), - ( - "spoiler with extra markdown in the fenced spoiler block", - "::: spoiler click to see more\n**how spicy!**\n*i have many lines*\n:::\n", - "
click to see more

how spicy!\ni have many lines\n

\n" - ), - ( - "spoiler mixed with other content", - "hey you\npsst, wanna hear a secret?\n::: spoiler lean in and i'll tell you\n**you are breathtaking!**\n:::\nwhatcha think about that?", - "

hey you\npsst, wanna hear a secret?

\n
lean in and i'll tell you

you are breathtaking!\n

\n

whatcha think about that?

\n" - ), - ( - "spoiler mixed with indented content", - "- did you know that\n::: spoiler the call was\n***coming from inside the house!***\n:::\n - crazy, right?", - "
    \n
  • did you know that
  • \n
\n
the call was

coming from inside the house!\n

\n
    \n
  • crazy, right?
  • \n
\n" - ) - ]; - - tests.iter().for_each(|&(msg, input, expected)| { - let md = &mut MarkdownIt::new(); - markdown_it::plugins::cmark::add(md); - add(md); - - assert_eq!( - md.parse(input).xrender(), - expected, - "Testing {}, with original input '{}'", - msg, - input - ); - }); - } -} diff --git a/crates/utils/src/utils/mention.rs b/crates/utils/src/utils/mention.rs index a2958e499..13762ed27 100644 --- a/crates/utils/src/utils/mention.rs +++ b/crates/utils/src/utils/mention.rs @@ -1,8 +1,8 @@ use itertools::Itertools; -use once_cell::sync::Lazy; use regex::Regex; +use std::sync::LazyLock; -static MENTIONS_REGEX: Lazy = Lazy::new(|| { +static MENTIONS_REGEX: LazyLock = LazyLock::new(|| { Regex::new(r"@(?P[\w.]+)@(?P[a-zA-Z0-9._:-]+)").expect("compile regex") }); // TODO nothing is done with community / group webfingers yet, so just ignore those for now @@ -34,11 +34,11 @@ pub fn scrape_text_for_mentions(text: &str) -> Vec { } #[cfg(test)] +#[expect(clippy::indexing_slicing)] mod test { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] use crate::utils::mention::scrape_text_for_mentions; + use pretty_assertions::assert_eq; #[test] fn test_mentions_regex() { diff --git a/crates/utils/src/utils/slurs.rs b/crates/utils/src/utils/slurs.rs index cc2d6a3e6..2350822eb 100644 --- a/crates/utils/src/utils/slurs.rs +++ b/crates/utils/src/utils/slurs.rs @@ -1,4 +1,4 @@ -use crate::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; use regex::{Regex, RegexBuilder}; pub fn remove_slurs(test: &str, slur_regex: &Option) -> String { @@ -39,7 +39,7 @@ pub fn build_slur_regex(regex_str: Option<&str>) -> Option { }) } -pub fn check_slurs(text: &str, slur_regex: &Option) -> Result<(), LemmyError> { +pub fn check_slurs(text: &str, slur_regex: &Option) -> LemmyResult<()> { if let Err(slurs) = slur_check(text, slur_regex) { Err(anyhow::anyhow!("{}", slurs_vec_to_str(&slurs))).with_lemmy_type(LemmyErrorType::Slurs) } else { @@ -47,10 +47,7 @@ pub fn check_slurs(text: &str, slur_regex: &Option) -> Result<(), LemmyEr } } -pub fn check_slurs_opt( - text: &Option, - slur_regex: &Option, -) -> Result<(), LemmyError> { +pub fn check_slurs_opt(text: &Option, slur_regex: &Option) -> LemmyResult<()> { match text { Some(t) => check_slurs(t, slur_regex), None => Ok(()), @@ -65,15 +62,17 @@ pub(crate) fn slurs_vec_to_str(slurs: &[&str]) -> String { #[cfg(test)] mod test { - #![allow(clippy::unwrap_used)] - #![allow(clippy::indexing_slicing)] - use crate::utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str}; + use crate::{ + error::LemmyResult, + utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str}, + }; + use pretty_assertions::assert_eq; use regex::RegexBuilder; #[test] - fn test_slur_filter() { - let slur_regex = Some(RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|ni((g{2,}|q)+|[gq]{2,})[e3r]+(s|z)?|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap()); + fn test_slur_filter() -> LemmyResult<()> { + let slur_regex = Some(RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|ni((g{2,}|q)+|[gq]{2,})[e3r]+(s|z)?|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build()?); let test = "faggot test kike tranny cocksucker retardeds. Capitalized Niggerz. This is a bunch of other safe text."; let slur_free = "No slurs here"; @@ -98,12 +97,14 @@ mod test { if let Err(slur_vec) = slur_check(test, &slur_regex) { assert_eq!(&slurs_vec_to_str(&slur_vec), has_slurs_err_str); } + + Ok(()) } // These helped with testing // #[test] // fn test_send_email() { - // let result = send_email("not a subject", "test_email@gmail.com", "ur user", "

HI there

"); - // assert!(result.is_ok()); + // let result = send_email("not a subject", "test_email@gmail.com", "ur user", "

HI + // there

"); assert!(result.is_ok()); // } } diff --git a/crates/utils/src/utils/validation.rs b/crates/utils/src/utils/validation.rs index 4d8ed8543..f8da6f609 100644 --- a/crates/utils/src/utils/validation.rs +++ b/crates/utils/src/utils/validation.rs @@ -1,25 +1,25 @@ use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; +use clearurls::UrlCleaner; use itertools::Itertools; -use once_cell::sync::Lazy; -use regex::{Regex, RegexBuilder}; -use url::Url; +use regex::{Regex, RegexBuilder, RegexSet}; +use std::sync::LazyLock; +use url::{ParseError, Url}; -static VALID_ACTOR_NAME_REGEX: Lazy = - Lazy::new(|| Regex::new(r"^[a-zA-Z0-9_]{3,}$").expect("compile regex")); -static VALID_POST_TITLE_REGEX: Lazy = - Lazy::new(|| Regex::new(r".*\S{3,200}.*").expect("compile regex")); -static VALID_MATRIX_ID_REGEX: Lazy = Lazy::new(|| { - Regex::new(r"^@[A-Za-z0-9._=-]+:[A-Za-z0-9.-]+\.[A-Za-z]{2,}$").expect("compile regex") -}); -// taken from https://en.wikipedia.org/wiki/UTM_parameters -static CLEAN_URL_PARAMS_REGEX: Lazy = Lazy::new(|| { - Regex::new(r"^utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid$") +// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35 +static VALID_MATRIX_ID_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"^@[A-Za-z0-9\x21-\x39\x3B-\x7F]+:[A-Za-z0-9.-]+(:[0-9]{2,5})?$") .expect("compile regex") }); +// taken from https://en.wikipedia.org/wiki/UTM_parameters +static URL_CLEANER: LazyLock = + LazyLock::new(|| UrlCleaner::from_embedded_rules().expect("compile clearurls")); +const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"]; const BODY_MAX_LENGTH: usize = 10000; const POST_BODY_MAX_LENGTH: usize = 50000; -const BIO_MAX_LENGTH: usize = 300; +const BIO_MAX_LENGTH: usize = 1000; +const URL_MAX_LENGTH: usize = 2000; +const ALT_TEXT_MAX_LENGTH: usize = 1500; const SITE_NAME_MAX_LENGTH: usize = 20; const SITE_NAME_MIN_LENGTH: usize = 1; const SITE_DESCRIPTION_MAX_LENGTH: usize = 150; @@ -85,23 +85,52 @@ fn has_newline(name: &str) -> bool { } pub fn is_valid_actor_name(name: &str, actor_name_max_length: usize) -> LemmyResult<()> { - let check = name.chars().count() <= actor_name_max_length - && VALID_ACTOR_NAME_REGEX.is_match(name) - && !has_newline(name); - if !check { + static VALID_ACTOR_NAME_REGEX_EN: LazyLock = + LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9_]{3,}$").expect("compile regex")); + static VALID_ACTOR_NAME_REGEX_AR: LazyLock = + LazyLock::new(|| Regex::new(r"^[\p{Arabic}0-9_]{3,}$").expect("compile regex")); + static VALID_ACTOR_NAME_REGEX_RU: LazyLock = + LazyLock::new(|| Regex::new(r"^[\p{Cyrillic}0-9_]{3,}$").expect("compile regex")); + + let check = name.chars().count() <= actor_name_max_length && !has_newline(name); + + // Only allow characters from a single alphabet per username. This avoids problems with lookalike + // characters like `o` which looks identical in Latin and Cyrillic, and can be used to imitate + // other users. Checks for additional alphabets can be added in the same way. + let lang_check = VALID_ACTOR_NAME_REGEX_EN.is_match(name) + || VALID_ACTOR_NAME_REGEX_AR.is_match(name) + || VALID_ACTOR_NAME_REGEX_RU.is_match(name); + + if !check || !lang_check { Err(LemmyErrorType::InvalidName.into()) } else { Ok(()) } } +fn has_3_permitted_display_chars(name: &str) -> bool { + let mut num_non_fdc: i8 = 0; + for c in name.chars() { + if !FORBIDDEN_DISPLAY_CHARS.contains(&c) { + num_non_fdc += 1; + if num_non_fdc >= 3 { + break; + } + } + } + if num_non_fdc >= 3 { + return true; + } + false +} + // Can't do a regex here, reverse lookarounds not supported pub fn is_valid_display_name(name: &str, actor_name_max_length: usize) -> LemmyResult<()> { - let check = !name.contains(FORBIDDEN_DISPLAY_CHARS) - && !name.starts_with('@') - && name.chars().count() >= 3 + let check = !name.starts_with('@') + && !name.starts_with(FORBIDDEN_DISPLAY_CHARS) && name.chars().count() <= actor_name_max_length - && !has_newline(name); + && !has_newline(name) + && has_3_permitted_display_chars(name); if !check { Err(LemmyErrorType::InvalidDisplayName.into()) } else { @@ -119,7 +148,8 @@ pub fn is_valid_matrix_id(matrix_id: &str) -> LemmyResult<()> { } pub fn is_valid_post_title(title: &str) -> LemmyResult<()> { - let check = VALID_POST_TITLE_REGEX.is_match(title) && !has_newline(title); + let length = title.trim().chars().count(); + let check = (3..=200).contains(&length) && !has_newline(title); if !check { Err(LemmyErrorType::InvalidPostTitle.into()) } else { @@ -128,14 +158,12 @@ pub fn is_valid_post_title(title: &str) -> LemmyResult<()> { } /// This could be post bodies, comments, or any description field -pub fn is_valid_body_field(body: &Option, post: bool) -> LemmyResult<()> { - if let Some(body) = body { - if post { - max_length_check(body, POST_BODY_MAX_LENGTH, LemmyErrorType::InvalidBodyField)?; - } else { - max_length_check(body, BODY_MAX_LENGTH, LemmyErrorType::InvalidBodyField)?; - }; - } +pub fn is_valid_body_field(body: &str, post: bool) -> LemmyResult<()> { + if post { + max_length_check(body, POST_BODY_MAX_LENGTH, LemmyErrorType::InvalidBodyField)?; + } else { + max_length_check(body, BODY_MAX_LENGTH, LemmyErrorType::InvalidBodyField)?; + }; Ok(()) } @@ -143,6 +171,16 @@ pub fn is_valid_bio_field(bio: &str) -> LemmyResult<()> { max_length_check(bio, BIO_MAX_LENGTH, LemmyErrorType::BioLengthOverflow) } +pub fn is_valid_alt_text_field(alt_text: &str) -> LemmyResult<()> { + max_length_check( + alt_text, + ALT_TEXT_MAX_LENGTH, + LemmyErrorType::AltTextLengthOverflow, + )?; + + Ok(()) +} + /// Checks the site name length, the limit as defined in the DB. pub fn site_name_length_check(name: &str) -> LemmyResult<()> { min_length_check(name, SITE_NAME_MIN_LENGTH, LemmyErrorType::SiteNameRequired)?; @@ -153,8 +191,8 @@ pub fn site_name_length_check(name: &str) -> LemmyResult<()> { ) } -/// Checks the site description length, the limit as defined in the DB. -pub fn site_description_length_check(description: &str) -> LemmyResult<()> { +/// Checks the site / community description length, the limit as defined in the DB. +pub fn site_or_community_description_length_check(description: &str) -> LemmyResult<()> { max_length_check( description, SITE_DESCRIPTION_MAX_LENGTH, @@ -162,7 +200,7 @@ pub fn site_description_length_check(description: &str) -> LemmyResult<()> { ) } -/// Check minumum and maximum length of input string. If the string is too short or too long, the +/// Check minimum and maximum length of input string. If the string is too short or too long, the /// corresponding error is returned. /// /// HTML frontends specify maximum input length using `maxlength` attribute. @@ -216,17 +254,22 @@ pub fn build_and_check_regex(regex_str_opt: &Option<&str>) -> LemmyResult