Merge branch 'main' into image-api-rework

This commit is contained in:
Felix Ableitner 2025-01-09 15:33:45 +01:00
commit 6f91754f99
50 changed files with 2080 additions and 439 deletions

View file

@ -104,6 +104,18 @@ steps:
- cargo clippy --workspace --tests --all-targets -- -D warnings
when: *slow_check_paths
# `DROP OWNED` doesn't work for default user
create_database_user:
image: postgres:16-alpine
environment:
PGUSER: postgres
PGPASSWORD: password
PGHOST: database
PGDATABASE: lemmy
commands:
- psql -c "CREATE USER lemmy WITH PASSWORD 'password' SUPERUSER;"
when: *slow_check_paths
cargo_test:
image: *rust_image
environment:
@ -113,6 +125,12 @@ steps:
LEMMY_TEST_FAST_FEDERATION: "1"
LEMMY_CONFIG_LOCATION: ../../config/config.hjson
commands:
# Install pg_dump for the schema setup test (must match server version)
- apt update && apt install -y lsb-release
- sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
- wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
- apt update && apt install -y postgresql-client-16
# Run tests
- cargo test --workspace --no-fail-fast
when: *slow_check_paths
@ -160,18 +178,6 @@ steps:
- diff config/defaults.hjson config/defaults_current.hjson
when: *slow_check_paths
check_diesel_schema:
image: *rust_image
environment:
CARGO_HOME: .cargo_home
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
commands:
- <<: *install_diesel_cli
- cp crates/db_schema/src/schema.rs tmp.schema
- diesel migration run
- diff tmp.schema crates/db_schema/src/schema.rs
when: *slow_check_paths
cargo_build:
image: *rust_image
environment:
@ -181,37 +187,19 @@ steps:
- mv target/debug/lemmy_server target/lemmy_server
when: *slow_check_paths
check_diesel_migration:
# TODO: use willsquire/diesel-cli image when shared libraries become optional in lemmy_server
check_diesel_schema:
image: *rust_image
environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
RUST_BACKTRACE: "1"
CARGO_HOME: .cargo_home
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
PGUSER: lemmy
PGPASSWORD: password
PGHOST: database
PGDATABASE: lemmy
commands:
# Install diesel_cli
- cp crates/db_schema/src/schema.rs tmp.schema
- target/lemmy_server migration --all run
- <<: *install_diesel_cli
# Run all migrations
- diesel migration run
- psql -c "DROP SCHEMA IF EXISTS r CASCADE;"
- pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --no-sync -f before.sqldump
# Make sure that the newest migration is revertable without the `r` schema
- diesel migration redo
# Run schema setup twice, which fails on the 2nd time if `DROP SCHEMA IF EXISTS r CASCADE` drops the wrong things
- alias lemmy_schema_setup="target/lemmy_server --disable-scheduled-tasks --disable-http-server --disable-activity-sending"
- lemmy_schema_setup
- lemmy_schema_setup
# Make sure that the newest migration is revertable with the `r` schema
- diesel migration redo
# Check for changes in the schema, which would be caused by an incorrect migration
- psql -c "DROP SCHEMA IF EXISTS r CASCADE;"
- pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --no-sync -f after.sqldump
- diff before.sqldump after.sqldump
- diesel print-schema
- diff tmp.schema crates/db_schema/src/schema.rs
when: *slow_check_paths
check_db_perf_tool:
@ -318,5 +306,6 @@ services:
# 15-alpine image necessary because of diesel tests
image: pgautoupgrade/pgautoupgrade:15-alpine
environment:
POSTGRES_USER: lemmy
POSTGRES_DB: lemmy
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password

27
Cargo.lock generated
View file

@ -1359,6 +1359,19 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "diffutils"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8d7ce619b5c0e13f7543dc2c203a7e6fa37e0111d876339aada7ec9540a58d5"
dependencies = [
"chrono",
"diff",
"regex",
"same-file",
"unicode-width",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -2649,6 +2662,7 @@ dependencies = [
"diesel-derive-newtype",
"diesel_ltree",
"diesel_migrations",
"diffutils",
"futures-util",
"i-love-jesus",
"lemmy_utils",
@ -2676,6 +2690,7 @@ version = "0.19.6-beta.7"
dependencies = [
"actix-web",
"chrono",
"derive-new",
"diesel",
"diesel-async",
"diesel_ltree",
@ -2837,6 +2852,7 @@ dependencies = [
"lettre",
"markdown-it",
"markdown-it-block-spoiler",
"markdown-it-footnote",
"markdown-it-ruby",
"markdown-it-sub",
"markdown-it-sup",
@ -2901,7 +2917,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.48.5",
"windows-targets 0.52.6",
]
[[package]]
@ -3021,6 +3037,15 @@ dependencies = [
"markdown-it",
]
[[package]]
name = "markdown-it-footnote"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6679ab967fbc45b290b25fb477af7556bf6825feec208a970585e6bb9aae3be"
dependencies = [
"markdown-it",
]
[[package]]
name = "markdown-it-ruby"
version = "1.0.0"

View file

@ -26,7 +26,6 @@ workspace = true
# See https://github.com/johnthagen/min-sized-rust for additional optimizations
[profile.release]
debug = 0
lto = "fat"
opt-level = 3 # Optimize for speed, not size.
codegen-units = 1 # Reduce parallel code generation.

View file

@ -16,10 +16,7 @@ pub async fn save_comment(
context: Data<LemmyContext>,
local_user_view: LocalUserView,
) -> LemmyResult<Json<CommentResponse>> {
let comment_saved_form = CommentSavedForm {
comment_id: data.comment_id,
person_id: local_user_view.person.id,
};
let comment_saved_form = CommentSavedForm::new(data.comment_id, local_user_view.person.id);
if data.save {
CommentSaved::save(&mut context.pool(), &comment_saved_form)

View file

@ -0,0 +1,42 @@
use activitypub_federation::config::Data;
use actix_web::web::{Json, Query};
use lemmy_api_common::{
context::LemmyContext,
person::{ListPersonSaved, ListPersonSavedResponse},
utils::check_private_instance,
};
use lemmy_db_views::{
person_saved_combined_view::PersonSavedCombinedQuery,
structs::{LocalUserView, SiteView},
};
use lemmy_utils::error::LemmyResult;
#[tracing::instrument(skip(context))]
pub async fn list_person_saved(
data: Query<ListPersonSaved>,
context: Data<LemmyContext>,
local_user_view: LocalUserView,
) -> LemmyResult<Json<ListPersonSavedResponse>> {
let local_site = SiteView::read_local(&mut context.pool()).await?;
check_private_instance(&Some(local_user_view.clone()), &local_site.local_site)?;
// parse pagination token
let page_after = if let Some(pa) = &data.page_cursor {
Some(pa.read(&mut context.pool()).await?)
} else {
None
};
let page_back = data.page_back;
let type_ = data.type_;
let saved = PersonSavedCombinedQuery {
type_,
page_after,
page_back,
}
.list(&mut context.pool(), &local_user_view)
.await?;
Ok(Json(ListPersonSavedResponse { saved }))
}

View file

@ -8,6 +8,7 @@ pub mod get_captcha;
pub mod list_banned;
pub mod list_logins;
pub mod list_media;
pub mod list_saved;
pub mod login;
pub mod logout;
pub mod notifications;

View file

@ -131,8 +131,6 @@ pub struct GetComments {
#[cfg_attr(feature = "full", ts(optional))]
pub parent_id: Option<CommentId>,
#[cfg_attr(feature = "full", ts(optional))]
pub saved_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub liked_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub disliked_only: Option<bool>,

View file

@ -4,10 +4,16 @@ use lemmy_db_schema::{
source::{login_token::LoginToken, site::Site},
CommentSortType,
ListingType,
PersonContentType,
PostListingMode,
PostSortType,
};
use lemmy_db_views::structs::{CommentView, LocalImageView, PostView};
use lemmy_db_views::structs::{
LocalImageView,
PersonContentCombinedPaginationCursor,
PersonContentCombinedView,
PersonSavedCombinedPaginationCursor,
};
use lemmy_db_views_actor::structs::{
CommentReplyView,
CommunityModeratorView,
@ -216,16 +222,6 @@ pub struct GetPersonDetails {
/// Example: dessalines , or dessalines@xyz.tld
#[cfg_attr(feature = "full", ts(optional))]
pub username: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub sort: Option<PostSortType>,
#[cfg_attr(feature = "full", ts(optional))]
pub page: Option<i64>,
#[cfg_attr(feature = "full", ts(optional))]
pub limit: Option<i64>,
#[cfg_attr(feature = "full", ts(optional))]
pub community_id: Option<CommunityId>,
#[cfg_attr(feature = "full", ts(optional))]
pub saved_only: Option<bool>,
}
#[skip_serializing_none]
@ -237,11 +233,62 @@ pub struct GetPersonDetailsResponse {
pub person_view: PersonView,
#[cfg_attr(feature = "full", ts(optional))]
pub site: Option<Site>,
pub comments: Vec<CommentView>,
pub posts: Vec<PostView>,
pub moderates: Vec<CommunityModeratorView>,
}
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// Gets a person's content (posts and comments)
///
/// Either person_id, or username are required.
pub struct ListPersonContent {
#[cfg_attr(feature = "full", ts(optional))]
pub type_: Option<PersonContentType>,
#[cfg_attr(feature = "full", ts(optional))]
pub person_id: Option<PersonId>,
/// Example: dessalines , or dessalines@xyz.tld
#[cfg_attr(feature = "full", ts(optional))]
pub username: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_cursor: Option<PersonContentCombinedPaginationCursor>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_back: Option<bool>,
}
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// A person's content response.
pub struct ListPersonContentResponse {
pub content: Vec<PersonContentCombinedView>,
}
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// Gets your saved posts and comments
pub struct ListPersonSaved {
#[cfg_attr(feature = "full", ts(optional))]
pub type_: Option<PersonContentType>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_cursor: Option<PersonSavedCombinedPaginationCursor>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_back: Option<bool>,
}
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// A person's saved content response.
pub struct ListPersonSavedResponse {
pub saved: Vec<PersonContentCombinedView>,
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]

View file

@ -29,7 +29,7 @@ use reqwest::{
};
use reqwest_middleware::ClientWithMiddleware;
use serde::{Deserialize, Serialize};
use tracing::info;
use tracing::{info, warn};
use url::Url;
use urlencoding::encode;
use webpage::HTML;
@ -170,15 +170,23 @@ pub async fn generate_post_link_metadata(
metadata.opengraph_data.image.clone()
};
// Attempt to generate a thumbnail depending on the instance settings. Either by proxying,
// storing image persistently in pict-rs or returning the remote url directly as thumbnail.
let thumbnail_url = if let (false, Some(url)) = (is_image_post, custom_thumbnail) {
proxy_image_link(url, &context).await.ok()
} else if let (true, Some(url)) = (allow_generate_thumbnail, image_url) {
proxy_image_link(url.clone(), &context)
.await
.map_err(|e| warn!("Failed to proxy thumbnail: {e}"))
.ok()
.or(Some(url.into()))
} else if let (true, Some(url)) = (allow_generate_thumbnail, image_url.clone()) {
generate_pictrs_thumbnail(&url, &context)
.await
.map_err(|e| warn!("Failed to generate thumbnail: {e}"))
.ok()
.map(Into::into)
.or(image_url)
} else {
metadata.opengraph_data.image.clone()
image_url.clone()
};
let form = PostUpdateForm {

View file

@ -94,8 +94,6 @@ pub struct Search {
#[cfg_attr(feature = "full", ts(optional))]
pub post_url_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub saved_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub liked_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub disliked_only: Option<bool>,
@ -201,7 +199,6 @@ pub struct CreateSite {
#[cfg_attr(feature = "full", ts(optional))]
pub description: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub enable_nsfw: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub community_creation_admin_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
@ -259,8 +256,6 @@ pub struct CreateSite {
#[cfg_attr(feature = "full", ts(optional))]
pub federation_enabled: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub federation_debug: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub captcha_enabled: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub captcha_difficulty: Option<String>,
@ -294,9 +289,6 @@ pub struct EditSite {
/// A shorter, one line description of your site.
#[cfg_attr(feature = "full", ts(optional))]
pub description: Option<String>,
/// Whether to enable NSFW.
#[cfg_attr(feature = "full", ts(optional))]
pub enable_nsfw: Option<bool>,
/// Limits community creation to admins only.
#[cfg_attr(feature = "full", ts(optional))]
pub community_creation_admin_only: Option<bool>,
@ -375,9 +367,6 @@ pub struct EditSite {
/// Whether to enable federation.
#[cfg_attr(feature = "full", ts(optional))]
pub federation_enabled: Option<bool>,
/// Enables federation debugging.
#[cfg_attr(feature = "full", ts(optional))]
pub federation_debug: Option<bool>,
/// Whether to enable captchas for signups.
#[cfg_attr(feature = "full", ts(optional))]
pub captcha_enabled: Option<bool>,

View file

@ -86,6 +86,9 @@ pub async fn create_post(
let community = Community::read(&mut context.pool(), data.community_id).await?;
check_community_user_action(&local_user_view.person, &community, &mut context.pool()).await?;
// If its an NSFW community, then use that as a default
let nsfw = data.nsfw.or(Some(community.nsfw));
if community.posting_restricted_to_mods {
let community_id = data.community_id;
CommunityModeratorView::check_is_community_moderator(
@ -110,7 +113,7 @@ pub async fn create_post(
url,
body,
alt_text: data.alt_text.clone(),
nsfw: data.nsfw,
nsfw,
language_id: Some(language_id),
scheduled_publish_time,
..PostInsertForm::new(

View file

@ -46,7 +46,6 @@ pub async fn list_comments(
&site_view.local_site,
));
let max_depth = data.max_depth;
let saved_only = data.saved_only;
let liked_only = data.liked_only;
let disliked_only = data.disliked_only;
@ -78,7 +77,6 @@ pub async fn list_comments(
listing_type,
sort,
max_depth,
saved_only,
liked_only,
disliked_only,
community_id,

View file

@ -0,0 +1,52 @@
use super::resolve_person_id_from_id_or_username;
use activitypub_federation::config::Data;
use actix_web::web::{Json, Query};
use lemmy_api_common::{
context::LemmyContext,
person::{ListPersonContent, ListPersonContentResponse},
utils::check_private_instance,
};
use lemmy_db_views::{
person_content_combined_view::PersonContentCombinedQuery,
structs::{LocalUserView, SiteView},
};
use lemmy_utils::error::LemmyResult;
#[tracing::instrument(skip(context))]
pub async fn list_person_content(
data: Query<ListPersonContent>,
context: Data<LemmyContext>,
local_user_view: Option<LocalUserView>,
) -> LemmyResult<Json<ListPersonContentResponse>> {
let local_site = SiteView::read_local(&mut context.pool()).await?;
check_private_instance(&local_user_view, &local_site.local_site)?;
let person_details_id = resolve_person_id_from_id_or_username(
&data.person_id,
&data.username,
&context,
&local_user_view,
)
.await?;
// parse pagination token
let page_after = if let Some(pa) = &data.page_cursor {
Some(pa.read(&mut context.pool()).await?)
} else {
None
};
let page_back = data.page_back;
let type_ = data.type_;
let content = PersonContentCombinedQuery {
creator_id: person_details_id,
type_,
page_after,
page_back,
}
.list(&mut context.pool(), &local_user_view)
.await?;
Ok(Json(ListPersonContentResponse { content }))
}

View file

@ -41,7 +41,6 @@ pub async fn list_posts(
} else {
data.community_id
};
let saved_only = data.saved_only;
let read_only = data.read_only;
let show_hidden = data.show_hidden;
let show_read = data.show_read;
@ -78,7 +77,6 @@ pub async fn list_posts(
listing_type,
sort,
community_id,
saved_only,
read_only,
liked_only,
disliked_only,

View file

@ -1,12 +1,18 @@
use crate::{fetcher::resolve_actor_identifier, objects::person::ApubPerson};
use activitypub_federation::config::Data;
use lemmy_api_common::{context::LemmyContext, LemmyErrorType};
use lemmy_db_schema::{
newtypes::CommunityId,
source::{local_site::LocalSite, local_user::LocalUser},
newtypes::{CommunityId, PersonId},
source::{local_site::LocalSite, local_user::LocalUser, person::Person},
CommentSortType,
ListingType,
PostSortType,
};
use lemmy_db_views::structs::LocalUserView;
use lemmy_utils::error::LemmyResult;
pub mod list_comments;
pub mod list_person_content;
pub mod list_posts;
pub mod read_community;
pub mod read_person;
@ -61,3 +67,28 @@ fn comment_sort_type_with_default(
.unwrap_or(local_site.default_comment_sort_type),
)
}
async fn resolve_person_id_from_id_or_username(
person_id: &Option<PersonId>,
username: &Option<String>,
context: &Data<LemmyContext>,
local_user_view: &Option<LocalUserView>,
) -> LemmyResult<PersonId> {
// Check to make sure a person name or an id is given
if username.is_none() && person_id.is_none() {
Err(LemmyErrorType::NoIdGiven)?
}
Ok(match person_id {
Some(id) => *id,
None => {
if let Some(username) = username {
resolve_actor_identifier::<ApubPerson, Person>(username, context, local_user_view, true)
.await?
.id
} else {
Err(LemmyErrorType::NotFound)?
}
}
})
}

View file

@ -1,4 +1,4 @@
use crate::{fetcher::resolve_actor_identifier, objects::person::ApubPerson};
use super::resolve_person_id_from_id_or_username;
use activitypub_federation::config::Data;
use actix_web::web::{Json, Query};
use lemmy_api_common::{
@ -6,14 +6,9 @@ use lemmy_api_common::{
person::{GetPersonDetails, GetPersonDetailsResponse},
utils::{check_private_instance, is_admin, read_site_for_actor},
};
use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type};
use lemmy_db_views::{
comment_view::CommentQuery,
post_view::PostQuery,
structs::{LocalUserView, SiteView},
};
use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_db_views_actor::structs::{CommunityModeratorView, PersonView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
use lemmy_utils::error::LemmyResult;
#[tracing::instrument(skip(context))]
pub async fn read_person(
@ -21,27 +16,17 @@ pub async fn read_person(
context: Data<LemmyContext>,
local_user_view: Option<LocalUserView>,
) -> LemmyResult<Json<GetPersonDetailsResponse>> {
// Check to make sure a person name or an id is given
if data.username.is_none() && data.person_id.is_none() {
Err(LemmyErrorType::NoIdGiven)?
}
let local_site = SiteView::read_local(&mut context.pool()).await?;
check_private_instance(&local_user_view, &local_site.local_site)?;
let person_details_id = match data.person_id {
Some(id) => id,
None => {
if let Some(username) = &data.username {
resolve_actor_identifier::<ApubPerson, Person>(username, &context, &local_user_view, true)
.await?
.id
} else {
Err(LemmyErrorType::NotFound)?
}
}
};
let person_details_id = resolve_person_id_from_id_or_username(
&data.person_id,
&data.username,
&context,
&local_user_view,
)
.await?;
// You don't need to return settings for the user, since this comes back with GetSite
// `my_user`
@ -50,48 +35,6 @@ pub async fn read_person(
.map(|l| is_admin(l).is_ok())
.unwrap_or_default();
let person_view = PersonView::read(&mut context.pool(), person_details_id, is_admin).await?;
let sort = data.sort;
let page = data.page;
let limit = data.limit;
let saved_only = data.saved_only;
let community_id = data.community_id;
// If its saved only, you don't care what creator it was
// Or, if its not saved, then you only want it for that specific creator
let creator_id = if !saved_only.unwrap_or_default() {
Some(person_details_id)
} else {
None
};
let local_user = local_user_view.as_ref().map(|l| &l.local_user);
let posts = PostQuery {
sort,
saved_only,
local_user,
community_id,
page,
limit,
creator_id,
..Default::default()
}
.list(&local_site.site, &mut context.pool())
.await?;
let comments = CommentQuery {
local_user,
sort: sort.map(post_to_comment_sort_type),
saved_only,
community_id,
page,
limit,
creator_id,
..Default::default()
}
.list(&local_site.site, &mut context.pool())
.await?;
let moderates = CommunityModeratorView::for_person(
&mut context.pool(),
person_details_id,
@ -101,12 +44,9 @@ pub async fn read_person(
let site = read_site_for_actor(person_view.person.actor_id.clone(), &context).await?;
// Return the jwt
Ok(Json(GetPersonDetailsResponse {
person_view,
site,
moderates,
comments,
posts,
}))
}

View file

@ -53,7 +53,6 @@ pub async fn search(
limit,
title_only,
post_url_only,
saved_only,
liked_only,
disliked_only,
}) = data;
@ -86,7 +85,6 @@ pub async fn search(
url_only: post_url_only,
liked_only,
disliked_only,
saved_only,
..Default::default()
};
@ -101,7 +99,6 @@ pub async fn search(
limit,
liked_only,
disliked_only,
saved_only,
..Default::default()
};

View file

@ -212,10 +212,7 @@ pub async fn import_settings(
&context,
|(saved, context)| async move {
let comment = saved.dereference(&context).await?;
let form = CommentSavedForm {
person_id,
comment_id: comment.id,
};
let form = CommentSavedForm::new(comment.id, person_id);
CommentSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},

View file

@ -85,3 +85,4 @@ tuplex = { workspace = true, optional = true }
[dev-dependencies]
serial_test = { workspace = true }
pretty_assertions = { workspace = true }
diffutils = "0.4.2"

View file

@ -685,3 +685,79 @@ CALL r.create_report_combined_trigger ('comment_report');
CALL r.create_report_combined_trigger ('private_message_report');
-- person_content (comment, post)
CREATE PROCEDURE r.create_person_content_combined_trigger (table_name text)
LANGUAGE plpgsql
AS $a$
BEGIN
EXECUTE replace($b$ CREATE FUNCTION r.person_content_combined_thing_insert ( )
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO person_content_combined (published, thing_id)
VALUES (NEW.published, NEW.id);
RETURN NEW;
END $$;
CREATE TRIGGER person_content_combined
AFTER INSERT ON thing
FOR EACH ROW
EXECUTE FUNCTION r.person_content_combined_thing_insert ( );
$b$,
'thing',
table_name);
END;
$a$;
CALL r.create_person_content_combined_trigger ('post');
CALL r.create_person_content_combined_trigger ('comment');
-- person_saved (comment, post)
-- This one is a little different, because it triggers using x_actions.saved,
-- Rather than any row insert
CREATE PROCEDURE r.create_person_saved_combined_trigger (table_name text)
LANGUAGE plpgsql
AS $a$
BEGIN
EXECUTE replace($b$ CREATE FUNCTION r.person_saved_combined_change_values_thing ( )
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
IF (TG_OP = 'DELETE') THEN
DELETE FROM person_saved_combined AS p
WHERE p.person_id = OLD.person_id
AND p.thing_id = OLD.thing_id;
ELSIF (TG_OP = 'INSERT') THEN
IF NEW.saved IS NOT NULL THEN
INSERT INTO person_saved_combined (saved, person_id, thing_id)
VALUES (NEW.saved, NEW.person_id, NEW.thing_id);
END IF;
ELSIF (TG_OP = 'UPDATE') THEN
IF NEW.saved IS NOT NULL THEN
INSERT INTO person_saved_combined (saved, person_id, thing_id)
VALUES (NEW.saved, NEW.person_id, NEW.thing_id);
-- If saved gets set as null, delete the row
ELSE
DELETE FROM person_saved_combined AS p
WHERE p.person_id = NEW.person_id
AND p.thing_id = NEW.thing_id;
END IF;
END IF;
RETURN NULL;
END $$;
CREATE TRIGGER person_saved_combined
AFTER INSERT OR DELETE OR UPDATE OF saved ON thing_actions
FOR EACH ROW
EXECUTE FUNCTION r.person_saved_combined_change_values_thing ( );
$b$,
'thing',
table_name);
END;
$a$;
CALL r.create_person_saved_combined_trigger ('post');
CALL r.create_person_saved_combined_trigger ('comment');

View file

@ -184,10 +184,6 @@ impl Saveable for CommentSaved {
comment_saved_form: &CommentSavedForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
let comment_saved_form = (
comment_saved_form,
comment_actions::saved.eq(now().nullable()),
);
insert_into(comment_actions::table)
.values(comment_saved_form)
.on_conflict((comment_actions::comment_id, comment_actions::person_id))
@ -319,11 +315,7 @@ mod tests {
};
// Comment Saved
let comment_saved_form = CommentSavedForm {
comment_id: inserted_comment.id,
person_id: inserted_person.id,
};
let comment_saved_form = CommentSavedForm::new(inserted_comment.id, inserted_person.id);
let inserted_comment_saved = CommentSaved::save(pool, &comment_saved_form).await?;
let expected_comment_saved = CommentSaved {

View file

@ -11,11 +11,6 @@ extern crate diesel_derive_newtype;
#[macro_use]
extern crate diesel_derive_enum;
// this is used in tests
#[cfg(feature = "full")]
#[macro_use]
extern crate diesel_migrations;
#[cfg(feature = "full")]
#[macro_use]
extern crate async_trait;
@ -44,7 +39,7 @@ pub mod traits;
pub mod utils;
#[cfg(feature = "full")]
mod schema_setup;
pub mod schema_setup;
use serde::{Deserialize, Serialize};
use strum::{Display, EnumString};
@ -219,6 +214,16 @@ pub enum ModlogActionType {
AdminAllowInstance,
}
#[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// A list of possible types for the various modlog actions.
pub enum PersonContentType {
All,
Comments,
Posts,
}
#[derive(
EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy, Default, PartialEq, Eq, Hash,
)]

View file

@ -184,6 +184,16 @@ pub struct DbUrl(pub(crate) Box<Url>);
/// The report combined id
pub struct ReportCombinedId(i32);
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType))]
/// The person content combined id
pub struct PersonContentCombinedId(i32);
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType))]
/// The person saved combined id
pub struct PersonSavedCombinedId(i32);
impl DbUrl {
pub fn inner(&self) -> &Url {
&self.0

View file

@ -729,6 +729,15 @@ diesel::table! {
}
}
diesel::table! {
person_content_combined (id) {
id -> Int4,
published -> Timestamptz,
post_id -> Nullable<Int4>,
comment_id -> Nullable<Int4>,
}
}
diesel::table! {
person_mention (id) {
id -> Int4,
@ -739,6 +748,16 @@ diesel::table! {
}
}
diesel::table! {
person_saved_combined (id) {
id -> Int4,
saved -> Timestamptz,
person_id -> Int4,
post_id -> Nullable<Int4>,
comment_id -> Nullable<Int4>,
}
}
diesel::table! {
post (id) {
id -> Int4,
@ -834,6 +853,13 @@ diesel::table! {
}
}
diesel::table! {
previously_run_sql (id) {
id -> Bool,
content -> Text,
}
}
diesel::table! {
private_message (id) {
id -> Int4,
@ -1050,8 +1076,13 @@ diesel::joinable!(password_reset_request -> local_user (local_user_id));
diesel::joinable!(person -> instance (instance_id));
diesel::joinable!(person_aggregates -> person (person_id));
diesel::joinable!(person_ban -> person (person_id));
diesel::joinable!(person_content_combined -> comment (comment_id));
diesel::joinable!(person_content_combined -> post (post_id));
diesel::joinable!(person_mention -> comment (comment_id));
diesel::joinable!(person_mention -> person (recipient_id));
diesel::joinable!(person_saved_combined -> comment (comment_id));
diesel::joinable!(person_saved_combined -> person (person_id));
diesel::joinable!(person_saved_combined -> post (post_id));
diesel::joinable!(post -> community (community_id));
diesel::joinable!(post -> language (language_id));
diesel::joinable!(post -> person (creator_id));
@ -1129,12 +1160,15 @@ diesel::allow_tables_to_appear_in_same_query!(
person_actions,
person_aggregates,
person_ban,
person_content_combined,
person_mention,
person_saved_combined,
post,
post_actions,
post_aggregates,
post_report,
post_tag,
previously_run_sql,
private_message,
private_message_report,
received_activity,

View file

@ -1,65 +1,358 @@
use anyhow::Context;
use diesel::{connection::SimpleConnection, Connection, PgConnection};
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
use lemmy_utils::error::LemmyError;
mod diff_check;
const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
use crate::schema::previously_run_sql;
use anyhow::{anyhow, Context};
use chrono::TimeDelta;
use diesel::{
connection::SimpleConnection,
dsl::exists,
migration::{Migration, MigrationVersion},
pg::Pg,
select,
update,
BoolExpressionMethods,
Connection,
ExpressionMethods,
PgConnection,
QueryDsl,
RunQueryDsl,
};
use diesel_migrations::MigrationHarness;
use lemmy_utils::{error::LemmyResult, settings::SETTINGS};
use std::time::Instant;
diesel::table! {
pg_namespace (nspname) {
nspname -> Text,
}
}
fn migrations() -> diesel_migrations::EmbeddedMigrations {
// Using `const` here is required by the borrow checker
const MIGRATIONS: diesel_migrations::EmbeddedMigrations = diesel_migrations::embed_migrations!();
MIGRATIONS
}
/// This SQL code sets up the `r` schema, which contains things that can be safely dropped and
/// replaced instead of being changed using migrations. It may not create or modify things outside
/// of the `r` schema (indicated by `r.` before the name), unless a comment says otherwise.
///
/// Currently, this code is only run after the server starts and there's at least 1 pending
/// migration to run. This means every time you change something here, you must also create a
/// migration (a blank up.sql file works fine). This behavior will be removed when we implement a
/// better way to avoid useless schema updates and locks.
///
/// If you add something that depends on something (such as a table) created in a new migration,
/// then down.sql must use `CASCADE` when dropping it. This doesn't need to be fixed in old
/// migrations because the "replaceable-schema" migration runs `DROP SCHEMA IF EXISTS r CASCADE` in
/// down.sql.
const REPLACEABLE_SCHEMA: &[&str] = &[
"DROP SCHEMA IF EXISTS r CASCADE;",
"CREATE SCHEMA r;",
include_str!("../replaceable_schema/utils.sql"),
include_str!("../replaceable_schema/triggers.sql"),
];
fn replaceable_schema() -> String {
[
"CREATE SCHEMA r;",
include_str!("../replaceable_schema/utils.sql"),
include_str!("../replaceable_schema/triggers.sql"),
]
.join("\n")
}
pub fn run(db_url: &str) -> Result<(), LemmyError> {
// Migrations don't support async connection
let mut conn = PgConnection::establish(db_url).with_context(|| "Error connecting to database")?;
const REPLACEABLE_SCHEMA_PATH: &str = "crates/db_schema/replaceable_schema";
// Run all pending migrations except for the newest one, then run the newest one in the same
// transaction as `REPLACEABLE_SCHEMA`. This code will be becone less hacky when the conditional
// setup of things in `REPLACEABLE_SCHEMA` is done without using the number of pending
// migrations.
println!("Running Database migrations (This may take a long time)...");
let migrations = conn
.pending_migrations(MIGRATIONS)
.map_err(|e| anyhow::anyhow!("Couldn't determine pending migrations: {e}"))?;
for migration in migrations.iter().rev().skip(1).rev() {
conn
.run_migration(migration)
.map_err(|e| anyhow::anyhow!("Couldn't run DB Migrations: {e}"))?;
struct MigrationHarnessWrapper<'a> {
conn: &'a mut PgConnection,
#[cfg(test)]
diff_checked_migration_name: Option<String>,
}
impl MigrationHarnessWrapper<'_> {
fn run_migration_inner(
&mut self,
migration: &dyn Migration<Pg>,
) -> diesel::migration::Result<MigrationVersion<'static>> {
let start_time = Instant::now();
let result = self.conn.run_migration(migration);
let duration = TimeDelta::from_std(start_time.elapsed())
.map(|d| d.to_string())
.unwrap_or_default();
let name = migration.name();
println!("{duration} run {name}");
result
}
conn.transaction::<_, LemmyError, _>(|conn| {
if let Some(migration) = migrations.last() {
// Migration is run with a savepoint since there's already a transaction
conn
.run_migration(migration)
.map_err(|e| anyhow::anyhow!("Couldn't run DB Migrations: {e}"))?;
} else if !cfg!(debug_assertions) {
// In production, skip running `REPLACEABLE_SCHEMA` to avoid locking things in the schema. In
// CI, always run it because `diesel migration` commands would otherwise prevent it.
return Ok(());
}
impl MigrationHarness<Pg> for MigrationHarnessWrapper<'_> {
fn run_migration(
&mut self,
migration: &dyn Migration<Pg>,
) -> diesel::migration::Result<MigrationVersion<'static>> {
#[cfg(test)]
if self.diff_checked_migration_name == Some(migration.name().to_string()) {
let before = diff_check::get_dump();
self.run_migration_inner(migration)?;
self.revert_migration(migration)?;
let after = diff_check::get_dump();
diff_check::check_dump_diff(
after,
before,
&format!(
"These changes need to be applied in migrations/{}/down.sql:",
migration.name()
),
);
}
self.run_migration_inner(migration)
}
fn revert_migration(
&mut self,
migration: &dyn Migration<Pg>,
) -> diesel::migration::Result<MigrationVersion<'static>> {
let start_time = Instant::now();
let result = self.conn.revert_migration(migration);
let duration = TimeDelta::from_std(start_time.elapsed())
.map(|d| d.to_string())
.unwrap_or_default();
let name = migration.name();
println!("{duration} revert {name}");
result
}
fn applied_migrations(&mut self) -> diesel::migration::Result<Vec<MigrationVersion<'static>>> {
self.conn.applied_migrations()
}
}
#[derive(Default, Clone, Copy)]
pub struct Options {
#[cfg(test)]
enable_diff_check: bool,
revert: bool,
run: bool,
limit: Option<u64>,
}
impl Options {
#[cfg(test)]
fn enable_diff_check(mut self) -> Self {
self.enable_diff_check = true;
self
}
pub fn run(mut self) -> Self {
self.run = true;
self
}
pub fn revert(mut self) -> Self {
self.revert = true;
self
}
pub fn limit(mut self, limit: u64) -> Self {
self.limit = Some(limit);
self
}
}
/// Checked by tests
#[derive(PartialEq, Eq, Debug)]
pub enum Branch {
EarlyReturn,
ReplaceableSchemaRebuilt,
ReplaceableSchemaNotRebuilt,
}
pub fn run(options: Options) -> LemmyResult<Branch> {
let db_url = SETTINGS.get_database_url();
// Migrations don't support async connection, and this function doesn't need to be async
let mut conn = PgConnection::establish(&db_url)?;
// If possible, skip getting a lock and recreating the "r" schema, so
// lemmy_server processes in a horizontally scaled setup can start without causing locks
if !options.revert
&& options.run
&& options.limit.is_none()
&& !conn
.has_pending_migration(migrations())
.map_err(convert_err)?
{
// The condition above implies that the migration that creates the previously_run_sql table was
// already run
let sql_unchanged = exists(
previously_run_sql::table.filter(previously_run_sql::content.eq(replaceable_schema())),
);
let schema_exists = exists(pg_namespace::table.find("r"));
if select(sql_unchanged.and(schema_exists)).get_result(&mut conn)? {
return Ok(Branch::EarlyReturn);
}
}
// Block concurrent attempts to run migrations until `conn` is closed, and disable the
// trigger that prevents the Diesel CLI from running migrations
println!("Waiting for lock...");
conn.batch_execute("SELECT pg_advisory_lock(0);")?;
println!("Running Database migrations (This may take a long time)...");
// Drop `r` schema, so migrations don't need to be made to work both with and without things in
// it existing
revert_replaceable_schema(&mut conn)?;
run_selected_migrations(&mut conn, &options).map_err(convert_err)?;
// Only run replaceable_schema if newest migration was applied
let output = if (options.run && options.limit.is_none())
|| !conn
.has_pending_migration(migrations())
.map_err(convert_err)?
{
#[cfg(test)]
if options.enable_diff_check {
let before = diff_check::get_dump();
run_replaceable_schema(&mut conn)?;
revert_replaceable_schema(&mut conn)?;
let after = diff_check::get_dump();
diff_check::check_dump_diff(before, after, "The code in crates/db_schema/replaceable_schema incorrectly created or modified things outside of the `r` schema, causing these changes to be left behind after dropping the schema:");
}
run_replaceable_schema(&mut conn)?;
Branch::ReplaceableSchemaRebuilt
} else {
Branch::ReplaceableSchemaNotRebuilt
};
println!("Database migrations complete.");
Ok(output)
}
fn run_replaceable_schema(conn: &mut PgConnection) -> LemmyResult<()> {
conn.transaction(|conn| {
conn
.batch_execute(&REPLACEABLE_SCHEMA.join("\n"))
.context("Couldn't run SQL files in crates/db_schema/replaceable_schema")?;
.batch_execute(&replaceable_schema())
.with_context(|| format!("Failed to run SQL files in {REPLACEABLE_SCHEMA_PATH}"))?;
let num_rows_updated = update(previously_run_sql::table)
.set(previously_run_sql::content.eq(replaceable_schema()))
.execute(conn)?;
debug_assert_eq!(num_rows_updated, 1);
Ok(())
})?;
println!("Database migrations complete.");
})
}
fn revert_replaceable_schema(conn: &mut PgConnection) -> LemmyResult<()> {
conn
.batch_execute("DROP SCHEMA IF EXISTS r CASCADE;")
.with_context(|| format!("Failed to revert SQL files in {REPLACEABLE_SCHEMA_PATH}"))?;
// Value in `previously_run_sql` table is not set here because the table might not exist,
// and that's fine because the existence of the `r` schema is also checked
Ok(())
}
fn run_selected_migrations(
conn: &mut PgConnection,
options: &Options,
) -> diesel::migration::Result<()> {
let mut wrapper = MigrationHarnessWrapper {
conn,
#[cfg(test)]
diff_checked_migration_name: options
.enable_diff_check
.then(|| diesel::migration::MigrationSource::<Pg>::migrations(&migrations()))
.transpose()?
// Get the migration with the highest version
.and_then(|migrations| {
migrations
.into_iter()
.map(|migration| migration.name().to_string())
.max()
}),
};
if options.revert {
if let Some(limit) = options.limit {
for _ in 0..limit {
wrapper.revert_last_migration(migrations())?;
}
} else {
wrapper.revert_all_migrations(migrations())?;
}
}
if options.run {
if let Some(limit) = options.limit {
for _ in 0..limit {
wrapper.run_next_migration(migrations())?;
}
} else {
wrapper.run_pending_migrations(migrations())?;
}
}
Ok(())
}
/// Makes `diesel::migration::Result` work with `anyhow` and `LemmyError`
fn convert_err(e: Box<dyn std::error::Error + Send + Sync>) -> anyhow::Error {
anyhow!(e)
}
#[cfg(test)]
mod tests {
use super::{
Branch::{EarlyReturn, ReplaceableSchemaNotRebuilt, ReplaceableSchemaRebuilt},
*,
};
use lemmy_utils::{error::LemmyResult, settings::SETTINGS};
use serial_test::serial;
#[test]
#[serial]
fn test_schema_setup() -> LemmyResult<()> {
let o = Options::default();
let db_url = SETTINGS.get_database_url();
let mut conn = PgConnection::establish(&db_url)?;
// Start with consistent state by dropping everything
conn.batch_execute("DROP OWNED BY CURRENT_USER;")?;
// Run all migrations, make sure the newest migration can be redone, and check the newest
// down.sql file
assert_eq!(run(o.run().enable_diff_check())?, ReplaceableSchemaRebuilt);
// Check for early return
assert_eq!(run(o.run())?, EarlyReturn);
// Test `limit`
assert_eq!(run(o.revert().limit(1))?, ReplaceableSchemaNotRebuilt);
assert_eq!(
conn
.pending_migrations(migrations())
.map_err(convert_err)?
.len(),
1
);
assert_eq!(run(o.run().limit(1))?, ReplaceableSchemaRebuilt);
// This should throw an error saying to use lemmy_server instead of diesel CLI
conn.batch_execute("DROP OWNED BY CURRENT_USER;")?;
assert!(matches!(
conn.run_pending_migrations(migrations()),
Err(e) if e.to_string().contains("lemmy_server")
));
// Diesel CLI's way of running migrations shouldn't break the custom migration runner
assert_eq!(run(o.run())?, ReplaceableSchemaRebuilt);
Ok(())
}
}

View file

@ -0,0 +1,41 @@
#![cfg(test)]
#![expect(clippy::expect_used)]
use lemmy_utils::settings::SETTINGS;
use std::process::{Command, Stdio};
// It's not possible to call `export_snapshot()` for each dump and run the dumps in parallel with
// the `--snapshot` flag. Don't waste your time!!!!
pub fn get_dump() -> String {
let db_url = SETTINGS.get_database_url();
let output = Command::new("pg_dump")
.args([
// Specify database URL
"--dbname",
&db_url,
// Disable some things
"--no-owner",
"--no-privileges",
"--no-table-access-method",
"--schema-only",
"--no-sync",
])
.stderr(Stdio::inherit())
.output()
.expect("failed to start pg_dump process");
// TODO: use exit_ok method when it's stable
assert!(output.status.success());
String::from_utf8(output.stdout).expect("pg_dump output is not valid UTF-8 text")
}
pub fn check_dump_diff(before: String, after: String, label: &str) {
if before != after {
let diff_bytes =
diffutilslib::unified_diff(before.as_bytes(), after.as_bytes(), &Default::default());
let diff = String::from_utf8_lossy(&diff_bytes);
panic!("{label}\n\n{diff}");
}
}

View file

@ -1 +1,3 @@
pub mod person_content;
pub mod person_saved;
pub mod report;

View file

@ -0,0 +1,22 @@
use crate::newtypes::{CommentId, PersonContentCombinedId, PostId};
#[cfg(feature = "full")]
use crate::schema::person_content_combined;
use chrono::{DateTime, Utc};
#[cfg(feature = "full")]
use i_love_jesus::CursorKeysModule;
#[derive(PartialEq, Eq, Debug, Clone)]
#[cfg_attr(
feature = "full",
derive(Identifiable, Queryable, Selectable, CursorKeysModule)
)]
#[cfg_attr(feature = "full", diesel(table_name = person_content_combined))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", cursor_keys_module(name = person_content_combined_keys))]
/// A combined table for a persons contents (posts and comments)
pub struct PersonContentCombined {
pub id: PersonContentCombinedId,
pub published: DateTime<Utc>,
pub post_id: Option<PostId>,
pub comment_id: Option<CommentId>,
}

View file

@ -0,0 +1,23 @@
use crate::newtypes::{CommentId, PersonId, PersonSavedCombinedId, PostId};
#[cfg(feature = "full")]
use crate::schema::person_saved_combined;
use chrono::{DateTime, Utc};
#[cfg(feature = "full")]
use i_love_jesus::CursorKeysModule;
#[derive(PartialEq, Eq, Debug, Clone)]
#[cfg_attr(
feature = "full",
derive(Identifiable, Queryable, Selectable, CursorKeysModule)
)]
#[cfg_attr(feature = "full", diesel(table_name = person_saved_combined))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", cursor_keys_module(name = person_saved_combined_keys))]
/// A combined person_saved table.
pub struct PersonSavedCombined {
pub id: PersonSavedCombinedId,
pub saved: DateTime<Utc>,
pub person_id: PersonId,
pub post_id: Option<PostId>,
pub comment_id: Option<CommentId>,
}

View file

@ -142,7 +142,10 @@ pub struct CommentSaved {
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = comment_actions))]
#[derive(derive_new::new)]
pub struct CommentSavedForm {
pub comment_id: CommentId,
pub person_id: PersonId,
#[new(value = "Utc::now()")]
pub saved: DateTime<Utc>,
}

View file

@ -1,6 +1,6 @@
pub mod uplete;
use crate::{newtypes::DbUrl, CommentSortType, PostSortType};
use crate::{newtypes::DbUrl, schema_setup, CommentSortType, PostSortType};
use chrono::TimeDelta;
use deadpool::Runtime;
use diesel::{
@ -475,7 +475,7 @@ pub fn build_db_pool() -> LemmyResult<ActualDbPool> {
// provide a setup function which handles creating the connection
let mut config = ManagerConfig::default();
config.custom_setup = Box::new(establish_connection);
let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_config(&db_url, config);
let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new_with_config(db_url, config);
let pool = Pool::builder(manager)
.max_size(SETTINGS.database.pool_size)
.runtime(Runtime::Tokio1)
@ -493,7 +493,7 @@ pub fn build_db_pool() -> LemmyResult<ActualDbPool> {
}))
.build()?;
crate::schema_setup::run(&db_url)?;
schema_setup::run(schema_setup::Options::default().run())?;
Ok(pool)
}

View file

@ -41,6 +41,7 @@ ts-rs = { workspace = true, optional = true }
actix-web = { workspace = true, optional = true }
i-love-jesus = { workspace = true, optional = true }
chrono = { workspace = true }
derive-new.workspace = true
[dev-dependencies]
serial_test = { workspace = true }

View file

@ -186,13 +186,6 @@ fn queries<'a>() -> Queries<
}
}
// If its saved only, then filter, and order by the saved time, not the comment creation time.
if o.saved_only.unwrap_or_default() {
query = query
.filter(comment_actions::saved.is_not_null())
.then_order_by(comment_actions::saved.desc());
}
if let Some(my_id) = o.local_user.person_id() {
let not_creator_filter = comment::creator_id.ne(my_id);
if o.liked_only.unwrap_or_default() {
@ -332,7 +325,6 @@ pub struct CommentQuery<'a> {
pub creator_id: Option<PersonId>,
pub local_user: Option<&'a LocalUser>,
pub search_term: Option<String>,
pub saved_only: Option<bool>,
pub liked_only: Option<bool>,
pub disliked_only: Option<bool>,
pub page: Option<i64>,
@ -376,15 +368,7 @@ mod tests {
newtypes::LanguageId,
source::{
actor_language::LocalUserLanguage,
comment::{
Comment,
CommentInsertForm,
CommentLike,
CommentLikeForm,
CommentSaved,
CommentSavedForm,
CommentUpdateForm,
},
comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm, CommentUpdateForm},
community::{
Community,
CommunityFollower,
@ -406,7 +390,7 @@ mod tests {
post::{Post, PostInsertForm, PostUpdateForm},
site::{Site, SiteInsertForm},
},
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable},
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable},
utils::{build_db_pool_for_tests, RANK_DEFAULT},
CommunityVisibility,
SubscribedType,
@ -892,47 +876,6 @@ mod tests {
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_saved_order() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests();
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Save two comments
let save_comment_0_form = CommentSavedForm {
person_id: data.timmy_local_user_view.person.id,
comment_id: data.inserted_comment_0.id,
};
CommentSaved::save(pool, &save_comment_0_form).await?;
let save_comment_2_form = CommentSavedForm {
person_id: data.timmy_local_user_view.person.id,
comment_id: data.inserted_comment_2.id,
};
CommentSaved::save(pool, &save_comment_2_form).await?;
// Fetch the saved comments
let comments = CommentQuery {
local_user: Some(&data.timmy_local_user_view.local_user),
saved_only: Some(true),
..Default::default()
}
.list(&data.site, pool)
.await?;
// There should only be two comments
assert_eq!(2, comments.len());
// The first comment, should be the last one saved (descending order)
assert_eq!(comments[0].comment.id, data.inserted_comment_2.id);
// The second comment, should be the first one saved
assert_eq!(comments[1].comment.id, data.inserted_comment_0.id);
cleanup(data, pool).await
}
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> {
CommentLike::remove(
pool,

View file

@ -12,6 +12,10 @@ pub mod local_image_view;
#[cfg(feature = "full")]
pub mod local_user_view;
#[cfg(feature = "full")]
pub mod person_content_combined_view;
#[cfg(feature = "full")]
pub mod person_saved_combined_view;
#[cfg(feature = "full")]
pub mod post_report_view;
#[cfg(feature = "full")]
pub mod post_tags_view;
@ -30,3 +34,10 @@ pub mod site_view;
pub mod structs;
#[cfg(feature = "full")]
pub mod vote_view;
pub trait InternalToCombinedView {
type CombinedView;
/// Maps the combined DB row to an enum
fn map_to_enum(&self) -> Option<Self::CombinedView>;
}

View file

@ -0,0 +1,456 @@
use crate::{
structs::{
CommentView,
LocalUserView,
PersonContentCombinedPaginationCursor,
PersonContentCombinedView,
PersonContentViewInternal,
PostView,
},
InternalToCombinedView,
};
use diesel::{
result::Error,
BoolExpressionMethods,
ExpressionMethods,
JoinOnDsl,
NullableExpressionMethods,
QueryDsl,
SelectableHelper,
};
use diesel_async::RunQueryDsl;
use i_love_jesus::PaginatedQueryBuilder;
use lemmy_db_schema::{
aliases::creator_community_actions,
newtypes::PersonId,
schema::{
comment,
comment_actions,
comment_aggregates,
community,
community_actions,
image_details,
local_user,
person,
person_actions,
person_content_combined,
post,
post_actions,
post_aggregates,
post_tag,
tag,
},
source::{
combined::person_content::{person_content_combined_keys as key, PersonContentCombined},
community::CommunityFollower,
},
utils::{actions, actions_alias, functions::coalesce, get_conn, DbPool},
PersonContentType,
};
use lemmy_utils::error::LemmyResult;
impl PersonContentCombinedPaginationCursor {
// get cursor for page that starts immediately after the given post
pub fn after_post(view: &PersonContentCombinedView) -> PersonContentCombinedPaginationCursor {
let (prefix, id) = match view {
PersonContentCombinedView::Comment(v) => ('C', v.comment.id.0),
PersonContentCombinedView::Post(v) => ('P', v.post.id.0),
};
// hex encoding to prevent ossification
PersonContentCombinedPaginationCursor(format!("{prefix}{id:x}"))
}
pub async fn read(&self, pool: &mut DbPool<'_>) -> Result<PaginationCursorData, Error> {
let err_msg = || Error::QueryBuilderError("Could not parse pagination token".into());
let mut query = person_content_combined::table
.select(PersonContentCombined::as_select())
.into_boxed();
let (prefix, id_str) = self.0.split_at_checked(1).ok_or_else(err_msg)?;
let id = i32::from_str_radix(id_str, 16).map_err(|_err| err_msg())?;
query = match prefix {
"C" => query.filter(person_content_combined::comment_id.eq(id)),
"P" => query.filter(person_content_combined::post_id.eq(id)),
_ => return Err(err_msg()),
};
let token = query.first(&mut get_conn(pool).await?).await?;
Ok(PaginationCursorData(token))
}
}
#[derive(Clone)]
pub struct PaginationCursorData(PersonContentCombined);
#[derive(derive_new::new)]
pub struct PersonContentCombinedQuery {
pub creator_id: PersonId,
#[new(default)]
pub type_: Option<PersonContentType>,
#[new(default)]
pub page_after: Option<PaginationCursorData>,
#[new(default)]
pub page_back: Option<bool>,
}
impl PersonContentCombinedQuery {
pub async fn list(
self,
pool: &mut DbPool<'_>,
user: &Option<LocalUserView>,
) -> LemmyResult<Vec<PersonContentCombinedView>> {
let my_person_id = user.as_ref().map(|u| u.local_user.person_id);
let item_creator = person::id;
let conn = &mut get_conn(pool).await?;
let post_tags = post_tag::table
.inner_join(tag::table)
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
"json_agg(tag.*)",
))
.filter(post_tag::post_id.eq(post::id))
.filter(tag::deleted.eq(false))
.single_value();
// Notes: since the post_id and comment_id are optional columns,
// many joins must use an OR condition.
// For example, the creator must be the person table joined to either:
// - post.creator_id
// - comment.creator_id
let query = person_content_combined::table
// The comment
.left_join(comment::table.on(person_content_combined::comment_id.eq(comment::id.nullable())))
// The post
// It gets a bit complicated here, because since both comments and post combined have a post
// attached, you can do an inner join.
.inner_join(
post::table.on(
person_content_combined::post_id
.eq(post::id.nullable())
.or(comment::post_id.eq(post::id)),
),
)
// The item creator
.inner_join(
person::table.on(
comment::creator_id
.eq(item_creator)
// Need to filter out the post rows where the post_id given is null
// Otherwise you'll get duped post rows
.or(
post::creator_id
.eq(item_creator)
.and(person_content_combined::post_id.is_not_null()),
),
),
)
// The community
.inner_join(community::table.on(post::community_id.eq(community::id)))
.left_join(actions_alias(
creator_community_actions,
item_creator,
post::community_id,
))
.left_join(
local_user::table.on(
item_creator
.eq(local_user::person_id)
.and(local_user::admin.eq(true)),
),
)
.left_join(actions(
community_actions::table,
my_person_id,
post::community_id,
))
.left_join(actions(post_actions::table, my_person_id, post::id))
.left_join(actions(person_actions::table, my_person_id, item_creator))
.inner_join(post_aggregates::table.on(post::id.eq(post_aggregates::post_id)))
.left_join(
comment_aggregates::table
.on(person_content_combined::comment_id.eq(comment_aggregates::comment_id.nullable())),
)
.left_join(actions(comment_actions::table, my_person_id, comment::id))
.left_join(image_details::table.on(post::thumbnail_url.eq(image_details::link.nullable())))
// The creator id filter
.filter(item_creator.eq(self.creator_id))
.select((
// Post-specific
post_aggregates::all_columns,
coalesce(
post_aggregates::comments.nullable() - post_actions::read_comments_amount.nullable(),
post_aggregates::comments,
),
post_actions::saved.nullable().is_not_null(),
post_actions::read.nullable().is_not_null(),
post_actions::hidden.nullable().is_not_null(),
post_actions::like_score.nullable(),
image_details::all_columns.nullable(),
post_tags,
// Comment-specific
comment::all_columns.nullable(),
comment_aggregates::all_columns.nullable(),
comment_actions::saved.nullable().is_not_null(),
comment_actions::like_score.nullable(),
// Shared
post::all_columns,
community::all_columns,
person::all_columns,
CommunityFollower::select_subscribed_type(),
local_user::admin.nullable().is_not_null(),
creator_community_actions
.field(community_actions::became_moderator)
.nullable()
.is_not_null(),
creator_community_actions
.field(community_actions::received_ban)
.nullable()
.is_not_null(),
person_actions::blocked.nullable().is_not_null(),
community_actions::received_ban.nullable().is_not_null(),
))
.into_boxed();
let mut query = PaginatedQueryBuilder::new(query);
if let Some(type_) = self.type_ {
query = match type_ {
PersonContentType::All => query,
PersonContentType::Comments => {
query.filter(person_content_combined::comment_id.is_not_null())
}
PersonContentType::Posts => query.filter(person_content_combined::post_id.is_not_null()),
}
}
let page_after = self.page_after.map(|c| c.0);
if self.page_back.unwrap_or_default() {
query = query.before(page_after).limit_and_offset_from_end();
} else {
query = query.after(page_after);
}
// Sorting by published
query = query
.then_desc(key::published)
// Tie breaker
.then_desc(key::id);
let res = query.load::<PersonContentViewInternal>(conn).await?;
// Map the query results to the enum
let out = res.into_iter().filter_map(|u| u.map_to_enum()).collect();
Ok(out)
}
}
impl InternalToCombinedView for PersonContentViewInternal {
type CombinedView = PersonContentCombinedView;
fn map_to_enum(&self) -> Option<Self::CombinedView> {
// Use for a short alias
let v = self.clone();
if let (Some(comment), Some(counts)) = (v.comment, v.comment_counts) {
Some(PersonContentCombinedView::Comment(CommentView {
comment,
counts,
post: v.post,
community: v.community,
creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.comment_saved,
my_vote: v.my_comment_vote,
banned_from_community: v.banned_from_community,
}))
} else {
Some(PersonContentCombinedView::Post(PostView {
post: v.post,
community: v.community,
unread_comments: v.post_unread_comments,
counts: v.post_counts,
creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.post_saved,
read: v.post_read,
hidden: v.post_hidden,
my_vote: v.my_post_vote,
image_details: v.image_details,
banned_from_community: v.banned_from_community,
tags: v.post_tags,
}))
}
}
}
#[cfg(test)]
#[expect(clippy::indexing_slicing)]
mod tests {
use crate::{
person_content_combined_view::PersonContentCombinedQuery,
structs::PersonContentCombinedView,
};
use lemmy_db_schema::{
source::{
comment::{Comment, CommentInsertForm},
community::{Community, CommunityInsertForm},
instance::Instance,
person::{Person, PersonInsertForm},
post::{Post, PostInsertForm},
},
traits::Crud,
utils::{build_db_pool_for_tests, DbPool},
};
use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq;
use serial_test::serial;
struct Data {
instance: Instance,
timmy: Person,
sara: Person,
timmy_post: Post,
timmy_post_2: Post,
sara_post: Post,
timmy_comment: Comment,
sara_comment: Comment,
sara_comment_2: Comment,
}
async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult<Data> {
let instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
let timmy_form = PersonInsertForm::test_form(instance.id, "timmy_pcv");
let timmy = Person::create(pool, &timmy_form).await?;
let sara_form = PersonInsertForm::test_form(instance.id, "sara_pcv");
let sara = Person::create(pool, &sara_form).await?;
let community_form = CommunityInsertForm::new(
instance.id,
"test community pcv".to_string(),
"nada".to_owned(),
"pubkey".to_string(),
);
let community = Community::create(pool, &community_form).await?;
let timmy_post_form = PostInsertForm::new("timmy post prv".into(), timmy.id, community.id);
let timmy_post = Post::create(pool, &timmy_post_form).await?;
let timmy_post_form_2 = PostInsertForm::new("timmy post prv 2".into(), timmy.id, community.id);
let timmy_post_2 = Post::create(pool, &timmy_post_form_2).await?;
let sara_post_form = PostInsertForm::new("sara post prv".into(), sara.id, community.id);
let sara_post = Post::create(pool, &sara_post_form).await?;
let timmy_comment_form =
CommentInsertForm::new(timmy.id, timmy_post.id, "timmy comment prv".into());
let timmy_comment = Comment::create(pool, &timmy_comment_form, None).await?;
let sara_comment_form =
CommentInsertForm::new(sara.id, timmy_post.id, "sara comment prv".into());
let sara_comment = Comment::create(pool, &sara_comment_form, None).await?;
let sara_comment_form_2 =
CommentInsertForm::new(sara.id, timmy_post_2.id, "sara comment prv 2".into());
let sara_comment_2 = Comment::create(pool, &sara_comment_form_2, None).await?;
Ok(Data {
instance,
timmy,
sara,
timmy_post,
timmy_post_2,
sara_post,
timmy_comment,
sara_comment,
sara_comment_2,
})
}
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> {
Instance::delete(pool, data.instance.id).await?;
Ok(())
}
#[tokio::test]
#[serial]
async fn test_combined() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests();
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Do a batch read of timmy
let timmy_content = PersonContentCombinedQuery::new(data.timmy.id)
.list(pool, &None)
.await?;
assert_eq!(3, timmy_content.len());
// Make sure the types are correct
if let PersonContentCombinedView::Comment(v) = &timmy_content[0] {
assert_eq!(data.timmy_comment.id, v.comment.id);
assert_eq!(data.timmy.id, v.creator.id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Post(v) = &timmy_content[1] {
assert_eq!(data.timmy_post_2.id, v.post.id);
assert_eq!(data.timmy.id, v.post.creator_id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Post(v) = &timmy_content[2] {
assert_eq!(data.timmy_post.id, v.post.id);
assert_eq!(data.timmy.id, v.post.creator_id);
} else {
panic!("wrong type");
}
// Do a batch read of sara
let sara_content = PersonContentCombinedQuery::new(data.sara.id)
.list(pool, &None)
.await?;
assert_eq!(3, sara_content.len());
// Make sure the report types are correct
if let PersonContentCombinedView::Comment(v) = &sara_content[0] {
assert_eq!(data.sara_comment_2.id, v.comment.id);
assert_eq!(data.sara.id, v.creator.id);
// This one was to timmy_post_2
assert_eq!(data.timmy_post_2.id, v.post.id);
assert_eq!(data.timmy.id, v.post.creator_id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Comment(v) = &sara_content[1] {
assert_eq!(data.sara_comment.id, v.comment.id);
assert_eq!(data.sara.id, v.creator.id);
assert_eq!(data.timmy_post.id, v.post.id);
assert_eq!(data.timmy.id, v.post.creator_id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Post(v) = &sara_content[2] {
assert_eq!(data.sara_post.id, v.post.id);
assert_eq!(data.sara.id, v.post.creator_id);
} else {
panic!("wrong type");
}
cleanup(data, pool).await?;
Ok(())
}
}

View file

@ -0,0 +1,417 @@
use crate::{
structs::{
LocalUserView,
PersonContentCombinedView,
PersonContentViewInternal,
PersonSavedCombinedPaginationCursor,
},
InternalToCombinedView,
};
use diesel::{
result::Error,
BoolExpressionMethods,
ExpressionMethods,
JoinOnDsl,
NullableExpressionMethods,
QueryDsl,
SelectableHelper,
};
use diesel_async::RunQueryDsl;
use i_love_jesus::PaginatedQueryBuilder;
use lemmy_db_schema::{
aliases::creator_community_actions,
schema::{
comment,
comment_actions,
comment_aggregates,
community,
community_actions,
image_details,
local_user,
person,
person_actions,
person_saved_combined,
post,
post_actions,
post_aggregates,
post_tag,
tag,
},
source::{
combined::person_saved::{person_saved_combined_keys as key, PersonSavedCombined},
community::CommunityFollower,
},
utils::{actions, actions_alias, functions::coalesce, get_conn, DbPool},
PersonContentType,
};
use lemmy_utils::error::LemmyResult;
impl PersonSavedCombinedPaginationCursor {
// get cursor for page that starts immediately after the given post
pub fn after_post(view: &PersonContentCombinedView) -> PersonSavedCombinedPaginationCursor {
let (prefix, id) = match view {
PersonContentCombinedView::Comment(v) => ('C', v.comment.id.0),
PersonContentCombinedView::Post(v) => ('P', v.post.id.0),
};
// hex encoding to prevent ossification
PersonSavedCombinedPaginationCursor(format!("{prefix}{id:x}"))
}
pub async fn read(&self, pool: &mut DbPool<'_>) -> Result<PaginationCursorData, Error> {
let err_msg = || Error::QueryBuilderError("Could not parse pagination token".into());
let mut query = person_saved_combined::table
.select(PersonSavedCombined::as_select())
.into_boxed();
let (prefix, id_str) = self.0.split_at_checked(1).ok_or_else(err_msg)?;
let id = i32::from_str_radix(id_str, 16).map_err(|_err| err_msg())?;
query = match prefix {
"C" => query.filter(person_saved_combined::comment_id.eq(id)),
"P" => query.filter(person_saved_combined::post_id.eq(id)),
_ => return Err(err_msg()),
};
let token = query.first(&mut get_conn(pool).await?).await?;
Ok(PaginationCursorData(token))
}
}
#[derive(Clone)]
pub struct PaginationCursorData(PersonSavedCombined);
#[derive(Default)]
pub struct PersonSavedCombinedQuery {
pub type_: Option<PersonContentType>,
pub page_after: Option<PaginationCursorData>,
pub page_back: Option<bool>,
}
impl PersonSavedCombinedQuery {
pub async fn list(
self,
pool: &mut DbPool<'_>,
user: &LocalUserView,
) -> LemmyResult<Vec<PersonContentCombinedView>> {
let my_person_id = user.local_user.person_id;
let item_creator = person::id;
let conn = &mut get_conn(pool).await?;
let post_tags = post_tag::table
.inner_join(tag::table)
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
"json_agg(tag.*)",
))
.filter(post_tag::post_id.eq(post::id))
.filter(tag::deleted.eq(false))
.single_value();
// Notes: since the post_id and comment_id are optional columns,
// many joins must use an OR condition.
// For example, the creator must be the person table joined to either:
// - post.creator_id
// - comment.creator_id
let query = person_saved_combined::table
// The comment
.left_join(comment::table.on(person_saved_combined::comment_id.eq(comment::id.nullable())))
// The post
// It gets a bit complicated here, because since both comments and post combined have a post
// attached, you can do an inner join.
.inner_join(
post::table.on(
person_saved_combined::post_id
.eq(post::id.nullable())
.or(comment::post_id.eq(post::id)),
),
)
// The item creator
.inner_join(
person::table.on(
comment::creator_id
.eq(item_creator)
// Need to filter out the post rows where the post_id given is null
// Otherwise you'll get duped post rows
.or(
post::creator_id
.eq(item_creator)
.and(person_saved_combined::post_id.is_not_null()),
),
),
)
// The community
.inner_join(community::table.on(post::community_id.eq(community::id)))
.left_join(actions_alias(
creator_community_actions,
item_creator,
post::community_id,
))
.left_join(
local_user::table.on(
item_creator
.eq(local_user::person_id)
.and(local_user::admin.eq(true)),
),
)
.left_join(actions(
community_actions::table,
Some(my_person_id),
post::community_id,
))
.left_join(actions(post_actions::table, Some(my_person_id), post::id))
.left_join(actions(
person_actions::table,
Some(my_person_id),
item_creator,
))
.inner_join(post_aggregates::table.on(post::id.eq(post_aggregates::post_id)))
.left_join(
comment_aggregates::table
.on(person_saved_combined::comment_id.eq(comment_aggregates::comment_id.nullable())),
)
.left_join(actions(
comment_actions::table,
Some(my_person_id),
comment::id,
))
.left_join(image_details::table.on(post::thumbnail_url.eq(image_details::link.nullable())))
// The person id filter
.filter(person_saved_combined::person_id.eq(my_person_id))
.select((
// Post-specific
post_aggregates::all_columns,
coalesce(
post_aggregates::comments.nullable() - post_actions::read_comments_amount.nullable(),
post_aggregates::comments,
),
post_actions::saved.nullable().is_not_null(),
post_actions::read.nullable().is_not_null(),
post_actions::hidden.nullable().is_not_null(),
post_actions::like_score.nullable(),
image_details::all_columns.nullable(),
post_tags,
// Comment-specific
comment::all_columns.nullable(),
comment_aggregates::all_columns.nullable(),
comment_actions::saved.nullable().is_not_null(),
comment_actions::like_score.nullable(),
// Shared
post::all_columns,
community::all_columns,
person::all_columns,
CommunityFollower::select_subscribed_type(),
local_user::admin.nullable().is_not_null(),
creator_community_actions
.field(community_actions::became_moderator)
.nullable()
.is_not_null(),
creator_community_actions
.field(community_actions::received_ban)
.nullable()
.is_not_null(),
person_actions::blocked.nullable().is_not_null(),
community_actions::received_ban.nullable().is_not_null(),
))
.into_boxed();
let mut query = PaginatedQueryBuilder::new(query);
if let Some(type_) = self.type_ {
query = match type_ {
PersonContentType::All => query,
PersonContentType::Comments => {
query.filter(person_saved_combined::comment_id.is_not_null())
}
PersonContentType::Posts => query.filter(person_saved_combined::post_id.is_not_null()),
}
}
let page_after = self.page_after.map(|c| c.0);
if self.page_back.unwrap_or_default() {
query = query.before(page_after).limit_and_offset_from_end();
} else {
query = query.after(page_after);
}
// Sorting by saved desc
query = query
.then_desc(key::saved)
// Tie breaker
.then_desc(key::id);
let res = query.load::<PersonContentViewInternal>(conn).await?;
// Map the query results to the enum
let out = res.into_iter().filter_map(|u| u.map_to_enum()).collect();
Ok(out)
}
}
#[cfg(test)]
#[expect(clippy::indexing_slicing)]
mod tests {
use crate::{
person_saved_combined_view::PersonSavedCombinedQuery,
structs::{LocalUserView, PersonContentCombinedView},
};
use lemmy_db_schema::{
source::{
comment::{Comment, CommentInsertForm, CommentSaved, CommentSavedForm},
community::{Community, CommunityInsertForm},
instance::Instance,
local_user::{LocalUser, LocalUserInsertForm},
local_user_vote_display_mode::LocalUserVoteDisplayMode,
person::{Person, PersonInsertForm},
post::{Post, PostInsertForm, PostSaved, PostSavedForm},
},
traits::{Crud, Saveable},
utils::{build_db_pool_for_tests, DbPool},
};
use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq;
use serial_test::serial;
struct Data {
instance: Instance,
timmy: Person,
timmy_view: LocalUserView,
sara: Person,
timmy_post: Post,
sara_comment: Comment,
sara_comment_2: Comment,
}
async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult<Data> {
let instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
let timmy_form = PersonInsertForm::test_form(instance.id, "timmy_pcv");
let timmy = Person::create(pool, &timmy_form).await?;
let timmy_local_user_form = LocalUserInsertForm::test_form(timmy.id);
let timmy_local_user = LocalUser::create(pool, &timmy_local_user_form, vec![]).await?;
let timmy_view = LocalUserView {
local_user: timmy_local_user,
local_user_vote_display_mode: LocalUserVoteDisplayMode::default(),
person: timmy.clone(),
counts: Default::default(),
};
let sara_form = PersonInsertForm::test_form(instance.id, "sara_pcv");
let sara = Person::create(pool, &sara_form).await?;
let community_form = CommunityInsertForm::new(
instance.id,
"test community pcv".to_string(),
"nada".to_owned(),
"pubkey".to_string(),
);
let community = Community::create(pool, &community_form).await?;
let timmy_post_form = PostInsertForm::new("timmy post prv".into(), timmy.id, community.id);
let timmy_post = Post::create(pool, &timmy_post_form).await?;
let timmy_post_form_2 = PostInsertForm::new("timmy post prv 2".into(), timmy.id, community.id);
let timmy_post_2 = Post::create(pool, &timmy_post_form_2).await?;
let sara_post_form = PostInsertForm::new("sara post prv".into(), sara.id, community.id);
let _sara_post = Post::create(pool, &sara_post_form).await?;
let timmy_comment_form =
CommentInsertForm::new(timmy.id, timmy_post.id, "timmy comment prv".into());
let _timmy_comment = Comment::create(pool, &timmy_comment_form, None).await?;
let sara_comment_form =
CommentInsertForm::new(sara.id, timmy_post.id, "sara comment prv".into());
let sara_comment = Comment::create(pool, &sara_comment_form, None).await?;
let sara_comment_form_2 =
CommentInsertForm::new(sara.id, timmy_post_2.id, "sara comment prv 2".into());
let sara_comment_2 = Comment::create(pool, &sara_comment_form_2, None).await?;
Ok(Data {
instance,
timmy,
timmy_view,
sara,
timmy_post,
sara_comment,
sara_comment_2,
})
}
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> {
Instance::delete(pool, data.instance.id).await?;
Ok(())
}
#[tokio::test]
#[serial]
async fn test_combined() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests();
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Do a batch read of timmy saved
let timmy_saved = PersonSavedCombinedQuery::default()
.list(pool, &data.timmy_view)
.await?;
assert_eq!(0, timmy_saved.len());
// Save a few things
let save_sara_comment_2 =
CommentSavedForm::new(data.sara_comment_2.id, data.timmy_view.person.id);
CommentSaved::save(pool, &save_sara_comment_2).await?;
let save_sara_comment = CommentSavedForm::new(data.sara_comment.id, data.timmy_view.person.id);
CommentSaved::save(pool, &save_sara_comment).await?;
let post_save_form = PostSavedForm::new(data.timmy_post.id, data.timmy.id);
PostSaved::save(pool, &post_save_form).await?;
let timmy_saved = PersonSavedCombinedQuery::default()
.list(pool, &data.timmy_view)
.await?;
assert_eq!(3, timmy_saved.len());
// Make sure the types and order are correct
if let PersonContentCombinedView::Post(v) = &timmy_saved[0] {
assert_eq!(data.timmy_post.id, v.post.id);
assert_eq!(data.timmy.id, v.post.creator_id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Comment(v) = &timmy_saved[1] {
assert_eq!(data.sara_comment.id, v.comment.id);
assert_eq!(data.sara.id, v.comment.creator_id);
} else {
panic!("wrong type");
}
if let PersonContentCombinedView::Comment(v) = &timmy_saved[2] {
assert_eq!(data.sara_comment_2.id, v.comment.id);
assert_eq!(data.sara.id, v.comment.creator_id);
} else {
panic!("wrong type");
}
// Try unsaving 2 things
CommentSaved::unsave(pool, &save_sara_comment).await?;
PostSaved::unsave(pool, &post_save_form).await?;
let timmy_saved = PersonSavedCombinedQuery::default()
.list(pool, &data.timmy_view)
.await?;
assert_eq!(1, timmy_saved.len());
if let PersonContentCombinedView::Comment(v) = &timmy_saved[0] {
assert_eq!(data.sara_comment_2.id, v.comment.id);
assert_eq!(data.sara.id, v.comment.creator_id);
} else {
panic!("wrong type");
}
cleanup(data, pool).await?;
Ok(())
}
}

View file

@ -5,9 +5,7 @@ use diesel::{
pg::Pg,
query_builder::AsQuery,
result::Error,
sql_types,
BoolExpressionMethods,
BoxableExpression,
ExpressionMethods,
JoinOnDsl,
NullableExpressionMethods,
@ -97,18 +95,15 @@ fn queries<'a>() -> Queries<
// If we want to filter by post tag we will have to add
// separate logic below since this subquery can't affect filtering, but it is simple (`WHERE
// exists (select 1 from post_community_post_tags where community_post_tag_id in (1,2,3,4)`).
let post_tags: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable<sql_types::Json>>,
> = Box::new(
post_tag::table
.inner_join(tag::table)
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
"json_agg(tag.*)",
))
.filter(post_tag::post_id.eq(post_aggregates::post_id))
.filter(tag::deleted.eq(false))
.single_value(),
);
let post_tags = post_tag::table
.inner_join(tag::table)
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
"json_agg(tag.*)",
))
.filter(post_tag::post_id.eq(post_aggregates::post_id))
.filter(tag::deleted.eq(false))
.single_value();
query
.inner_join(person::table)
.inner_join(community::table)
@ -311,21 +306,13 @@ fn queries<'a>() -> Queries<
query = query.filter(post_aggregates::comments.eq(0));
};
// If its saved only, then filter, and order by the saved time, not the comment creation time.
if o.saved_only.unwrap_or_default() {
query = query
.filter(post_actions::saved.is_not_null())
.then_order_by(post_actions::saved.desc());
}
if o.read_only.unwrap_or_default() {
query = query
.filter(post_actions::read.is_not_null())
.then_order_by(post_actions::read.desc())
}
// Only hide the read posts, if the saved_only is false. Otherwise ppl with the hide_read
// setting wont be able to see saved posts.
else if !o.show_read.unwrap_or(o.local_user.show_read_posts()) {
if !o.show_read.unwrap_or(o.local_user.show_read_posts()) {
// Do not hide read posts when it is a user profile view
// Or, only hide read posts on non-profile views
if o.creator_id.is_none() {
@ -515,7 +502,6 @@ pub struct PostQuery<'a> {
pub local_user: Option<&'a LocalUser>,
pub search_term: Option<String>,
pub url_only: Option<bool>,
pub saved_only: Option<bool>,
pub read_only: Option<bool>,
pub liked_only: Option<bool>,
pub disliked_only: Option<bool>,
@ -676,14 +662,12 @@ mod tests {
PostLikeForm,
PostRead,
PostReadForm,
PostSaved,
PostSavedForm,
PostUpdateForm,
},
site::Site,
tag::{PostTagInsertForm, Tag, TagInsertForm},
},
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable},
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable},
utils::{build_db_pool, get_conn, uplete, ActualDbPool, DbPool, RANK_DEFAULT},
CommunityVisibility,
PostSortType,
@ -1215,34 +1199,6 @@ mod tests {
Ok(())
}
#[test_context(Data)]
#[tokio::test]
#[serial]
async fn post_listing_saved_only(data: &mut Data) -> LemmyResult<()> {
let pool = &data.pool();
let pool = &mut pool.into();
// Save only the bot post
// The saved_only should only show the bot post
let post_save_form =
PostSavedForm::new(data.inserted_bot_post.id, data.local_user_view.person.id);
PostSaved::save(pool, &post_save_form).await?;
// Read the saved only
let read_saved_post_listing = PostQuery {
community_id: Some(data.inserted_community.id),
saved_only: Some(true),
..data.default_post_query()
}
.list(&data.site, pool)
.await?;
// This should only include the bot post, not the one you created
assert_eq!(vec![POST_BY_BOT], names(&read_saved_post_listing));
Ok(())
}
#[test_context(Data)]
#[tokio::test]
#[serial]

View file

@ -1,11 +1,14 @@
use crate::structs::{
CommentReportView,
LocalUserView,
PostReportView,
PrivateMessageReportView,
ReportCombinedPaginationCursor,
ReportCombinedView,
ReportCombinedViewInternal,
use crate::{
structs::{
CommentReportView,
LocalUserView,
PostReportView,
PrivateMessageReportView,
ReportCombinedPaginationCursor,
ReportCombinedView,
ReportCombinedViewInternal,
},
InternalToCombinedView,
};
use diesel::{
result::Error,
@ -153,9 +156,10 @@ impl ReportCombinedQuery {
user: &LocalUserView,
) -> LemmyResult<Vec<ReportCombinedView>> {
let my_person_id = user.local_user.person_id;
let report_creator = person::id;
let item_creator = aliases::person1.field(person::id);
let resolver = aliases::person2.field(person::id).nullable();
let conn = &mut get_conn(pool).await?;
// Notes: since the post_report_id and comment_report_id are optional columns,
@ -171,9 +175,9 @@ impl ReportCombinedQuery {
.inner_join(
person::table.on(
post_report::creator_id
.eq(person::id)
.or(comment_report::creator_id.eq(person::id))
.or(private_message_report::creator_id.eq(person::id)),
.eq(report_creator)
.or(comment_report::creator_id.eq(report_creator))
.or(private_message_report::creator_id.eq(report_creator)),
),
)
// The comment
@ -327,81 +331,84 @@ impl ReportCombinedQuery {
let res = query.load::<ReportCombinedViewInternal>(conn).await?;
// Map the query results to the enum
let out = res.into_iter().filter_map(map_to_enum).collect();
let out = res.into_iter().filter_map(|u| u.map_to_enum()).collect();
Ok(out)
}
}
/// Maps the combined DB row to an enum
fn map_to_enum(view: ReportCombinedViewInternal) -> Option<ReportCombinedView> {
// Use for a short alias
let v = view;
impl InternalToCombinedView for ReportCombinedViewInternal {
type CombinedView = ReportCombinedView;
if let (Some(post_report), Some(post), Some(community), Some(unread_comments), Some(counts)) = (
v.post_report,
v.post.clone(),
v.community.clone(),
v.post_unread_comments,
v.post_counts,
) {
Some(ReportCombinedView::Post(PostReportView {
post_report,
post,
community,
unread_comments,
counts,
creator: v.report_creator,
post_creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.post_saved,
read: v.post_read,
hidden: v.post_hidden,
my_vote: v.my_post_vote,
resolver: v.resolver,
}))
} else if let (Some(comment_report), Some(comment), Some(counts), Some(post), Some(community)) = (
v.comment_report,
v.comment,
v.comment_counts,
v.post.clone(),
v.community.clone(),
) {
Some(ReportCombinedView::Comment(CommentReportView {
comment_report,
comment,
counts,
post,
community,
creator: v.report_creator,
comment_creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.comment_saved,
my_vote: v.my_comment_vote,
resolver: v.resolver,
}))
} else if let (Some(private_message_report), Some(private_message)) =
(v.private_message_report, v.private_message)
{
Some(ReportCombinedView::PrivateMessage(
PrivateMessageReportView {
private_message_report,
private_message,
fn map_to_enum(&self) -> Option<Self::CombinedView> {
// Use for a short alias
let v = self.clone();
if let (Some(post_report), Some(post), Some(community), Some(unread_comments), Some(counts)) = (
v.post_report,
v.post.clone(),
v.community.clone(),
v.post_unread_comments,
v.post_counts,
) {
Some(ReportCombinedView::Post(PostReportView {
post_report,
post,
community,
unread_comments,
counts,
creator: v.report_creator,
private_message_creator: v.item_creator,
post_creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.post_saved,
read: v.post_read,
hidden: v.post_hidden,
my_vote: v.my_post_vote,
resolver: v.resolver,
},
))
} else {
None
}))
} else if let (Some(comment_report), Some(comment), Some(counts), Some(post), Some(community)) = (
v.comment_report,
v.comment,
v.comment_counts,
v.post,
v.community,
) {
Some(ReportCombinedView::Comment(CommentReportView {
comment_report,
comment,
counts,
post,
community,
creator: v.report_creator,
comment_creator: v.item_creator,
creator_banned_from_community: v.item_creator_banned_from_community,
creator_is_moderator: v.item_creator_is_moderator,
creator_is_admin: v.item_creator_is_admin,
creator_blocked: v.item_creator_blocked,
subscribed: v.subscribed,
saved: v.comment_saved,
my_vote: v.my_comment_vote,
resolver: v.resolver,
}))
} else if let (Some(private_message_report), Some(private_message)) =
(v.private_message_report, v.private_message)
{
Some(ReportCombinedView::PrivateMessage(
PrivateMessageReportView {
private_message_report,
private_message,
creator: v.report_creator,
private_message_creator: v.item_creator,
resolver: v.resolver,
},
))
} else {
None
}
}
}

View file

@ -135,6 +135,18 @@ pub struct PaginationCursor(pub String);
#[cfg_attr(feature = "full", ts(export))]
pub struct ReportCombinedPaginationCursor(pub String);
/// like PaginationCursor but for the person_content_combined table
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(ts_rs::TS))]
#[cfg_attr(feature = "full", ts(export))]
pub struct PersonContentCombinedPaginationCursor(pub String);
/// like PaginationCursor but for the person_saved_combined table
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(ts_rs::TS))]
#[cfg_attr(feature = "full", ts(export))]
pub struct PersonSavedCombinedPaginationCursor(pub String);
#[skip_serializing_none]
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS, Queryable))]
@ -294,6 +306,47 @@ pub enum ReportCombinedView {
PrivateMessage(PrivateMessageReportView),
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(Queryable))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
/// A combined person_content view
pub struct PersonContentViewInternal {
// Post-specific
pub post_counts: PostAggregates,
pub post_unread_comments: i64,
pub post_saved: bool,
pub post_read: bool,
pub post_hidden: bool,
pub my_post_vote: Option<i16>,
pub image_details: Option<ImageDetails>,
pub post_tags: PostTags,
// Comment-specific
pub comment: Option<Comment>,
pub comment_counts: Option<CommentAggregates>,
pub comment_saved: bool,
pub my_comment_vote: Option<i16>,
// Shared
pub post: Post,
pub community: Community,
pub item_creator: Person,
pub subscribed: SubscribedType,
pub item_creator_is_admin: bool,
pub item_creator_is_moderator: bool,
pub item_creator_banned_from_community: bool,
pub item_creator_blocked: bool,
pub banned_from_community: bool,
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
// Use serde's internal tagging, to work easier with javascript libraries
#[serde(tag = "type_")]
pub enum PersonContentCombinedView {
Post(PostView),
Comment(CommentView),
}
#[derive(Clone, serde::Serialize, serde::Deserialize, Debug, PartialEq, Default)]
#[cfg_attr(feature = "full", derive(TS, FromSqlRow, AsExpression))]
#[serde(transparent)]

View file

@ -89,6 +89,7 @@ markdown-it-block-spoiler = "1.0.0"
markdown-it-sub = "1.0.0"
markdown-it-sup = "1.0.0"
markdown-it-ruby = "1.0.0"
markdown-it-footnote = "0.2.0"
moka = { workspace = true, optional = true }
[dev-dependencies]

View file

@ -14,6 +14,7 @@ static MARKDOWN_PARSER: LazyLock<MarkdownIt> = LazyLock::new(|| {
markdown_it_sub::add(&mut parser);
markdown_it_sup::add(&mut parser);
markdown_it_ruby::add(&mut parser);
markdown_it_footnote::add(&mut parser);
link_rule::add(&mut parser);
parser
@ -122,6 +123,17 @@ mod tests {
"ruby text",
"{漢|Kan}{字|ji}",
"<p><ruby>漢<rp>(</rp><rt>Kan</rt><rp>)</rp></ruby><ruby>字<rp>(</rp><rt>ji</rt><rp>)</rp></ruby></p>\n"
),
(
"footnotes",
"Bold claim.[^1]\n\n[^1]: example.com",
"<p>Bold claim.<sup class=\"footnote-ref\"><a href=\"#fn1\" id=\"fnref1\">[1]</a></sup></p>\n\
<hr class=\"footnotes-sep\" />\n\
<section class=\"footnotes\">\n\
<ol class=\"footnotes-list\">\n\
<li id=\"fn1\" class=\"footnote-item\">\n\
<p>example.com <a href=\"#fnref1\" class=\"footnote-backref\">↩︎</a></p>\n\
</li>\n</ol>\n</section>\n"
)
];

View file

@ -0,0 +1,2 @@
DROP FUNCTION forbid_diesel_cli CASCADE;

View file

@ -0,0 +1,33 @@
-- This trigger prevents using the Diesel CLI to run or revert migrations, so the custom migration runner
-- can drop and recreate the `r` schema for new migrations.
--
-- This migration being seperate from the next migration (created in the same PR) guarantees that the
-- Diesel CLI will fail to bring the number of pending migrations to 0, which is one of the conditions
-- required to skip running replaceable_schema.
--
-- If the Diesel CLI could run or revert migrations, this scenario would be possible:
--
-- Run `diesel migration redo` when the newest migration has a new table with triggers. End up with triggers
-- being dropped and not replaced because triggers are created outside of up.sql. The custom migration runner
-- sees that there are no pending migrations and the value in the `previously_run_sql` trigger is correct, so
-- it doesn't rebuild the `r` schema. There is now incorrect behavior but no error messages.
CREATE FUNCTION forbid_diesel_cli ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
IF NOT EXISTS (
SELECT
FROM
pg_locks
WHERE (locktype, pid, objid) = ('advisory', pg_backend_pid(), 0)) THEN
RAISE 'migrations must be managed using lemmy_server instead of diesel CLI';
END IF;
RETURN NULL;
END;
$$;
CREATE TRIGGER forbid_diesel_cli
BEFORE INSERT OR UPDATE OR DELETE OR TRUNCATE ON __diesel_schema_migrations
EXECUTE FUNCTION forbid_diesel_cli ();

View file

@ -0,0 +1,2 @@
DROP TABLE previously_run_sql;

View file

@ -0,0 +1,12 @@
DROP SCHEMA IF EXISTS r CASCADE;
CREATE TABLE previously_run_sql (
-- For compatibility with Diesel
id boolean PRIMARY KEY,
-- Too big to be used as primary key
content text NOT NULL
);
INSERT INTO previously_run_sql (id, content)
VALUES (TRUE, '');

View file

@ -0,0 +1,4 @@
DROP TABLE person_content_combined;
DROP TABLE person_saved_combined;

View file

@ -0,0 +1,67 @@
-- Creates combined tables for
-- person_content: (comment, post)
-- person_saved: (comment, post)
CREATE TABLE person_content_combined (
id serial PRIMARY KEY,
published timestamptz NOT NULL,
post_id int UNIQUE REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE,
comment_id int UNIQUE REFERENCES COMMENT ON UPDATE CASCADE ON DELETE CASCADE,
-- Make sure only one of the columns is not null
CHECK (num_nonnulls (post_id, comment_id) = 1)
);
CREATE INDEX idx_person_content_combined_published ON person_content_combined (published DESC, id DESC);
-- Updating the history
INSERT INTO person_content_combined (published, post_id, comment_id)
SELECT
published,
id,
NULL::int
FROM
post
UNION ALL
SELECT
published,
NULL::int,
id
FROM
comment;
-- This one is special, because you use the saved date, not the ordinary published
CREATE TABLE person_saved_combined (
id serial PRIMARY KEY,
saved timestamptz NOT NULL,
person_id int NOT NULL REFERENCES person ON UPDATE CASCADE ON DELETE CASCADE,
post_id int UNIQUE REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE,
comment_id int UNIQUE REFERENCES COMMENT ON UPDATE CASCADE ON DELETE CASCADE,
-- Make sure only one of the columns is not null
CHECK (num_nonnulls (post_id, comment_id) = 1)
);
CREATE INDEX idx_person_saved_combined_published ON person_saved_combined (saved DESC, id DESC);
CREATE INDEX idx_person_saved_combined ON person_saved_combined (person_id);
-- Updating the history
INSERT INTO person_saved_combined (saved, person_id, post_id, comment_id)
SELECT
saved,
person_id,
post_id,
NULL::int
FROM
post_actions
WHERE
saved IS NOT NULL
UNION ALL
SELECT
saved,
person_id,
NULL::int,
comment_id
FROM
comment_actions
WHERE
saved IS NOT NULL;

View file

@ -9,8 +9,8 @@ cd "$CWD/../"
source scripts/start_dev_db.sh
diesel migration run
pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --no-sync -f schema.sqldump
cargo run --package lemmy_server -- migration run
pg_dump --no-owner --no-privileges --no-table-access-method --schema-only --exclude-schema=r --no-sync -f schema.sqldump
pg_ctl stop
rm -rf $PGDATA

View file

@ -31,6 +31,7 @@ use lemmy_api::{
list_banned::list_banned_users,
list_logins::list_logins,
list_media::list_media,
list_saved::list_person_saved,
login::login,
logout::logout,
notifications::{
@ -142,6 +143,7 @@ use lemmy_api_crud::{
};
use lemmy_apub::api::{
list_comments::list_comments,
list_person_content::list_person_content,
list_posts::list_posts,
read_community::get_community,
read_person::read_person,
@ -309,7 +311,8 @@ pub fn config(cfg: &mut ServiceConfig, rate_limit: &RateLimitCell) {
.route("/change_password", put().to(change_password))
.route("/totp/generate", post().to(generate_totp_secret))
.route("/totp/update", post().to(update_totp))
.route("/verify_email", post().to(verify_email)),
.route("/verify_email", post().to(verify_email))
.route("/saved", get().to(list_person_saved)),
)
.route("/account/settings/save", put().to(save_user_settings))
.service(
@ -349,7 +352,11 @@ pub fn config(cfg: &mut ServiceConfig, rate_limit: &RateLimitCell) {
),
)
// User actions
.route("/person", get().to(read_person))
.service(
scope("/person")
.route("", get().to(read_person))
.route("/content", get().to(list_person_content)),
)
// Admin Actions
.service(
scope("/admin")

View file

@ -17,7 +17,7 @@ use actix_web::{
HttpServer,
};
use actix_web_prom::PrometheusMetricsBuilder;
use clap::Parser;
use clap::{Parser, Subcommand};
use lemmy_api::sitemap::get_sitemap;
use lemmy_api_common::{
context::LemmyContext,
@ -35,7 +35,7 @@ use lemmy_apub::{
VerifyUrlData,
FEDERATION_HTTP_FETCH_LIMIT,
};
use lemmy_db_schema::{source::secret::Secret, utils::build_db_pool};
use lemmy_db_schema::{schema_setup, source::secret::Secret, utils::build_db_pool};
use lemmy_federate::{Opts, SendManager};
use lemmy_routes::{feeds, nodeinfo, webfinger};
use lemmy_utils::{
@ -104,6 +104,31 @@ pub struct CmdArgs {
/// If set, make sure to set --federate-process-index differently for each.
#[arg(long, default_value_t = 1, env = "LEMMY_FEDERATE_PROCESS_COUNT")]
federate_process_count: i32,
#[command(subcommand)]
subcommand: Option<CmdSubcommand>,
}
#[derive(Subcommand, Debug)]
enum CmdSubcommand {
/// Do something with migrations, then exit.
Migration {
#[command(subcommand)]
subcommand: MigrationSubcommand,
/// Stop after there's no remaining migrations.
#[arg(long, default_value_t = false)]
all: bool,
/// Stop after the given number of migrations.
#[arg(long, default_value_t = 1)]
number: u64,
},
}
#[derive(Subcommand, Debug)]
enum MigrationSubcommand {
/// Run up.sql for pending migrations, oldest to newest.
Run,
/// Run down.sql for non-pending migrations, newest to oldest.
Revert,
}
/// Placing the main function in lib.rs allows other crates to import it and embed Lemmy
@ -111,6 +136,26 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
// Print version number to log
println!("Starting Lemmy v{VERSION}");
if let Some(CmdSubcommand::Migration {
subcommand,
all,
number,
}) = args.subcommand
{
let mut options = match subcommand {
MigrationSubcommand::Run => schema_setup::Options::default().run(),
MigrationSubcommand::Revert => schema_setup::Options::default().revert(),
};
if !all {
options = options.limit(number);
}
schema_setup::run(options)?;
return Ok(());
}
// return error 503 while running db migrations and startup tasks
let mut startup_server_handle = None;
if !args.disable_http_server {
@ -187,10 +232,11 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
request_data.reset_request_count(),
));
let scheduled_tasks = (!args.disable_scheduled_tasks).then(|| {
if !args.disable_scheduled_tasks {
// Schedules various cleanup tasks for the DB
tokio::task::spawn(scheduled_tasks::setup(request_data.reset_request_count()))
});
let _scheduled_tasks =
tokio::task::spawn(scheduled_tasks::setup(request_data.reset_request_count()));
}
let server = if !args.disable_http_server {
if let Some(startup_server_handle) = startup_server_handle {
@ -228,17 +274,15 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
if server.is_some() || federate.is_some() || scheduled_tasks.is_some() {
tokio::select! {
_ = tokio::signal::ctrl_c() => {
tracing::warn!("Received ctrl-c, shutting down gracefully...");
}
_ = interrupt.recv() => {
tracing::warn!("Received interrupt, shutting down gracefully...");
}
_ = terminate.recv() => {
tracing::warn!("Received terminate, shutting down gracefully...");
}
tokio::select! {
_ = tokio::signal::ctrl_c() => {
tracing::warn!("Received ctrl-c, shutting down gracefully...");
}
_ = interrupt.recv() => {
tracing::warn!("Received interrupt, shutting down gracefully...");
}
_ = terminate.recv() => {
tracing::warn!("Received terminate, shutting down gracefully...");
}
}
if let Some(server) = server {