Merge remote-tracking branch 'upstream/main' into optimize-get-random

This commit is contained in:
Dull Bananas 2024-12-18 15:04:14 -07:00
commit 9bedf28bdf
38 changed files with 730 additions and 368 deletions

2
Cargo.lock generated
View file

@ -2685,8 +2685,10 @@ dependencies = [
"lemmy_utils", "lemmy_utils",
"pretty_assertions", "pretty_assertions",
"serde", "serde",
"serde_json",
"serde_with", "serde_with",
"serial_test", "serial_test",
"test-context",
"tokio", "tokio",
"tracing", "tracing",
"ts-rs", "ts-rs",

View file

@ -74,6 +74,9 @@ test("Set some user settings, check that they are federated", async () => {
test("Delete user", async () => { test("Delete user", async () => {
let user = await registerUser(alpha, alphaUrl); let user = await registerUser(alpha, alphaUrl);
let user_profile = await getMyUser(user);
let person_id = user_profile.local_user_view.person.id;
let actor_id = user_profile.local_user_view.person.actor_id;
// make a local post and comment // make a local post and comment
let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541")) let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541"))
@ -101,6 +104,10 @@ test("Delete user", async () => {
expect(remoteComment).toBeDefined(); expect(remoteComment).toBeDefined();
await deleteUser(user); await deleteUser(user);
await expect(getMyUser(user)).rejects.toStrictEqual(Error("incorrect_login"));
await expect(getPersonDetails(user, person_id)).rejects.toStrictEqual(
Error("not_found"),
);
// check that posts and comments are marked as deleted on other instances. // check that posts and comments are marked as deleted on other instances.
// use get methods to avoid refetching from origin instance // use get methods to avoid refetching from origin instance
@ -118,6 +125,9 @@ test("Delete user", async () => {
(await getComments(alpha, remoteComment.post_id)).comments[0].comment (await getComments(alpha, remoteComment.post_id)).comments[0].comment
.deleted, .deleted,
).toBe(true); ).toBe(true);
await expect(
getPersonDetails(user, remoteComment.creator_id),
).rejects.toStrictEqual(Error("not_found"));
}); });
test("Requests with invalid auth should be treated as unauthenticated", async () => { test("Requests with invalid auth should be treated as unauthenticated", async () => {

View file

@ -1,11 +1,7 @@
{ {
# settings related to the postgresql database # settings related to the postgresql database
database: { database: {
# Configure the database by specifying a URI # Configure the database by specifying URI pointing to a postgres instance
#
# This is the preferred method to specify database connection details since
# it is the most flexible.
# Connection URI pointing to a postgres instance
# #
# This example uses peer authentication to obviate the need for creating, # This example uses peer authentication to obviate the need for creating,
# configuring, and managing passwords. # configuring, and managing passwords.
@ -14,25 +10,7 @@
# PostgreSQL's documentation. # PostgreSQL's documentation.
# #
# [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6 # [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6
uri: "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql" connection: "postgres://lemmy:password@localhost:5432/lemmy"
# or
# Configure the database by specifying parts of a URI
#
# Note that specifying the `uri` field should be preferred since it provides
# greater control over how the connection is made. This merely exists for
# backwards-compatibility.
# Username to connect to postgres
user: "string"
# Password to connect to postgres
password: "string"
# Host where postgres is running
host: "string"
# Port where postgres can be accessed
port: 123
# Name of the postgres database for lemmy
database: "string"
# Maximum number of active sql connections # Maximum number of active sql connections
pool_size: 30 pool_size: 30
} }

View file

@ -110,7 +110,7 @@ pub async fn ban_from_community(
ModBanFromCommunity::create(&mut context.pool(), &form).await?; ModBanFromCommunity::create(&mut context.pool(), &form).await?;
let person_view = PersonView::read(&mut context.pool(), data.person_id).await?; let person_view = PersonView::read(&mut context.pool(), data.person_id, false).await?;
ActivityChannel::submit_activity( ActivityChannel::submit_activity(
SendActivityData::BanFromCommunity { SendActivityData::BanFromCommunity {

View file

@ -88,7 +88,7 @@ pub async fn ban_from_site(
ModBan::create(&mut context.pool(), &form).await?; ModBan::create(&mut context.pool(), &form).await?;
let person_view = PersonView::read(&mut context.pool(), person.id).await?; let person_view = PersonView::read(&mut context.pool(), person.id, false).await?;
ban_nonlocal_user_from_local_communities( ban_nonlocal_user_from_local_communities(
&local_user_view, &local_user_view,

View file

@ -48,7 +48,7 @@ pub async fn user_block_person(
.with_lemmy_type(LemmyErrorType::PersonBlockAlreadyExists)?; .with_lemmy_type(LemmyErrorType::PersonBlockAlreadyExists)?;
} }
let person_view = PersonView::read(&mut context.pool(), target_id).await?; let person_view = PersonView::read(&mut context.pool(), target_id, false).await?;
Ok(Json(BlockPersonResponse { Ok(Json(BlockPersonResponse {
person_view, person_view,
blocked: data.block, blocked: data.block,

View file

@ -1,5 +1,5 @@
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, PostReportId}, newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, PostReportId, TagId},
ListingType, ListingType,
PostFeatureType, PostFeatureType,
PostSortType, PostSortType,
@ -37,6 +37,8 @@ pub struct CreatePost {
/// Instead of fetching a thumbnail, use a custom one. /// Instead of fetching a thumbnail, use a custom one.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub custom_thumbnail: Option<String>, pub custom_thumbnail: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub tags: Option<Vec<TagId>>,
/// Time when this post should be scheduled. Null means publish immediately. /// Time when this post should be scheduled. Null means publish immediately.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub scheduled_publish_time: Option<i64>, pub scheduled_publish_time: Option<i64>,
@ -164,6 +166,8 @@ pub struct EditPost {
/// Instead of fetching a thumbnail, use a custom one. /// Instead of fetching a thumbnail, use a custom one.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub custom_thumbnail: Option<String>, pub custom_thumbnail: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub tags: Option<Vec<TagId>>,
/// Time when this post should be scheduled. Null means publish immediately. /// Time when this post should be scheduled. Null means publish immediately.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub scheduled_publish_time: Option<i64>, pub scheduled_publish_time: Option<i64>,

View file

@ -51,9 +51,11 @@ pub fn client_builder(settings: &Settings) -> ClientBuilder {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> { pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> {
info!("Fetching site metadata for url: {}", url); info!("Fetching site metadata for url: {}", url);
// We only fetch the first 64kB of data in order to not waste bandwidth especially for large // We only fetch the first MB of data in order to not waste bandwidth especially for large
// binary files // binary files. This high limit is particularly needed for youtube, which includes a lot of
let bytes_to_fetch = 64 * 1024; // javascript code before the opengraph tags. Mastodon also uses a 1 MB limit:
// https://github.com/mastodon/mastodon/blob/295ad6f19a016b3f16e1201ffcbb1b3ad6b455a2/app/lib/request.rb#L213
let bytes_to_fetch = 1024 * 1024;
let response = context let response = context
.client() .client()
.get(url.as_str()) .get(url.as_str())

View file

@ -123,8 +123,6 @@ pub fn is_admin(local_user_view: &LocalUserView) -> LemmyResult<()> {
check_user_valid(&local_user_view.person)?; check_user_valid(&local_user_view.person)?;
if !local_user_view.local_user.admin { if !local_user_view.local_user.admin {
Err(LemmyErrorType::NotAnAdmin)? Err(LemmyErrorType::NotAnAdmin)?
} else if local_user_view.person.banned {
Err(LemmyErrorType::Banned)?
} else { } else {
Ok(()) Ok(())
} }

View file

@ -1,5 +1,5 @@
use actix_web::web::{Data, Json}; use actix_web::web::{Data, Json};
use lemmy_api_common::{context::LemmyContext, site::MyUserInfo}; use lemmy_api_common::{context::LemmyContext, site::MyUserInfo, utils::check_user_valid};
use lemmy_db_schema::source::{ use lemmy_db_schema::source::{
actor_language::LocalUserLanguage, actor_language::LocalUserLanguage,
community_block::CommunityBlock, community_block::CommunityBlock,
@ -15,6 +15,8 @@ pub async fn get_my_user(
local_user_view: LocalUserView, local_user_view: LocalUserView,
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<Json<MyUserInfo>> { ) -> LemmyResult<Json<MyUserInfo>> {
check_user_valid(&local_user_view.person)?;
// Build the local user with parallel queries and add it to site response // Build the local user with parallel queries and add it to site response
let person_id = local_user_view.person.id; let person_id = local_user_view.person.id;
let local_user_id = local_user_view.local_user.id; let local_user_id = local_user_view.local_user.id;

View file

@ -4,7 +4,7 @@ use actix_web::web::{Json, Query};
use lemmy_api_common::{ use lemmy_api_common::{
context::LemmyContext, context::LemmyContext,
person::{GetPersonDetails, GetPersonDetailsResponse}, person::{GetPersonDetails, GetPersonDetailsResponse},
utils::{check_private_instance, read_site_for_actor}, utils::{check_private_instance, is_admin, read_site_for_actor},
}; };
use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type}; use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type};
use lemmy_db_views::{ use lemmy_db_views::{
@ -45,7 +45,11 @@ pub async fn read_person(
// You don't need to return settings for the user, since this comes back with GetSite // You don't need to return settings for the user, since this comes back with GetSite
// `my_user` // `my_user`
let person_view = PersonView::read(&mut context.pool(), person_details_id).await?; let is_admin = local_user_view
.as_ref()
.map(|l| is_admin(l).is_ok())
.unwrap_or_default();
let person_view = PersonView::read(&mut context.pool(), person_details_id, is_admin).await?;
let sort = data.sort; let sort = data.sort;
let page = data.page; let page = data.page;

View file

@ -60,7 +60,7 @@ async fn convert_response(
} }
}, },
SearchableObjects::PersonOrCommunity(pc) => match *pc { SearchableObjects::PersonOrCommunity(pc) => match *pc {
UserOrCommunity::User(u) => res.person = Some(PersonView::read(pool, u.id).await?), UserOrCommunity::User(u) => res.person = Some(PersonView::read(pool, u.id, is_admin).await?),
UserOrCommunity::Community(c) => { UserOrCommunity::Community(c) => {
res.community = Some(CommunityView::read(pool, c.id, local_user.as_ref(), is_admin).await?) res.community = Some(CommunityView::read(pool, c.id, local_user.as_ref(), is_admin).await?)
} }

View file

@ -35,4 +35,5 @@ pub mod private_message_report;
pub mod registration_application; pub mod registration_application;
pub mod secret; pub mod secret;
pub mod site; pub mod site;
pub mod tag;
pub mod tagline; pub mod tagline;

View file

@ -0,0 +1,53 @@
use crate::{
newtypes::TagId,
schema::{post_tag, tag},
source::tag::{PostTagInsertForm, Tag, TagInsertForm},
traits::Crud,
utils::{get_conn, DbPool},
};
use diesel::{insert_into, result::Error, QueryDsl};
use diesel_async::RunQueryDsl;
use lemmy_utils::error::LemmyResult;
#[async_trait]
impl Crud for Tag {
type InsertForm = TagInsertForm;
type UpdateForm = TagInsertForm;
type IdType = TagId;
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
insert_into(tag::table)
.values(form)
.get_result::<Self>(conn)
.await
}
async fn update(
pool: &mut DbPool<'_>,
pid: TagId,
form: &Self::UpdateForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(tag::table.find(pid))
.set(form)
.get_result::<Self>(conn)
.await
}
}
impl PostTagInsertForm {
pub async fn insert_tag_associations(
pool: &mut DbPool<'_>,
tags: &[PostTagInsertForm],
) -> LemmyResult<()> {
let conn = &mut get_conn(pool).await?;
insert_into(post_tag::table)
.values(tags)
.execute(conn)
.await?;
Ok(())
}
}

View file

@ -283,3 +283,9 @@ impl InstanceId {
self.0 self.0
} }
} }
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Default, Serialize, Deserialize)]
#[cfg_attr(feature = "full", derive(DieselNewType, TS))]
#[cfg_attr(feature = "full", ts(export))]
/// The internal tag id.
pub struct TagId(pub i32);

View file

@ -827,6 +827,14 @@ diesel::table! {
} }
} }
diesel::table! {
post_tag (post_id, tag_id) {
post_id -> Int4,
tag_id -> Int4,
published -> Timestamptz,
}
}
diesel::table! { diesel::table! {
private_message (id) { private_message (id) {
id -> Int4, id -> Int4,
@ -952,6 +960,18 @@ diesel::table! {
} }
} }
diesel::table! {
tag (id) {
id -> Int4,
ap_id -> Text,
name -> Text,
community_id -> Int4,
published -> Timestamptz,
updated -> Nullable<Timestamptz>,
deleted -> Bool,
}
}
diesel::table! { diesel::table! {
tagline (id) { tagline (id) {
id -> Int4, id -> Int4,
@ -1033,6 +1053,8 @@ diesel::joinable!(post_aggregates -> instance (instance_id));
diesel::joinable!(post_aggregates -> person (creator_id)); diesel::joinable!(post_aggregates -> person (creator_id));
diesel::joinable!(post_aggregates -> post (post_id)); diesel::joinable!(post_aggregates -> post (post_id));
diesel::joinable!(post_report -> post (post_id)); diesel::joinable!(post_report -> post (post_id));
diesel::joinable!(post_tag -> post (post_id));
diesel::joinable!(post_tag -> tag (tag_id));
diesel::joinable!(private_message_report -> private_message (private_message_id)); diesel::joinable!(private_message_report -> private_message (private_message_id));
diesel::joinable!(registration_application -> local_user (local_user_id)); diesel::joinable!(registration_application -> local_user (local_user_id));
diesel::joinable!(registration_application -> person (admin_id)); diesel::joinable!(registration_application -> person (admin_id));
@ -1040,6 +1062,7 @@ diesel::joinable!(site -> instance (instance_id));
diesel::joinable!(site_aggregates -> site (site_id)); diesel::joinable!(site_aggregates -> site (site_id));
diesel::joinable!(site_language -> language (language_id)); diesel::joinable!(site_language -> language (language_id));
diesel::joinable!(site_language -> site (site_id)); diesel::joinable!(site_language -> site (site_id));
diesel::joinable!(tag -> community (community_id));
diesel::allow_tables_to_appear_in_same_query!( diesel::allow_tables_to_appear_in_same_query!(
admin_allow_instance, admin_allow_instance,
@ -1099,6 +1122,7 @@ diesel::allow_tables_to_appear_in_same_query!(
post_actions, post_actions,
post_aggregates, post_aggregates,
post_report, post_report,
post_tag,
private_message, private_message,
private_message_report, private_message_report,
received_activity, received_activity,
@ -1109,5 +1133,6 @@ diesel::allow_tables_to_appear_in_same_query!(
site, site,
site_aggregates, site_aggregates,
site_language, site_language,
tag,
tagline, tagline,
); );

View file

@ -40,6 +40,7 @@ pub mod private_message_report;
pub mod registration_application; pub mod registration_application;
pub mod secret; pub mod secret;
pub mod site; pub mod site;
pub mod tag;
pub mod tagline; pub mod tagline;
/// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip). /// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip).

View file

@ -0,0 +1,57 @@
use crate::newtypes::{CommunityId, DbUrl, PostId, TagId};
#[cfg(feature = "full")]
use crate::schema::{post_tag, tag};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
#[cfg(feature = "full")]
use ts_rs::TS;
/// A tag that can be assigned to a post within a community.
/// The tag object is created by the community moderators.
/// The assignment happens by the post creator and can be updated by the community moderators.
///
/// A tag is a federatable object that gives additional context to another object, which can be
/// displayed and filtered on currently, we only have community post tags, which is a tag that is
/// created by post authors as well as mods of a community, to categorize a post. in the future we
/// may add more tag types, depending on the requirements, this will lead to either expansion of
/// this table (community_id optional, addition of tag_type enum) or split of this table / creation
/// of new tables.
#[skip_serializing_none]
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS, Queryable, Selectable, Identifiable))]
#[cfg_attr(feature = "full", diesel(table_name = tag))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))]
pub struct Tag {
pub id: TagId,
pub ap_id: DbUrl,
pub name: String,
/// the community that owns this tag
pub community_id: CommunityId,
pub published: DateTime<Utc>,
#[cfg_attr(feature = "full", ts(optional))]
pub updated: Option<DateTime<Utc>>,
pub deleted: bool,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = tag))]
pub struct TagInsertForm {
pub ap_id: DbUrl,
pub name: String,
pub community_id: CommunityId,
// default now
pub published: Option<DateTime<Utc>>,
pub updated: Option<DateTime<Utc>>,
pub deleted: bool,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = post_tag))]
pub struct PostTagInsertForm {
pub post_id: PostId,
pub tag_id: TagId,
}

View file

@ -550,6 +550,11 @@ pub mod functions {
// really this function is variadic, this just adds the two-argument version // really this function is variadic, this just adds the two-argument version
define_sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T); define_sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T);
define_sql_function! {
#[aggregate]
fn json_agg<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(obj: T) -> Json
}
define_sql_function!(#[sql_name = "coalesce"] fn coalesce_2_nullable<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: diesel::sql_types::Nullable<T>) -> diesel::sql_types::Nullable<T>); define_sql_function!(#[sql_name = "coalesce"] fn coalesce_2_nullable<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: diesel::sql_types::Nullable<T>) -> diesel::sql_types::Nullable<T>);
} }

View file

@ -35,6 +35,7 @@ diesel-async = { workspace = true, optional = true }
diesel_ltree = { workspace = true, optional = true } diesel_ltree = { workspace = true, optional = true }
serde = { workspace = true } serde = { workspace = true }
serde_with = { workspace = true } serde_with = { workspace = true }
serde_json = { workspace = true }
tracing = { workspace = true, optional = true } tracing = { workspace = true, optional = true }
ts-rs = { workspace = true, optional = true } ts-rs = { workspace = true, optional = true }
actix-web = { workspace = true, optional = true } actix-web = { workspace = true, optional = true }
@ -46,3 +47,4 @@ serial_test = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
pretty_assertions = { workspace = true } pretty_assertions = { workspace = true }
url = { workspace = true } url = { workspace = true }
test-context = "0.3.0"

View file

@ -14,6 +14,8 @@ pub mod local_user_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod post_report_view; pub mod post_report_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod post_tags_view;
#[cfg(feature = "full")]
pub mod post_view; pub mod post_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod private_message_report_view; pub mod private_message_report_view;

View file

@ -0,0 +1,30 @@
//! see post_view.rs for the reason for this json decoding
use crate::structs::PostTags;
use diesel::{
deserialize::FromSql,
pg::{Pg, PgValue},
serialize::ToSql,
sql_types::{self, Nullable},
};
impl FromSql<Nullable<sql_types::Json>, Pg> for PostTags {
fn from_sql(bytes: PgValue) -> diesel::deserialize::Result<Self> {
let value = <serde_json::Value as FromSql<sql_types::Json, Pg>>::from_sql(bytes)?;
Ok(serde_json::from_value::<PostTags>(value)?)
}
fn from_nullable_sql(
bytes: Option<<Pg as diesel::backend::Backend>::RawValue<'_>>,
) -> diesel::deserialize::Result<Self> {
match bytes {
Some(bytes) => Self::from_sql(bytes),
None => Ok(Self { tags: vec![] }),
}
}
}
impl ToSql<Nullable<sql_types::Json>, Pg> for PostTags {
fn to_sql(&self, out: &mut diesel::serialize::Output<Pg>) -> diesel::serialize::Result {
let value = serde_json::to_value(self)?;
<serde_json::Value as ToSql<sql_types::Json, Pg>>::to_sql(&value, &mut out.reborrow())
}
}

View file

@ -5,7 +5,9 @@ use diesel::{
pg::Pg, pg::Pg,
query_builder::AsQuery, query_builder::AsQuery,
result::Error, result::Error,
sql_types,
BoolExpressionMethods, BoolExpressionMethods,
BoxableExpression,
ExpressionMethods, ExpressionMethods,
JoinOnDsl, JoinOnDsl,
NullableExpressionMethods, NullableExpressionMethods,
@ -32,6 +34,8 @@ use lemmy_db_schema::{
post, post,
post_actions, post_actions,
post_aggregates, post_aggregates,
post_tag,
tag,
}, },
source::{ source::{
community::{CommunityFollower, CommunityFollowerState}, community::{CommunityFollower, CommunityFollowerState},
@ -80,6 +84,31 @@ fn queries<'a>() -> Queries<
// TODO maybe this should go to localuser also // TODO maybe this should go to localuser also
let all_joins = move |query: post_aggregates::BoxedQuery<'a, Pg>, let all_joins = move |query: post_aggregates::BoxedQuery<'a, Pg>,
my_person_id: Option<PersonId>| { my_person_id: Option<PersonId>| {
// We fetch post tags by letting postgresql aggregate them internally in a subquery into JSON.
// This is a simple way to join m rows into n rows without duplicating the data and getting
// complex diesel types. In pure SQL you would usually do this either using a LEFT JOIN + then
// aggregating the results in the application code. But this results in a lot of duplicate
// data transferred (since each post will be returned once per tag that it has) and more
// complicated application code. The diesel docs suggest doing three separate sequential queries
// in this case (see https://diesel.rs/guides/relations.html#many-to-many-or-mn ): First fetch
// the posts, then fetch all relevant post-tag-association tuples from the db, and then fetch
// all the relevant tag objects.
//
// If we want to filter by post tag we will have to add
// separate logic below since this subquery can't affect filtering, but it is simple (`WHERE
// exists (select 1 from post_community_post_tags where community_post_tag_id in (1,2,3,4)`).
let post_tags: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable<sql_types::Json>>,
> = Box::new(
post_tag::table
.inner_join(tag::table)
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
"json_agg(tag.*)",
))
.filter(post_tag::post_id.eq(post_aggregates::post_id))
.filter(tag::deleted.eq(false))
.single_value(),
);
query query
.inner_join(person::table) .inner_join(person::table)
.inner_join(community::table) .inner_join(community::table)
@ -136,6 +165,7 @@ fn queries<'a>() -> Queries<
post_aggregates::comments.nullable() - post_actions::read_comments_amount.nullable(), post_aggregates::comments.nullable() - post_actions::read_comments_amount.nullable(),
post_aggregates::comments, post_aggregates::comments,
), ),
post_tags,
)) ))
}; };
@ -603,11 +633,13 @@ impl<'a> PostQuery<'a> {
} }
} }
#[allow(clippy::indexing_slicing)]
#[expect(clippy::expect_used)]
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{ use crate::{
post_view::{PaginationCursorData, PostQuery, PostView}, post_view::{PaginationCursorData, PostQuery, PostView},
structs::LocalUserView, structs::{LocalUserView, PostTags},
}; };
use chrono::Utc; use chrono::Utc;
use diesel_async::SimpleAsyncConnection; use diesel_async::SimpleAsyncConnection;
@ -651,29 +683,33 @@ mod tests {
PostUpdateForm, PostUpdateForm,
}, },
site::Site, site::Site,
tag::{PostTagInsertForm, Tag, TagInsertForm},
}, },
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable}, traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable},
utils::{build_db_pool, build_db_pool_for_tests, get_conn, uplete, DbPool, RANK_DEFAULT}, utils::{build_db_pool, get_conn, uplete, ActualDbPool, DbPool, RANK_DEFAULT},
CommunityVisibility, CommunityVisibility,
PostSortType, PostSortType,
SubscribedType, SubscribedType,
}; };
use lemmy_utils::error::LemmyResult; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use test_context::{test_context, AsyncTestContext};
use url::Url; use url::Url;
const POST_WITH_ANOTHER_TITLE: &str = "Another title"; const POST_WITH_ANOTHER_TITLE: &str = "Another title";
const POST_BY_BLOCKED_PERSON: &str = "post by blocked person"; const POST_BY_BLOCKED_PERSON: &str = "post by blocked person";
const POST_BY_BOT: &str = "post by bot"; const POST_BY_BOT: &str = "post by bot";
const POST: &str = "post"; const POST: &str = "post";
const POST_WITH_TAGS: &str = "post with tags";
fn names(post_views: &[PostView]) -> Vec<&str> { fn names(post_views: &[PostView]) -> Vec<&str> {
post_views.iter().map(|i| i.post.name.as_str()).collect() post_views.iter().map(|i| i.post.name.as_str()).collect()
} }
struct Data { struct Data {
pool: ActualDbPool,
inserted_instance: Instance, inserted_instance: Instance,
local_user_view: LocalUserView, local_user_view: LocalUserView,
blocked_local_user_view: LocalUserView, blocked_local_user_view: LocalUserView,
@ -681,10 +717,19 @@ mod tests {
inserted_community: Community, inserted_community: Community,
inserted_post: Post, inserted_post: Post,
inserted_bot_post: Post, inserted_bot_post: Post,
inserted_post_with_tags: Post,
tag_1: Tag,
tag_2: Tag,
site: Site, site: Site,
} }
impl Data { impl Data {
fn pool(&self) -> ActualDbPool {
self.pool.clone()
}
pub fn pool2(&self) -> DbPool<'_> {
DbPool::Pool(&self.pool)
}
fn default_post_query(&self) -> PostQuery<'_> { fn default_post_query(&self) -> PostQuery<'_> {
PostQuery { PostQuery {
sort: Some(PostSortType::New), sort: Some(PostSortType::New),
@ -692,9 +737,10 @@ mod tests {
..Default::default() ..Default::default()
} }
} }
}
async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult<Data> { async fn setup() -> LemmyResult<Data> {
let actual_pool = build_db_pool()?;
let pool = &mut (&actual_pool).into();
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?; let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
let new_person = PersonInsertForm::test_form(inserted_instance.id, "tegan"); let new_person = PersonInsertForm::test_form(inserted_instance.id, "tegan");
@ -752,11 +798,38 @@ mod tests {
PersonBlock::block(pool, &person_block).await?; PersonBlock::block(pool, &person_block).await?;
// Two community post tags
let tag_1 = Tag::create(
pool,
&TagInsertForm {
ap_id: Url::parse(&format!("{}/tags/test_tag1", inserted_community.actor_id))?.into(),
name: "Test Tag 1".into(),
community_id: inserted_community.id,
published: None,
updated: None,
deleted: false,
},
)
.await?;
let tag_2 = Tag::create(
pool,
&TagInsertForm {
ap_id: Url::parse(&format!("{}/tags/test_tag2", inserted_community.actor_id))?.into(),
name: "Test Tag 2".into(),
community_id: inserted_community.id,
published: None,
updated: None,
deleted: false,
},
)
.await?;
// A sample post // A sample post
let new_post = PostInsertForm { let new_post = PostInsertForm {
language_id: Some(LanguageId(47)), language_id: Some(LanguageId(47)),
..PostInsertForm::new(POST.to_string(), inserted_person.id, inserted_community.id) ..PostInsertForm::new(POST.to_string(), inserted_person.id, inserted_community.id)
}; };
let inserted_post = Post::create(pool, &new_post).await?; let inserted_post = Post::create(pool, &new_post).await?;
let new_bot_post = PostInsertForm::new( let new_bot_post = PostInsertForm::new(
@ -766,6 +839,29 @@ mod tests {
); );
let inserted_bot_post = Post::create(pool, &new_bot_post).await?; let inserted_bot_post = Post::create(pool, &new_bot_post).await?;
// A sample post with tags
let new_post = PostInsertForm {
language_id: Some(LanguageId(47)),
..PostInsertForm::new(
POST_WITH_TAGS.to_string(),
inserted_person.id,
inserted_community.id,
)
};
let inserted_post_with_tags = Post::create(pool, &new_post).await?;
let inserted_tags = vec![
PostTagInsertForm {
post_id: inserted_post_with_tags.id,
tag_id: tag_1.id,
},
PostTagInsertForm {
post_id: inserted_post_with_tags.id,
tag_id: tag_2.id,
},
];
PostTagInsertForm::insert_tag_associations(pool, &inserted_tags).await?;
let local_user_view = LocalUserView { let local_user_view = LocalUserView {
local_user: inserted_local_user, local_user: inserted_local_user,
local_user_vote_display_mode: LocalUserVoteDisplayMode::default(), local_user_vote_display_mode: LocalUserVoteDisplayMode::default(),
@ -798,6 +894,7 @@ mod tests {
}; };
Ok(Data { Ok(Data {
pool: actual_pool,
inserted_instance, inserted_instance,
local_user_view, local_user_view,
blocked_local_user_view, blocked_local_user_view,
@ -805,16 +902,41 @@ mod tests {
inserted_community, inserted_community,
inserted_post, inserted_post,
inserted_bot_post, inserted_bot_post,
inserted_post_with_tags,
tag_1,
tag_2,
site, site,
}) })
} }
async fn teardown(data: Data) -> LemmyResult<()> {
let pool = &mut data.pool2();
// let pool = &mut (&pool).into();
let num_deleted = Post::delete(pool, data.inserted_post.id).await?;
Community::delete(pool, data.inserted_community.id).await?;
Person::delete(pool, data.local_user_view.person.id).await?;
Person::delete(pool, data.inserted_bot.id).await?;
Person::delete(pool, data.blocked_local_user_view.person.id).await?;
Instance::delete(pool, data.inserted_instance.id).await?;
assert_eq!(1, num_deleted);
Ok(())
}
}
impl AsyncTestContext for Data {
async fn setup() -> Self {
Data::setup().await.expect("setup failed")
}
async fn teardown(self) {
Data::teardown(self).await.expect("teardown failed")
}
}
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_with_person() -> LemmyResult<()> { async fn post_listing_with_person(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let mut data = init_data(pool).await?;
let local_user_form = LocalUserUpdateForm { let local_user_form = LocalUserUpdateForm {
show_bot_accounts: Some(false), show_bot_accounts: Some(false),
@ -823,12 +945,14 @@ mod tests {
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_bot_accounts = false; data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery { let mut read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
..data.default_post_query() ..data.default_post_query()
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
// remove tags post
read_post_listing.remove(0);
let post_listing_single_with_person = PostView::read( let post_listing_single_with_person = PostView::read(
pool, pool,
@ -838,7 +962,7 @@ mod tests {
) )
.await?; .await?;
let expected_post_listing_with_user = expected_post_view(&data, pool).await?; let expected_post_listing_with_user = expected_post_view(data, pool).await?;
// Should be only one person, IE the bot post, and blocked should be missing // Should be only one person, IE the bot post, and blocked should be missing
assert_eq!( assert_eq!(
@ -864,17 +988,19 @@ mod tests {
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
// should include bot post which has "undetermined" language // should include bot post which has "undetermined" language
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_with_bots)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
cleanup(data, pool).await names(&post_listings_with_bots)
);
Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_no_person() -> LemmyResult<()> { async fn post_listing_no_person(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let read_post_listing_multiple_no_person = PostQuery { let read_post_listing_multiple_no_person = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -887,32 +1013,31 @@ mod tests {
let read_post_listing_single_no_person = let read_post_listing_single_no_person =
PostView::read(pool, data.inserted_post.id, None, false).await?; PostView::read(pool, data.inserted_post.id, None, false).await?;
let expected_post_listing_no_person = expected_post_view(&data, pool).await?; let expected_post_listing_no_person = expected_post_view(data, pool).await?;
// Should be 2 posts, with the bot post, and the blocked // Should be 2 posts, with the bot post, and the blocked
assert_eq!( assert_eq!(
vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON], vec![POST_WITH_TAGS, POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
names(&read_post_listing_multiple_no_person) names(&read_post_listing_multiple_no_person)
); );
assert_eq!( assert_eq!(
Some(&expected_post_listing_no_person), Some(&expected_post_listing_no_person),
read_post_listing_multiple_no_person.get(1) read_post_listing_multiple_no_person.get(2)
); );
assert_eq!( assert_eq!(
expected_post_listing_no_person, expected_post_listing_no_person,
read_post_listing_single_no_person read_post_listing_single_no_person
); );
Ok(())
cleanup(data, pool).await
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_title_only() -> LemmyResult<()> { async fn post_listing_title_only(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// A post which contains the search them 'Post' not in the title (but in the body) // A post which contains the search them 'Post' not in the title (but in the body)
let new_post = PostInsertForm { let new_post = PostInsertForm {
@ -950,6 +1075,7 @@ mod tests {
assert_eq!( assert_eq!(
vec![ vec![
POST_WITH_ANOTHER_TITLE, POST_WITH_ANOTHER_TITLE,
POST_WITH_TAGS,
POST_BY_BOT, POST_BY_BOT,
POST, POST,
POST_BY_BLOCKED_PERSON POST_BY_BLOCKED_PERSON
@ -959,19 +1085,19 @@ mod tests {
// Should be 3 posts when we search for title only // Should be 3 posts when we search for title only
assert_eq!( assert_eq!(
vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON], vec![POST_WITH_TAGS, POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
names(&read_post_listing_by_title_only) names(&read_post_listing_by_title_only)
); );
Post::delete(pool, inserted_post.id).await?; Post::delete(pool, inserted_post.id).await?;
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_block_community() -> LemmyResult<()> { async fn post_listing_block_community(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let community_block = CommunityBlockForm { let community_block = CommunityBlockForm {
person_id: data.local_user_view.person.id, person_id: data.local_user_view.person.id,
@ -989,15 +1115,15 @@ mod tests {
assert_eq!(read_post_listings_with_person_after_block, vec![]); assert_eq!(read_post_listings_with_person_after_block, vec![]);
CommunityBlock::unblock(pool, &community_block).await?; CommunityBlock::unblock(pool, &community_block).await?;
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_like() -> LemmyResult<()> { async fn post_listing_like(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let mut data = init_data(pool).await?;
let post_like_form = let post_like_form =
PostLikeForm::new(data.inserted_post.id, data.local_user_view.person.id, 1); PostLikeForm::new(data.inserted_post.id, data.local_user_view.person.id, 1);
@ -1020,7 +1146,7 @@ mod tests {
) )
.await?; .await?;
let mut expected_post_with_upvote = expected_post_view(&data, pool).await?; let mut expected_post_with_upvote = expected_post_view(data, pool).await?;
expected_post_with_upvote.my_vote = Some(1); expected_post_with_upvote.my_vote = Some(1);
expected_post_with_upvote.counts.score = 1; expected_post_with_upvote.counts.score = 1;
expected_post_with_upvote.counts.upvotes = 1; expected_post_with_upvote.counts.upvotes = 1;
@ -1033,26 +1159,27 @@ mod tests {
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_bot_accounts = false; data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery { let mut read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
..data.default_post_query() ..data.default_post_query()
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
read_post_listing.remove(0);
assert_eq!(vec![expected_post_with_upvote], read_post_listing); assert_eq!(vec![expected_post_with_upvote], read_post_listing);
let like_removed = let like_removed =
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?; PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
assert_eq!(uplete::Count::only_deleted(1), like_removed); assert_eq!(uplete::Count::only_deleted(1), like_removed);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_liked_only() -> LemmyResult<()> { async fn post_listing_liked_only(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Like both the bot post, and your own // Like both the bot post, and your own
// The liked_only should not show your own post // The liked_only should not show your own post
@ -1087,15 +1214,15 @@ mod tests {
// Should be no posts // Should be no posts
assert_eq!(read_disliked_post_listing, vec![]); assert_eq!(read_disliked_post_listing, vec![]);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_saved_only() -> LemmyResult<()> { async fn post_listing_saved_only(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Save only the bot post // Save only the bot post
// The saved_only should only show the bot post // The saved_only should only show the bot post
@ -1115,15 +1242,15 @@ mod tests {
// This should only include the bot post, not the one you created // This should only include the bot post, not the one you created
assert_eq!(vec![POST_BY_BOT], names(&read_saved_post_listing)); assert_eq!(vec![POST_BY_BOT], names(&read_saved_post_listing));
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn creator_info() -> LemmyResult<()> { async fn creator_info(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Make one of the inserted persons a moderator // Make one of the inserted persons a moderator
let person_id = data.local_user_view.person.id; let person_id = data.local_user_view.person.id;
@ -1145,23 +1272,24 @@ mod tests {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let expected_post_listing = vec![ let expected_post_listing = vec![
("tegan".to_owned(), true, true),
("mybot".to_owned(), false, false), ("mybot".to_owned(), false, false),
("tegan".to_owned(), true, true), ("tegan".to_owned(), true, true),
]; ];
assert_eq!(expected_post_listing, post_listing); assert_eq!(expected_post_listing, post_listing);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_person_language() -> LemmyResult<()> { async fn post_listing_person_language(data: &mut Data) -> LemmyResult<()> {
const EL_POSTO: &str = "el posto"; const EL_POSTO: &str = "el posto";
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let spanish_id = Language::read_id_from_code(pool, "es").await?; let spanish_id = Language::read_id_from_code(pool, "es").await?;
@ -1180,17 +1308,23 @@ mod tests {
let post_listings_all = data.default_post_query().list(&data.site, pool).await?; let post_listings_all = data.default_post_query().list(&data.site, pool).await?;
// no language filters specified, all posts should be returned // no language filters specified, all posts should be returned
assert_eq!(vec![EL_POSTO, POST_BY_BOT, POST], names(&post_listings_all)); assert_eq!(
vec![EL_POSTO, POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listings_all)
);
LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id).await?; LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id).await?;
let post_listing_french = data.default_post_query().list(&data.site, pool).await?; let post_listing_french = data.default_post_query().list(&data.site, pool).await?;
// only one post in french and one undetermined should be returned // only one post in french and one undetermined should be returned
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listing_french)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listing_french)
);
assert_eq!( assert_eq!(
Some(french_id), Some(french_id),
post_listing_french.get(1).map(|p| p.post.language_id) post_listing_french.get(2).map(|p| p.post.language_id)
); );
LocalUserLanguage::update( LocalUserLanguage::update(
@ -1207,6 +1341,7 @@ mod tests {
.map(|p| (p.post.name, p.post.language_id)) .map(|p| (p.post.name, p.post.language_id))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let expected_post_listings_french_und = vec![ let expected_post_listings_french_und = vec![
(POST_WITH_TAGS.to_owned(), french_id),
(POST_BY_BOT.to_owned(), UNDETERMINED_ID), (POST_BY_BOT.to_owned(), UNDETERMINED_ID),
(POST.to_owned(), french_id), (POST.to_owned(), french_id),
]; ];
@ -1214,15 +1349,15 @@ mod tests {
// french post and undetermined language post should be returned // french post and undetermined language post should be returned
assert_eq!(expected_post_listings_french_und, post_listings_french_und); assert_eq!(expected_post_listings_french_und, post_listings_french_und);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_removed() -> LemmyResult<()> { async fn post_listings_removed(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let mut data = init_data(pool).await?;
// Remove the post // Remove the post
Post::update( Post::update(
@ -1237,7 +1372,7 @@ mod tests {
// Make sure you don't see the removed post in the results // Make sure you don't see the removed post in the results
let post_listings_no_admin = data.default_post_query().list(&data.site, pool).await?; let post_listings_no_admin = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(vec![POST], names(&post_listings_no_admin)); assert_eq!(vec![POST_WITH_TAGS, POST], names(&post_listings_no_admin));
// Removed bot post is shown to admins on its profile page // Removed bot post is shown to admins on its profile page
data.local_user_view.local_user.admin = true; data.local_user_view.local_user.admin = true;
@ -1249,15 +1384,15 @@ mod tests {
.await?; .await?;
assert_eq!(vec![POST_BY_BOT], names(&post_listings_is_admin)); assert_eq!(vec![POST_BY_BOT], names(&post_listings_is_admin));
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_deleted() -> LemmyResult<()> { async fn post_listings_deleted(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Delete the post // Delete the post
Post::update( Post::update(
@ -1288,15 +1423,15 @@ mod tests {
assert_eq!(expect_contains_deleted, contains_deleted); assert_eq!(expect_contains_deleted, contains_deleted);
} }
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_hidden_community() -> LemmyResult<()> { async fn post_listings_hidden_community(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
Community::update( Community::update(
pool, pool,
@ -1324,17 +1459,17 @@ mod tests {
let posts = data.default_post_query().list(&data.site, pool).await?; let posts = data.default_post_query().list(&data.site, pool).await?;
assert!(!posts.is_empty()); assert!(!posts.is_empty());
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_instance_block() -> LemmyResult<()> { async fn post_listing_instance_block(data: &mut Data) -> LemmyResult<()> {
const POST_FROM_BLOCKED_INSTANCE: &str = "post on blocked instance"; const POST_FROM_BLOCKED_INSTANCE: &str = "post on blocked instance";
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let blocked_instance = Instance::read_or_create(pool, "another_domain.tld".to_string()).await?; let blocked_instance = Instance::read_or_create(pool, "another_domain.tld".to_string()).await?;
@ -1359,7 +1494,12 @@ mod tests {
// no instance block, should return all posts // no instance block, should return all posts
let post_listings_all = data.default_post_query().list(&data.site, pool).await?; let post_listings_all = data.default_post_query().list(&data.site, pool).await?;
assert_eq!( assert_eq!(
vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST], vec![
POST_FROM_BLOCKED_INSTANCE,
POST_WITH_TAGS,
POST_BY_BOT,
POST
],
names(&post_listings_all) names(&post_listings_all)
); );
@ -1372,7 +1512,10 @@ mod tests {
// now posts from communities on that instance should be hidden // now posts from communities on that instance should be hidden
let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?; let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_blocked)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listings_blocked)
);
assert!(post_listings_blocked assert!(post_listings_blocked
.iter() .iter()
.all(|p| p.post.id != post_from_blocked_instance.id)); .all(|p| p.post.id != post_from_blocked_instance.id));
@ -1381,20 +1524,25 @@ mod tests {
InstanceBlock::unblock(pool, &block_form).await?; InstanceBlock::unblock(pool, &block_form).await?;
let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?; let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?;
assert_eq!( assert_eq!(
vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST], vec![
POST_FROM_BLOCKED_INSTANCE,
POST_WITH_TAGS,
POST_BY_BOT,
POST
],
names(&post_listings_blocked) names(&post_listings_blocked)
); );
Instance::delete(pool, blocked_instance.id).await?; Instance::delete(pool, blocked_instance.id).await?;
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn pagination_includes_each_post_once() -> LemmyResult<()> { async fn pagination_includes_each_post_once(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let community_form = CommunityInsertForm::new( let community_form = CommunityInsertForm::new(
data.inserted_instance.id, data.inserted_instance.id,
@ -1496,15 +1644,15 @@ mod tests {
assert_eq!(inserted_post_ids, listed_post_ids); assert_eq!(inserted_post_ids, listed_post_ids);
Community::delete(pool, inserted_community.id).await?; Community::delete(pool, inserted_community.id).await?;
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_hide_read() -> LemmyResult<()> { async fn post_listings_hide_read(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let mut data = init_data(pool).await?;
// Make sure local user hides read posts // Make sure local user hides read posts
let local_user_form = LocalUserUpdateForm { let local_user_form = LocalUserUpdateForm {
@ -1520,7 +1668,7 @@ mod tests {
// Make sure you don't see the read post in the results // Make sure you don't see the read post in the results
let post_listings_hide_read = data.default_post_query().list(&data.site, pool).await?; let post_listings_hide_read = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(vec![POST], names(&post_listings_hide_read)); assert_eq!(vec![POST_WITH_TAGS, POST], names(&post_listings_hide_read));
// Test with the show_read override as true // Test with the show_read override as true
let post_listings_show_read_true = PostQuery { let post_listings_show_read_true = PostQuery {
@ -1530,7 +1678,7 @@ mod tests {
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!( assert_eq!(
vec![POST_BY_BOT, POST], vec![POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listings_show_read_true) names(&post_listings_show_read_true)
); );
@ -1541,16 +1689,19 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(vec![POST], names(&post_listings_show_read_false)); assert_eq!(
cleanup(data, pool).await vec![POST_WITH_TAGS, POST],
names(&post_listings_show_read_false)
);
Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_hide_hidden() -> LemmyResult<()> { async fn post_listings_hide_hidden(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Mark a post as hidden // Mark a post as hidden
PostHide::hide( PostHide::hide(
@ -1562,7 +1713,10 @@ mod tests {
// Make sure you don't see the hidden post in the results // Make sure you don't see the hidden post in the results
let post_listings_hide_hidden = data.default_post_query().list(&data.site, pool).await?; let post_listings_hide_hidden = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(vec![POST], names(&post_listings_hide_hidden)); assert_eq!(
vec![POST_WITH_TAGS, POST],
names(&post_listings_hide_hidden)
);
// Make sure it does come back with the show_hidden option // Make sure it does come back with the show_hidden option
let post_listings_show_hidden = PostQuery { let post_listings_show_hidden = PostQuery {
@ -1573,20 +1727,23 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_hidden)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listings_show_hidden)
);
// Make sure that hidden field is true. // Make sure that hidden field is true.
assert!(&post_listings_show_hidden.first().is_some_and(|p| p.hidden)); assert!(&post_listings_show_hidden.get(1).is_some_and(|p| p.hidden));
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_hide_nsfw() -> LemmyResult<()> { async fn post_listings_hide_nsfw(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Mark a post as nsfw // Mark a post as nsfw
let update_form = PostUpdateForm { let update_form = PostUpdateForm {
@ -1594,11 +1751,11 @@ mod tests {
..Default::default() ..Default::default()
}; };
Post::update(pool, data.inserted_bot_post.id, &update_form).await?; Post::update(pool, data.inserted_post_with_tags.id, &update_form).await?;
// Make sure you don't see the nsfw post in the regular results // Make sure you don't see the nsfw post in the regular results
let post_listings_hide_nsfw = data.default_post_query().list(&data.site, pool).await?; let post_listings_hide_nsfw = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(vec![POST], names(&post_listings_hide_nsfw)); assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_hide_nsfw));
// Make sure it does come back with the show_nsfw option // Make sure it does come back with the show_nsfw option
let post_listings_show_nsfw = PostQuery { let post_listings_show_nsfw = PostQuery {
@ -1609,22 +1766,19 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_nsfw)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
names(&post_listings_show_nsfw)
);
// Make sure that nsfw field is true. // Make sure that nsfw field is true.
assert!(&post_listings_show_nsfw.first().is_some_and(|p| p.post.nsfw)); assert!(
&post_listings_show_nsfw
cleanup(data, pool).await .first()
} .ok_or(LemmyErrorType::NotFound)?
.post
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> { .nsfw
let num_deleted = Post::delete(pool, data.inserted_post.id).await?; );
Community::delete(pool, data.inserted_community.id).await?;
Person::delete(pool, data.local_user_view.person.id).await?;
Person::delete(pool, data.inserted_bot.id).await?;
Person::delete(pool, data.blocked_local_user_view.person.id).await?;
Instance::delete(pool, data.inserted_instance.id).await?;
assert_eq!(1, num_deleted);
Ok(()) Ok(())
} }
@ -1747,15 +1901,16 @@ mod tests {
hidden: false, hidden: false,
saved: false, saved: false,
creator_blocked: false, creator_blocked: false,
tags: PostTags::default(),
}) })
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn local_only_instance() -> LemmyResult<()> { async fn local_only_instance(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool_for_tests(); let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
Community::update( Community::update(
pool, pool,
@ -1780,7 +1935,7 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(2, authenticated_query.len()); assert_eq!(3, authenticated_query.len());
let unauthenticated_post = PostView::read(pool, data.inserted_post.id, None, false).await; let unauthenticated_post = PostView::read(pool, data.inserted_post.id, None, false).await;
assert!(unauthenticated_post.is_err()); assert!(unauthenticated_post.is_err());
@ -1794,16 +1949,15 @@ mod tests {
.await; .await;
assert!(authenticated_post.is_ok()); assert!(authenticated_post.is_ok());
cleanup(data, pool).await?;
Ok(()) Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_local_user_banned_from_community() -> LemmyResult<()> { async fn post_listing_local_user_banned_from_community(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Test that post view shows if local user is blocked from community // Test that post view shows if local user is blocked from community
let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill"); let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill");
@ -1838,15 +1992,15 @@ mod tests {
assert!(post_view.banned_from_community); assert!(post_view.banned_from_community);
Person::delete(pool, inserted_banned_from_comm_person.id).await?; Person::delete(pool, inserted_banned_from_comm_person.id).await?;
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_local_user_not_banned_from_community() -> LemmyResult<()> { async fn post_listing_local_user_not_banned_from_community(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
let post_view = PostView::read( let post_view = PostView::read(
pool, pool,
@ -1858,15 +2012,15 @@ mod tests {
assert!(!post_view.banned_from_community); assert!(!post_view.banned_from_community);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn speed_check() -> LemmyResult<()> { async fn speed_check(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Make sure the post_view query is less than this time // Make sure the post_view query is less than this time
let duration_max = Duration::from_millis(80); let duration_max = Duration::from_millis(80);
@ -1914,15 +2068,15 @@ mod tests {
duration_max duration_max
); );
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listings_no_comments_only() -> LemmyResult<()> { async fn post_listings_no_comments_only(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let data = init_data(pool).await?;
// Create a comment for a post // Create a comment for a post
let comment_form = CommentInsertForm::new( let comment_form = CommentInsertForm::new(
@ -1942,17 +2096,20 @@ mod tests {
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(vec![POST_BY_BOT], names(&post_listings_no_comments)); assert_eq!(
vec![POST_WITH_TAGS, POST_BY_BOT],
names(&post_listings_no_comments)
);
cleanup(data, pool).await Ok(())
} }
#[test_context(Data)]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn post_listing_private_community() -> LemmyResult<()> { async fn post_listing_private_community(data: &mut Data) -> LemmyResult<()> {
let pool = &build_db_pool()?; let pool = &data.pool();
let pool = &mut pool.into(); let pool = &mut pool.into();
let mut data = init_data(pool).await?;
// Mark community as private // Mark community as private
Community::update( Community::update(
@ -2004,7 +2161,7 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(2, read_post_listing.len()); assert_eq!(3, read_post_listing.len());
let post_view = PostView::read( let post_view = PostView::read(
pool, pool,
data.inserted_post.id, data.inserted_post.id,
@ -2031,7 +2188,7 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(2, read_post_listing.len()); assert_eq!(3, read_post_listing.len());
let post_view = PostView::read( let post_view = PostView::read(
pool, pool,
data.inserted_post.id, data.inserted_post.id,
@ -2041,6 +2198,33 @@ mod tests {
.await; .await;
assert!(post_view.is_ok()); assert!(post_view.is_ok());
cleanup(data, pool).await Ok(())
}
#[test_context(Data)]
#[tokio::test]
#[serial]
async fn post_tags_present(data: &mut Data) -> LemmyResult<()> {
let pool = &data.pool();
let pool = &mut pool.into();
let post_view = PostView::read(
pool,
data.inserted_post_with_tags.id,
Some(&data.local_user_view.local_user),
false,
)
.await?;
assert_eq!(2, post_view.tags.tags.len());
assert_eq!(data.tag_1.name, post_view.tags.tags[0].name);
assert_eq!(data.tag_2.name, post_view.tags.tags[1].name);
let all_posts = data.default_post_query().list(&data.site, pool).await?;
assert_eq!(2, all_posts[0].tags.tags.len()); // post with tags
assert_eq!(0, all_posts[1].tags.tags.len()); // bot post
assert_eq!(0, all_posts[2].tags.tags.len()); // normal post
Ok(())
} }
} }

View file

@ -1,5 +1,7 @@
#[cfg(feature = "full")] #[cfg(feature = "full")]
use diesel::Queryable; use diesel::Queryable;
#[cfg(feature = "full")]
use diesel::{deserialize::FromSqlRow, expression::AsExpression, sql_types};
use lemmy_db_schema::{ use lemmy_db_schema::{
aggregates::structs::{CommentAggregates, PersonAggregates, PostAggregates, SiteAggregates}, aggregates::structs::{CommentAggregates, PersonAggregates, PostAggregates, SiteAggregates},
source::{ source::{
@ -20,6 +22,7 @@ use lemmy_db_schema::{
private_message_report::PrivateMessageReport, private_message_report::PrivateMessageReport,
registration_application::RegistrationApplication, registration_application::RegistrationApplication,
site::Site, site::Site,
tag::Tag,
}, },
SubscribedType, SubscribedType,
}; };
@ -151,6 +154,7 @@ pub struct PostView {
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub my_vote: Option<i16>, pub my_vote: Option<i16>,
pub unread_comments: i64, pub unread_comments: i64,
pub tags: PostTags,
} }
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)]
@ -237,3 +241,12 @@ pub struct LocalImageView {
pub local_image: LocalImage, pub local_image: LocalImage,
pub person: Person, pub person: Person,
} }
#[derive(Clone, serde::Serialize, serde::Deserialize, Debug, PartialEq, Default)]
#[cfg_attr(feature = "full", derive(TS, FromSqlRow, AsExpression))]
#[serde(transparent)]
#[cfg_attr(feature = "full", diesel(sql_type = Nullable<sql_types::Json>))]
/// we wrap this in a struct so we can implement FromSqlRow<Json> for it
pub struct PostTags {
pub tags: Vec<Tag>,
}

View file

@ -188,7 +188,7 @@ impl CommunityView {
let is_mod = let is_mod =
CommunityModeratorView::check_is_community_moderator(pool, community_id, person_id).await; CommunityModeratorView::check_is_community_moderator(pool, community_id, person_id).await;
if is_mod.is_ok() if is_mod.is_ok()
|| PersonView::read(pool, person_id) || PersonView::read(pool, person_id, false)
.await .await
.is_ok_and(|t| t.is_admin) .is_ok_and(|t| t.is_admin)
{ {
@ -206,7 +206,7 @@ impl CommunityView {
let is_mod_of_any = let is_mod_of_any =
CommunityModeratorView::is_community_moderator_of_any(pool, person_id).await; CommunityModeratorView::is_community_moderator_of_any(pool, person_id).await;
if is_mod_of_any.is_ok() if is_mod_of_any.is_ok()
|| PersonView::read(pool, person_id) || PersonView::read(pool, person_id, false)
.await .await
.is_ok_and(|t| t.is_admin) .is_ok_and(|t| t.is_admin)
{ {

View file

@ -58,12 +58,11 @@ fn post_to_person_sort_type(sort: PostSortType) -> PersonSortType {
} }
fn queries<'a>( fn queries<'a>(
) -> Queries<impl ReadFn<'a, PersonView, PersonId>, impl ListFn<'a, PersonView, ListMode>> { ) -> Queries<impl ReadFn<'a, PersonView, (PersonId, bool)>, impl ListFn<'a, PersonView, ListMode>> {
let all_joins = move |query: person::BoxedQuery<'a, Pg>| { let all_joins = move |query: person::BoxedQuery<'a, Pg>| {
query query
.inner_join(person_aggregates::table) .inner_join(person_aggregates::table)
.left_join(local_user::table) .left_join(local_user::table)
.filter(person::deleted.eq(false))
.select(( .select((
person::all_columns, person::all_columns,
person_aggregates::all_columns, person_aggregates::all_columns,
@ -71,14 +70,17 @@ fn queries<'a>(
)) ))
}; };
let read = move |mut conn: DbConn<'a>, person_id: PersonId| async move { let read = move |mut conn: DbConn<'a>, params: (PersonId, bool)| async move {
all_joins(person::table.find(person_id).into_boxed()) let (person_id, is_admin) = params;
.first(&mut conn) let mut query = all_joins(person::table.find(person_id).into_boxed());
.await if !is_admin {
query = query.filter(person::deleted.eq(false));
}
query.first(&mut conn).await
}; };
let list = move |mut conn: DbConn<'a>, mode: ListMode| async move { let list = move |mut conn: DbConn<'a>, mode: ListMode| async move {
let mut query = all_joins(person::table.into_boxed()); let mut query = all_joins(person::table.into_boxed()).filter(person::deleted.eq(false));
match mode { match mode {
ListMode::Admins => { ListMode::Admins => {
query = query query = query
@ -135,8 +137,12 @@ fn queries<'a>(
} }
impl PersonView { impl PersonView {
pub async fn read(pool: &mut DbPool<'_>, person_id: PersonId) -> Result<Self, Error> { pub async fn read(
queries().read(pool, person_id).await pool: &mut DbPool<'_>,
person_id: PersonId,
is_admin: bool,
) -> Result<Self, Error> {
queries().read(pool, (person_id, is_admin)).await
} }
pub async fn admins(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> { pub async fn admins(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
@ -243,9 +249,13 @@ mod tests {
) )
.await?; .await?;
let read = PersonView::read(pool, data.alice.id).await; let read = PersonView::read(pool, data.alice.id, false).await;
assert!(read.is_err()); assert!(read.is_err());
// only admin can view deleted users
let read = PersonView::read(pool, data.alice.id, true).await;
assert!(read.is_ok());
let list = PersonQuery { let list = PersonQuery {
sort: Some(PostSortType::New), sort: Some(PostSortType::New),
..Default::default() ..Default::default()
@ -303,10 +313,10 @@ mod tests {
assert_length!(1, list); assert_length!(1, list);
assert_eq!(list[0].person.id, data.alice.id); assert_eq!(list[0].person.id, data.alice.id);
let is_admin = PersonView::read(pool, data.alice.id).await?.is_admin; let is_admin = PersonView::read(pool, data.alice.id, false).await?.is_admin;
assert!(is_admin); assert!(is_admin);
let is_admin = PersonView::read(pool, data.bob.id).await?.is_admin; let is_admin = PersonView::read(pool, data.bob.id, false).await?.is_admin;
assert!(!is_admin); assert!(!is_admin);
cleanup(data, pool).await cleanup(data, pool).await

View file

@ -113,7 +113,6 @@ pub enum LemmyErrorType {
SystemErrLogin, SystemErrLogin,
CouldntSetAllRegistrationsAccepted, CouldntSetAllRegistrationsAccepted,
CouldntSetAllEmailVerified, CouldntSetAllEmailVerified,
Banned,
BlockedUrl, BlockedUrl,
CouldntGetComments, CouldntGetComments,
CouldntGetPosts, CouldntGetPosts,
@ -328,9 +327,9 @@ cfg_if! {
#[test] #[test]
fn deserializes_no_message() -> LemmyResult<()> { fn deserializes_no_message() -> LemmyResult<()> {
let err = LemmyError::from(LemmyErrorType::Banned).error_response(); let err = LemmyError::from(LemmyErrorType::BlockedUrl).error_response();
let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?; let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?;
assert_eq!(&json, "{\"error\":\"banned\"}"); assert_eq!(&json, "{\"error\":\"blocked_url\"}");
Ok(()) Ok(())
} }

View file

@ -3,13 +3,11 @@ use anyhow::{anyhow, Context};
use deser_hjson::from_str; use deser_hjson::from_str;
use regex::Regex; use regex::Regex;
use std::{env, fs, io::Error, sync::LazyLock}; use std::{env, fs, io::Error, sync::LazyLock};
use structs::{PictrsConfig, PictrsImageMode, Settings};
use url::Url; use url::Url;
use urlencoding::encode;
pub mod structs; pub mod structs;
use structs::{DatabaseConnection, PictrsConfig, PictrsImageMode, Settings};
const DEFAULT_CONFIG_FILE: &str = "config/config.hjson"; const DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
@ -51,20 +49,9 @@ impl Settings {
pub fn get_database_url(&self) -> String { pub fn get_database_url(&self) -> String {
if let Ok(url) = env::var("LEMMY_DATABASE_URL") { if let Ok(url) = env::var("LEMMY_DATABASE_URL") {
return url; url
} } else {
match &self.database.connection { self.database.connection.clone()
DatabaseConnection::Uri { uri } => uri.clone(),
DatabaseConnection::Parts(parts) => {
format!(
"postgres://{}:{}@{}:{}/{}",
encode(&parts.user),
encode(&parts.password),
parts.host,
parts.port,
encode(&parts.database),
)
}
} }
} }

View file

@ -132,23 +132,7 @@ pub enum PictrsImageMode {
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(default)] #[serde(default)]
pub struct DatabaseConfig { pub struct DatabaseConfig {
#[serde(flatten, default)] /// Configure the database by specifying URI pointing to a postgres instance
pub(crate) connection: DatabaseConnection,
/// Maximum number of active sql connections
#[default(30)]
pub pool_size: usize,
}
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(untagged)]
pub enum DatabaseConnection {
/// Configure the database by specifying a URI
///
/// This is the preferred method to specify database connection details since
/// it is the most flexible.
Uri {
/// Connection URI pointing to a postgres instance
/// ///
/// This example uses peer authentication to obviate the need for creating, /// This example uses peer authentication to obviate the need for creating,
/// configuring, and managing passwords. /// configuring, and managing passwords.
@ -157,37 +141,13 @@ pub enum DatabaseConnection {
/// PostgreSQL's documentation. /// PostgreSQL's documentation.
/// ///
/// [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6 /// [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6
#[default("postgres://lemmy:password@localhost:5432/lemmy")]
#[doku(example = "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql")] #[doku(example = "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql")]
uri: String, pub(crate) connection: String,
},
/// Configure the database by specifying parts of a URI /// Maximum number of active sql connections
/// #[default(30)]
/// Note that specifying the `uri` field should be preferred since it provides pub pool_size: usize,
/// greater control over how the connection is made. This merely exists for
/// backwards-compatibility.
#[default]
Parts(DatabaseConnectionParts),
}
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(default)]
pub struct DatabaseConnectionParts {
/// Username to connect to postgres
#[default("lemmy")]
pub(super) user: String,
/// Password to connect to postgres
#[default("password")]
pub(super) password: String,
#[default("localhost")]
/// Host where postgres is running
pub(super) host: String,
/// Port where postgres can be accessed
#[default(5432)]
pub(super) port: i32,
/// Name of the postgres database for lemmy
#[default("lemmy")]
pub(super) database: String,
} }
#[derive(Debug, Deserialize, Serialize, Clone, Document, SmartDefault)] #[derive(Debug, Deserialize, Serialize, Clone, Document, SmartDefault)]

View file

@ -8,7 +8,7 @@
site_name: lemmy-alpha site_name: lemmy-alpha
} }
database: { database: {
host: postgres_alpha connection: "postgres://lemmy:password@postgres_alpha:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,7 +8,7 @@
site_name: lemmy-beta site_name: lemmy-beta
} }
database: { database: {
host: postgres_beta connection: "postgres://lemmy:password@postgres_beta:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,6 +8,6 @@
site_name: lemmy-delta site_name: lemmy-delta
} }
database: { database: {
host: postgres_delta connection: "postgres://lemmy:password@postgres_delta:5432/lemmy"
} }
} }

View file

@ -8,7 +8,7 @@
site_name: lemmy-epsilon site_name: lemmy-epsilon
} }
database: { database: {
host: postgres_epsilon connection: "postgres://lemmy:password@postgres_epsilon:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,7 +8,7 @@
site_name: lemmy-gamma site_name: lemmy-gamma
} }
database: { database: {
host: postgres_gamma connection: "postgres://lemmy:password@postgres_gamma:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -11,7 +11,7 @@
site_name: "lemmy-dev" site_name: "lemmy-dev"
} }
database: { database: {
host: postgres connection: "postgres://lemmy:password@postgres:5432/lemmy"
} }
hostname: "localhost" hostname: "localhost"

View file

@ -0,0 +1,4 @@
DROP TABLE post_tag;
DROP TABLE tag;

View file

@ -0,0 +1,23 @@
-- a tag is a federatable object that gives additional context to another object, which can be displayed and filtered on
-- currently, we only have community post tags, which is a tag that is created by post authors as well as mods of a community,
-- to categorize a post. in the future we may add more tag types, depending on the requirements,
-- this will lead to either expansion of this table (community_id optional, addition of tag_type enum)
-- or split of this table / creation of new tables.
CREATE TABLE tag (
id serial PRIMARY KEY,
ap_id text NOT NULL UNIQUE,
name text NOT NULL,
community_id int NOT NULL REFERENCES community (id) ON UPDATE CASCADE ON DELETE CASCADE,
published timestamptz NOT NULL DEFAULT now(),
updated timestamptz,
deleted boolean NOT NULL DEFAULT FALSE
);
-- an association between a post and a tag. created/updated by the post author or mods of a community
CREATE TABLE post_tag (
post_id int NOT NULL REFERENCES post (id) ON UPDATE CASCADE ON DELETE CASCADE,
tag_id int NOT NULL REFERENCES tag (id) ON UPDATE CASCADE ON DELETE CASCADE,
published timestamptz NOT NULL DEFAULT now(),
PRIMARY KEY (post_id, tag_id)
);

View file

@ -579,13 +579,13 @@ async fn build_update_instance_form(
// This is the only kind of error that means the instance is dead // This is the only kind of error that means the instance is dead
return None; return None;
}; };
let status = res.status();
// In this block, returning `None` is ignored, and only means not writing nodeinfo to db if status.is_client_error() || status.is_server_error() {
async {
if res.status().is_client_error() {
return None; return None;
} }
// In this block, returning `None` is ignored, and only means not writing nodeinfo to db
async {
let node_info_url = res let node_info_url = res
.json::<NodeInfoWellKnown>() .json::<NodeInfoWellKnown>()
.await .await