Merge remote-tracking branch 'upstream/main' into optimize-get-random

This commit is contained in:
Dull Bananas 2024-12-18 15:04:14 -07:00
commit 9bedf28bdf
38 changed files with 730 additions and 368 deletions

2
Cargo.lock generated
View file

@ -2685,8 +2685,10 @@ dependencies = [
"lemmy_utils", "lemmy_utils",
"pretty_assertions", "pretty_assertions",
"serde", "serde",
"serde_json",
"serde_with", "serde_with",
"serial_test", "serial_test",
"test-context",
"tokio", "tokio",
"tracing", "tracing",
"ts-rs", "ts-rs",

View file

@ -74,6 +74,9 @@ test("Set some user settings, check that they are federated", async () => {
test("Delete user", async () => { test("Delete user", async () => {
let user = await registerUser(alpha, alphaUrl); let user = await registerUser(alpha, alphaUrl);
let user_profile = await getMyUser(user);
let person_id = user_profile.local_user_view.person.id;
let actor_id = user_profile.local_user_view.person.actor_id;
// make a local post and comment // make a local post and comment
let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541")) let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541"))
@ -101,6 +104,10 @@ test("Delete user", async () => {
expect(remoteComment).toBeDefined(); expect(remoteComment).toBeDefined();
await deleteUser(user); await deleteUser(user);
await expect(getMyUser(user)).rejects.toStrictEqual(Error("incorrect_login"));
await expect(getPersonDetails(user, person_id)).rejects.toStrictEqual(
Error("not_found"),
);
// check that posts and comments are marked as deleted on other instances. // check that posts and comments are marked as deleted on other instances.
// use get methods to avoid refetching from origin instance // use get methods to avoid refetching from origin instance
@ -118,6 +125,9 @@ test("Delete user", async () => {
(await getComments(alpha, remoteComment.post_id)).comments[0].comment (await getComments(alpha, remoteComment.post_id)).comments[0].comment
.deleted, .deleted,
).toBe(true); ).toBe(true);
await expect(
getPersonDetails(user, remoteComment.creator_id),
).rejects.toStrictEqual(Error("not_found"));
}); });
test("Requests with invalid auth should be treated as unauthenticated", async () => { test("Requests with invalid auth should be treated as unauthenticated", async () => {

View file

@ -1,11 +1,7 @@
{ {
# settings related to the postgresql database # settings related to the postgresql database
database: { database: {
# Configure the database by specifying a URI # Configure the database by specifying URI pointing to a postgres instance
#
# This is the preferred method to specify database connection details since
# it is the most flexible.
# Connection URI pointing to a postgres instance
# #
# This example uses peer authentication to obviate the need for creating, # This example uses peer authentication to obviate the need for creating,
# configuring, and managing passwords. # configuring, and managing passwords.
@ -14,25 +10,7 @@
# PostgreSQL's documentation. # PostgreSQL's documentation.
# #
# [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6 # [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6
uri: "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql" connection: "postgres://lemmy:password@localhost:5432/lemmy"
# or
# Configure the database by specifying parts of a URI
#
# Note that specifying the `uri` field should be preferred since it provides
# greater control over how the connection is made. This merely exists for
# backwards-compatibility.
# Username to connect to postgres
user: "string"
# Password to connect to postgres
password: "string"
# Host where postgres is running
host: "string"
# Port where postgres can be accessed
port: 123
# Name of the postgres database for lemmy
database: "string"
# Maximum number of active sql connections # Maximum number of active sql connections
pool_size: 30 pool_size: 30
} }

View file

@ -110,7 +110,7 @@ pub async fn ban_from_community(
ModBanFromCommunity::create(&mut context.pool(), &form).await?; ModBanFromCommunity::create(&mut context.pool(), &form).await?;
let person_view = PersonView::read(&mut context.pool(), data.person_id).await?; let person_view = PersonView::read(&mut context.pool(), data.person_id, false).await?;
ActivityChannel::submit_activity( ActivityChannel::submit_activity(
SendActivityData::BanFromCommunity { SendActivityData::BanFromCommunity {

View file

@ -88,7 +88,7 @@ pub async fn ban_from_site(
ModBan::create(&mut context.pool(), &form).await?; ModBan::create(&mut context.pool(), &form).await?;
let person_view = PersonView::read(&mut context.pool(), person.id).await?; let person_view = PersonView::read(&mut context.pool(), person.id, false).await?;
ban_nonlocal_user_from_local_communities( ban_nonlocal_user_from_local_communities(
&local_user_view, &local_user_view,

View file

@ -48,7 +48,7 @@ pub async fn user_block_person(
.with_lemmy_type(LemmyErrorType::PersonBlockAlreadyExists)?; .with_lemmy_type(LemmyErrorType::PersonBlockAlreadyExists)?;
} }
let person_view = PersonView::read(&mut context.pool(), target_id).await?; let person_view = PersonView::read(&mut context.pool(), target_id, false).await?;
Ok(Json(BlockPersonResponse { Ok(Json(BlockPersonResponse {
person_view, person_view,
blocked: data.block, blocked: data.block,

View file

@ -1,5 +1,5 @@
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, PostReportId}, newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, PostReportId, TagId},
ListingType, ListingType,
PostFeatureType, PostFeatureType,
PostSortType, PostSortType,
@ -37,6 +37,8 @@ pub struct CreatePost {
/// Instead of fetching a thumbnail, use a custom one. /// Instead of fetching a thumbnail, use a custom one.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub custom_thumbnail: Option<String>, pub custom_thumbnail: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub tags: Option<Vec<TagId>>,
/// Time when this post should be scheduled. Null means publish immediately. /// Time when this post should be scheduled. Null means publish immediately.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub scheduled_publish_time: Option<i64>, pub scheduled_publish_time: Option<i64>,
@ -164,6 +166,8 @@ pub struct EditPost {
/// Instead of fetching a thumbnail, use a custom one. /// Instead of fetching a thumbnail, use a custom one.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub custom_thumbnail: Option<String>, pub custom_thumbnail: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub tags: Option<Vec<TagId>>,
/// Time when this post should be scheduled. Null means publish immediately. /// Time when this post should be scheduled. Null means publish immediately.
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub scheduled_publish_time: Option<i64>, pub scheduled_publish_time: Option<i64>,

View file

@ -51,9 +51,11 @@ pub fn client_builder(settings: &Settings) -> ClientBuilder {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> { pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> {
info!("Fetching site metadata for url: {}", url); info!("Fetching site metadata for url: {}", url);
// We only fetch the first 64kB of data in order to not waste bandwidth especially for large // We only fetch the first MB of data in order to not waste bandwidth especially for large
// binary files // binary files. This high limit is particularly needed for youtube, which includes a lot of
let bytes_to_fetch = 64 * 1024; // javascript code before the opengraph tags. Mastodon also uses a 1 MB limit:
// https://github.com/mastodon/mastodon/blob/295ad6f19a016b3f16e1201ffcbb1b3ad6b455a2/app/lib/request.rb#L213
let bytes_to_fetch = 1024 * 1024;
let response = context let response = context
.client() .client()
.get(url.as_str()) .get(url.as_str())

View file

@ -123,8 +123,6 @@ pub fn is_admin(local_user_view: &LocalUserView) -> LemmyResult<()> {
check_user_valid(&local_user_view.person)?; check_user_valid(&local_user_view.person)?;
if !local_user_view.local_user.admin { if !local_user_view.local_user.admin {
Err(LemmyErrorType::NotAnAdmin)? Err(LemmyErrorType::NotAnAdmin)?
} else if local_user_view.person.banned {
Err(LemmyErrorType::Banned)?
} else { } else {
Ok(()) Ok(())
} }

View file

@ -1,5 +1,5 @@
use actix_web::web::{Data, Json}; use actix_web::web::{Data, Json};
use lemmy_api_common::{context::LemmyContext, site::MyUserInfo}; use lemmy_api_common::{context::LemmyContext, site::MyUserInfo, utils::check_user_valid};
use lemmy_db_schema::source::{ use lemmy_db_schema::source::{
actor_language::LocalUserLanguage, actor_language::LocalUserLanguage,
community_block::CommunityBlock, community_block::CommunityBlock,
@ -15,6 +15,8 @@ pub async fn get_my_user(
local_user_view: LocalUserView, local_user_view: LocalUserView,
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<Json<MyUserInfo>> { ) -> LemmyResult<Json<MyUserInfo>> {
check_user_valid(&local_user_view.person)?;
// Build the local user with parallel queries and add it to site response // Build the local user with parallel queries and add it to site response
let person_id = local_user_view.person.id; let person_id = local_user_view.person.id;
let local_user_id = local_user_view.local_user.id; let local_user_id = local_user_view.local_user.id;

View file

@ -4,7 +4,7 @@ use actix_web::web::{Json, Query};
use lemmy_api_common::{ use lemmy_api_common::{
context::LemmyContext, context::LemmyContext,
person::{GetPersonDetails, GetPersonDetailsResponse}, person::{GetPersonDetails, GetPersonDetailsResponse},
utils::{check_private_instance, read_site_for_actor}, utils::{check_private_instance, is_admin, read_site_for_actor},
}; };
use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type}; use lemmy_db_schema::{source::person::Person, utils::post_to_comment_sort_type};
use lemmy_db_views::{ use lemmy_db_views::{
@ -45,7 +45,11 @@ pub async fn read_person(
// You don't need to return settings for the user, since this comes back with GetSite // You don't need to return settings for the user, since this comes back with GetSite
// `my_user` // `my_user`
let person_view = PersonView::read(&mut context.pool(), person_details_id).await?; let is_admin = local_user_view
.as_ref()
.map(|l| is_admin(l).is_ok())
.unwrap_or_default();
let person_view = PersonView::read(&mut context.pool(), person_details_id, is_admin).await?;
let sort = data.sort; let sort = data.sort;
let page = data.page; let page = data.page;

View file

@ -60,7 +60,7 @@ async fn convert_response(
} }
}, },
SearchableObjects::PersonOrCommunity(pc) => match *pc { SearchableObjects::PersonOrCommunity(pc) => match *pc {
UserOrCommunity::User(u) => res.person = Some(PersonView::read(pool, u.id).await?), UserOrCommunity::User(u) => res.person = Some(PersonView::read(pool, u.id, is_admin).await?),
UserOrCommunity::Community(c) => { UserOrCommunity::Community(c) => {
res.community = Some(CommunityView::read(pool, c.id, local_user.as_ref(), is_admin).await?) res.community = Some(CommunityView::read(pool, c.id, local_user.as_ref(), is_admin).await?)
} }

View file

@ -35,4 +35,5 @@ pub mod private_message_report;
pub mod registration_application; pub mod registration_application;
pub mod secret; pub mod secret;
pub mod site; pub mod site;
pub mod tag;
pub mod tagline; pub mod tagline;

View file

@ -0,0 +1,53 @@
use crate::{
newtypes::TagId,
schema::{post_tag, tag},
source::tag::{PostTagInsertForm, Tag, TagInsertForm},
traits::Crud,
utils::{get_conn, DbPool},
};
use diesel::{insert_into, result::Error, QueryDsl};
use diesel_async::RunQueryDsl;
use lemmy_utils::error::LemmyResult;
#[async_trait]
impl Crud for Tag {
type InsertForm = TagInsertForm;
type UpdateForm = TagInsertForm;
type IdType = TagId;
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
insert_into(tag::table)
.values(form)
.get_result::<Self>(conn)
.await
}
async fn update(
pool: &mut DbPool<'_>,
pid: TagId,
form: &Self::UpdateForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(tag::table.find(pid))
.set(form)
.get_result::<Self>(conn)
.await
}
}
impl PostTagInsertForm {
pub async fn insert_tag_associations(
pool: &mut DbPool<'_>,
tags: &[PostTagInsertForm],
) -> LemmyResult<()> {
let conn = &mut get_conn(pool).await?;
insert_into(post_tag::table)
.values(tags)
.execute(conn)
.await?;
Ok(())
}
}

View file

@ -283,3 +283,9 @@ impl InstanceId {
self.0 self.0
} }
} }
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Default, Serialize, Deserialize)]
#[cfg_attr(feature = "full", derive(DieselNewType, TS))]
#[cfg_attr(feature = "full", ts(export))]
/// The internal tag id.
pub struct TagId(pub i32);

View file

@ -827,6 +827,14 @@ diesel::table! {
} }
} }
diesel::table! {
post_tag (post_id, tag_id) {
post_id -> Int4,
tag_id -> Int4,
published -> Timestamptz,
}
}
diesel::table! { diesel::table! {
private_message (id) { private_message (id) {
id -> Int4, id -> Int4,
@ -952,6 +960,18 @@ diesel::table! {
} }
} }
diesel::table! {
tag (id) {
id -> Int4,
ap_id -> Text,
name -> Text,
community_id -> Int4,
published -> Timestamptz,
updated -> Nullable<Timestamptz>,
deleted -> Bool,
}
}
diesel::table! { diesel::table! {
tagline (id) { tagline (id) {
id -> Int4, id -> Int4,
@ -1033,6 +1053,8 @@ diesel::joinable!(post_aggregates -> instance (instance_id));
diesel::joinable!(post_aggregates -> person (creator_id)); diesel::joinable!(post_aggregates -> person (creator_id));
diesel::joinable!(post_aggregates -> post (post_id)); diesel::joinable!(post_aggregates -> post (post_id));
diesel::joinable!(post_report -> post (post_id)); diesel::joinable!(post_report -> post (post_id));
diesel::joinable!(post_tag -> post (post_id));
diesel::joinable!(post_tag -> tag (tag_id));
diesel::joinable!(private_message_report -> private_message (private_message_id)); diesel::joinable!(private_message_report -> private_message (private_message_id));
diesel::joinable!(registration_application -> local_user (local_user_id)); diesel::joinable!(registration_application -> local_user (local_user_id));
diesel::joinable!(registration_application -> person (admin_id)); diesel::joinable!(registration_application -> person (admin_id));
@ -1040,6 +1062,7 @@ diesel::joinable!(site -> instance (instance_id));
diesel::joinable!(site_aggregates -> site (site_id)); diesel::joinable!(site_aggregates -> site (site_id));
diesel::joinable!(site_language -> language (language_id)); diesel::joinable!(site_language -> language (language_id));
diesel::joinable!(site_language -> site (site_id)); diesel::joinable!(site_language -> site (site_id));
diesel::joinable!(tag -> community (community_id));
diesel::allow_tables_to_appear_in_same_query!( diesel::allow_tables_to_appear_in_same_query!(
admin_allow_instance, admin_allow_instance,
@ -1099,6 +1122,7 @@ diesel::allow_tables_to_appear_in_same_query!(
post_actions, post_actions,
post_aggregates, post_aggregates,
post_report, post_report,
post_tag,
private_message, private_message,
private_message_report, private_message_report,
received_activity, received_activity,
@ -1109,5 +1133,6 @@ diesel::allow_tables_to_appear_in_same_query!(
site, site,
site_aggregates, site_aggregates,
site_language, site_language,
tag,
tagline, tagline,
); );

View file

@ -40,6 +40,7 @@ pub mod private_message_report;
pub mod registration_application; pub mod registration_application;
pub mod secret; pub mod secret;
pub mod site; pub mod site;
pub mod tag;
pub mod tagline; pub mod tagline;
/// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip). /// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip).

View file

@ -0,0 +1,57 @@
use crate::newtypes::{CommunityId, DbUrl, PostId, TagId};
#[cfg(feature = "full")]
use crate::schema::{post_tag, tag};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
#[cfg(feature = "full")]
use ts_rs::TS;
/// A tag that can be assigned to a post within a community.
/// The tag object is created by the community moderators.
/// The assignment happens by the post creator and can be updated by the community moderators.
///
/// A tag is a federatable object that gives additional context to another object, which can be
/// displayed and filtered on currently, we only have community post tags, which is a tag that is
/// created by post authors as well as mods of a community, to categorize a post. in the future we
/// may add more tag types, depending on the requirements, this will lead to either expansion of
/// this table (community_id optional, addition of tag_type enum) or split of this table / creation
/// of new tables.
#[skip_serializing_none]
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS, Queryable, Selectable, Identifiable))]
#[cfg_attr(feature = "full", diesel(table_name = tag))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))]
pub struct Tag {
pub id: TagId,
pub ap_id: DbUrl,
pub name: String,
/// the community that owns this tag
pub community_id: CommunityId,
pub published: DateTime<Utc>,
#[cfg_attr(feature = "full", ts(optional))]
pub updated: Option<DateTime<Utc>>,
pub deleted: bool,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = tag))]
pub struct TagInsertForm {
pub ap_id: DbUrl,
pub name: String,
pub community_id: CommunityId,
// default now
pub published: Option<DateTime<Utc>>,
pub updated: Option<DateTime<Utc>>,
pub deleted: bool,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = post_tag))]
pub struct PostTagInsertForm {
pub post_id: PostId,
pub tag_id: TagId,
}

View file

@ -550,6 +550,11 @@ pub mod functions {
// really this function is variadic, this just adds the two-argument version // really this function is variadic, this just adds the two-argument version
define_sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T); define_sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T);
define_sql_function! {
#[aggregate]
fn json_agg<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(obj: T) -> Json
}
define_sql_function!(#[sql_name = "coalesce"] fn coalesce_2_nullable<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: diesel::sql_types::Nullable<T>) -> diesel::sql_types::Nullable<T>); define_sql_function!(#[sql_name = "coalesce"] fn coalesce_2_nullable<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: diesel::sql_types::Nullable<T>) -> diesel::sql_types::Nullable<T>);
} }

View file

@ -35,6 +35,7 @@ diesel-async = { workspace = true, optional = true }
diesel_ltree = { workspace = true, optional = true } diesel_ltree = { workspace = true, optional = true }
serde = { workspace = true } serde = { workspace = true }
serde_with = { workspace = true } serde_with = { workspace = true }
serde_json = { workspace = true }
tracing = { workspace = true, optional = true } tracing = { workspace = true, optional = true }
ts-rs = { workspace = true, optional = true } ts-rs = { workspace = true, optional = true }
actix-web = { workspace = true, optional = true } actix-web = { workspace = true, optional = true }
@ -46,3 +47,4 @@ serial_test = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
pretty_assertions = { workspace = true } pretty_assertions = { workspace = true }
url = { workspace = true } url = { workspace = true }
test-context = "0.3.0"

View file

@ -14,6 +14,8 @@ pub mod local_user_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod post_report_view; pub mod post_report_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod post_tags_view;
#[cfg(feature = "full")]
pub mod post_view; pub mod post_view;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod private_message_report_view; pub mod private_message_report_view;

View file

@ -0,0 +1,30 @@
//! see post_view.rs for the reason for this json decoding
use crate::structs::PostTags;
use diesel::{
deserialize::FromSql,
pg::{Pg, PgValue},
serialize::ToSql,
sql_types::{self, Nullable},
};
impl FromSql<Nullable<sql_types::Json>, Pg> for PostTags {
fn from_sql(bytes: PgValue) -> diesel::deserialize::Result<Self> {
let value = <serde_json::Value as FromSql<sql_types::Json, Pg>>::from_sql(bytes)?;
Ok(serde_json::from_value::<PostTags>(value)?)
}
fn from_nullable_sql(
bytes: Option<<Pg as diesel::backend::Backend>::RawValue<'_>>,
) -> diesel::deserialize::Result<Self> {
match bytes {
Some(bytes) => Self::from_sql(bytes),
None => Ok(Self { tags: vec![] }),
}
}
}
impl ToSql<Nullable<sql_types::Json>, Pg> for PostTags {
fn to_sql(&self, out: &mut diesel::serialize::Output<Pg>) -> diesel::serialize::Result {
let value = serde_json::to_value(self)?;
<serde_json::Value as ToSql<sql_types::Json, Pg>>::to_sql(&value, &mut out.reborrow())
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,7 @@
#[cfg(feature = "full")] #[cfg(feature = "full")]
use diesel::Queryable; use diesel::Queryable;
#[cfg(feature = "full")]
use diesel::{deserialize::FromSqlRow, expression::AsExpression, sql_types};
use lemmy_db_schema::{ use lemmy_db_schema::{
aggregates::structs::{CommentAggregates, PersonAggregates, PostAggregates, SiteAggregates}, aggregates::structs::{CommentAggregates, PersonAggregates, PostAggregates, SiteAggregates},
source::{ source::{
@ -20,6 +22,7 @@ use lemmy_db_schema::{
private_message_report::PrivateMessageReport, private_message_report::PrivateMessageReport,
registration_application::RegistrationApplication, registration_application::RegistrationApplication,
site::Site, site::Site,
tag::Tag,
}, },
SubscribedType, SubscribedType,
}; };
@ -151,6 +154,7 @@ pub struct PostView {
#[cfg_attr(feature = "full", ts(optional))] #[cfg_attr(feature = "full", ts(optional))]
pub my_vote: Option<i16>, pub my_vote: Option<i16>,
pub unread_comments: i64, pub unread_comments: i64,
pub tags: PostTags,
} }
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)]
@ -237,3 +241,12 @@ pub struct LocalImageView {
pub local_image: LocalImage, pub local_image: LocalImage,
pub person: Person, pub person: Person,
} }
#[derive(Clone, serde::Serialize, serde::Deserialize, Debug, PartialEq, Default)]
#[cfg_attr(feature = "full", derive(TS, FromSqlRow, AsExpression))]
#[serde(transparent)]
#[cfg_attr(feature = "full", diesel(sql_type = Nullable<sql_types::Json>))]
/// we wrap this in a struct so we can implement FromSqlRow<Json> for it
pub struct PostTags {
pub tags: Vec<Tag>,
}

View file

@ -188,7 +188,7 @@ impl CommunityView {
let is_mod = let is_mod =
CommunityModeratorView::check_is_community_moderator(pool, community_id, person_id).await; CommunityModeratorView::check_is_community_moderator(pool, community_id, person_id).await;
if is_mod.is_ok() if is_mod.is_ok()
|| PersonView::read(pool, person_id) || PersonView::read(pool, person_id, false)
.await .await
.is_ok_and(|t| t.is_admin) .is_ok_and(|t| t.is_admin)
{ {
@ -206,7 +206,7 @@ impl CommunityView {
let is_mod_of_any = let is_mod_of_any =
CommunityModeratorView::is_community_moderator_of_any(pool, person_id).await; CommunityModeratorView::is_community_moderator_of_any(pool, person_id).await;
if is_mod_of_any.is_ok() if is_mod_of_any.is_ok()
|| PersonView::read(pool, person_id) || PersonView::read(pool, person_id, false)
.await .await
.is_ok_and(|t| t.is_admin) .is_ok_and(|t| t.is_admin)
{ {

View file

@ -58,12 +58,11 @@ fn post_to_person_sort_type(sort: PostSortType) -> PersonSortType {
} }
fn queries<'a>( fn queries<'a>(
) -> Queries<impl ReadFn<'a, PersonView, PersonId>, impl ListFn<'a, PersonView, ListMode>> { ) -> Queries<impl ReadFn<'a, PersonView, (PersonId, bool)>, impl ListFn<'a, PersonView, ListMode>> {
let all_joins = move |query: person::BoxedQuery<'a, Pg>| { let all_joins = move |query: person::BoxedQuery<'a, Pg>| {
query query
.inner_join(person_aggregates::table) .inner_join(person_aggregates::table)
.left_join(local_user::table) .left_join(local_user::table)
.filter(person::deleted.eq(false))
.select(( .select((
person::all_columns, person::all_columns,
person_aggregates::all_columns, person_aggregates::all_columns,
@ -71,14 +70,17 @@ fn queries<'a>(
)) ))
}; };
let read = move |mut conn: DbConn<'a>, person_id: PersonId| async move { let read = move |mut conn: DbConn<'a>, params: (PersonId, bool)| async move {
all_joins(person::table.find(person_id).into_boxed()) let (person_id, is_admin) = params;
.first(&mut conn) let mut query = all_joins(person::table.find(person_id).into_boxed());
.await if !is_admin {
query = query.filter(person::deleted.eq(false));
}
query.first(&mut conn).await
}; };
let list = move |mut conn: DbConn<'a>, mode: ListMode| async move { let list = move |mut conn: DbConn<'a>, mode: ListMode| async move {
let mut query = all_joins(person::table.into_boxed()); let mut query = all_joins(person::table.into_boxed()).filter(person::deleted.eq(false));
match mode { match mode {
ListMode::Admins => { ListMode::Admins => {
query = query query = query
@ -135,8 +137,12 @@ fn queries<'a>(
} }
impl PersonView { impl PersonView {
pub async fn read(pool: &mut DbPool<'_>, person_id: PersonId) -> Result<Self, Error> { pub async fn read(
queries().read(pool, person_id).await pool: &mut DbPool<'_>,
person_id: PersonId,
is_admin: bool,
) -> Result<Self, Error> {
queries().read(pool, (person_id, is_admin)).await
} }
pub async fn admins(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> { pub async fn admins(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
@ -243,9 +249,13 @@ mod tests {
) )
.await?; .await?;
let read = PersonView::read(pool, data.alice.id).await; let read = PersonView::read(pool, data.alice.id, false).await;
assert!(read.is_err()); assert!(read.is_err());
// only admin can view deleted users
let read = PersonView::read(pool, data.alice.id, true).await;
assert!(read.is_ok());
let list = PersonQuery { let list = PersonQuery {
sort: Some(PostSortType::New), sort: Some(PostSortType::New),
..Default::default() ..Default::default()
@ -303,10 +313,10 @@ mod tests {
assert_length!(1, list); assert_length!(1, list);
assert_eq!(list[0].person.id, data.alice.id); assert_eq!(list[0].person.id, data.alice.id);
let is_admin = PersonView::read(pool, data.alice.id).await?.is_admin; let is_admin = PersonView::read(pool, data.alice.id, false).await?.is_admin;
assert!(is_admin); assert!(is_admin);
let is_admin = PersonView::read(pool, data.bob.id).await?.is_admin; let is_admin = PersonView::read(pool, data.bob.id, false).await?.is_admin;
assert!(!is_admin); assert!(!is_admin);
cleanup(data, pool).await cleanup(data, pool).await

View file

@ -113,7 +113,6 @@ pub enum LemmyErrorType {
SystemErrLogin, SystemErrLogin,
CouldntSetAllRegistrationsAccepted, CouldntSetAllRegistrationsAccepted,
CouldntSetAllEmailVerified, CouldntSetAllEmailVerified,
Banned,
BlockedUrl, BlockedUrl,
CouldntGetComments, CouldntGetComments,
CouldntGetPosts, CouldntGetPosts,
@ -328,9 +327,9 @@ cfg_if! {
#[test] #[test]
fn deserializes_no_message() -> LemmyResult<()> { fn deserializes_no_message() -> LemmyResult<()> {
let err = LemmyError::from(LemmyErrorType::Banned).error_response(); let err = LemmyError::from(LemmyErrorType::BlockedUrl).error_response();
let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?; let json = String::from_utf8(err.into_body().try_into_bytes().unwrap_or_default().to_vec())?;
assert_eq!(&json, "{\"error\":\"banned\"}"); assert_eq!(&json, "{\"error\":\"blocked_url\"}");
Ok(()) Ok(())
} }

View file

@ -3,13 +3,11 @@ use anyhow::{anyhow, Context};
use deser_hjson::from_str; use deser_hjson::from_str;
use regex::Regex; use regex::Regex;
use std::{env, fs, io::Error, sync::LazyLock}; use std::{env, fs, io::Error, sync::LazyLock};
use structs::{PictrsConfig, PictrsImageMode, Settings};
use url::Url; use url::Url;
use urlencoding::encode;
pub mod structs; pub mod structs;
use structs::{DatabaseConnection, PictrsConfig, PictrsImageMode, Settings};
const DEFAULT_CONFIG_FILE: &str = "config/config.hjson"; const DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]
@ -51,20 +49,9 @@ impl Settings {
pub fn get_database_url(&self) -> String { pub fn get_database_url(&self) -> String {
if let Ok(url) = env::var("LEMMY_DATABASE_URL") { if let Ok(url) = env::var("LEMMY_DATABASE_URL") {
return url; url
} } else {
match &self.database.connection { self.database.connection.clone()
DatabaseConnection::Uri { uri } => uri.clone(),
DatabaseConnection::Parts(parts) => {
format!(
"postgres://{}:{}@{}:{}/{}",
encode(&parts.user),
encode(&parts.password),
parts.host,
parts.port,
encode(&parts.database),
)
}
} }
} }

View file

@ -132,64 +132,24 @@ pub enum PictrsImageMode {
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(default)] #[serde(default)]
pub struct DatabaseConfig { pub struct DatabaseConfig {
#[serde(flatten, default)] /// Configure the database by specifying URI pointing to a postgres instance
pub(crate) connection: DatabaseConnection, ///
/// This example uses peer authentication to obviate the need for creating,
/// configuring, and managing passwords.
///
/// For an explanation of how to use connection URIs, see [here][0] in
/// PostgreSQL's documentation.
///
/// [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6
#[default("postgres://lemmy:password@localhost:5432/lemmy")]
#[doku(example = "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql")]
pub(crate) connection: String,
/// Maximum number of active sql connections /// Maximum number of active sql connections
#[default(30)] #[default(30)]
pub pool_size: usize, pub pool_size: usize,
} }
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(untagged)]
pub enum DatabaseConnection {
/// Configure the database by specifying a URI
///
/// This is the preferred method to specify database connection details since
/// it is the most flexible.
Uri {
/// Connection URI pointing to a postgres instance
///
/// This example uses peer authentication to obviate the need for creating,
/// configuring, and managing passwords.
///
/// For an explanation of how to use connection URIs, see [here][0] in
/// PostgreSQL's documentation.
///
/// [0]: https://www.postgresql.org/docs/current/libpq-connect.html#id-1.7.3.8.3.6
#[doku(example = "postgresql:///lemmy?user=lemmy&host=/var/run/postgresql")]
uri: String,
},
/// Configure the database by specifying parts of a URI
///
/// Note that specifying the `uri` field should be preferred since it provides
/// greater control over how the connection is made. This merely exists for
/// backwards-compatibility.
#[default]
Parts(DatabaseConnectionParts),
}
#[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)]
#[serde(default)]
pub struct DatabaseConnectionParts {
/// Username to connect to postgres
#[default("lemmy")]
pub(super) user: String,
/// Password to connect to postgres
#[default("password")]
pub(super) password: String,
#[default("localhost")]
/// Host where postgres is running
pub(super) host: String,
/// Port where postgres can be accessed
#[default(5432)]
pub(super) port: i32,
/// Name of the postgres database for lemmy
#[default("lemmy")]
pub(super) database: String,
}
#[derive(Debug, Deserialize, Serialize, Clone, Document, SmartDefault)] #[derive(Debug, Deserialize, Serialize, Clone, Document, SmartDefault)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct EmailConfig { pub struct EmailConfig {

View file

@ -8,7 +8,7 @@
site_name: lemmy-alpha site_name: lemmy-alpha
} }
database: { database: {
host: postgres_alpha connection: "postgres://lemmy:password@postgres_alpha:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,7 +8,7 @@
site_name: lemmy-beta site_name: lemmy-beta
} }
database: { database: {
host: postgres_beta connection: "postgres://lemmy:password@postgres_beta:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,6 +8,6 @@
site_name: lemmy-delta site_name: lemmy-delta
} }
database: { database: {
host: postgres_delta connection: "postgres://lemmy:password@postgres_delta:5432/lemmy"
} }
} }

View file

@ -8,7 +8,7 @@
site_name: lemmy-epsilon site_name: lemmy-epsilon
} }
database: { database: {
host: postgres_epsilon connection: "postgres://lemmy:password@postgres_epsilon:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -8,7 +8,7 @@
site_name: lemmy-gamma site_name: lemmy-gamma
} }
database: { database: {
host: postgres_gamma connection: "postgres://lemmy:password@postgres_gamma:5432/lemmy"
} }
pictrs: { pictrs: {
api_key: "my-pictrs-key" api_key: "my-pictrs-key"

View file

@ -11,7 +11,7 @@
site_name: "lemmy-dev" site_name: "lemmy-dev"
} }
database: { database: {
host: postgres connection: "postgres://lemmy:password@postgres:5432/lemmy"
} }
hostname: "localhost" hostname: "localhost"

View file

@ -0,0 +1,4 @@
DROP TABLE post_tag;
DROP TABLE tag;

View file

@ -0,0 +1,23 @@
-- a tag is a federatable object that gives additional context to another object, which can be displayed and filtered on
-- currently, we only have community post tags, which is a tag that is created by post authors as well as mods of a community,
-- to categorize a post. in the future we may add more tag types, depending on the requirements,
-- this will lead to either expansion of this table (community_id optional, addition of tag_type enum)
-- or split of this table / creation of new tables.
CREATE TABLE tag (
id serial PRIMARY KEY,
ap_id text NOT NULL UNIQUE,
name text NOT NULL,
community_id int NOT NULL REFERENCES community (id) ON UPDATE CASCADE ON DELETE CASCADE,
published timestamptz NOT NULL DEFAULT now(),
updated timestamptz,
deleted boolean NOT NULL DEFAULT FALSE
);
-- an association between a post and a tag. created/updated by the post author or mods of a community
CREATE TABLE post_tag (
post_id int NOT NULL REFERENCES post (id) ON UPDATE CASCADE ON DELETE CASCADE,
tag_id int NOT NULL REFERENCES tag (id) ON UPDATE CASCADE ON DELETE CASCADE,
published timestamptz NOT NULL DEFAULT now(),
PRIMARY KEY (post_id, tag_id)
);

View file

@ -579,13 +579,13 @@ async fn build_update_instance_form(
// This is the only kind of error that means the instance is dead // This is the only kind of error that means the instance is dead
return None; return None;
}; };
let status = res.status();
if status.is_client_error() || status.is_server_error() {
return None;
}
// In this block, returning `None` is ignored, and only means not writing nodeinfo to db // In this block, returning `None` is ignored, and only means not writing nodeinfo to db
async { async {
if res.status().is_client_error() {
return None;
}
let node_info_url = res let node_info_url = res
.json::<NodeInfoWellKnown>() .json::<NodeInfoWellKnown>()
.await .await