mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-12-23 03:11:32 +00:00
Merge remote-tracking branch 'origin/main' into combined_tables_2
This commit is contained in:
commit
154cea9366
21 changed files with 661 additions and 247 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -2685,8 +2685,10 @@ dependencies = [
|
|||
"lemmy_utils",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"serial_test",
|
||||
"test-context",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"ts-rs",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use lemmy_db_schema::{
|
||||
newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId},
|
||||
newtypes::{CommentId, CommunityId, DbUrl, LanguageId, PostId, TagId},
|
||||
ListingType,
|
||||
PostFeatureType,
|
||||
PostSortType,
|
||||
|
@ -37,6 +37,8 @@ pub struct CreatePost {
|
|||
/// Instead of fetching a thumbnail, use a custom one.
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub custom_thumbnail: Option<String>,
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub tags: Option<Vec<TagId>>,
|
||||
/// Time when this post should be scheduled. Null means publish immediately.
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub scheduled_publish_time: Option<i64>,
|
||||
|
@ -166,6 +168,8 @@ pub struct EditPost {
|
|||
/// Instead of fetching a thumbnail, use a custom one.
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub custom_thumbnail: Option<String>,
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub tags: Option<Vec<TagId>>,
|
||||
/// Time when this post should be scheduled. Null means publish immediately.
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub scheduled_publish_time: Option<i64>,
|
||||
|
|
|
@ -51,9 +51,11 @@ pub fn client_builder(settings: &Settings) -> ClientBuilder {
|
|||
#[tracing::instrument(skip_all)]
|
||||
pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResult<LinkMetadata> {
|
||||
info!("Fetching site metadata for url: {}", url);
|
||||
// We only fetch the first 64kB of data in order to not waste bandwidth especially for large
|
||||
// binary files
|
||||
let bytes_to_fetch = 64 * 1024;
|
||||
// We only fetch the first MB of data in order to not waste bandwidth especially for large
|
||||
// binary files. This high limit is particularly needed for youtube, which includes a lot of
|
||||
// javascript code before the opengraph tags. Mastodon also uses a 1 MB limit:
|
||||
// https://github.com/mastodon/mastodon/blob/295ad6f19a016b3f16e1201ffcbb1b3ad6b455a2/app/lib/request.rb#L213
|
||||
let bytes_to_fetch = 1024 * 1024;
|
||||
let response = context
|
||||
.client()
|
||||
.get(url.as_str())
|
||||
|
|
|
@ -42,7 +42,8 @@ pub async fn markdown_rewrite_remote_links(
|
|||
let mut local_url = local_url.to_string();
|
||||
// restore title
|
||||
if let Some(extra) = extra {
|
||||
local_url = format!("{local_url} {extra}");
|
||||
local_url.push(' ');
|
||||
local_url.push_str(extra);
|
||||
}
|
||||
src.replace_range(start..end, local_url.as_str());
|
||||
}
|
||||
|
|
|
@ -35,4 +35,5 @@ pub mod private_message_report;
|
|||
pub mod registration_application;
|
||||
pub mod secret;
|
||||
pub mod site;
|
||||
pub mod tag;
|
||||
pub mod tagline;
|
||||
|
|
53
crates/db_schema/src/impls/tag.rs
Normal file
53
crates/db_schema/src/impls/tag.rs
Normal file
|
@ -0,0 +1,53 @@
|
|||
use crate::{
|
||||
newtypes::TagId,
|
||||
schema::{post_tag, tag},
|
||||
source::tag::{PostTagInsertForm, Tag, TagInsertForm},
|
||||
traits::Crud,
|
||||
utils::{get_conn, DbPool},
|
||||
};
|
||||
use diesel::{insert_into, result::Error, QueryDsl};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use lemmy_utils::error::LemmyResult;
|
||||
|
||||
#[async_trait]
|
||||
impl Crud for Tag {
|
||||
type InsertForm = TagInsertForm;
|
||||
|
||||
type UpdateForm = TagInsertForm;
|
||||
|
||||
type IdType = TagId;
|
||||
|
||||
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
insert_into(tag::table)
|
||||
.values(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn update(
|
||||
pool: &mut DbPool<'_>,
|
||||
pid: TagId,
|
||||
form: &Self::UpdateForm,
|
||||
) -> Result<Self, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
diesel::update(tag::table.find(pid))
|
||||
.set(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl PostTagInsertForm {
|
||||
pub async fn insert_tag_associations(
|
||||
pool: &mut DbPool<'_>,
|
||||
tags: &[PostTagInsertForm],
|
||||
) -> LemmyResult<()> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
insert_into(post_tag::table)
|
||||
.values(tags)
|
||||
.execute(conn)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -288,3 +288,9 @@ impl InstanceId {
|
|||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Default, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "full", derive(DieselNewType, TS))]
|
||||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// The internal tag id.
|
||||
pub struct TagId(pub i32);
|
||||
|
|
|
@ -826,6 +826,14 @@ diesel::table! {
|
|||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
post_tag (post_id, tag_id) {
|
||||
post_id -> Int4,
|
||||
tag_id -> Int4,
|
||||
published -> Timestamptz,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
private_message (id) {
|
||||
id -> Int4,
|
||||
|
@ -961,6 +969,18 @@ diesel::table! {
|
|||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
tag (id) {
|
||||
id -> Int4,
|
||||
ap_id -> Text,
|
||||
name -> Text,
|
||||
community_id -> Int4,
|
||||
published -> Timestamptz,
|
||||
updated -> Nullable<Timestamptz>,
|
||||
deleted -> Bool,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
tagline (id) {
|
||||
id -> Int4,
|
||||
|
@ -1042,6 +1062,8 @@ diesel::joinable!(post_aggregates -> instance (instance_id));
|
|||
diesel::joinable!(post_aggregates -> person (creator_id));
|
||||
diesel::joinable!(post_aggregates -> post (post_id));
|
||||
diesel::joinable!(post_report -> post (post_id));
|
||||
diesel::joinable!(post_tag -> post (post_id));
|
||||
diesel::joinable!(post_tag -> tag (tag_id));
|
||||
diesel::joinable!(private_message_report -> private_message (private_message_id));
|
||||
diesel::joinable!(registration_application -> local_user (local_user_id));
|
||||
diesel::joinable!(registration_application -> person (admin_id));
|
||||
|
@ -1052,6 +1074,7 @@ diesel::joinable!(site -> instance (instance_id));
|
|||
diesel::joinable!(site_aggregates -> site (site_id));
|
||||
diesel::joinable!(site_language -> language (language_id));
|
||||
diesel::joinable!(site_language -> site (site_id));
|
||||
diesel::joinable!(tag -> community (community_id));
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
admin_allow_instance,
|
||||
|
@ -1111,6 +1134,7 @@ diesel::allow_tables_to_appear_in_same_query!(
|
|||
post_actions,
|
||||
post_aggregates,
|
||||
post_report,
|
||||
post_tag,
|
||||
private_message,
|
||||
private_message_report,
|
||||
received_activity,
|
||||
|
@ -1122,5 +1146,6 @@ diesel::allow_tables_to_appear_in_same_query!(
|
|||
site,
|
||||
site_aggregates,
|
||||
site_language,
|
||||
tag,
|
||||
tagline,
|
||||
);
|
||||
|
|
|
@ -41,6 +41,7 @@ pub mod private_message_report;
|
|||
pub mod registration_application;
|
||||
pub mod secret;
|
||||
pub mod site;
|
||||
pub mod tag;
|
||||
pub mod tagline;
|
||||
|
||||
/// Default value for columns like [community::Community.inbox_url] which are marked as serde(skip).
|
||||
|
|
57
crates/db_schema/src/source/tag.rs
Normal file
57
crates/db_schema/src/source/tag.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
use crate::newtypes::{CommunityId, DbUrl, PostId, TagId};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::schema::{post_tag, tag};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
|
||||
/// A tag that can be assigned to a post within a community.
|
||||
/// The tag object is created by the community moderators.
|
||||
/// The assignment happens by the post creator and can be updated by the community moderators.
|
||||
///
|
||||
/// A tag is a federatable object that gives additional context to another object, which can be
|
||||
/// displayed and filtered on currently, we only have community post tags, which is a tag that is
|
||||
/// created by post authors as well as mods of a community, to categorize a post. in the future we
|
||||
/// may add more tag types, depending on the requirements, this will lead to either expansion of
|
||||
/// this table (community_id optional, addition of tag_type enum) or split of this table / creation
|
||||
/// of new tables.
|
||||
#[skip_serializing_none]
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
|
||||
#[cfg_attr(feature = "full", derive(TS, Queryable, Selectable, Identifiable))]
|
||||
#[cfg_attr(feature = "full", diesel(table_name = tag))]
|
||||
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
|
||||
#[cfg_attr(feature = "full", ts(export))]
|
||||
pub struct Tag {
|
||||
pub id: TagId,
|
||||
pub ap_id: DbUrl,
|
||||
pub name: String,
|
||||
/// the community that owns this tag
|
||||
pub community_id: CommunityId,
|
||||
pub published: DateTime<Utc>,
|
||||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub updated: Option<DateTime<Utc>>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
|
||||
#[cfg_attr(feature = "full", diesel(table_name = tag))]
|
||||
pub struct TagInsertForm {
|
||||
pub ap_id: DbUrl,
|
||||
pub name: String,
|
||||
pub community_id: CommunityId,
|
||||
// default now
|
||||
pub published: Option<DateTime<Utc>>,
|
||||
pub updated: Option<DateTime<Utc>>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
|
||||
#[cfg_attr(feature = "full", diesel(table_name = post_tag))]
|
||||
pub struct PostTagInsertForm {
|
||||
pub post_id: PostId,
|
||||
pub tag_id: TagId,
|
||||
}
|
|
@ -547,6 +547,11 @@ pub mod functions {
|
|||
|
||||
// really this function is variadic, this just adds the two-argument version
|
||||
define_sql_function!(fn coalesce<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(x: diesel::sql_types::Nullable<T>, y: T) -> T);
|
||||
|
||||
define_sql_function! {
|
||||
#[aggregate]
|
||||
fn json_agg<T: diesel::sql_types::SqlType + diesel::sql_types::SingleValue>(obj: T) -> Json
|
||||
}
|
||||
}
|
||||
|
||||
pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*";
|
||||
|
|
|
@ -35,6 +35,7 @@ diesel-async = { workspace = true, optional = true }
|
|||
diesel_ltree = { workspace = true, optional = true }
|
||||
serde = { workspace = true }
|
||||
serde_with = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tracing = { workspace = true, optional = true }
|
||||
ts-rs = { workspace = true, optional = true }
|
||||
actix-web = { workspace = true, optional = true }
|
||||
|
@ -46,3 +47,4 @@ serial_test = { workspace = true }
|
|||
tokio = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
url = { workspace = true }
|
||||
test-context = "0.3.0"
|
||||
|
|
|
@ -14,6 +14,8 @@ pub mod local_user_view;
|
|||
#[cfg(feature = "full")]
|
||||
pub mod post_report_view;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod post_tags_view;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod post_view;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod private_message_report_view;
|
||||
|
|
30
crates/db_views/src/post_tags_view.rs
Normal file
30
crates/db_views/src/post_tags_view.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
//! see post_view.rs for the reason for this json decoding
|
||||
use crate::structs::PostTags;
|
||||
use diesel::{
|
||||
deserialize::FromSql,
|
||||
pg::{Pg, PgValue},
|
||||
serialize::ToSql,
|
||||
sql_types::{self, Nullable},
|
||||
};
|
||||
|
||||
impl FromSql<Nullable<sql_types::Json>, Pg> for PostTags {
|
||||
fn from_sql(bytes: PgValue) -> diesel::deserialize::Result<Self> {
|
||||
let value = <serde_json::Value as FromSql<sql_types::Json, Pg>>::from_sql(bytes)?;
|
||||
Ok(serde_json::from_value::<PostTags>(value)?)
|
||||
}
|
||||
fn from_nullable_sql(
|
||||
bytes: Option<<Pg as diesel::backend::Backend>::RawValue<'_>>,
|
||||
) -> diesel::deserialize::Result<Self> {
|
||||
match bytes {
|
||||
Some(bytes) => Self::from_sql(bytes),
|
||||
None => Ok(Self { tags: vec![] }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<Nullable<sql_types::Json>, Pg> for PostTags {
|
||||
fn to_sql(&self, out: &mut diesel::serialize::Output<Pg>) -> diesel::serialize::Result {
|
||||
let value = serde_json::to_value(self)?;
|
||||
<serde_json::Value as ToSql<sql_types::Json, Pg>>::to_sql(&value, &mut out.reborrow())
|
||||
}
|
||||
}
|
|
@ -5,7 +5,9 @@ use diesel::{
|
|||
pg::Pg,
|
||||
query_builder::AsQuery,
|
||||
result::Error,
|
||||
sql_types,
|
||||
BoolExpressionMethods,
|
||||
BoxableExpression,
|
||||
ExpressionMethods,
|
||||
JoinOnDsl,
|
||||
NullableExpressionMethods,
|
||||
|
@ -32,6 +34,8 @@ use lemmy_db_schema::{
|
|||
post,
|
||||
post_actions,
|
||||
post_aggregates,
|
||||
post_tag,
|
||||
tag,
|
||||
},
|
||||
source::{
|
||||
community::{CommunityFollower, CommunityFollowerState},
|
||||
|
@ -80,6 +84,31 @@ fn queries<'a>() -> Queries<
|
|||
// TODO maybe this should go to localuser also
|
||||
let all_joins = move |query: post_aggregates::BoxedQuery<'a, Pg>,
|
||||
my_person_id: Option<PersonId>| {
|
||||
// We fetch post tags by letting postgresql aggregate them internally in a subquery into JSON.
|
||||
// This is a simple way to join m rows into n rows without duplicating the data and getting
|
||||
// complex diesel types. In pure SQL you would usually do this either using a LEFT JOIN + then
|
||||
// aggregating the results in the application code. But this results in a lot of duplicate
|
||||
// data transferred (since each post will be returned once per tag that it has) and more
|
||||
// complicated application code. The diesel docs suggest doing three separate sequential queries
|
||||
// in this case (see https://diesel.rs/guides/relations.html#many-to-many-or-mn ): First fetch
|
||||
// the posts, then fetch all relevant post-tag-association tuples from the db, and then fetch
|
||||
// all the relevant tag objects.
|
||||
//
|
||||
// If we want to filter by post tag we will have to add
|
||||
// separate logic below since this subquery can't affect filtering, but it is simple (`WHERE
|
||||
// exists (select 1 from post_community_post_tags where community_post_tag_id in (1,2,3,4)`).
|
||||
let post_tags: Box<
|
||||
dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable<sql_types::Json>>,
|
||||
> = Box::new(
|
||||
post_tag::table
|
||||
.inner_join(tag::table)
|
||||
.select(diesel::dsl::sql::<diesel::sql_types::Json>(
|
||||
"json_agg(tag.*)",
|
||||
))
|
||||
.filter(post_tag::post_id.eq(post_aggregates::post_id))
|
||||
.filter(tag::deleted.eq(false))
|
||||
.single_value(),
|
||||
);
|
||||
query
|
||||
.inner_join(person::table)
|
||||
.inner_join(community::table)
|
||||
|
@ -136,6 +165,7 @@ fn queries<'a>() -> Queries<
|
|||
post_aggregates::comments.nullable() - post_actions::read_comments_amount.nullable(),
|
||||
post_aggregates::comments,
|
||||
),
|
||||
post_tags,
|
||||
))
|
||||
};
|
||||
|
||||
|
@ -603,11 +633,13 @@ impl<'a> PostQuery<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::indexing_slicing)]
|
||||
#[expect(clippy::expect_used)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{
|
||||
post_view::{PaginationCursorData, PostQuery, PostView},
|
||||
structs::LocalUserView,
|
||||
structs::{LocalUserView, PostTags},
|
||||
};
|
||||
use chrono::Utc;
|
||||
use diesel_async::SimpleAsyncConnection;
|
||||
|
@ -651,29 +683,33 @@ mod tests {
|
|||
PostUpdateForm,
|
||||
},
|
||||
site::Site,
|
||||
tag::{PostTagInsertForm, Tag, TagInsertForm},
|
||||
},
|
||||
traits::{Bannable, Blockable, Crud, Followable, Joinable, Likeable, Saveable},
|
||||
utils::{build_db_pool, build_db_pool_for_tests, get_conn, uplete, DbPool, RANK_DEFAULT},
|
||||
utils::{build_db_pool, get_conn, uplete, ActualDbPool, DbPool, RANK_DEFAULT},
|
||||
CommunityVisibility,
|
||||
PostSortType,
|
||||
SubscribedType,
|
||||
};
|
||||
use lemmy_utils::error::LemmyResult;
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
use pretty_assertions::assert_eq;
|
||||
use serial_test::serial;
|
||||
use std::time::{Duration, Instant};
|
||||
use test_context::{test_context, AsyncTestContext};
|
||||
use url::Url;
|
||||
|
||||
const POST_WITH_ANOTHER_TITLE: &str = "Another title";
|
||||
const POST_BY_BLOCKED_PERSON: &str = "post by blocked person";
|
||||
const POST_BY_BOT: &str = "post by bot";
|
||||
const POST: &str = "post";
|
||||
const POST_WITH_TAGS: &str = "post with tags";
|
||||
|
||||
fn names(post_views: &[PostView]) -> Vec<&str> {
|
||||
post_views.iter().map(|i| i.post.name.as_str()).collect()
|
||||
}
|
||||
|
||||
struct Data {
|
||||
pool: ActualDbPool,
|
||||
inserted_instance: Instance,
|
||||
local_user_view: LocalUserView,
|
||||
blocked_local_user_view: LocalUserView,
|
||||
|
@ -681,10 +717,19 @@ mod tests {
|
|||
inserted_community: Community,
|
||||
inserted_post: Post,
|
||||
inserted_bot_post: Post,
|
||||
inserted_post_with_tags: Post,
|
||||
tag_1: Tag,
|
||||
tag_2: Tag,
|
||||
site: Site,
|
||||
}
|
||||
|
||||
impl Data {
|
||||
fn pool(&self) -> ActualDbPool {
|
||||
self.pool.clone()
|
||||
}
|
||||
pub fn pool2(&self) -> DbPool<'_> {
|
||||
DbPool::Pool(&self.pool)
|
||||
}
|
||||
fn default_post_query(&self) -> PostQuery<'_> {
|
||||
PostQuery {
|
||||
sort: Some(PostSortType::New),
|
||||
|
@ -692,9 +737,10 @@ mod tests {
|
|||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn init_data(pool: &mut DbPool<'_>) -> LemmyResult<Data> {
|
||||
async fn setup() -> LemmyResult<Data> {
|
||||
let actual_pool = build_db_pool()?;
|
||||
let pool = &mut (&actual_pool).into();
|
||||
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
|
||||
|
||||
let new_person = PersonInsertForm::test_form(inserted_instance.id, "tegan");
|
||||
|
@ -752,11 +798,38 @@ mod tests {
|
|||
|
||||
PersonBlock::block(pool, &person_block).await?;
|
||||
|
||||
// Two community post tags
|
||||
let tag_1 = Tag::create(
|
||||
pool,
|
||||
&TagInsertForm {
|
||||
ap_id: Url::parse(&format!("{}/tags/test_tag1", inserted_community.actor_id))?.into(),
|
||||
name: "Test Tag 1".into(),
|
||||
community_id: inserted_community.id,
|
||||
published: None,
|
||||
updated: None,
|
||||
deleted: false,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let tag_2 = Tag::create(
|
||||
pool,
|
||||
&TagInsertForm {
|
||||
ap_id: Url::parse(&format!("{}/tags/test_tag2", inserted_community.actor_id))?.into(),
|
||||
name: "Test Tag 2".into(),
|
||||
community_id: inserted_community.id,
|
||||
published: None,
|
||||
updated: None,
|
||||
deleted: false,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// A sample post
|
||||
let new_post = PostInsertForm {
|
||||
language_id: Some(LanguageId(47)),
|
||||
..PostInsertForm::new(POST.to_string(), inserted_person.id, inserted_community.id)
|
||||
};
|
||||
|
||||
let inserted_post = Post::create(pool, &new_post).await?;
|
||||
|
||||
let new_bot_post = PostInsertForm::new(
|
||||
|
@ -766,6 +839,29 @@ mod tests {
|
|||
);
|
||||
let inserted_bot_post = Post::create(pool, &new_bot_post).await?;
|
||||
|
||||
// A sample post with tags
|
||||
let new_post = PostInsertForm {
|
||||
language_id: Some(LanguageId(47)),
|
||||
..PostInsertForm::new(
|
||||
POST_WITH_TAGS.to_string(),
|
||||
inserted_person.id,
|
||||
inserted_community.id,
|
||||
)
|
||||
};
|
||||
|
||||
let inserted_post_with_tags = Post::create(pool, &new_post).await?;
|
||||
let inserted_tags = vec![
|
||||
PostTagInsertForm {
|
||||
post_id: inserted_post_with_tags.id,
|
||||
tag_id: tag_1.id,
|
||||
},
|
||||
PostTagInsertForm {
|
||||
post_id: inserted_post_with_tags.id,
|
||||
tag_id: tag_2.id,
|
||||
},
|
||||
];
|
||||
PostTagInsertForm::insert_tag_associations(pool, &inserted_tags).await?;
|
||||
|
||||
let local_user_view = LocalUserView {
|
||||
local_user: inserted_local_user,
|
||||
local_user_vote_display_mode: LocalUserVoteDisplayMode::default(),
|
||||
|
@ -798,6 +894,7 @@ mod tests {
|
|||
};
|
||||
|
||||
Ok(Data {
|
||||
pool: actual_pool,
|
||||
inserted_instance,
|
||||
local_user_view,
|
||||
blocked_local_user_view,
|
||||
|
@ -805,16 +902,41 @@ mod tests {
|
|||
inserted_community,
|
||||
inserted_post,
|
||||
inserted_bot_post,
|
||||
inserted_post_with_tags,
|
||||
tag_1,
|
||||
tag_2,
|
||||
site,
|
||||
})
|
||||
}
|
||||
async fn teardown(data: Data) -> LemmyResult<()> {
|
||||
let pool = &mut data.pool2();
|
||||
// let pool = &mut (&pool).into();
|
||||
let num_deleted = Post::delete(pool, data.inserted_post.id).await?;
|
||||
Community::delete(pool, data.inserted_community.id).await?;
|
||||
Person::delete(pool, data.local_user_view.person.id).await?;
|
||||
Person::delete(pool, data.inserted_bot.id).await?;
|
||||
Person::delete(pool, data.blocked_local_user_view.person.id).await?;
|
||||
Instance::delete(pool, data.inserted_instance.id).await?;
|
||||
assert_eq!(1, num_deleted);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
impl AsyncTestContext for Data {
|
||||
async fn setup() -> Self {
|
||||
Data::setup().await.expect("setup failed")
|
||||
}
|
||||
async fn teardown(self) {
|
||||
Data::teardown(self).await.expect("teardown failed")
|
||||
}
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_with_person() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_with_person(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let mut data = init_data(pool).await?;
|
||||
|
||||
let local_user_form = LocalUserUpdateForm {
|
||||
show_bot_accounts: Some(false),
|
||||
|
@ -823,12 +945,14 @@ mod tests {
|
|||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_bot_accounts = false;
|
||||
|
||||
let read_post_listing = PostQuery {
|
||||
let mut read_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
..data.default_post_query()
|
||||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
// remove tags post
|
||||
read_post_listing.remove(0);
|
||||
|
||||
let post_listing_single_with_person = PostView::read(
|
||||
pool,
|
||||
|
@ -838,7 +962,7 @@ mod tests {
|
|||
)
|
||||
.await?;
|
||||
|
||||
let expected_post_listing_with_user = expected_post_view(&data, pool).await?;
|
||||
let expected_post_listing_with_user = expected_post_view(data, pool).await?;
|
||||
|
||||
// Should be only one person, IE the bot post, and blocked should be missing
|
||||
assert_eq!(
|
||||
|
@ -864,17 +988,19 @@ mod tests {
|
|||
.list(&data.site, pool)
|
||||
.await?;
|
||||
// should include bot post which has "undetermined" language
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_with_bots));
|
||||
|
||||
cleanup(data, pool).await
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_with_bots)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_no_person() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_no_person(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let read_post_listing_multiple_no_person = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
|
@ -887,32 +1013,31 @@ mod tests {
|
|||
let read_post_listing_single_no_person =
|
||||
PostView::read(pool, data.inserted_post.id, None, false).await?;
|
||||
|
||||
let expected_post_listing_no_person = expected_post_view(&data, pool).await?;
|
||||
let expected_post_listing_no_person = expected_post_view(data, pool).await?;
|
||||
|
||||
// Should be 2 posts, with the bot post, and the blocked
|
||||
assert_eq!(
|
||||
vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
|
||||
names(&read_post_listing_multiple_no_person)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(&expected_post_listing_no_person),
|
||||
read_post_listing_multiple_no_person.get(1)
|
||||
read_post_listing_multiple_no_person.get(2)
|
||||
);
|
||||
assert_eq!(
|
||||
expected_post_listing_no_person,
|
||||
read_post_listing_single_no_person
|
||||
);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_title_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_title_only(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// A post which contains the search them 'Post' not in the title (but in the body)
|
||||
let new_post = PostInsertForm {
|
||||
|
@ -950,6 +1075,7 @@ mod tests {
|
|||
assert_eq!(
|
||||
vec![
|
||||
POST_WITH_ANOTHER_TITLE,
|
||||
POST_WITH_TAGS,
|
||||
POST_BY_BOT,
|
||||
POST,
|
||||
POST_BY_BLOCKED_PERSON
|
||||
|
@ -959,19 +1085,19 @@ mod tests {
|
|||
|
||||
// Should be 3 posts when we search for title only
|
||||
assert_eq!(
|
||||
vec![POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST, POST_BY_BLOCKED_PERSON],
|
||||
names(&read_post_listing_by_title_only)
|
||||
);
|
||||
Post::delete(pool, inserted_post.id).await?;
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_block_community() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_block_community(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let community_block = CommunityBlockForm {
|
||||
person_id: data.local_user_view.person.id,
|
||||
|
@ -989,15 +1115,15 @@ mod tests {
|
|||
assert_eq!(read_post_listings_with_person_after_block, vec![]);
|
||||
|
||||
CommunityBlock::unblock(pool, &community_block).await?;
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_like() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_like(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let mut data = init_data(pool).await?;
|
||||
|
||||
let post_like_form =
|
||||
PostLikeForm::new(data.inserted_post.id, data.local_user_view.person.id, 1);
|
||||
|
@ -1020,7 +1146,7 @@ mod tests {
|
|||
)
|
||||
.await?;
|
||||
|
||||
let mut expected_post_with_upvote = expected_post_view(&data, pool).await?;
|
||||
let mut expected_post_with_upvote = expected_post_view(data, pool).await?;
|
||||
expected_post_with_upvote.my_vote = Some(1);
|
||||
expected_post_with_upvote.counts.score = 1;
|
||||
expected_post_with_upvote.counts.upvotes = 1;
|
||||
|
@ -1033,26 +1159,27 @@ mod tests {
|
|||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_bot_accounts = false;
|
||||
|
||||
let read_post_listing = PostQuery {
|
||||
let mut read_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
..data.default_post_query()
|
||||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
read_post_listing.remove(0);
|
||||
assert_eq!(vec![expected_post_with_upvote], read_post_listing);
|
||||
|
||||
let like_removed =
|
||||
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
|
||||
assert_eq!(uplete::Count::only_deleted(1), like_removed);
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_liked_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_liked_only(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Like both the bot post, and your own
|
||||
// The liked_only should not show your own post
|
||||
|
@ -1087,15 +1214,15 @@ mod tests {
|
|||
// Should be no posts
|
||||
assert_eq!(read_disliked_post_listing, vec![]);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_saved_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_saved_only(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Save only the bot post
|
||||
// The saved_only should only show the bot post
|
||||
|
@ -1115,15 +1242,15 @@ mod tests {
|
|||
// This should only include the bot post, not the one you created
|
||||
assert_eq!(vec![POST_BY_BOT], names(&read_saved_post_listing));
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn creator_info() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn creator_info(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Make one of the inserted persons a moderator
|
||||
let person_id = data.local_user_view.person.id;
|
||||
|
@ -1145,23 +1272,24 @@ mod tests {
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
let expected_post_listing = vec![
|
||||
("tegan".to_owned(), true, true),
|
||||
("mybot".to_owned(), false, false),
|
||||
("tegan".to_owned(), true, true),
|
||||
];
|
||||
|
||||
assert_eq!(expected_post_listing, post_listing);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_person_language() -> LemmyResult<()> {
|
||||
async fn post_listing_person_language(data: &mut Data) -> LemmyResult<()> {
|
||||
const EL_POSTO: &str = "el posto";
|
||||
|
||||
let pool = &build_db_pool()?;
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let spanish_id = Language::read_id_from_code(pool, "es").await?;
|
||||
|
||||
|
@ -1180,17 +1308,23 @@ mod tests {
|
|||
let post_listings_all = data.default_post_query().list(&data.site, pool).await?;
|
||||
|
||||
// no language filters specified, all posts should be returned
|
||||
assert_eq!(vec![EL_POSTO, POST_BY_BOT, POST], names(&post_listings_all));
|
||||
assert_eq!(
|
||||
vec![EL_POSTO, POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_all)
|
||||
);
|
||||
|
||||
LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id).await?;
|
||||
|
||||
let post_listing_french = data.default_post_query().list(&data.site, pool).await?;
|
||||
|
||||
// only one post in french and one undetermined should be returned
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listing_french));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listing_french)
|
||||
);
|
||||
assert_eq!(
|
||||
Some(french_id),
|
||||
post_listing_french.get(1).map(|p| p.post.language_id)
|
||||
post_listing_french.get(2).map(|p| p.post.language_id)
|
||||
);
|
||||
|
||||
LocalUserLanguage::update(
|
||||
|
@ -1207,6 +1341,7 @@ mod tests {
|
|||
.map(|p| (p.post.name, p.post.language_id))
|
||||
.collect::<Vec<_>>();
|
||||
let expected_post_listings_french_und = vec![
|
||||
(POST_WITH_TAGS.to_owned(), french_id),
|
||||
(POST_BY_BOT.to_owned(), UNDETERMINED_ID),
|
||||
(POST.to_owned(), french_id),
|
||||
];
|
||||
|
@ -1214,15 +1349,15 @@ mod tests {
|
|||
// french post and undetermined language post should be returned
|
||||
assert_eq!(expected_post_listings_french_und, post_listings_french_und);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_removed() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_removed(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let mut data = init_data(pool).await?;
|
||||
|
||||
// Remove the post
|
||||
Post::update(
|
||||
|
@ -1237,7 +1372,7 @@ mod tests {
|
|||
|
||||
// Make sure you don't see the removed post in the results
|
||||
let post_listings_no_admin = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(vec![POST], names(&post_listings_no_admin));
|
||||
assert_eq!(vec![POST_WITH_TAGS, POST], names(&post_listings_no_admin));
|
||||
|
||||
// Removed bot post is shown to admins on its profile page
|
||||
data.local_user_view.local_user.admin = true;
|
||||
|
@ -1249,15 +1384,15 @@ mod tests {
|
|||
.await?;
|
||||
assert_eq!(vec![POST_BY_BOT], names(&post_listings_is_admin));
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_deleted() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_deleted(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Delete the post
|
||||
Post::update(
|
||||
|
@ -1288,15 +1423,15 @@ mod tests {
|
|||
assert_eq!(expect_contains_deleted, contains_deleted);
|
||||
}
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_hidden_community() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_hidden_community(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
Community::update(
|
||||
pool,
|
||||
|
@ -1324,17 +1459,17 @@ mod tests {
|
|||
let posts = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert!(!posts.is_empty());
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_instance_block() -> LemmyResult<()> {
|
||||
async fn post_listing_instance_block(data: &mut Data) -> LemmyResult<()> {
|
||||
const POST_FROM_BLOCKED_INSTANCE: &str = "post on blocked instance";
|
||||
|
||||
let pool = &build_db_pool()?;
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let blocked_instance = Instance::read_or_create(pool, "another_domain.tld".to_string()).await?;
|
||||
|
||||
|
@ -1359,7 +1494,12 @@ mod tests {
|
|||
// no instance block, should return all posts
|
||||
let post_listings_all = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(
|
||||
vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST],
|
||||
vec![
|
||||
POST_FROM_BLOCKED_INSTANCE,
|
||||
POST_WITH_TAGS,
|
||||
POST_BY_BOT,
|
||||
POST
|
||||
],
|
||||
names(&post_listings_all)
|
||||
);
|
||||
|
||||
|
@ -1372,7 +1512,10 @@ mod tests {
|
|||
|
||||
// now posts from communities on that instance should be hidden
|
||||
let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_blocked));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_blocked)
|
||||
);
|
||||
assert!(post_listings_blocked
|
||||
.iter()
|
||||
.all(|p| p.post.id != post_from_blocked_instance.id));
|
||||
|
@ -1381,20 +1524,25 @@ mod tests {
|
|||
InstanceBlock::unblock(pool, &block_form).await?;
|
||||
let post_listings_blocked = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(
|
||||
vec![POST_FROM_BLOCKED_INSTANCE, POST_BY_BOT, POST],
|
||||
vec![
|
||||
POST_FROM_BLOCKED_INSTANCE,
|
||||
POST_WITH_TAGS,
|
||||
POST_BY_BOT,
|
||||
POST
|
||||
],
|
||||
names(&post_listings_blocked)
|
||||
);
|
||||
|
||||
Instance::delete(pool, blocked_instance.id).await?;
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn pagination_includes_each_post_once() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn pagination_includes_each_post_once(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let community_form = CommunityInsertForm::new(
|
||||
data.inserted_instance.id,
|
||||
|
@ -1496,15 +1644,15 @@ mod tests {
|
|||
assert_eq!(inserted_post_ids, listed_post_ids);
|
||||
|
||||
Community::delete(pool, inserted_community.id).await?;
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_hide_read() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_hide_read(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let mut data = init_data(pool).await?;
|
||||
|
||||
// Make sure local user hides read posts
|
||||
let local_user_form = LocalUserUpdateForm {
|
||||
|
@ -1520,7 +1668,7 @@ mod tests {
|
|||
|
||||
// Make sure you don't see the read post in the results
|
||||
let post_listings_hide_read = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(vec![POST], names(&post_listings_hide_read));
|
||||
assert_eq!(vec![POST_WITH_TAGS, POST], names(&post_listings_hide_read));
|
||||
|
||||
// Test with the show_read override as true
|
||||
let post_listings_show_read_true = PostQuery {
|
||||
|
@ -1530,7 +1678,7 @@ mod tests {
|
|||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(
|
||||
vec![POST_BY_BOT, POST],
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_show_read_true)
|
||||
);
|
||||
|
||||
|
@ -1541,16 +1689,19 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(vec![POST], names(&post_listings_show_read_false));
|
||||
cleanup(data, pool).await
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST],
|
||||
names(&post_listings_show_read_false)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_hide_hidden() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_hide_hidden(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Mark a post as hidden
|
||||
PostHide::hide(
|
||||
|
@ -1562,7 +1713,10 @@ mod tests {
|
|||
|
||||
// Make sure you don't see the hidden post in the results
|
||||
let post_listings_hide_hidden = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(vec![POST], names(&post_listings_hide_hidden));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST],
|
||||
names(&post_listings_hide_hidden)
|
||||
);
|
||||
|
||||
// Make sure it does come back with the show_hidden option
|
||||
let post_listings_show_hidden = PostQuery {
|
||||
|
@ -1573,20 +1727,23 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_hidden));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_show_hidden)
|
||||
);
|
||||
|
||||
// Make sure that hidden field is true.
|
||||
assert!(&post_listings_show_hidden.first().is_some_and(|p| p.hidden));
|
||||
assert!(&post_listings_show_hidden.get(1).is_some_and(|p| p.hidden));
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_hide_nsfw() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_hide_nsfw(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Mark a post as nsfw
|
||||
let update_form = PostUpdateForm {
|
||||
|
@ -1594,11 +1751,11 @@ mod tests {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
Post::update(pool, data.inserted_bot_post.id, &update_form).await?;
|
||||
Post::update(pool, data.inserted_post_with_tags.id, &update_form).await?;
|
||||
|
||||
// Make sure you don't see the nsfw post in the regular results
|
||||
let post_listings_hide_nsfw = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(vec![POST], names(&post_listings_hide_nsfw));
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_hide_nsfw));
|
||||
|
||||
// Make sure it does come back with the show_nsfw option
|
||||
let post_listings_show_nsfw = PostQuery {
|
||||
|
@ -1609,22 +1766,19 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(vec![POST_BY_BOT, POST], names(&post_listings_show_nsfw));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT, POST],
|
||||
names(&post_listings_show_nsfw)
|
||||
);
|
||||
|
||||
// Make sure that nsfw field is true.
|
||||
assert!(&post_listings_show_nsfw.first().is_some_and(|p| p.post.nsfw));
|
||||
|
||||
cleanup(data, pool).await
|
||||
}
|
||||
|
||||
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> {
|
||||
let num_deleted = Post::delete(pool, data.inserted_post.id).await?;
|
||||
Community::delete(pool, data.inserted_community.id).await?;
|
||||
Person::delete(pool, data.local_user_view.person.id).await?;
|
||||
Person::delete(pool, data.inserted_bot.id).await?;
|
||||
Person::delete(pool, data.blocked_local_user_view.person.id).await?;
|
||||
Instance::delete(pool, data.inserted_instance.id).await?;
|
||||
assert_eq!(1, num_deleted);
|
||||
assert!(
|
||||
&post_listings_show_nsfw
|
||||
.first()
|
||||
.ok_or(LemmyErrorType::NotFound)?
|
||||
.post
|
||||
.nsfw
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1746,15 +1900,16 @@ mod tests {
|
|||
hidden: false,
|
||||
saved: false,
|
||||
creator_blocked: false,
|
||||
tags: PostTags::default(),
|
||||
})
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn local_only_instance() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool_for_tests();
|
||||
async fn local_only_instance(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
Community::update(
|
||||
pool,
|
||||
|
@ -1779,7 +1934,7 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(2, authenticated_query.len());
|
||||
assert_eq!(3, authenticated_query.len());
|
||||
|
||||
let unauthenticated_post = PostView::read(pool, data.inserted_post.id, None, false).await;
|
||||
assert!(unauthenticated_post.is_err());
|
||||
|
@ -1793,16 +1948,15 @@ mod tests {
|
|||
.await;
|
||||
assert!(authenticated_post.is_ok());
|
||||
|
||||
cleanup(data, pool).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_local_user_banned_from_community() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_local_user_banned_from_community(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Test that post view shows if local user is blocked from community
|
||||
let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill");
|
||||
|
@ -1837,15 +1991,15 @@ mod tests {
|
|||
assert!(post_view.banned_from_community);
|
||||
|
||||
Person::delete(pool, inserted_banned_from_comm_person.id).await?;
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_local_user_not_banned_from_community() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_local_user_not_banned_from_community(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
let post_view = PostView::read(
|
||||
pool,
|
||||
|
@ -1857,15 +2011,15 @@ mod tests {
|
|||
|
||||
assert!(!post_view.banned_from_community);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn speed_check() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn speed_check(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Make sure the post_view query is less than this time
|
||||
let duration_max = Duration::from_millis(80);
|
||||
|
@ -1913,15 +2067,15 @@ mod tests {
|
|||
duration_max
|
||||
);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listings_no_comments_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listings_no_comments_only(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Create a comment for a post
|
||||
let comment_form = CommentInsertForm::new(
|
||||
|
@ -1941,17 +2095,20 @@ mod tests {
|
|||
.list(&data.site, pool)
|
||||
.await?;
|
||||
|
||||
assert_eq!(vec![POST_BY_BOT], names(&post_listings_no_comments));
|
||||
assert_eq!(
|
||||
vec![POST_WITH_TAGS, POST_BY_BOT],
|
||||
names(&post_listings_no_comments)
|
||||
);
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_private_community() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool()?;
|
||||
async fn post_listing_private_community(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
let mut data = init_data(pool).await?;
|
||||
|
||||
// Mark community as private
|
||||
Community::update(
|
||||
|
@ -2003,7 +2160,7 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(2, read_post_listing.len());
|
||||
assert_eq!(3, read_post_listing.len());
|
||||
let post_view = PostView::read(
|
||||
pool,
|
||||
data.inserted_post.id,
|
||||
|
@ -2030,7 +2187,7 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(2, read_post_listing.len());
|
||||
assert_eq!(3, read_post_listing.len());
|
||||
let post_view = PostView::read(
|
||||
pool,
|
||||
data.inserted_post.id,
|
||||
|
@ -2040,6 +2197,33 @@ mod tests {
|
|||
.await;
|
||||
assert!(post_view.is_ok());
|
||||
|
||||
cleanup(data, pool).await
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_context(Data)]
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_tags_present(data: &mut Data) -> LemmyResult<()> {
|
||||
let pool = &data.pool();
|
||||
let pool = &mut pool.into();
|
||||
|
||||
let post_view = PostView::read(
|
||||
pool,
|
||||
data.inserted_post_with_tags.id,
|
||||
Some(&data.local_user_view.local_user),
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(2, post_view.tags.tags.len());
|
||||
assert_eq!(data.tag_1.name, post_view.tags.tags[0].name);
|
||||
assert_eq!(data.tag_2.name, post_view.tags.tags[1].name);
|
||||
|
||||
let all_posts = data.default_post_query().list(&data.site, pool).await?;
|
||||
assert_eq!(2, all_posts[0].tags.tags.len()); // post with tags
|
||||
assert_eq!(0, all_posts[1].tags.tags.len()); // bot post
|
||||
assert_eq!(0, all_posts[2].tags.tags.len()); // normal post
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#[cfg(feature = "full")]
|
||||
use diesel::Queryable;
|
||||
#[cfg(feature = "full")]
|
||||
use diesel::{deserialize::FromSqlRow, expression::AsExpression, sql_types};
|
||||
use lemmy_db_schema::{
|
||||
aggregates::structs::{CommentAggregates, PersonAggregates, PostAggregates, SiteAggregates},
|
||||
source::{
|
||||
|
@ -20,6 +22,7 @@ use lemmy_db_schema::{
|
|||
private_message_report::PrivateMessageReport,
|
||||
registration_application::RegistrationApplication,
|
||||
site::Site,
|
||||
tag::Tag,
|
||||
},
|
||||
SubscribedType,
|
||||
};
|
||||
|
@ -157,6 +160,7 @@ pub struct PostView {
|
|||
#[cfg_attr(feature = "full", ts(optional))]
|
||||
pub my_vote: Option<i16>,
|
||||
pub unread_comments: i64,
|
||||
pub tags: PostTags,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)]
|
||||
|
@ -289,3 +293,12 @@ pub enum ReportCombinedView {
|
|||
Comment(CommentReportView),
|
||||
PrivateMessage(PrivateMessageReportView),
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize, serde::Deserialize, Debug, PartialEq, Default)]
|
||||
#[cfg_attr(feature = "full", derive(TS, FromSqlRow, AsExpression))]
|
||||
#[serde(transparent)]
|
||||
#[cfg_attr(feature = "full", diesel(sql_type = Nullable<sql_types::Json>))]
|
||||
/// we wrap this in a struct so we can implement FromSqlRow<Json> for it
|
||||
pub struct PostTags {
|
||||
pub tags: Vec<Tag>,
|
||||
}
|
||||
|
|
|
@ -454,7 +454,6 @@ fn build_item(
|
|||
protocol_and_hostname: &str,
|
||||
) -> LemmyResult<Item> {
|
||||
// TODO add images
|
||||
let author_url = format!("{protocol_and_hostname}/u/{creator_name}");
|
||||
let guid = Some(Guid {
|
||||
permalink: true,
|
||||
value: url.to_owned(),
|
||||
|
@ -464,7 +463,8 @@ fn build_item(
|
|||
Ok(Item {
|
||||
title: Some(format!("Reply from {creator_name}")),
|
||||
author: Some(format!(
|
||||
"/u/{creator_name} <a href=\"{author_url}\">(link)</a>"
|
||||
"/u/{creator_name} <a href=\"{}\">(link)</a>",
|
||||
format_args!("{protocol_and_hostname}/u/{creator_name}")
|
||||
)),
|
||||
pub_date: Some(published.to_rfc2822()),
|
||||
comments: Some(url.to_owned()),
|
||||
|
|
|
@ -24,7 +24,8 @@ pub fn markdown_rewrite_image_links(mut src: String) -> (String, Vec<Url>) {
|
|||
);
|
||||
// restore custom emoji format
|
||||
if let Some(extra) = extra {
|
||||
proxied = format!("{proxied} {extra}");
|
||||
proxied.push(' ');
|
||||
proxied.push_str(extra);
|
||||
}
|
||||
src.replace_range(start..end, &proxied);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
DROP TABLE post_tag;
|
||||
|
||||
DROP TABLE tag;
|
||||
|
23
migrations/2024-12-17-144959_community-post-tags/up.sql
Normal file
23
migrations/2024-12-17-144959_community-post-tags/up.sql
Normal file
|
@ -0,0 +1,23 @@
|
|||
-- a tag is a federatable object that gives additional context to another object, which can be displayed and filtered on
|
||||
-- currently, we only have community post tags, which is a tag that is created by post authors as well as mods of a community,
|
||||
-- to categorize a post. in the future we may add more tag types, depending on the requirements,
|
||||
-- this will lead to either expansion of this table (community_id optional, addition of tag_type enum)
|
||||
-- or split of this table / creation of new tables.
|
||||
CREATE TABLE tag (
|
||||
id serial PRIMARY KEY,
|
||||
ap_id text NOT NULL UNIQUE,
|
||||
name text NOT NULL,
|
||||
community_id int NOT NULL REFERENCES community (id) ON UPDATE CASCADE ON DELETE CASCADE,
|
||||
published timestamptz NOT NULL DEFAULT now(),
|
||||
updated timestamptz,
|
||||
deleted boolean NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
-- an association between a post and a tag. created/updated by the post author or mods of a community
|
||||
CREATE TABLE post_tag (
|
||||
post_id int NOT NULL REFERENCES post (id) ON UPDATE CASCADE ON DELETE CASCADE,
|
||||
tag_id int NOT NULL REFERENCES tag (id) ON UPDATE CASCADE ON DELETE CASCADE,
|
||||
published timestamptz NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (post_id, tag_id)
|
||||
);
|
||||
|
|
@ -190,10 +190,8 @@ async fn process_ranks_in_batches(
|
|||
UPDATE {aggregates_table} a {set_clause}
|
||||
FROM batch WHERE a.{id_column} = batch.{id_column} RETURNING a.published;
|
||||
"#,
|
||||
id_column = format!("{table_name}_id"),
|
||||
aggregates_table = format!("{table_name}_aggregates"),
|
||||
set_clause = set_clause,
|
||||
where_clause = where_clause
|
||||
id_column = format_args!("{table_name}_id"),
|
||||
aggregates_table = format_args!("{table_name}_aggregates"),
|
||||
))
|
||||
.bind::<Timestamptz, _>(previous_batch_last_published)
|
||||
.bind::<Integer, _>(update_batch_size)
|
||||
|
|
Loading…
Reference in a new issue