Merge branch 'main' into split_user_table

This commit is contained in:
Dessalines 2021-02-25 12:34:00 -05:00
commit aba32917bd
68 changed files with 559 additions and 701 deletions

View file

@ -1,5 +1,5 @@
tab_spaces = 2 tab_spaces = 2
edition="2018" edition="2018"
imports_layout="HorizontalVertical" imports_layout="HorizontalVertical"
merge_imports=true imports_granularity="Crate"
reorder_imports=true reorder_imports=true

View file

@ -1,3 +1,23 @@
# Lemmy v0.9.9 Release (2021-02-19)
## Changes
### Lemmy backend
- Added an federated activity query sorting order.
- Explicitly marking posts and comments as public.
- Added a `NewComment` / forum sort for posts.
- Fixed an issue with not setting correct published time for fetched posts.
- Fixed an issue with an open docker port on lemmy-ui.
- Using lemmy post link for RSS link.
- Fixed reason and display name lengths to use char counts instead.
### Lemmy-ui
- Updated translations.
- Made websocket host configurable.
- Added some accessibility features.
- Always showing password reset link.
# Lemmy v0.9.7 Release (2021-02-08) # Lemmy v0.9.7 Release (2021-02-08)
## Changes ## Changes

View file

@ -1 +1 @@
0.9.7 0.9.9

View file

@ -64,6 +64,14 @@
- src: '../docker/iframely.config.local.js' - src: '../docker/iframely.config.local.js'
dest: '{{lemmy_base_dir}}/iframely.config.local.js' dest: '{{lemmy_base_dir}}/iframely.config.local.js'
mode: '0600' mode: '0600'
vars:
lemmy_docker_image: "dessalines/lemmy:dev"
lemmy_docker_ui_image: "dessalines/lemmy-ui:{{ lookup('file', 'VERSION') }}"
lemmy_port: "8536"
lemmy_ui_port: "1235"
pictshare_port: "8537"
iframely_port: "8538"
postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}"
- name: add config file (only during initial setup) - name: add config file (only during initial setup)
template: template:

View file

@ -27,7 +27,7 @@ use lemmy_db_views::{
use lemmy_structs::{blocking, comment::*, send_local_notifs}; use lemmy_structs::{blocking, comment::*, send_local_notifs};
use lemmy_utils::{ use lemmy_utils::{
utils::{remove_slurs, scrape_text_for_mentions}, utils::{remove_slurs, scrape_text_for_mentions},
APIError, ApiError,
ConnectionId, ConnectionId,
LemmyError, LemmyError,
}; };
@ -60,7 +60,7 @@ impl Perform for CreateComment {
// Check if post is locked, no new comments // Check if post is locked, no new comments
if post.locked { if post.locked {
return Err(APIError::err("locked").into()); return Err(ApiError::err("locked").into());
} }
// If there's a parent_id, check to make sure that comment is in that post // If there's a parent_id, check to make sure that comment is in that post
@ -69,10 +69,10 @@ impl Perform for CreateComment {
let parent = let parent =
match blocking(context.pool(), move |conn| Comment::read(&conn, parent_id)).await? { match blocking(context.pool(), move |conn| Comment::read(&conn, parent_id)).await? {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_create_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_create_comment").into()),
}; };
if parent.post_id != post_id { if parent.post_id != post_id {
return Err(APIError::err("couldnt_create_comment").into()); return Err(ApiError::err("couldnt_create_comment").into());
} }
} }
@ -98,7 +98,7 @@ impl Perform for CreateComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_create_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_create_comment").into()),
}; };
// Necessary to update the ap_id // Necessary to update the ap_id
@ -112,7 +112,7 @@ impl Perform for CreateComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_create_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_create_comment").into()),
}; };
updated_comment.send_create(&user, context).await?; updated_comment.send_create(&user, context).await?;
@ -140,7 +140,7 @@ impl Perform for CreateComment {
let like = move |conn: &'_ _| CommentLike::like(&conn, &like_form); let like = move |conn: &'_ _| CommentLike::like(&conn, &like_form);
if blocking(context.pool(), like).await?.is_err() { if blocking(context.pool(), like).await?.is_err() {
return Err(APIError::err("couldnt_like_comment").into()); return Err(ApiError::err("couldnt_like_comment").into());
} }
updated_comment.send_like(&user, context).await?; updated_comment.send_like(&user, context).await?;
@ -160,7 +160,7 @@ impl Perform for CreateComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_update_comment").into()),
}; };
comment_view.comment.read = true; comment_view.comment.read = true;
} }
@ -205,7 +205,7 @@ impl Perform for EditComment {
// Verify that only the creator can edit // Verify that only the creator can edit
if user.id != orig_comment.creator.id { if user.id != orig_comment.creator.id {
return Err(APIError::err("no_comment_edit_allowed").into()); return Err(ApiError::err("no_comment_edit_allowed").into());
} }
// Do the update // Do the update
@ -217,7 +217,7 @@ impl Perform for EditComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_update_comment").into()),
}; };
// Send the apub update // Send the apub update
@ -281,7 +281,7 @@ impl Perform for DeleteComment {
// Verify that only the creator can delete // Verify that only the creator can delete
if user.id != orig_comment.creator.id { if user.id != orig_comment.creator.id {
return Err(APIError::err("no_comment_edit_allowed").into()); return Err(ApiError::err("no_comment_edit_allowed").into());
} }
// Do the delete // Do the delete
@ -292,7 +292,7 @@ impl Perform for DeleteComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_update_comment").into()),
}; };
// Send the apub message // Send the apub message
@ -370,7 +370,7 @@ impl Perform for RemoveComment {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_update_comment").into()),
}; };
// Mod tables // Mod tables
@ -452,7 +452,7 @@ impl Perform for MarkCommentAsRead {
// Verify that only the recipient can mark as read // Verify that only the recipient can mark as read
if user.id != orig_comment.get_recipient_id() { if user.id != orig_comment.get_recipient_id() {
return Err(APIError::err("no_comment_edit_allowed").into()); return Err(ApiError::err("no_comment_edit_allowed").into());
} }
// Do the mark as read // Do the mark as read
@ -463,7 +463,7 @@ impl Perform for MarkCommentAsRead {
.await? .await?
{ {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()), Err(_e) => return Err(ApiError::err("couldnt_update_comment").into()),
}; };
// Refetch it // Refetch it
@ -504,12 +504,12 @@ impl Perform for SaveComment {
if data.save { if data.save {
let save_comment = move |conn: &'_ _| CommentSaved::save(conn, &comment_saved_form); let save_comment = move |conn: &'_ _| CommentSaved::save(conn, &comment_saved_form);
if blocking(context.pool(), save_comment).await?.is_err() { if blocking(context.pool(), save_comment).await?.is_err() {
return Err(APIError::err("couldnt_save_comment").into()); return Err(ApiError::err("couldnt_save_comment").into());
} }
} else { } else {
let unsave_comment = move |conn: &'_ _| CommentSaved::unsave(conn, &comment_saved_form); let unsave_comment = move |conn: &'_ _| CommentSaved::unsave(conn, &comment_saved_form);
if blocking(context.pool(), unsave_comment).await?.is_err() { if blocking(context.pool(), unsave_comment).await?.is_err() {
return Err(APIError::err("couldnt_save_comment").into()); return Err(ApiError::err("couldnt_save_comment").into());
} }
} }
@ -577,7 +577,7 @@ impl Perform for CreateCommentLike {
let like_form2 = like_form.clone(); let like_form2 = like_form.clone();
let like = move |conn: &'_ _| CommentLike::like(conn, &like_form2); let like = move |conn: &'_ _| CommentLike::like(conn, &like_form2);
if blocking(context.pool(), like).await?.is_err() { if blocking(context.pool(), like).await?.is_err() {
return Err(APIError::err("couldnt_like_comment").into()); return Err(ApiError::err("couldnt_like_comment").into());
} }
if like_form.score == 1 { if like_form.score == 1 {
@ -647,7 +647,7 @@ impl Perform for GetComments {
.await?; .await?;
let comments = match comments { let comments = match comments {
Ok(comments) => comments, Ok(comments) => comments,
Err(_) => return Err(APIError::err("couldnt_get_comments").into()), Err(_) => return Err(ApiError::err("couldnt_get_comments").into()),
}; };
Ok(GetCommentsResponse { comments }) Ok(GetCommentsResponse { comments })
@ -670,10 +670,10 @@ impl Perform for CreateCommentReport {
// check size of report and check for whitespace // check size of report and check for whitespace
let reason = data.reason.trim(); let reason = data.reason.trim();
if reason.is_empty() { if reason.is_empty() {
return Err(APIError::err("report_reason_required").into()); return Err(ApiError::err("report_reason_required").into());
} }
if reason.len() > 1000 { if reason.chars().count() > 1000 {
return Err(APIError::err("report_too_long").into()); return Err(ApiError::err("report_too_long").into());
} }
let user_id = user.id; let user_id = user.id;
@ -698,7 +698,7 @@ impl Perform for CreateCommentReport {
.await? .await?
{ {
Ok(report) => report, Ok(report) => report,
Err(_e) => return Err(APIError::err("couldnt_create_report").into()), Err(_e) => return Err(ApiError::err("couldnt_create_report").into()),
}; };
let res = CreateCommentReportResponse { success: true }; let res = CreateCommentReportResponse { success: true };
@ -753,7 +753,7 @@ impl Perform for ResolveCommentReport {
}; };
if blocking(context.pool(), resolve_fun).await?.is_err() { if blocking(context.pool(), resolve_fun).await?.is_err() {
return Err(APIError::err("couldnt_resolve_report").into()); return Err(ApiError::err("couldnt_resolve_report").into());
}; };
let report_id = data.report_id; let report_id = data.report_id;

View file

@ -48,7 +48,7 @@ use lemmy_utils::{
apub::generate_actor_keypair, apub::generate_actor_keypair,
location_info, location_info,
utils::{check_slurs, check_slurs_opt, is_valid_community_name, naive_from_unix}, utils::{check_slurs, check_slurs_opt, is_valid_community_name, naive_from_unix},
APIError, ApiError,
ConnectionId, ConnectionId,
LemmyError, LemmyError,
}; };
@ -82,7 +82,7 @@ impl Perform for GetCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
} }
.id .id
} }
@ -94,7 +94,7 @@ impl Perform for GetCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
}; };
let moderators: Vec<CommunityModeratorView> = match blocking(context.pool(), move |conn| { let moderators: Vec<CommunityModeratorView> = match blocking(context.pool(), move |conn| {
@ -103,7 +103,7 @@ impl Perform for GetCommunity {
.await? .await?
{ {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
}; };
let online = context let online = context
@ -140,7 +140,7 @@ impl Perform for CreateCommunity {
check_slurs_opt(&data.description)?; check_slurs_opt(&data.description)?;
if !is_valid_community_name(&data.name) { if !is_valid_community_name(&data.name) {
return Err(APIError::err("invalid_community_name").into()); return Err(ApiError::err("invalid_community_name").into());
} }
// Double check for duplicate community actor_ids // Double check for duplicate community actor_ids
@ -151,7 +151,7 @@ impl Perform for CreateCommunity {
}) })
.await?; .await?;
if community_dupe.is_ok() { if community_dupe.is_ok() {
return Err(APIError::err("community_already_exists").into()); return Err(ApiError::err("community_already_exists").into());
} }
// Check to make sure the icon and banners are urls // Check to make sure the icon and banners are urls
@ -170,7 +170,6 @@ impl Perform for CreateCommunity {
description: data.description.to_owned(), description: data.description.to_owned(),
icon, icon,
banner, banner,
category_id: data.category_id,
creator_id: user.id, creator_id: user.id,
removed: None, removed: None,
deleted: None, deleted: None,
@ -193,7 +192,7 @@ impl Perform for CreateCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("community_already_exists").into()), Err(_e) => return Err(ApiError::err("community_already_exists").into()),
}; };
// The community creator becomes a moderator // The community creator becomes a moderator
@ -204,7 +203,7 @@ impl Perform for CreateCommunity {
let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form); let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form);
if blocking(context.pool(), join).await?.is_err() { if blocking(context.pool(), join).await?.is_err() {
return Err(APIError::err("community_moderator_already_exists").into()); return Err(ApiError::err("community_moderator_already_exists").into());
} }
// Follow your own community // Follow your own community
@ -216,7 +215,7 @@ impl Perform for CreateCommunity {
let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form); let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form);
if blocking(context.pool(), follow).await?.is_err() { if blocking(context.pool(), follow).await?.is_err() {
return Err(APIError::err("community_follower_already_exists").into()); return Err(ApiError::err("community_follower_already_exists").into());
} }
let user_id = user.id; let user_id = user.id;
@ -252,7 +251,7 @@ impl Perform for EditCommunity {
}) })
.await??; .await??;
if !mods.contains(&user.id) { if !mods.contains(&user.id) {
return Err(APIError::err("not_a_moderator").into()); return Err(ApiError::err("not_a_moderator").into());
} }
let community_id = data.community_id; let community_id = data.community_id;
@ -273,7 +272,6 @@ impl Perform for EditCommunity {
description: data.description.to_owned(), description: data.description.to_owned(),
icon, icon,
banner, banner,
category_id: data.category_id.to_owned(),
creator_id: read_community.creator_id, creator_id: read_community.creator_id,
removed: Some(read_community.removed), removed: Some(read_community.removed),
deleted: Some(read_community.deleted), deleted: Some(read_community.deleted),
@ -297,7 +295,7 @@ impl Perform for EditCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_update_community").into()), Err(_e) => return Err(ApiError::err("couldnt_update_community").into()),
}; };
// TODO there needs to be some kind of an apub update // TODO there needs to be some kind of an apub update
@ -337,7 +335,7 @@ impl Perform for DeleteCommunity {
}) })
.await??; .await??;
if read_community.creator_id != user.id { if read_community.creator_id != user.id {
return Err(APIError::err("no_community_edit_allowed").into()); return Err(ApiError::err("no_community_edit_allowed").into());
} }
// Do the delete // Do the delete
@ -349,7 +347,7 @@ impl Perform for DeleteCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_update_community").into()), Err(_e) => return Err(ApiError::err("couldnt_update_community").into()),
}; };
// Send apub messages // Send apub messages
@ -398,7 +396,7 @@ impl Perform for RemoveCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_update_community").into()), Err(_e) => return Err(ApiError::err("couldnt_update_community").into()),
}; };
// Mod tables // Mod tables
@ -513,13 +511,13 @@ impl Perform for FollowCommunity {
let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form); let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form);
if blocking(context.pool(), follow).await?.is_err() { if blocking(context.pool(), follow).await?.is_err() {
return Err(APIError::err("community_follower_already_exists").into()); return Err(ApiError::err("community_follower_already_exists").into());
} }
} else { } else {
let unfollow = let unfollow =
move |conn: &'_ _| CommunityFollower::unfollow(conn, &community_follower_form); move |conn: &'_ _| CommunityFollower::unfollow(conn, &community_follower_form);
if blocking(context.pool(), unfollow).await?.is_err() { if blocking(context.pool(), unfollow).await?.is_err() {
return Err(APIError::err("community_follower_already_exists").into()); return Err(ApiError::err("community_follower_already_exists").into());
} }
} }
} else if data.follow { } else if data.follow {
@ -530,7 +528,7 @@ impl Perform for FollowCommunity {
user.send_unfollow(&community.actor_id(), context).await?; user.send_unfollow(&community.actor_id(), context).await?;
let unfollow = move |conn: &'_ _| CommunityFollower::unfollow(conn, &community_follower_form); let unfollow = move |conn: &'_ _| CommunityFollower::unfollow(conn, &community_follower_form);
if blocking(context.pool(), unfollow).await?.is_err() { if blocking(context.pool(), unfollow).await?.is_err() {
return Err(APIError::err("community_follower_already_exists").into()); return Err(ApiError::err("community_follower_already_exists").into());
} }
} }
@ -571,7 +569,7 @@ impl Perform for GetFollowedCommunities {
.await? .await?
{ {
Ok(communities) => communities, Ok(communities) => communities,
_ => return Err(APIError::err("system_err_login").into()), _ => return Err(ApiError::err("system_err_login").into()),
}; };
// Return the jwt // Return the jwt
@ -605,7 +603,7 @@ impl Perform for BanFromCommunity {
if data.ban { if data.ban {
let ban = move |conn: &'_ _| CommunityUserBan::ban(conn, &community_user_ban_form); let ban = move |conn: &'_ _| CommunityUserBan::ban(conn, &community_user_ban_form);
if blocking(context.pool(), ban).await?.is_err() { if blocking(context.pool(), ban).await?.is_err() {
return Err(APIError::err("community_user_already_banned").into()); return Err(ApiError::err("community_user_already_banned").into());
} }
// Also unsubscribe them from the community, if they are subscribed // Also unsubscribe them from the community, if they are subscribed
@ -622,7 +620,7 @@ impl Perform for BanFromCommunity {
} else { } else {
let unban = move |conn: &'_ _| CommunityUserBan::unban(conn, &community_user_ban_form); let unban = move |conn: &'_ _| CommunityUserBan::unban(conn, &community_user_ban_form);
if blocking(context.pool(), unban).await?.is_err() { if blocking(context.pool(), unban).await?.is_err() {
return Err(APIError::err("community_user_already_banned").into()); return Err(ApiError::err("community_user_already_banned").into());
} }
} }
@ -721,12 +719,12 @@ impl Perform for AddModToCommunity {
if data.added { if data.added {
let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form); let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form);
if blocking(context.pool(), join).await?.is_err() { if blocking(context.pool(), join).await?.is_err() {
return Err(APIError::err("community_moderator_already_exists").into()); return Err(ApiError::err("community_moderator_already_exists").into());
} }
} else { } else {
let leave = move |conn: &'_ _| CommunityModerator::leave(conn, &community_moderator_form); let leave = move |conn: &'_ _| CommunityModerator::leave(conn, &community_moderator_form);
if blocking(context.pool(), leave).await?.is_err() { if blocking(context.pool(), leave).await?.is_err() {
return Err(APIError::err("community_moderator_already_exists").into()); return Err(ApiError::err("community_moderator_already_exists").into());
} }
} }
@ -798,14 +796,14 @@ impl Perform for TransferCommunity {
if user.id != read_community.creator_id if user.id != read_community.creator_id
&& !admins.iter().map(|a| a.user.id).any(|x| x == user.id) && !admins.iter().map(|a| a.user.id).any(|x| x == user.id)
{ {
return Err(APIError::err("not_an_admin").into()); return Err(ApiError::err("not_an_admin").into());
} }
let community_id = data.community_id; let community_id = data.community_id;
let new_creator = data.user_id; let new_creator = data.user_id;
let update = move |conn: &'_ _| Community::update_creator(conn, community_id, new_creator); let update = move |conn: &'_ _| Community::update_creator(conn, community_id, new_creator);
if blocking(context.pool(), update).await?.is_err() { if blocking(context.pool(), update).await?.is_err() {
return Err(APIError::err("couldnt_update_community").into()); return Err(ApiError::err("couldnt_update_community").into());
}; };
// You also have to re-do the community_moderator table, reordering it. // You also have to re-do the community_moderator table, reordering it.
@ -836,7 +834,7 @@ impl Perform for TransferCommunity {
let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form); let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form);
if blocking(context.pool(), join).await?.is_err() { if blocking(context.pool(), join).await?.is_err() {
return Err(APIError::err("community_moderator_already_exists").into()); return Err(ApiError::err("community_moderator_already_exists").into());
} }
} }
@ -860,7 +858,7 @@ impl Perform for TransferCommunity {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
}; };
let community_id = data.community_id; let community_id = data.community_id;
@ -870,7 +868,7 @@ impl Perform for TransferCommunity {
.await? .await?
{ {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
}; };
// Return the jwt // Return the jwt

View file

@ -19,7 +19,7 @@ use lemmy_db_views_actor::{
community_view::CommunityView, community_view::CommunityView,
}; };
use lemmy_structs::{blocking, comment::*, community::*, post::*, site::*, user::*, websocket::*}; use lemmy_structs::{blocking, comment::*, community::*, post::*, site::*, user::*, websocket::*};
use lemmy_utils::{claims::Claims, settings::Settings, APIError, ConnectionId, LemmyError}; use lemmy_utils::{claims::Claims, settings::Settings, ApiError, ConnectionId, LemmyError};
use lemmy_websocket::{serialize_websocket_message, LemmyContext, UserOperation}; use lemmy_websocket::{serialize_websocket_message, LemmyContext, UserOperation};
use serde::Deserialize; use serde::Deserialize;
use std::process::Command; use std::process::Command;
@ -54,14 +54,14 @@ pub(crate) async fn is_mod_or_admin(
}) })
.await?; .await?;
if !is_mod_or_admin { if !is_mod_or_admin {
return Err(APIError::err("not_a_mod_or_admin").into()); return Err(ApiError::err("not_a_mod_or_admin").into());
} }
Ok(()) Ok(())
} }
pub async fn is_admin(pool: &DbPool, user_id: i32) -> Result<(), LemmyError> { pub async fn is_admin(pool: &DbPool, user_id: i32) -> Result<(), LemmyError> {
let user = blocking(pool, move |conn| User_::read(conn, user_id)).await??; let user = blocking(pool, move |conn| User_::read(conn, user_id)).await??;
if !user.admin { if !user.admin {
return Err(APIError::err("not_an_admin").into()); return Err(ApiError::err("not_an_admin").into());
} }
Ok(()) Ok(())
} }
@ -69,20 +69,20 @@ pub async fn is_admin(pool: &DbPool, user_id: i32) -> Result<(), LemmyError> {
pub(crate) async fn get_post(post_id: i32, pool: &DbPool) -> Result<Post, LemmyError> { pub(crate) async fn get_post(post_id: i32, pool: &DbPool) -> Result<Post, LemmyError> {
match blocking(pool, move |conn| Post::read(conn, post_id)).await? { match blocking(pool, move |conn| Post::read(conn, post_id)).await? {
Ok(post) => Ok(post), Ok(post) => Ok(post),
Err(_e) => Err(APIError::err("couldnt_find_post").into()), Err(_e) => Err(ApiError::err("couldnt_find_post").into()),
} }
} }
pub(crate) async fn get_user_from_jwt(jwt: &str, pool: &DbPool) -> Result<User_, LemmyError> { pub(crate) async fn get_user_from_jwt(jwt: &str, pool: &DbPool) -> Result<User_, LemmyError> {
let claims = match Claims::decode(&jwt) { let claims = match Claims::decode(&jwt) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()), Err(_e) => return Err(ApiError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let user = blocking(pool, move |conn| User_::read(conn, user_id)).await??; let user = blocking(pool, move |conn| User_::read(conn, user_id)).await??;
// Check for a site ban // Check for a site ban
if user.banned { if user.banned {
return Err(APIError::err("site_ban").into()); return Err(ApiError::err("site_ban").into());
} }
Ok(user) Ok(user)
} }
@ -103,13 +103,13 @@ pub(crate) async fn get_user_safe_settings_from_jwt(
) -> Result<UserSafeSettings, LemmyError> { ) -> Result<UserSafeSettings, LemmyError> {
let claims = match Claims::decode(&jwt) { let claims = match Claims::decode(&jwt) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()), Err(_e) => return Err(ApiError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let user = blocking(pool, move |conn| UserSafeSettings::read(conn, user_id)).await??; let user = blocking(pool, move |conn| UserSafeSettings::read(conn, user_id)).await??;
// Check for a site ban // Check for a site ban
if user.banned { if user.banned {
return Err(APIError::err("site_ban").into()); return Err(ApiError::err("site_ban").into());
} }
Ok(user) Ok(user)
} }
@ -131,7 +131,7 @@ pub(crate) async fn check_community_ban(
) -> Result<(), LemmyError> { ) -> Result<(), LemmyError> {
let is_banned = move |conn: &'_ _| CommunityUserBanView::get(conn, user_id, community_id).is_ok(); let is_banned = move |conn: &'_ _| CommunityUserBanView::get(conn, user_id, community_id).is_ok();
if blocking(pool, is_banned).await? { if blocking(pool, is_banned).await? {
Err(APIError::err("community_ban").into()) Err(ApiError::err("community_ban").into())
} else { } else {
Ok(()) Ok(())
} }
@ -141,7 +141,7 @@ pub(crate) async fn check_downvotes_enabled(score: i16, pool: &DbPool) -> Result
if score == -1 { if score == -1 {
let site = blocking(pool, move |conn| Site::read_simple(conn)).await??; let site = blocking(pool, move |conn| Site::read_simple(conn)).await??;
if !site.enable_downvotes { if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into()); return Err(ApiError::err("downvotes_disabled").into());
} }
} }
Ok(()) Ok(())
@ -175,7 +175,7 @@ pub(crate) async fn collect_moderated_communities(
pub(crate) fn check_optional_url(item: &Option<Option<String>>) -> Result<(), LemmyError> { pub(crate) fn check_optional_url(item: &Option<Option<String>>) -> Result<(), LemmyError> {
if let Some(Some(item)) = &item { if let Some(Some(item)) = &item {
if Url::parse(item).is_err() { if Url::parse(item).is_err() {
return Err(APIError::err("invalid_url").into()); return Err(ApiError::err("invalid_url").into());
} }
} }
Ok(()) Ok(())
@ -298,9 +298,6 @@ pub async fn match_websocket_operation(
UserOperation::TransferSite => { UserOperation::TransferSite => {
do_websocket_operation::<TransferSite>(context, id, op, data).await do_websocket_operation::<TransferSite>(context, id, op, data).await
} }
UserOperation::ListCategories => {
do_websocket_operation::<ListCategories>(context, id, op, data).await
}
// Community ops // Community ops
UserOperation::GetCommunity => { UserOperation::GetCommunity => {

View file

@ -40,7 +40,7 @@ use lemmy_structs::{blocking, post::*};
use lemmy_utils::{ use lemmy_utils::{
request::fetch_iframely_and_pictrs_data, request::fetch_iframely_and_pictrs_data,
utils::{check_slurs, check_slurs_opt, is_valid_post_title}, utils::{check_slurs, check_slurs_opt, is_valid_post_title},
APIError, ApiError,
ConnectionId, ConnectionId,
LemmyError, LemmyError,
}; };
@ -67,7 +67,7 @@ impl Perform for CreatePost {
check_slurs_opt(&data.body)?; check_slurs_opt(&data.body)?;
if !is_valid_post_title(&data.name) { if !is_valid_post_title(&data.name) {
return Err(APIError::err("invalid_post_title").into()); return Err(ApiError::err("invalid_post_title").into());
} }
check_community_ban(user.id, data.community_id, context.pool()).await?; check_community_ban(user.id, data.community_id, context.pool()).await?;
@ -109,7 +109,7 @@ impl Perform for CreatePost {
"couldnt_create_post" "couldnt_create_post"
}; };
return Err(APIError::err(err_type).into()); return Err(ApiError::err(err_type).into());
} }
}; };
@ -121,7 +121,7 @@ impl Perform for CreatePost {
.await? .await?
{ {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err("couldnt_create_post").into()), Err(_e) => return Err(ApiError::err("couldnt_create_post").into()),
}; };
updated_post.send_create(&user, context).await?; updated_post.send_create(&user, context).await?;
@ -135,7 +135,7 @@ impl Perform for CreatePost {
let like = move |conn: &'_ _| PostLike::like(conn, &like_form); let like = move |conn: &'_ _| PostLike::like(conn, &like_form);
if blocking(context.pool(), like).await?.is_err() { if blocking(context.pool(), like).await?.is_err() {
return Err(APIError::err("couldnt_like_post").into()); return Err(ApiError::err("couldnt_like_post").into());
} }
updated_post.send_like(&user, context).await?; updated_post.send_like(&user, context).await?;
@ -148,7 +148,7 @@ impl Perform for CreatePost {
.await? .await?
{ {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err("couldnt_find_post").into()), Err(_e) => return Err(ApiError::err("couldnt_find_post").into()),
}; };
let res = PostResponse { post_view }; let res = PostResponse { post_view };
@ -183,7 +183,7 @@ impl Perform for GetPost {
.await? .await?
{ {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err("couldnt_find_post").into()), Err(_e) => return Err(ApiError::err("couldnt_find_post").into()),
}; };
let id = data.id; let id = data.id;
@ -209,7 +209,7 @@ impl Perform for GetPost {
.await? .await?
{ {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()), Err(_e) => return Err(ApiError::err("couldnt_find_community").into()),
}; };
let online = context let online = context
@ -273,7 +273,7 @@ impl Perform for GetPosts {
.await? .await?
{ {
Ok(posts) => posts, Ok(posts) => posts,
Err(_e) => return Err(APIError::err("couldnt_get_posts").into()), Err(_e) => return Err(ApiError::err("couldnt_get_posts").into()),
}; };
Ok(GetPostsResponse { posts }) Ok(GetPostsResponse { posts })
@ -320,7 +320,7 @@ impl Perform for CreatePostLike {
let like_form2 = like_form.clone(); let like_form2 = like_form.clone();
let like = move |conn: &'_ _| PostLike::like(conn, &like_form2); let like = move |conn: &'_ _| PostLike::like(conn, &like_form2);
if blocking(context.pool(), like).await?.is_err() { if blocking(context.pool(), like).await?.is_err() {
return Err(APIError::err("couldnt_like_post").into()); return Err(ApiError::err("couldnt_like_post").into());
} }
if like_form.score == 1 { if like_form.score == 1 {
@ -340,7 +340,7 @@ impl Perform for CreatePostLike {
.await? .await?
{ {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err("couldnt_find_post").into()), Err(_e) => return Err(ApiError::err("couldnt_find_post").into()),
}; };
let res = PostResponse { post_view }; let res = PostResponse { post_view };
@ -371,7 +371,7 @@ impl Perform for EditPost {
check_slurs_opt(&data.body)?; check_slurs_opt(&data.body)?;
if !is_valid_post_title(&data.name) { if !is_valid_post_title(&data.name) {
return Err(APIError::err("invalid_post_title").into()); return Err(ApiError::err("invalid_post_title").into());
} }
let post_id = data.post_id; let post_id = data.post_id;
@ -381,7 +381,7 @@ impl Perform for EditPost {
// Verify that only the creator can edit // Verify that only the creator can edit
if !Post::is_post_creator(user.id, orig_post.creator_id) { if !Post::is_post_creator(user.id, orig_post.creator_id) {
return Err(APIError::err("no_post_edit_allowed").into()); return Err(ApiError::err("no_post_edit_allowed").into());
} }
// Fetch Iframely and Pictrs cached image // Fetch Iframely and Pictrs cached image
@ -423,7 +423,7 @@ impl Perform for EditPost {
"couldnt_update_post" "couldnt_update_post"
}; };
return Err(APIError::err(err_type).into()); return Err(ApiError::err(err_type).into());
} }
}; };
@ -467,7 +467,7 @@ impl Perform for DeletePost {
// Verify that only the creator can delete // Verify that only the creator can delete
if !Post::is_post_creator(user.id, orig_post.creator_id) { if !Post::is_post_creator(user.id, orig_post.creator_id) {
return Err(APIError::err("no_post_edit_allowed").into()); return Err(ApiError::err("no_post_edit_allowed").into());
} }
// Update the post // Update the post
@ -711,12 +711,12 @@ impl Perform for SavePost {
if data.save { if data.save {
let save = move |conn: &'_ _| PostSaved::save(conn, &post_saved_form); let save = move |conn: &'_ _| PostSaved::save(conn, &post_saved_form);
if blocking(context.pool(), save).await?.is_err() { if blocking(context.pool(), save).await?.is_err() {
return Err(APIError::err("couldnt_save_post").into()); return Err(ApiError::err("couldnt_save_post").into());
} }
} else { } else {
let unsave = move |conn: &'_ _| PostSaved::unsave(conn, &post_saved_form); let unsave = move |conn: &'_ _| PostSaved::unsave(conn, &post_saved_form);
if blocking(context.pool(), unsave).await?.is_err() { if blocking(context.pool(), unsave).await?.is_err() {
return Err(APIError::err("couldnt_save_post").into()); return Err(ApiError::err("couldnt_save_post").into());
} }
} }
@ -747,10 +747,10 @@ impl Perform for CreatePostReport {
// check size of report and check for whitespace // check size of report and check for whitespace
let reason = data.reason.trim(); let reason = data.reason.trim();
if reason.is_empty() { if reason.is_empty() {
return Err(APIError::err("report_reason_required").into()); return Err(ApiError::err("report_reason_required").into());
} }
if reason.len() > 1000 { if reason.chars().count() > 1000 {
return Err(APIError::err("report_too_long").into()); return Err(ApiError::err("report_too_long").into());
} }
let user_id = user.id; let user_id = user.id;
@ -777,7 +777,7 @@ impl Perform for CreatePostReport {
.await? .await?
{ {
Ok(report) => report, Ok(report) => report,
Err(_e) => return Err(APIError::err("couldnt_create_report").into()), Err(_e) => return Err(ApiError::err("couldnt_create_report").into()),
}; };
let res = CreatePostReportResponse { success: true }; let res = CreatePostReportResponse { success: true };
@ -837,7 +837,7 @@ impl Perform for ResolvePostReport {
}; };
if blocking(context.pool(), resolve_fun).await?.is_err() { if blocking(context.pool(), resolve_fun).await?.is_err() {
return Err(APIError::err("couldnt_resolve_report").into()); return Err(ApiError::err("couldnt_resolve_report").into());
}; };
context.chat_server().do_send(SendModRoomMessage { context.chat_server().do_send(SendModRoomMessage {

View file

@ -22,11 +22,6 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimit) {
.route("/config", web::get().to(route_get::<GetSiteConfig>)) .route("/config", web::get().to(route_get::<GetSiteConfig>))
.route("/config", web::put().to(route_post::<SaveSiteConfig>)), .route("/config", web::put().to(route_post::<SaveSiteConfig>)),
) )
.service(
web::resource("/categories")
.wrap(rate_limit.message())
.route(web::get().to(route_get::<ListCategories>)),
)
.service( .service(
web::resource("/modlog") web::resource("/modlog")
.wrap(rate_limit.message()) .wrap(rate_limit.message())

View file

@ -10,17 +10,10 @@ use crate::{
use actix_web::web::Data; use actix_web::web::Data;
use anyhow::Context; use anyhow::Context;
use lemmy_apub::fetcher::search::search_by_apub_id; use lemmy_apub::fetcher::search::search_by_apub_id;
use lemmy_db_queries::{ use lemmy_db_queries::{diesel_option_overwrite, source::site::Site_, Crud, SearchType, SortType};
diesel_option_overwrite,
source::{category::Category_, site::Site_},
Crud,
SearchType,
SortType,
};
use lemmy_db_schema::{ use lemmy_db_schema::{
naive_now, naive_now,
source::{ source::{
category::Category,
moderator::*, moderator::*,
site::{Site, *}, site::{Site, *},
}, },
@ -51,7 +44,7 @@ use lemmy_utils::{
settings::Settings, settings::Settings,
utils::{check_slurs, check_slurs_opt}, utils::{check_slurs, check_slurs_opt},
version, version,
APIError, ApiError,
ConnectionId, ConnectionId,
LemmyError, LemmyError,
}; };
@ -63,24 +56,6 @@ use lemmy_websocket::{
use log::{debug, info}; use log::{debug, info};
use std::str::FromStr; use std::str::FromStr;
#[async_trait::async_trait(?Send)]
impl Perform for ListCategories {
type Response = ListCategoriesResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
_websocket_id: Option<ConnectionId>,
) -> Result<ListCategoriesResponse, LemmyError> {
let _data: &ListCategories = &self;
let categories = blocking(context.pool(), move |conn| Category::list_all(conn)).await??;
// Return the jwt
Ok(ListCategoriesResponse { categories })
}
}
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl Perform for GetModlog { impl Perform for GetModlog {
type Response = GetModlogResponse; type Response = GetModlogResponse;
@ -168,7 +143,7 @@ impl Perform for CreateSite {
let read_site = move |conn: &'_ _| Site::read_simple(conn); let read_site = move |conn: &'_ _| Site::read_simple(conn);
if blocking(context.pool(), read_site).await?.is_ok() { if blocking(context.pool(), read_site).await?.is_ok() {
return Err(APIError::err("site_already_exists").into()); return Err(ApiError::err("site_already_exists").into());
}; };
let user = get_user_from_jwt(&data.auth, context.pool()).await?; let user = get_user_from_jwt(&data.auth, context.pool()).await?;
@ -193,7 +168,7 @@ impl Perform for CreateSite {
let create_site = move |conn: &'_ _| Site::create(conn, &site_form); let create_site = move |conn: &'_ _| Site::create(conn, &site_form);
if blocking(context.pool(), create_site).await?.is_err() { if blocking(context.pool(), create_site).await?.is_err() {
return Err(APIError::err("site_already_exists").into()); return Err(ApiError::err("site_already_exists").into());
} }
let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??; let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
@ -238,7 +213,7 @@ impl Perform for EditSite {
let update_site = move |conn: &'_ _| Site::update(conn, 1, &site_form); let update_site = move |conn: &'_ _| Site::update(conn, 1, &site_form);
if blocking(context.pool(), update_site).await?.is_err() { if blocking(context.pool(), update_site).await?.is_err() {
return Err(APIError::err("couldnt_update_site").into()); return Err(ApiError::err("couldnt_update_site").into());
} }
let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??; let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
@ -525,13 +500,13 @@ impl Perform for TransferSite {
// Make sure user is the creator // Make sure user is the creator
if read_site.creator_id != user.id { if read_site.creator_id != user.id {
return Err(APIError::err("not_an_admin").into()); return Err(ApiError::err("not_an_admin").into());
} }
let new_creator_id = data.user_id; let new_creator_id = data.user_id;
let transfer_site = move |conn: &'_ _| Site::transfer(conn, new_creator_id); let transfer_site = move |conn: &'_ _| Site::transfer(conn, new_creator_id);
if blocking(context.pool(), transfer_site).await?.is_err() { if blocking(context.pool(), transfer_site).await?.is_err() {
return Err(APIError::err("couldnt_update_site").into()); return Err(ApiError::err("couldnt_update_site").into());
}; };
// Mod tables // Mod tables
@ -608,7 +583,7 @@ impl Perform for SaveSiteConfig {
// Make sure docker doesn't have :ro at the end of the volume, so its not a read-only filesystem // Make sure docker doesn't have :ro at the end of the volume, so its not a read-only filesystem
let config_hjson = match Settings::save_config_file(&data.config_hjson) { let config_hjson = match Settings::save_config_file(&data.config_hjson) {
Ok(config_hjson) => config_hjson, Ok(config_hjson) => config_hjson,
Err(_e) => return Err(APIError::err("couldnt_update_site").into()), Err(_e) => return Err(ApiError::err("couldnt_update_site").into()),
}; };
Ok(GetSiteConfigResponse { config_hjson }) Ok(GetSiteConfigResponse { config_hjson })

View file

@ -80,7 +80,7 @@ use lemmy_utils::{
naive_from_unix, naive_from_unix,
remove_slurs, remove_slurs,
}, },
APIError, ApiError,
ConnectionId, ConnectionId,
LemmyError, LemmyError,
}; };
@ -110,13 +110,13 @@ impl Perform for Login {
.await? .await?
{ {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()), Err(_e) => return Err(ApiError::err("couldnt_find_that_username_or_email").into()),
}; };
// Verify the password // Verify the password
let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false); let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false);
if !valid { if !valid {
return Err(APIError::err("password_incorrect").into()); return Err(ApiError::err("password_incorrect").into());
} }
// Return the jwt // Return the jwt
@ -140,18 +140,18 @@ impl Perform for Register {
// Make sure site has open registration // Make sure site has open registration
if let Ok(site) = blocking(context.pool(), move |conn| Site::read_simple(conn)).await? { if let Ok(site) = blocking(context.pool(), move |conn| Site::read_simple(conn)).await? {
if !site.open_registration { if !site.open_registration {
return Err(APIError::err("registration_closed").into()); return Err(ApiError::err("registration_closed").into());
} }
} }
// Password length check // Password length check
if data.password.len() > 60 { if data.password.len() > 60 {
return Err(APIError::err("invalid_password").into()); return Err(ApiError::err("invalid_password").into());
} }
// Make sure passwords match // Make sure passwords match
if data.password != data.password_verify { if data.password != data.password_verify {
return Err(APIError::err("passwords_dont_match").into()); return Err(ApiError::err("passwords_dont_match").into());
} }
// Check if there are admins. False if admins exist // Check if there are admins. False if admins exist
@ -176,7 +176,7 @@ impl Perform for Register {
}) })
.await?; .await?;
if !check { if !check {
return Err(APIError::err("captcha_incorrect").into()); return Err(ApiError::err("captcha_incorrect").into());
} }
} }
@ -184,7 +184,7 @@ impl Perform for Register {
let user_keypair = generate_actor_keypair()?; let user_keypair = generate_actor_keypair()?;
if !is_valid_username(&data.username) { if !is_valid_username(&data.username) {
return Err(APIError::err("invalid_username").into()); return Err(ApiError::err("invalid_username").into());
} }
let user_actor_id = generate_apub_endpoint(EndpointType::User, &data.username)?; let user_actor_id = generate_apub_endpoint(EndpointType::User, &data.username)?;
@ -234,7 +234,7 @@ impl Perform for Register {
"user_already_exists" "user_already_exists"
}; };
return Err(APIError::err(err_type).into()); return Err(ApiError::err(err_type).into());
} }
}; };
@ -251,7 +251,6 @@ impl Perform for Register {
name: default_community_name.to_string(), name: default_community_name.to_string(),
title: "The Default Community".to_string(), title: "The Default Community".to_string(),
description: Some("The Default Community".to_string()), description: Some("The Default Community".to_string()),
category_id: 1,
nsfw: false, nsfw: false,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
@ -285,7 +284,7 @@ impl Perform for Register {
let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form); let follow = move |conn: &'_ _| CommunityFollower::follow(conn, &community_follower_form);
if blocking(context.pool(), follow).await?.is_err() { if blocking(context.pool(), follow).await?.is_err() {
return Err(APIError::err("community_follower_already_exists").into()); return Err(ApiError::err("community_follower_already_exists").into());
}; };
// If its an admin, add them as a mod and follower to main // If its an admin, add them as a mod and follower to main
@ -297,7 +296,7 @@ impl Perform for Register {
let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form); let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form);
if blocking(context.pool(), join).await?.is_err() { if blocking(context.pool(), join).await?.is_err() {
return Err(APIError::err("community_moderator_already_exists").into()); return Err(ApiError::err("community_moderator_already_exists").into());
} }
} }
@ -380,13 +379,13 @@ impl Perform for SaveUserSettings {
if let Some(Some(bio)) = &bio { if let Some(Some(bio)) = &bio {
if bio.chars().count() > 300 { if bio.chars().count() > 300 {
return Err(APIError::err("bio_length_overflow").into()); return Err(ApiError::err("bio_length_overflow").into());
} }
} }
if let Some(Some(preferred_username)) = &preferred_username { if let Some(Some(preferred_username)) = &preferred_username {
if !is_valid_preferred_username(preferred_username.trim()) { if !is_valid_preferred_username(preferred_username.trim()) {
return Err(APIError::err("invalid_username").into()); return Err(ApiError::err("invalid_username").into());
} }
} }
@ -397,7 +396,7 @@ impl Perform for SaveUserSettings {
Some(new_password_verify) => { Some(new_password_verify) => {
// Make sure passwords match // Make sure passwords match
if new_password != new_password_verify { if new_password != new_password_verify {
return Err(APIError::err("passwords_dont_match").into()); return Err(ApiError::err("passwords_dont_match").into());
} }
// Check the old password // Check the old password
@ -405,7 +404,7 @@ impl Perform for SaveUserSettings {
Some(old_password) => { Some(old_password) => {
let valid: bool = verify(old_password, &user.password_encrypted).unwrap_or(false); let valid: bool = verify(old_password, &user.password_encrypted).unwrap_or(false);
if !valid { if !valid {
return Err(APIError::err("password_incorrect").into()); return Err(ApiError::err("password_incorrect").into());
} }
let new_password = new_password.to_owned(); let new_password = new_password.to_owned();
let user = blocking(context.pool(), move |conn| { let user = blocking(context.pool(), move |conn| {
@ -414,10 +413,10 @@ impl Perform for SaveUserSettings {
.await??; .await??;
user.password_encrypted user.password_encrypted
} }
None => return Err(APIError::err("password_incorrect").into()), None => return Err(ApiError::err("password_incorrect").into()),
} }
} }
None => return Err(APIError::err("passwords_dont_match").into()), None => return Err(ApiError::err("passwords_dont_match").into()),
} }
} }
None => user.password_encrypted, None => user.password_encrypted,
@ -470,7 +469,7 @@ impl Perform for SaveUserSettings {
"user_already_exists" "user_already_exists"
}; };
return Err(APIError::err(err_type).into()); return Err(ApiError::err(err_type).into());
} }
}; };
@ -513,7 +512,7 @@ impl Perform for GetUserDetails {
.await?; .await?;
match user { match user {
Ok(user) => user.id, Ok(user) => user.id,
Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()), Err(_e) => return Err(ApiError::err("couldnt_find_that_username_or_email").into()),
} }
} }
}; };
@ -563,10 +562,15 @@ impl Perform for GetUserDetails {
}) })
.await??; .await??;
let follows = blocking(context.pool(), move |conn| { let mut follows = vec![];
CommunityFollowerView::for_user(conn, user_details_id) if let Some(uid) = user_id {
}) if uid == user_details_id {
.await??; follows = blocking(context.pool(), move |conn| {
CommunityFollowerView::for_user(conn, user_details_id)
})
.await??;
}
};
let moderates = blocking(context.pool(), move |conn| { let moderates = blocking(context.pool(), move |conn| {
CommunityModeratorView::for_user(conn, user_details_id) CommunityModeratorView::for_user(conn, user_details_id)
}) })
@ -602,7 +606,7 @@ impl Perform for AddAdmin {
let added_user_id = data.user_id; let added_user_id = data.user_id;
let add_admin = move |conn: &'_ _| User_::add_admin(conn, added_user_id, added); let add_admin = move |conn: &'_ _| User_::add_admin(conn, added_user_id, added);
if blocking(context.pool(), add_admin).await?.is_err() { if blocking(context.pool(), add_admin).await?.is_err() {
return Err(APIError::err("couldnt_update_user").into()); return Err(ApiError::err("couldnt_update_user").into());
} }
// Mod tables // Mod tables
@ -658,7 +662,7 @@ impl Perform for BanUser {
let banned_user_id = data.user_id; let banned_user_id = data.user_id;
let ban_user = move |conn: &'_ _| User_::ban_user(conn, banned_user_id, ban); let ban_user = move |conn: &'_ _| User_::ban_user(conn, banned_user_id, ban);
if blocking(context.pool(), ban_user).await?.is_err() { if blocking(context.pool(), ban_user).await?.is_err() {
return Err(APIError::err("couldnt_update_user").into()); return Err(ApiError::err("couldnt_update_user").into());
} }
// Remove their data if that's desired // Remove their data if that's desired
@ -806,14 +810,14 @@ impl Perform for MarkUserMentionAsRead {
.await??; .await??;
if user.id != read_user_mention.recipient_id { if user.id != read_user_mention.recipient_id {
return Err(APIError::err("couldnt_update_comment").into()); return Err(ApiError::err("couldnt_update_comment").into());
} }
let user_mention_id = read_user_mention.id; let user_mention_id = read_user_mention.id;
let read = data.read; let read = data.read;
let update_mention = move |conn: &'_ _| UserMention::update_read(conn, user_mention_id, read); let update_mention = move |conn: &'_ _| UserMention::update_read(conn, user_mention_id, read);
if blocking(context.pool(), update_mention).await?.is_err() { if blocking(context.pool(), update_mention).await?.is_err() {
return Err(APIError::err("couldnt_update_comment").into()); return Err(ApiError::err("couldnt_update_comment").into());
}; };
let user_mention_id = read_user_mention.id; let user_mention_id = read_user_mention.id;
@ -858,7 +862,7 @@ impl Perform for MarkAllAsRead {
let reply_id = comment_view.comment.id; let reply_id = comment_view.comment.id;
let mark_as_read = move |conn: &'_ _| Comment::update_read(conn, reply_id, true); let mark_as_read = move |conn: &'_ _| Comment::update_read(conn, reply_id, true);
if blocking(context.pool(), mark_as_read).await?.is_err() { if blocking(context.pool(), mark_as_read).await?.is_err() {
return Err(APIError::err("couldnt_update_comment").into()); return Err(ApiError::err("couldnt_update_comment").into());
} }
} }
@ -868,13 +872,13 @@ impl Perform for MarkAllAsRead {
.await? .await?
.is_err() .is_err()
{ {
return Err(APIError::err("couldnt_update_comment").into()); return Err(ApiError::err("couldnt_update_comment").into());
} }
// Mark all private_messages as read // Mark all private_messages as read
let update_pm = move |conn: &'_ _| PrivateMessage::mark_all_as_read(conn, user_id); let update_pm = move |conn: &'_ _| PrivateMessage::mark_all_as_read(conn, user_id);
if blocking(context.pool(), update_pm).await?.is_err() { if blocking(context.pool(), update_pm).await?.is_err() {
return Err(APIError::err("couldnt_update_private_message").into()); return Err(ApiError::err("couldnt_update_private_message").into());
} }
Ok(GetRepliesResponse { replies: vec![] }) Ok(GetRepliesResponse { replies: vec![] })
@ -896,20 +900,20 @@ impl Perform for DeleteAccount {
// Verify the password // Verify the password
let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false); let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false);
if !valid { if !valid {
return Err(APIError::err("password_incorrect").into()); return Err(ApiError::err("password_incorrect").into());
} }
// Comments // Comments
let user_id = user.id; let user_id = user.id;
let permadelete = move |conn: &'_ _| Comment::permadelete_for_creator(conn, user_id); let permadelete = move |conn: &'_ _| Comment::permadelete_for_creator(conn, user_id);
if blocking(context.pool(), permadelete).await?.is_err() { if blocking(context.pool(), permadelete).await?.is_err() {
return Err(APIError::err("couldnt_update_comment").into()); return Err(ApiError::err("couldnt_update_comment").into());
} }
// Posts // Posts
let permadelete = move |conn: &'_ _| Post::permadelete_for_creator(conn, user_id); let permadelete = move |conn: &'_ _| Post::permadelete_for_creator(conn, user_id);
if blocking(context.pool(), permadelete).await?.is_err() { if blocking(context.pool(), permadelete).await?.is_err() {
return Err(APIError::err("couldnt_update_post").into()); return Err(ApiError::err("couldnt_update_post").into());
} }
blocking(context.pool(), move |conn| { blocking(context.pool(), move |conn| {
@ -942,7 +946,7 @@ impl Perform for PasswordReset {
.await? .await?
{ {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()), Err(_e) => return Err(ApiError::err("couldnt_find_that_username_or_email").into()),
}; };
// Generate a random token // Generate a random token
@ -964,7 +968,7 @@ impl Perform for PasswordReset {
let html = &format!("<h1>Password Reset Request for {}</h1><br><a href={}/password_change/{}>Click here to reset your password</a>", user.name, hostname, &token); let html = &format!("<h1>Password Reset Request for {}</h1><br><a href={}/password_change/{}>Click here to reset your password</a>", user.name, hostname, &token);
match send_email(subject, user_email, &user.name, html) { match send_email(subject, user_email, &user.name, html) {
Ok(_o) => _o, Ok(_o) => _o,
Err(_e) => return Err(APIError::err(&_e).into()), Err(_e) => return Err(ApiError::err(&_e).into()),
}; };
Ok(PasswordResetResponse {}) Ok(PasswordResetResponse {})
@ -991,7 +995,7 @@ impl Perform for PasswordChange {
// Make sure passwords match // Make sure passwords match
if data.password != data.password_verify { if data.password != data.password_verify {
return Err(APIError::err("passwords_dont_match").into()); return Err(ApiError::err("passwords_dont_match").into());
} }
// Update the user with the new password // Update the user with the new password
@ -1002,7 +1006,7 @@ impl Perform for PasswordChange {
.await? .await?
{ {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err("couldnt_update_user").into()), Err(_e) => return Err(ApiError::err("couldnt_update_user").into()),
}; };
// Return the jwt // Return the jwt
@ -1045,7 +1049,7 @@ impl Perform for CreatePrivateMessage {
{ {
Ok(private_message) => private_message, Ok(private_message) => private_message,
Err(_e) => { Err(_e) => {
return Err(APIError::err("couldnt_create_private_message").into()); return Err(ApiError::err("couldnt_create_private_message").into());
} }
}; };
@ -1067,7 +1071,7 @@ impl Perform for CreatePrivateMessage {
.await? .await?
{ {
Ok(private_message) => private_message, Ok(private_message) => private_message,
Err(_e) => return Err(APIError::err("couldnt_create_private_message").into()), Err(_e) => return Err(ApiError::err("couldnt_create_private_message").into()),
}; };
updated_private_message.send_create(&user, context).await?; updated_private_message.send_create(&user, context).await?;
@ -1124,7 +1128,7 @@ impl Perform for EditPrivateMessage {
}) })
.await??; .await??;
if user.id != orig_private_message.creator_id { if user.id != orig_private_message.creator_id {
return Err(APIError::err("no_private_message_edit_allowed").into()); return Err(ApiError::err("no_private_message_edit_allowed").into());
} }
// Doing the update // Doing the update
@ -1136,7 +1140,7 @@ impl Perform for EditPrivateMessage {
.await? .await?
{ {
Ok(private_message) => private_message, Ok(private_message) => private_message,
Err(_e) => return Err(APIError::err("couldnt_update_private_message").into()), Err(_e) => return Err(ApiError::err("couldnt_update_private_message").into()),
}; };
// Send the apub update // Send the apub update
@ -1183,7 +1187,7 @@ impl Perform for DeletePrivateMessage {
}) })
.await??; .await??;
if user.id != orig_private_message.creator_id { if user.id != orig_private_message.creator_id {
return Err(APIError::err("no_private_message_edit_allowed").into()); return Err(ApiError::err("no_private_message_edit_allowed").into());
} }
// Doing the update // Doing the update
@ -1195,7 +1199,7 @@ impl Perform for DeletePrivateMessage {
.await? .await?
{ {
Ok(private_message) => private_message, Ok(private_message) => private_message,
Err(_e) => return Err(APIError::err("couldnt_update_private_message").into()), Err(_e) => return Err(ApiError::err("couldnt_update_private_message").into()),
}; };
// Send the apub update // Send the apub update
@ -1248,7 +1252,7 @@ impl Perform for MarkPrivateMessageAsRead {
}) })
.await??; .await??;
if user.id != orig_private_message.recipient_id { if user.id != orig_private_message.recipient_id {
return Err(APIError::err("couldnt_update_private_message").into()); return Err(ApiError::err("couldnt_update_private_message").into());
} }
// Doing the update // Doing the update
@ -1260,7 +1264,7 @@ impl Perform for MarkPrivateMessageAsRead {
.await? .await?
{ {
Ok(private_message) => private_message, Ok(private_message) => private_message,
Err(_e) => return Err(APIError::err("couldnt_update_private_message").into()), Err(_e) => return Err(ApiError::err("couldnt_update_private_message").into()),
}; };
// No need to send an apub update // No need to send an apub update

View file

@ -1,41 +1,19 @@
use activitystreams::unparsed::UnparsedMutExt; use activitystreams::unparsed::UnparsedMutExt;
use activitystreams_ext::UnparsedExtension; use activitystreams_ext::UnparsedExtension;
use diesel::PgConnection;
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::category::Category;
use lemmy_utils::LemmyError; use lemmy_utils::LemmyError;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Activitystreams extension to allow (de)serializing additional Community fields `category` and /// Activitystreams extension to allow (de)serializing additional Community field
/// `sensitive` (called 'nsfw' in Lemmy). /// `sensitive` (called 'nsfw' in Lemmy).
#[derive(Clone, Debug, Default, Deserialize, Serialize)] #[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct GroupExtension { pub struct GroupExtension {
pub category: Option<GroupCategory>,
pub sensitive: Option<bool>, pub sensitive: Option<bool>,
} }
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GroupCategory {
// Using a string because that's how Peertube does it.
pub identifier: String,
pub name: String,
}
impl GroupExtension { impl GroupExtension {
pub fn new( pub fn new(sensitive: bool) -> Result<GroupExtension, LemmyError> {
conn: &PgConnection,
category_id: i32,
sensitive: bool,
) -> Result<GroupExtension, LemmyError> {
let category = Category::read(conn, category_id)?;
let group_category = GroupCategory {
identifier: category_id.to_string(),
name: category.name,
};
Ok(GroupExtension { Ok(GroupExtension {
category: Some(group_category),
sensitive: Some(sensitive), sensitive: Some(sensitive),
}) })
} }
@ -49,13 +27,11 @@ where
fn try_from_unparsed(unparsed_mut: &mut U) -> Result<Self, Self::Error> { fn try_from_unparsed(unparsed_mut: &mut U) -> Result<Self, Self::Error> {
Ok(GroupExtension { Ok(GroupExtension {
category: unparsed_mut.remove("category")?,
sensitive: unparsed_mut.remove("sensitive")?, sensitive: unparsed_mut.remove("sensitive")?,
}) })
} }
fn try_into_unparsed(self, unparsed_mut: &mut U) -> Result<(), Self::Error> { fn try_into_unparsed(self, unparsed_mut: &mut U) -> Result<(), Self::Error> {
unparsed_mut.insert("category", self.category)?;
unparsed_mut.insert("sensitive", self.sensitive)?; unparsed_mut.insert("sensitive", self.sensitive)?;
Ok(()) Ok(())
} }

View file

@ -41,8 +41,8 @@ use log::debug;
use url::Url; use url::Url;
/// The types of ActivityPub objects that can be fetched directly by searching for their ID. /// The types of ActivityPub objects that can be fetched directly by searching for their ID.
#[serde(untagged)]
#[derive(serde::Deserialize, Debug)] #[derive(serde::Deserialize, Debug)]
#[serde(untagged)]
enum SearchAcceptedObjects { enum SearchAcceptedObjects {
Person(Box<PersonExt>), Person(Box<PersonExt>),
Group(Box<GroupExt>), Group(Box<GroupExt>),

View file

@ -120,7 +120,7 @@ pub(crate) async fn community_receive_message(
User_::read_from_apub_id(&conn, &actor_id.into()) User_::read_from_apub_id(&conn, &actor_id.into())
}) })
.await??; .await??;
check_community_or_site_ban(&user, &to_community, context.pool()).await?; check_community_or_site_ban(&user, to_community.id, context.pool()).await?;
let any_base = activity.clone().into_any_base()?; let any_base = activity.clone().into_any_base()?;
let actor_url = actor.actor_id(); let actor_url = actor.actor_id();
@ -261,14 +261,13 @@ async fn handle_undo_follow(
pub(crate) async fn check_community_or_site_ban( pub(crate) async fn check_community_or_site_ban(
user: &User_, user: &User_,
community: &Community, community_id: i32,
pool: &DbPool, pool: &DbPool,
) -> Result<(), LemmyError> { ) -> Result<(), LemmyError> {
if user.banned { if user.banned {
return Err(anyhow!("User is banned from site").into()); return Err(anyhow!("User is banned from site").into());
} }
let user_id = user.id; let user_id = user.id;
let community_id = community.id;
let is_banned = move |conn: &'_ _| CommunityUserBanView::get(conn, user_id, community_id).is_ok(); let is_banned = move |conn: &'_ _| CommunityUserBanView::get(conn, user_id, community_id).is_ok();
if blocking(pool, is_banned).await? { if blocking(pool, is_banned).await? {
return Err(anyhow!("User is banned from community").into()); return Err(anyhow!("User is banned from community").into());

View file

@ -48,8 +48,15 @@ use lemmy_db_schema::source::site::Site;
use lemmy_structs::blocking; use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError}; use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext; use lemmy_websocket::LemmyContext;
use strum_macros::EnumString;
use url::Url; use url::Url;
#[derive(EnumString)]
enum PageOrNote {
Page,
Note,
}
/// This file is for post/comment activities received by the community, and for post/comment /// This file is for post/comment activities received by the community, and for post/comment
/// activities announced by the community and received by the user. /// activities announced by the community and received by the user.
@ -64,9 +71,13 @@ pub(in crate::inbox) async fn receive_create_for_community(
verify_activity_domains_valid(&create, &expected_domain, true)?; verify_activity_domains_valid(&create, &expected_domain, true)?;
is_addressed_to_public(&create)?; is_addressed_to_public(&create)?;
match create.object().as_single_kind_str() { let kind = create
Some("Page") => receive_create_post(create, context, request_counter).await, .object()
Some("Note") => receive_create_comment(create, context, request_counter).await, .as_single_kind_str()
.and_then(|s| s.parse().ok());
match kind {
Some(PageOrNote::Page) => receive_create_post(create, context, request_counter).await,
Some(PageOrNote::Note) => receive_create_comment(create, context, request_counter).await,
_ => receive_unhandled_activity(create), _ => receive_unhandled_activity(create),
} }
} }
@ -82,9 +93,13 @@ pub(in crate::inbox) async fn receive_update_for_community(
verify_activity_domains_valid(&update, &expected_domain, true)?; verify_activity_domains_valid(&update, &expected_domain, true)?;
is_addressed_to_public(&update)?; is_addressed_to_public(&update)?;
match update.object().as_single_kind_str() { let kind = update
Some("Page") => receive_update_post(update, context, request_counter).await, .object()
Some("Note") => receive_update_comment(update, context, request_counter).await, .as_single_kind_str()
.and_then(|s| s.parse().ok());
match kind {
Some(PageOrNote::Page) => receive_update_post(update, context, request_counter).await,
Some(PageOrNote::Note) => receive_update_comment(update, context, request_counter).await,
_ => receive_unhandled_activity(update), _ => receive_unhandled_activity(update),
} }
} }
@ -201,6 +216,14 @@ pub(in crate::inbox) async fn receive_remove_for_community(
} }
} }
#[derive(EnumString)]
enum UndoableActivities {
Delete,
Remove,
Like,
Dislike,
}
/// A post/comment action being reverted (either a delete, remove, upvote or downvote) /// A post/comment action being reverted (either a delete, remove, upvote or downvote)
pub(in crate::inbox) async fn receive_undo_for_community( pub(in crate::inbox) async fn receive_undo_for_community(
context: &LemmyContext, context: &LemmyContext,
@ -212,13 +235,18 @@ pub(in crate::inbox) async fn receive_undo_for_community(
verify_activity_domains_valid(&undo, &expected_domain.to_owned(), true)?; verify_activity_domains_valid(&undo, &expected_domain.to_owned(), true)?;
is_addressed_to_public(&undo)?; is_addressed_to_public(&undo)?;
match undo.object().as_single_kind_str() { use UndoableActivities::*;
Some("Delete") => receive_undo_delete_for_community(context, undo, expected_domain).await, match undo
Some("Remove") => receive_undo_remove_for_community(context, undo, expected_domain).await, .object()
Some("Like") => { .as_single_kind_str()
.and_then(|s| s.parse().ok())
{
Some(Delete) => receive_undo_delete_for_community(context, undo, expected_domain).await,
Some(Remove) => receive_undo_remove_for_community(context, undo, expected_domain).await,
Some(Like) => {
receive_undo_like_for_community(context, undo, expected_domain, request_counter).await receive_undo_like_for_community(context, undo, expected_domain, request_counter).await
} }
Some("Dislike") => { Some(Dislike) => {
receive_undo_dislike_for_community(context, undo, expected_domain, request_counter).await receive_undo_dislike_for_community(context, undo, expected_domain, request_counter).await
} }
_ => receive_unhandled_activity(undo), _ => receive_unhandled_activity(undo),

View file

@ -60,6 +60,7 @@ use lemmy_websocket::LemmyContext;
use log::debug; use log::debug;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
use strum_macros::EnumString;
use url::Url; use url::Url;
/// Allowed activities for user inbox. /// Allowed activities for user inbox.
@ -235,6 +236,17 @@ async fn receive_accept(
Ok(()) Ok(())
} }
#[derive(EnumString)]
enum AnnouncableActivities {
Create,
Update,
Like,
Dislike,
Delete,
Remove,
Undo,
}
/// Takes an announce and passes the inner activity to the appropriate handler. /// Takes an announce and passes the inner activity to the appropriate handler.
pub async fn receive_announce( pub async fn receive_announce(
context: &LemmyContext, context: &LemmyContext,
@ -246,7 +258,10 @@ pub async fn receive_announce(
verify_activity_domains_valid(&announce, &actor.actor_id(), false)?; verify_activity_domains_valid(&announce, &actor.actor_id(), false)?;
is_addressed_to_public(&announce)?; is_addressed_to_public(&announce)?;
let kind = announce.object().as_single_kind_str(); let kind = announce
.object()
.as_single_kind_str()
.and_then(|s| s.parse().ok());
let inner_activity = announce let inner_activity = announce
.object() .object()
.to_owned() .to_owned()
@ -259,22 +274,23 @@ pub async fn receive_announce(
return Ok(()); return Ok(());
} }
use AnnouncableActivities::*;
match kind { match kind {
Some("Create") => { Some(Create) => {
receive_create_for_community(context, inner_activity, &inner_id, request_counter).await receive_create_for_community(context, inner_activity, &inner_id, request_counter).await
} }
Some("Update") => { Some(Update) => {
receive_update_for_community(context, inner_activity, &inner_id, request_counter).await receive_update_for_community(context, inner_activity, &inner_id, request_counter).await
} }
Some("Like") => { Some(Like) => {
receive_like_for_community(context, inner_activity, &inner_id, request_counter).await receive_like_for_community(context, inner_activity, &inner_id, request_counter).await
} }
Some("Dislike") => { Some(Dislike) => {
receive_dislike_for_community(context, inner_activity, &inner_id, request_counter).await receive_dislike_for_community(context, inner_activity, &inner_id, request_counter).await
} }
Some("Delete") => receive_delete_for_community(context, inner_activity, &inner_id).await, Some(Delete) => receive_delete_for_community(context, inner_activity, &inner_id).await,
Some("Remove") => receive_remove_for_community(context, inner_activity, &inner_id).await, Some(Remove) => receive_remove_for_community(context, inner_activity, &inner_id).await,
Some("Undo") => { Some(Undo) => {
receive_undo_for_community(context, inner_activity, &inner_id, request_counter).await receive_undo_for_community(context, inner_activity, &inner_id, request_counter).await
} }
_ => receive_unhandled_activity(inner_activity), _ => receive_unhandled_activity(inner_activity),

View file

@ -18,12 +18,12 @@ use crate::{
use activitystreams::{ use activitystreams::{
object::{kind::NoteType, ApObject, Note, Tombstone}, object::{kind::NoteType, ApObject, Note, Tombstone},
prelude::*, prelude::*,
public,
}; };
use anyhow::{anyhow, Context}; use anyhow::{anyhow, Context};
use lemmy_db_queries::{Crud, DbPool}; use lemmy_db_queries::{Crud, DbPool};
use lemmy_db_schema::source::{ use lemmy_db_schema::source::{
comment::{Comment, CommentForm}, comment::{Comment, CommentForm},
community::Community,
post::Post, post::Post,
user::User_, user::User_,
}; };
@ -49,9 +49,6 @@ impl ToApub for Comment {
let post_id = self.post_id; let post_id = self.post_id;
let post = blocking(pool, move |conn| Post::read(conn, post_id)).await??; let post = blocking(pool, move |conn| Post::read(conn, post_id)).await??;
let community_id = post.community_id;
let community = blocking(pool, move |conn| Community::read(conn, community_id)).await??;
// Add a vector containing some important info to the "in_reply_to" field // Add a vector containing some important info to the "in_reply_to" field
// [post_ap_id, Option(parent_comment_ap_id)] // [post_ap_id, Option(parent_comment_ap_id)]
let mut in_reply_to_vec = vec![post.ap_id.into_inner()]; let mut in_reply_to_vec = vec![post.ap_id.into_inner()];
@ -67,7 +64,7 @@ impl ToApub for Comment {
.set_many_contexts(lemmy_context()?) .set_many_contexts(lemmy_context()?)
.set_id(self.ap_id.to_owned().into_inner()) .set_id(self.ap_id.to_owned().into_inner())
.set_published(convert_datetime(self.published)) .set_published(convert_datetime(self.published))
.set_to(community.actor_id.into_inner()) .set_to(public())
.set_many_in_reply_tos(in_reply_to_vec) .set_many_in_reply_tos(in_reply_to_vec)
.set_attributed_to(creator.actor_id.into_inner()); .set_attributed_to(creator.actor_id.into_inner());
@ -103,13 +100,13 @@ impl FromApub for Comment {
expected_domain: Url, expected_domain: Url,
request_counter: &mut i32, request_counter: &mut i32,
) -> Result<Comment, LemmyError> { ) -> Result<Comment, LemmyError> {
check_object_for_community_or_site_ban(note, context, request_counter).await?;
let comment: Comment = let comment: Comment =
get_object_from_apub(note, context, expected_domain, request_counter).await?; get_object_from_apub(note, context, expected_domain, request_counter).await?;
let post_id = comment.post_id; let post_id = comment.post_id;
let post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??; let post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_object_for_community_or_site_ban(note, post.community_id, context, request_counter)
.await?;
if post.locked { if post.locked {
// This is not very efficient because a comment gets inserted just to be deleted right // This is not very efficient because a comment gets inserted just to be deleted right
// afterwards, but it seems to be the easiest way to implement it. // afterwards, but it seems to be the easiest way to implement it.

View file

@ -93,16 +93,9 @@ impl ToApub for Community {
..Default::default() ..Default::default()
}); });
let nsfw = self.nsfw;
let category_id = self.category_id;
let group_extension = blocking(pool, move |conn| {
GroupExtension::new(conn, category_id, nsfw)
})
.await??;
Ok(Ext2::new( Ok(Ext2::new(
ap_actor, ap_actor,
group_extension, GroupExtension::new(self.nsfw)?,
self.get_public_key_ext()?, self.get_public_key_ext()?,
)) ))
} }
@ -207,13 +200,6 @@ impl FromApubToForm<GroupExt> for CommunityForm {
name, name,
title, title,
description, description,
category_id: group
.ext_one
.category
.clone()
.map(|c| c.identifier.parse::<i32>().ok())
.flatten()
.unwrap_or(1),
creator_id: creator.id, creator_id: creator.id,
removed: None, removed: None,
published: group.inner.published().map(|u| u.to_owned().naive_local()), published: group.inner.published().map(|u| u.to_owned().naive_local()),

View file

@ -11,7 +11,9 @@ use activitystreams::{
}; };
use anyhow::{anyhow, Context}; use anyhow::{anyhow, Context};
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use diesel::result::Error::NotFound;
use lemmy_db_queries::{ApubObject, Crud, DbPool}; use lemmy_db_queries::{ApubObject, Crud, DbPool};
use lemmy_db_schema::source::community::Community;
use lemmy_structs::blocking; use lemmy_structs::blocking;
use lemmy_utils::{location_info, settings::Settings, utils::convert_datetime, LemmyError}; use lemmy_utils::{location_info, settings::Settings, utils::convert_datetime, LemmyError};
use lemmy_websocket::LemmyContext; use lemmy_websocket::LemmyContext;
@ -205,6 +207,7 @@ where
pub(in crate::objects) async fn check_object_for_community_or_site_ban<T, Kind>( pub(in crate::objects) async fn check_object_for_community_or_site_ban<T, Kind>(
object: &T, object: &T,
community_id: i32,
context: &LemmyContext, context: &LemmyContext,
request_counter: &mut i32, request_counter: &mut i32,
) -> Result<(), LemmyError> ) -> Result<(), LemmyError>
@ -217,11 +220,30 @@ where
.as_single_xsd_any_uri() .as_single_xsd_any_uri()
.context(location_info!())?; .context(location_info!())?;
let user = get_or_fetch_and_upsert_user(user_id, context, request_counter).await?; let user = get_or_fetch_and_upsert_user(user_id, context, request_counter).await?;
let community_id = object check_community_or_site_ban(&user, community_id, context.pool()).await
}
pub(in crate::objects) async fn get_to_community<T, Kind>(
object: &T,
context: &LemmyContext,
request_counter: &mut i32,
) -> Result<Community, LemmyError>
where
T: ObjectExt<Kind>,
{
let community_ids = object
.to() .to()
.context(location_info!())? .context(location_info!())?
.as_single_xsd_any_uri() .as_many()
.context(location_info!())?; .context(location_info!())?
let community = get_or_fetch_and_upsert_community(community_id, context, request_counter).await?; .iter()
check_community_or_site_ban(&user, &community, context.pool()).await .map(|a| a.as_xsd_any_uri().context(location_info!()))
.collect::<Result<Vec<&Url>, anyhow::Error>>()?;
for cid in community_ids {
let community = get_or_fetch_and_upsert_community(&cid, context, request_counter).await;
if community.is_ok() {
return community;
}
}
Err(NotFound.into())
} }

View file

@ -1,12 +1,13 @@
use crate::{ use crate::{
extensions::{context::lemmy_context, page_extension::PageExtension}, extensions::{context::lemmy_context, page_extension::PageExtension},
fetcher::{community::get_or_fetch_and_upsert_community, user::get_or_fetch_and_upsert_user}, fetcher::user::get_or_fetch_and_upsert_user,
objects::{ objects::{
check_object_domain, check_object_domain,
check_object_for_community_or_site_ban, check_object_for_community_or_site_ban,
create_tombstone, create_tombstone,
get_object_from_apub, get_object_from_apub,
get_source_markdown_value, get_source_markdown_value,
get_to_community,
set_content_and_source, set_content_and_source,
FromApub, FromApub,
FromApubToForm, FromApubToForm,
@ -17,6 +18,7 @@ use crate::{
use activitystreams::{ use activitystreams::{
object::{kind::PageType, ApObject, Image, Page, Tombstone}, object::{kind::PageType, ApObject, Image, Page, Tombstone},
prelude::*, prelude::*,
public,
}; };
use activitystreams_ext::Ext1; use activitystreams_ext::Ext1;
use anyhow::Context; use anyhow::Context;
@ -56,11 +58,12 @@ impl ToApub for Post {
// https://git.asonix.dog/Aardwolf/activitystreams/issues/5 // https://git.asonix.dog/Aardwolf/activitystreams/issues/5
.set_many_contexts(lemmy_context()?) .set_many_contexts(lemmy_context()?)
.set_id(self.ap_id.to_owned().into_inner()) .set_id(self.ap_id.to_owned().into_inner())
// Use summary field to be consistent with mastodon content warning. .set_name(self.name.to_owned())
// https://mastodon.xyz/@Louisa/103987265222901387.json // `summary` field for compatibility with lemmy v0.9.9 and older,
// TODO: remove this after some time
.set_summary(self.name.to_owned()) .set_summary(self.name.to_owned())
.set_published(convert_datetime(self.published)) .set_published(convert_datetime(self.published))
.set_to(community.actor_id.into_inner()) .set_many_tos(vec![community.actor_id.into_inner(), public()])
.set_attributed_to(creator.actor_id.into_inner()); .set_attributed_to(creator.actor_id.into_inner());
if let Some(body) = &self.body { if let Some(body) = &self.body {
@ -115,8 +118,10 @@ impl FromApub for Post {
expected_domain: Url, expected_domain: Url,
request_counter: &mut i32, request_counter: &mut i32,
) -> Result<Post, LemmyError> { ) -> Result<Post, LemmyError> {
check_object_for_community_or_site_ban(page, context, request_counter).await?; let post: Post = get_object_from_apub(page, context, expected_domain, request_counter).await?;
get_object_from_apub(page, context, expected_domain, request_counter).await check_object_for_community_or_site_ban(page, post.community_id, context, request_counter)
.await?;
Ok(post)
} }
} }
@ -139,16 +144,7 @@ impl FromApubToForm<PageExt> for PostForm {
let creator = get_or_fetch_and_upsert_user(creator_actor_id, context, request_counter).await?; let creator = get_or_fetch_and_upsert_user(creator_actor_id, context, request_counter).await?;
let community_actor_id = page let community = get_to_community(page, context, request_counter).await?;
.inner
.to()
.as_ref()
.context(location_info!())?
.as_single_xsd_any_uri()
.context(location_info!())?;
let community =
get_or_fetch_and_upsert_community(community_actor_id, context, request_counter).await?;
let thumbnail_url = match &page.inner.image() { let thumbnail_url = match &page.inner.image() {
Some(any_image) => Image::from_any_base( Some(any_image) => Image::from_any_base(
@ -181,8 +177,11 @@ impl FromApubToForm<PageExt> for PostForm {
let name = page let name = page
.inner .inner
.summary() .name()
.as_ref() .map(|s| s.map(|s2| s2.to_owned()))
// The following is for compatibility with lemmy v0.9.9 and older
// TODO: remove it after some time (along with the map above)
.or_else(|| page.inner.summary().map(|s| s.to_owned()))
.context(location_info!())? .context(location_info!())?
.as_single_xsd_string() .as_single_xsd_string()
.context(location_info!())? .context(location_info!())?

View file

@ -109,7 +109,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -113,7 +113,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,
@ -138,7 +137,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -13,6 +13,7 @@ pub struct PostAggregates {
pub downvotes: i64, pub downvotes: i64,
pub stickied: bool, pub stickied: bool,
pub published: chrono::NaiveDateTime, pub published: chrono::NaiveDateTime,
pub newest_comment_time_necro: chrono::NaiveDateTime, // A newest comment time, limited to 2 days, to prevent necrobumping
pub newest_comment_time: chrono::NaiveDateTime, pub newest_comment_time: chrono::NaiveDateTime,
} }
@ -112,7 +113,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -94,7 +94,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -109,7 +109,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -165,6 +165,7 @@ pub enum SortType {
TopYear, TopYear,
TopAll, TopAll,
MostComments, MostComments,
NewComments,
} }
#[derive(EnumString, ToString, Debug, Serialize, Deserialize, Clone)] #[derive(EnumString, ToString, Debug, Serialize, Deserialize, Clone)]

View file

@ -110,7 +110,8 @@ impl Activity_ for Activity {
.sql(" AND activity.data -> 'object' ->> 'type' = 'Create'") .sql(" AND activity.data -> 'object' ->> 'type' = 'Create'")
.sql(" AND activity.data -> 'object' -> 'object' ->> 'type' = 'Page'") .sql(" AND activity.data -> 'object' -> 'object' ->> 'type' = 'Page'")
.sql(" AND activity.data ->> 'actor' = ") .sql(" AND activity.data ->> 'actor' = ")
.bind::<Text, _>(community_actor_id), .bind::<Text, _>(community_actor_id)
.sql(" ORDER BY activity.published DESC"),
) )
.limit(20) .limit(20)
.get_results(conn)?; .get_results(conn)?;

View file

@ -1,54 +0,0 @@
use crate::Crud;
use diesel::{dsl::*, result::Error, *};
use lemmy_db_schema::{schema::category::dsl::*, source::category::*};
impl Crud<CategoryForm> for Category {
fn read(conn: &PgConnection, category_id: i32) -> Result<Self, Error> {
category.find(category_id).first::<Self>(conn)
}
fn create(conn: &PgConnection, new_category: &CategoryForm) -> Result<Self, Error> {
insert_into(category)
.values(new_category)
.get_result::<Self>(conn)
}
fn update(
conn: &PgConnection,
category_id: i32,
new_category: &CategoryForm,
) -> Result<Self, Error> {
diesel::update(category.find(category_id))
.set(new_category)
.get_result::<Self>(conn)
}
}
pub trait Category_ {
fn list_all(conn: &PgConnection) -> Result<Vec<Category>, Error>;
}
impl Category_ for Category {
fn list_all(conn: &PgConnection) -> Result<Vec<Category>, Error> {
category.load::<Self>(conn)
}
}
#[cfg(test)]
mod tests {
use crate::{establish_unpooled_connection, source::category::Category_};
use lemmy_db_schema::source::category::Category;
#[test]
fn test_crud() {
let conn = establish_unpooled_connection();
let categories = Category::list_all(&conn).unwrap();
let expected_first_category = Category {
id: 1,
name: "Discussion".into(),
};
assert_eq!(expected_first_category, categories[0]);
}
}

View file

@ -252,7 +252,6 @@ mod tests {
name: "test community".to_string(), name: "test community".to_string(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -24,7 +24,6 @@ mod safe_type {
name, name,
title, title,
description, description,
category_id,
creator_id, creator_id,
removed, removed,
published, published,
@ -45,7 +44,6 @@ mod safe_type {
name, name,
title, title,
description, description,
category_id,
creator_id, creator_id,
removed, removed,
published, published,
@ -383,7 +381,6 @@ mod tests {
creator_id: inserted_user.id, creator_id: inserted_user.id,
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: None, removed: None,
deleted: None, deleted: None,
@ -409,7 +406,6 @@ mod tests {
name: "TIL".into(), name: "TIL".into(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
nsfw: false, nsfw: false,
removed: false, removed: false,
deleted: false, deleted: false,

View file

@ -1,5 +1,4 @@
pub mod activity; pub mod activity;
pub mod category;
pub mod comment; pub mod comment;
pub mod comment_report; pub mod comment_report;
pub mod community; pub mod community;

View file

@ -271,7 +271,6 @@ mod tests {
name: "mod_community".to_string(), name: "mod_community".to_string(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -271,7 +271,6 @@ mod tests {
name: "test community_3".to_string(), name: "test community_3".to_string(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -152,7 +152,6 @@ mod tests {
name: "test community lake".to_string(), name: "test community lake".to_string(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -10,13 +10,6 @@ table! {
} }
} }
table! {
category (id) {
id -> Int4,
name -> Varchar,
}
}
table! { table! {
comment (id) { comment (id) {
id -> Int4, id -> Int4,
@ -85,7 +78,6 @@ table! {
name -> Varchar, name -> Varchar,
title -> Varchar, title -> Varchar,
description -> Nullable<Text>, description -> Nullable<Text>,
category_id -> Int4,
creator_id -> Int4, creator_id -> Int4,
removed -> Bool, removed -> Bool,
published -> Timestamp, published -> Timestamp,
@ -291,6 +283,7 @@ table! {
downvotes -> Int8, downvotes -> Int8,
stickied -> Bool, stickied -> Bool,
published -> Timestamp, published -> Timestamp,
newest_comment_time_necro -> Timestamp,
newest_comment_time -> Timestamp, newest_comment_time -> Timestamp,
} }
} }
@ -545,7 +538,6 @@ joinable!(comment_like -> user_ (user_id));
joinable!(comment_report -> comment (comment_id)); joinable!(comment_report -> comment (comment_id));
joinable!(comment_saved -> comment (comment_id)); joinable!(comment_saved -> comment (comment_id));
joinable!(comment_saved -> user_ (user_id)); joinable!(comment_saved -> user_ (user_id));
joinable!(community -> category (category_id));
joinable!(community -> user_ (creator_id)); joinable!(community -> user_ (creator_id));
joinable!(community_aggregates -> community (community_id)); joinable!(community_aggregates -> community (community_id));
joinable!(community_follower -> community (community_id)); joinable!(community_follower -> community (community_id));
@ -586,7 +578,6 @@ joinable!(user_mention -> user_ (recipient_id));
allow_tables_to_appear_in_same_query!( allow_tables_to_appear_in_same_query!(
activity, activity,
category,
comment, comment,
comment_aggregates, comment_aggregates,
comment_like, comment_like,

View file

@ -1,15 +0,0 @@
use crate::schema::category;
use serde::Serialize;
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, Clone)]
#[table_name = "category"]
pub struct Category {
pub id: i32,
pub name: String,
}
#[derive(Insertable, AsChangeset)]
#[table_name = "category"]
pub struct CategoryForm {
pub name: String,
}

View file

@ -11,7 +11,6 @@ pub struct Community {
pub name: String, pub name: String,
pub title: String, pub title: String,
pub description: Option<String>, pub description: Option<String>,
pub category_id: i32,
pub creator_id: i32, pub creator_id: i32,
pub removed: bool, pub removed: bool,
pub published: chrono::NaiveDateTime, pub published: chrono::NaiveDateTime,
@ -38,7 +37,6 @@ pub struct CommunitySafe {
pub name: String, pub name: String,
pub title: String, pub title: String,
pub description: Option<String>, pub description: Option<String>,
pub category_id: i32,
pub creator_id: i32, pub creator_id: i32,
pub removed: bool, pub removed: bool,
pub published: chrono::NaiveDateTime, pub published: chrono::NaiveDateTime,
@ -57,7 +55,6 @@ pub struct CommunityForm {
pub name: String, pub name: String,
pub title: String, pub title: String,
pub description: Option<String>, pub description: Option<String>,
pub category_id: i32,
pub creator_id: i32, pub creator_id: i32,
pub removed: Option<bool>, pub removed: Option<bool>,
pub published: Option<chrono::NaiveDateTime>, pub published: Option<chrono::NaiveDateTime>,

View file

@ -1,5 +1,4 @@
pub mod activity; pub mod activity;
pub mod category;
pub mod comment; pub mod comment;
pub mod comment_report; pub mod comment_report;
pub mod community; pub mod community;

View file

@ -380,7 +380,9 @@ impl<'a> CommentQueryBuilder<'a> {
SortType::Hot | SortType::Active => query SortType::Hot | SortType::Active => query
.order_by(hot_rank(comment_aggregates::score, comment_aggregates::published).desc()) .order_by(hot_rank(comment_aggregates::score, comment_aggregates::published).desc())
.then_order_by(comment_aggregates::published.desc()), .then_order_by(comment_aggregates::published.desc()),
SortType::New | SortType::MostComments => query.order_by(comment::published.desc()), SortType::New | SortType::MostComments | SortType::NewComments => {
query.order_by(comment::published.desc())
}
SortType::TopAll => query.order_by(comment_aggregates::score.desc()), SortType::TopAll => query.order_by(comment_aggregates::score.desc()),
SortType::TopYear => query SortType::TopYear => query
.filter(comment::published.gt(now - 1.years())) .filter(comment::published.gt(now - 1.years()))
@ -481,7 +483,6 @@ mod tests {
name: "test community 5".to_string(), name: "test community 5".to_string(),
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
category_id: 1,
creator_id: inserted_user.id, creator_id: inserted_user.id,
removed: None, removed: None,
deleted: None, deleted: None,
@ -623,7 +624,6 @@ mod tests {
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
creator_id: inserted_user.id, creator_id: inserted_user.id,
category_id: 1,
updated: None, updated: None,
banner: None, banner: None,
published: inserted_community.published, published: inserted_community.published,

View file

@ -356,14 +356,19 @@ impl<'a> PostQueryBuilder<'a> {
query = match self.sort { query = match self.sort {
SortType::Active => query SortType::Active => query
.then_order_by( .then_order_by(
hot_rank(post_aggregates::score, post_aggregates::newest_comment_time).desc(), hot_rank(
post_aggregates::score,
post_aggregates::newest_comment_time_necro,
)
.desc(),
) )
.then_order_by(post_aggregates::newest_comment_time.desc()), .then_order_by(post_aggregates::newest_comment_time_necro.desc()),
SortType::Hot => query SortType::Hot => query
.then_order_by(hot_rank(post_aggregates::score, post_aggregates::published).desc()) .then_order_by(hot_rank(post_aggregates::score, post_aggregates::published).desc())
.then_order_by(post_aggregates::published.desc()), .then_order_by(post_aggregates::published.desc()),
SortType::New => query.then_order_by(post_aggregates::published.desc()), SortType::New => query.then_order_by(post_aggregates::published.desc()),
SortType::MostComments => query.then_order_by(post_aggregates::comments.desc()), SortType::MostComments => query.then_order_by(post_aggregates::comments.desc()),
SortType::NewComments => query.then_order_by(post_aggregates::newest_comment_time.desc()),
SortType::TopAll => query.then_order_by(post_aggregates::score.desc()), SortType::TopAll => query.then_order_by(post_aggregates::score.desc()),
SortType::TopYear => query SortType::TopYear => query
.filter(post::published.gt(now - 1.years())) .filter(post::published.gt(now - 1.years()))
@ -474,7 +479,6 @@ mod tests {
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
creator_id: inserted_user.id, creator_id: inserted_user.id,
category_id: 1,
removed: None, removed: None,
deleted: None, deleted: None,
updated: None, updated: None,
@ -609,7 +613,6 @@ mod tests {
title: "nada".to_owned(), title: "nada".to_owned(),
description: None, description: None,
creator_id: inserted_user.id, creator_id: inserted_user.id,
category_id: 1,
updated: None, updated: None,
banner: None, banner: None,
published: inserted_community.published, published: inserted_community.published,
@ -623,6 +626,7 @@ mod tests {
downvotes: 0, downvotes: 0,
stickied: false, stickied: false,
published: agg.published, published: agg.published,
newest_comment_time_necro: inserted_post.published,
newest_comment_time: inserted_post.published, newest_comment_time: inserted_post.published,
}, },
subscribed: false, subscribed: false,

View file

@ -12,9 +12,8 @@ use lemmy_db_queries::{
ViewToVec, ViewToVec,
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
schema::{category, community, community_aggregates, community_follower, user_}, schema::{community, community_aggregates, community_follower, user_},
source::{ source::{
category::Category,
community::{Community, CommunityFollower, CommunitySafe}, community::{Community, CommunityFollower, CommunitySafe},
user::{UserSafe, User_}, user::{UserSafe, User_},
}, },
@ -25,7 +24,6 @@ use serde::Serialize;
pub struct CommunityView { pub struct CommunityView {
pub community: CommunitySafe, pub community: CommunitySafe,
pub creator: UserSafe, pub creator: UserSafe,
pub category: Category,
pub subscribed: bool, pub subscribed: bool,
pub counts: CommunityAggregates, pub counts: CommunityAggregates,
} }
@ -33,7 +31,6 @@ pub struct CommunityView {
type CommunityViewTuple = ( type CommunityViewTuple = (
CommunitySafe, CommunitySafe,
UserSafe, UserSafe,
Category,
CommunityAggregates, CommunityAggregates,
Option<CommunityFollower>, Option<CommunityFollower>,
); );
@ -47,10 +44,9 @@ impl CommunityView {
// The left join below will return None in this case // The left join below will return None in this case
let user_id_join = my_user_id.unwrap_or(-1); let user_id_join = my_user_id.unwrap_or(-1);
let (community, creator, category, counts, follower) = community::table let (community, creator, counts, follower) = community::table
.find(community_id) .find(community_id)
.inner_join(user_::table) .inner_join(user_::table)
.inner_join(category::table)
.inner_join(community_aggregates::table) .inner_join(community_aggregates::table)
.left_join( .left_join(
community_follower::table.on( community_follower::table.on(
@ -62,7 +58,6 @@ impl CommunityView {
.select(( .select((
Community::safe_columns_tuple(), Community::safe_columns_tuple(),
User_::safe_columns_tuple(), User_::safe_columns_tuple(),
category::all_columns,
community_aggregates::all_columns, community_aggregates::all_columns,
community_follower::all_columns.nullable(), community_follower::all_columns.nullable(),
)) ))
@ -71,7 +66,6 @@ impl CommunityView {
Ok(CommunityView { Ok(CommunityView {
community, community,
creator, creator,
category,
subscribed: follower.is_some(), subscribed: follower.is_some(),
counts, counts,
}) })
@ -162,7 +156,6 @@ impl<'a> CommunityQueryBuilder<'a> {
let mut query = community::table let mut query = community::table
.inner_join(user_::table) .inner_join(user_::table)
.inner_join(category::table)
.inner_join(community_aggregates::table) .inner_join(community_aggregates::table)
.left_join( .left_join(
community_follower::table.on( community_follower::table.on(
@ -174,7 +167,6 @@ impl<'a> CommunityQueryBuilder<'a> {
.select(( .select((
Community::safe_columns_tuple(), Community::safe_columns_tuple(),
User_::safe_columns_tuple(), User_::safe_columns_tuple(),
category::all_columns,
community_aggregates::all_columns, community_aggregates::all_columns,
community_follower::all_columns.nullable(), community_follower::all_columns.nullable(),
)) ))
@ -235,9 +227,8 @@ impl ViewToVec for CommunityView {
.map(|a| Self { .map(|a| Self {
community: a.0.to_owned(), community: a.0.to_owned(),
creator: a.1.to_owned(), creator: a.1.to_owned(),
category: a.2.to_owned(), counts: a.2.to_owned(),
counts: a.3.to_owned(), subscribed: a.3.is_some(),
subscribed: a.4.is_some(),
}) })
.collect::<Vec<Self>>() .collect::<Vec<Self>>()
} }

View file

@ -270,7 +270,9 @@ impl<'a> UserMentionQueryBuilder<'a> {
SortType::Hot | SortType::Active => query SortType::Hot | SortType::Active => query
.order_by(hot_rank(comment_aggregates::score, comment_aggregates::published).desc()) .order_by(hot_rank(comment_aggregates::score, comment_aggregates::published).desc())
.then_order_by(comment_aggregates::published.desc()), .then_order_by(comment_aggregates::published.desc()),
SortType::New | SortType::MostComments => query.order_by(comment::published.desc()), SortType::New | SortType::MostComments | SortType::NewComments => {
query.order_by(comment::published.desc())
}
SortType::TopAll => query.order_by(comment_aggregates::score.desc()), SortType::TopAll => query.order_by(comment_aggregates::score.desc()),
SortType::TopYear => query SortType::TopYear => query
.filter(comment::published.gt(now - 1.years())) .filter(comment::published.gt(now - 1.years()))

View file

@ -110,7 +110,9 @@ impl<'a> UserQueryBuilder<'a> {
SortType::Active => query SortType::Active => query
.order_by(user_aggregates::comment_score.desc()) .order_by(user_aggregates::comment_score.desc())
.then_order_by(user_::published.desc()), .then_order_by(user_::published.desc()),
SortType::New | SortType::MostComments => query.order_by(user_::published.desc()), SortType::New | SortType::MostComments | SortType::NewComments => {
query.order_by(user_::published.desc())
}
SortType::TopAll => query.order_by(user_aggregates::comment_score.desc()), SortType::TopAll => query.order_by(user_aggregates::comment_score.desc()),
SortType::TopYear => query SortType::TopYear => query
.filter(user_::published.gt(now - 1.years())) .filter(user_::published.gt(now - 1.years()))

View file

@ -376,6 +376,7 @@ fn create_post_items(posts: Vec<PostView>) -> Result<Vec<Item>, LemmyError> {
Settings::get().get_protocol_and_hostname(), Settings::get().get_protocol_and_hostname(),
p.post.id p.post.id
); );
i.link(post_url.to_owned());
i.comments(post_url.to_owned()); i.comments(post_url.to_owned());
let guid = GuidBuilder::default() let guid = GuidBuilder::default()
.permalink(true) .permalink(true)
@ -393,10 +394,6 @@ fn create_post_items(posts: Vec<PostView>) -> Result<Vec<Item>, LemmyError> {
// TODO: for category we should just put the name of the category, but then we would have // TODO: for category we should just put the name of the category, but then we would have
// to read each community from the db // to read each community from the db
if let Some(url) = p.post.url {
i.link(url);
}
// TODO add images // TODO add images
let mut description = format!("submitted by <a href=\"{}\">{}</a> to <a href=\"{}\">{}</a><br>{} points | <a href=\"{}\">{} comments</a>", let mut description = format!("submitted by <a href=\"{}\">{}</a> to <a href=\"{}\">{}</a><br>{} points | <a href=\"{}\">{} comments</a>",
p.creator.actor_id, p.creator.actor_id,
@ -407,6 +404,12 @@ fn create_post_items(posts: Vec<PostView>) -> Result<Vec<Item>, LemmyError> {
post_url, post_url,
p.counts.comments); p.counts.comments);
// If its a url post, add it to the description
if let Some(url) = p.post.url {
let link_html = format!("<br><a href=\"{url}\">{url}</a>", url = url);
description.push_str(&link_html);
}
if let Some(body) = p.post.body { if let Some(body) = p.post.body {
let html = markdown_to_html(&body); let html = markdown_to_html(&body);
description.push_str(&html); description.push_str(&html);

View file

@ -1,4 +1,4 @@
use lemmy_db_schema::source::{category::*, user::UserSafeSettings}; use lemmy_db_schema::source::user::UserSafeSettings;
use lemmy_db_views::{comment_view::CommentView, post_view::PostView, site_view::SiteView}; use lemmy_db_views::{comment_view::CommentView, post_view::PostView, site_view::SiteView};
use lemmy_db_views_actor::{community_view::CommunityView, user_view::UserViewSafe}; use lemmy_db_views_actor::{community_view::CommunityView, user_view::UserViewSafe};
use lemmy_db_views_moderator::{ use lemmy_db_views_moderator::{
@ -14,14 +14,6 @@ use lemmy_db_views_moderator::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
pub struct ListCategories {}
#[derive(Serialize)]
pub struct ListCategoriesResponse {
pub categories: Vec<Category>,
}
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
pub struct Search { pub struct Search {
pub q: String, pub q: String,

View file

@ -23,7 +23,7 @@ pub type ConnectionId = usize;
pub type PostId = i32; pub type PostId = i32;
pub type CommunityId = i32; pub type CommunityId = i32;
pub type UserId = i32; pub type UserId = i32;
pub type IPAddr = String; pub type IpAddr = String;
#[macro_export] #[macro_export]
macro_rules! location_info { macro_rules! location_info {
@ -39,13 +39,13 @@ macro_rules! location_info {
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[error("{{\"error\":\"{message}\"}}")] #[error("{{\"error\":\"{message}\"}}")]
pub struct APIError { pub struct ApiError {
pub message: String, pub message: String,
} }
impl APIError { impl ApiError {
pub fn err(msg: &str) -> Self { pub fn err(msg: &str) -> Self {
APIError { ApiError {
message: msg.to_string(), message: msg.to_string(),
} }
} }

View file

@ -1,4 +1,4 @@
use crate::{APIError, IPAddr, LemmyError}; use crate::{ApiError, IpAddr, LemmyError};
use log::debug; use log::debug;
use std::{collections::HashMap, time::SystemTime}; use std::{collections::HashMap, time::SystemTime};
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
@ -20,13 +20,13 @@ pub(crate) enum RateLimitType {
/// Rate limiting based on rate type and IP addr /// Rate limiting based on rate type and IP addr
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct RateLimiter { pub struct RateLimiter {
buckets: HashMap<RateLimitType, HashMap<IPAddr, RateLimitBucket>>, buckets: HashMap<RateLimitType, HashMap<IpAddr, RateLimitBucket>>,
} }
impl Default for RateLimiter { impl Default for RateLimiter {
fn default() -> Self { fn default() -> Self {
Self { Self {
buckets: HashMap::<RateLimitType, HashMap<IPAddr, RateLimitBucket>>::new(), buckets: HashMap::<RateLimitType, HashMap<IpAddr, RateLimitBucket>>::new(),
} }
} }
} }
@ -87,7 +87,7 @@ impl RateLimiter {
rate_limit.allowance rate_limit.allowance
); );
Err( Err(
APIError { ApiError {
message: format!( message: format!(
"Too many requests. type: {}, IP: {}, {} per {} seconds", "Too many requests. type: {}, IP: {}, {} per {} seconds",
type_.as_ref(), type_.as_ref(),

View file

@ -187,21 +187,6 @@ async fn is_image_content_type(client: &Client, test: &str) -> Result<(), LemmyE
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::request::is_image_content_type;
#[test]
fn test_image() {
actix_rt::System::new("tset_image").block_on(async move {
let client = reqwest::Client::default();
assert!(is_image_content_type(&client, "https://1734811051.rsc.cdn77.org/data/images/full/365645/as-virus-kills-navajos-in-their-homes-tribal-women-provide-lifeline.jpg?w=600?w=650").await.is_ok());
assert!(is_image_content_type(&client,
"https://twitter.com/BenjaminNorton/status/1259922424272957440?s=20"
)
.await.is_err()
);
});
}
// These helped with testing // These helped with testing
// #[test] // #[test]
// fn test_iframely() { // fn test_iframely() {

View file

@ -1,4 +1,4 @@
use crate::{settings::Settings, APIError}; use crate::{settings::Settings, ApiError};
use actix_web::dev::ConnectionInfo; use actix_web::dev::ConnectionInfo;
use chrono::{DateTime, FixedOffset, NaiveDateTime}; use chrono::{DateTime, FixedOffset, NaiveDateTime};
use itertools::Itertools; use itertools::Itertools;
@ -43,15 +43,15 @@ pub(crate) fn slur_check(test: &str) -> Result<(), Vec<&str>> {
} }
} }
pub fn check_slurs(text: &str) -> Result<(), APIError> { pub fn check_slurs(text: &str) -> Result<(), ApiError> {
if let Err(slurs) = slur_check(text) { if let Err(slurs) = slur_check(text) {
Err(APIError::err(&slurs_vec_to_str(slurs))) Err(ApiError::err(&slurs_vec_to_str(slurs)))
} else { } else {
Ok(()) Ok(())
} }
} }
pub fn check_slurs_opt(text: &Option<String>) -> Result<(), APIError> { pub fn check_slurs_opt(text: &Option<String>) -> Result<(), ApiError> {
match text { match text {
Some(t) => check_slurs(t), Some(t) => check_slurs(t),
None => Ok(()), None => Ok(()),
@ -110,8 +110,8 @@ pub fn is_valid_username(name: &str) -> bool {
// Can't do a regex here, reverse lookarounds not supported // Can't do a regex here, reverse lookarounds not supported
pub fn is_valid_preferred_username(preferred_username: &str) -> bool { pub fn is_valid_preferred_username(preferred_username: &str) -> bool {
!preferred_username.starts_with('@') !preferred_username.starts_with('@')
&& preferred_username.len() >= 3 && preferred_username.chars().count() >= 3
&& preferred_username.len() <= 20 && preferred_username.chars().count() <= 20
} }
pub fn is_valid_community_name(name: &str) -> bool { pub fn is_valid_community_name(name: &str) -> bool {

View file

@ -1 +1 @@
pub const VERSION: &str = "0.9.7"; pub const VERSION: &str = "0.9.9";

View file

@ -10,10 +10,10 @@ use lemmy_structs::{comment::*, post::*};
use lemmy_utils::{ use lemmy_utils::{
location_info, location_info,
rate_limit::RateLimit, rate_limit::RateLimit,
APIError, ApiError,
CommunityId, CommunityId,
ConnectionId, ConnectionId,
IPAddr, IpAddr,
LemmyError, LemmyError,
PostId, PostId,
UserId, UserId,
@ -73,8 +73,8 @@ pub struct ChatServer {
} }
pub struct SessionInfo { pub struct SessionInfo {
pub addr: Recipient<WSMessage>, pub addr: Recipient<WsMessage>,
pub ip: IPAddr, pub ip: IpAddr,
} }
/// `ChatServer` is an actor. It maintains list of connection client session. /// `ChatServer` is an actor. It maintains list of connection client session.
@ -395,7 +395,7 @@ impl ChatServer {
fn sendit(&self, message: &str, id: ConnectionId) { fn sendit(&self, message: &str, id: ConnectionId) {
if let Some(info) = self.sessions.get(&id) { if let Some(info) = self.sessions.get(&id) {
let _ = info.addr.do_send(WSMessage(message.to_owned())); let _ = info.addr.do_send(WsMessage(message.to_owned()));
} }
} }
@ -406,7 +406,7 @@ impl ChatServer {
) -> impl Future<Output = Result<String, LemmyError>> { ) -> impl Future<Output = Result<String, LemmyError>> {
let rate_limiter = self.rate_limiter.clone(); let rate_limiter = self.rate_limiter.clone();
let ip: IPAddr = match self.sessions.get(&msg.id) { let ip: IpAddr = match self.sessions.get(&msg.id) {
Some(info) => info.ip.to_owned(), Some(info) => info.ip.to_owned(),
None => "blank_ip".to_string(), None => "blank_ip".to_string(),
}; };
@ -421,7 +421,7 @@ impl ChatServer {
async move { async move {
let json: Value = serde_json::from_str(&msg.msg)?; let json: Value = serde_json::from_str(&msg.msg)?;
let data = &json["data"].to_string(); let data = &json["data"].to_string();
let op = &json["op"].as_str().ok_or(APIError { let op = &json["op"].as_str().ok_or(ApiError {
message: "Unknown op type".to_string(), message: "Unknown op type".to_string(),
})?; })?;

View file

@ -88,7 +88,6 @@ pub enum UserOperation {
CreateCommunity, CreateCommunity,
CreatePost, CreatePost,
ListCommunities, ListCommunities,
ListCategories,
GetPost, GetPost,
GetCommunity, GetCommunity,
CreateComment, CreateComment,

View file

@ -1,13 +1,13 @@
use crate::UserOperation; use crate::UserOperation;
use actix::{prelude::*, Recipient}; use actix::{prelude::*, Recipient};
use lemmy_structs::{comment::CommentResponse, post::PostResponse}; use lemmy_structs::{comment::CommentResponse, post::PostResponse};
use lemmy_utils::{CommunityId, ConnectionId, IPAddr, PostId, UserId}; use lemmy_utils::{CommunityId, ConnectionId, IpAddr, PostId, UserId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Chat server sends this messages to session /// Chat server sends this messages to session
#[derive(Message)] #[derive(Message)]
#[rtype(result = "()")] #[rtype(result = "()")]
pub struct WSMessage(pub String); pub struct WsMessage(pub String);
/// Message for chat server communications /// Message for chat server communications
@ -15,8 +15,8 @@ pub struct WSMessage(pub String);
#[derive(Message)] #[derive(Message)]
#[rtype(usize)] #[rtype(usize)]
pub struct Connect { pub struct Connect {
pub addr: Recipient<WSMessage>, pub addr: Recipient<WsMessage>,
pub ip: IPAddr, pub ip: IpAddr,
} }
/// Session is disconnected /// Session is disconnected
@ -24,7 +24,7 @@ pub struct Connect {
#[rtype(result = "()")] #[rtype(result = "()")]
pub struct Disconnect { pub struct Disconnect {
pub id: ConnectionId, pub id: ConnectionId,
pub ip: IPAddr, pub ip: IpAddr,
} }
/// The messages sent to websocket clients /// The messages sent to websocket clients

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
chat_server::ChatServer, chat_server::ChatServer,
messages::{Connect, Disconnect, StandardMessage, WSMessage}, messages::{Connect, Disconnect, StandardMessage, WsMessage},
LemmyContext, LemmyContext,
}; };
use actix::prelude::*; use actix::prelude::*;
@ -22,7 +22,7 @@ pub async fn chat_route(
context: web::Data<LemmyContext>, context: web::Data<LemmyContext>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
ws::start( ws::start(
WSSession { WsSession {
cs_addr: context.chat_server().to_owned(), cs_addr: context.chat_server().to_owned(),
id: 0, id: 0,
hb: Instant::now(), hb: Instant::now(),
@ -33,7 +33,7 @@ pub async fn chat_route(
) )
} }
struct WSSession { struct WsSession {
cs_addr: Addr<ChatServer>, cs_addr: Addr<ChatServer>,
/// unique session id /// unique session id
id: usize, id: usize,
@ -43,7 +43,7 @@ struct WSSession {
hb: Instant, hb: Instant,
} }
impl Actor for WSSession { impl Actor for WsSession {
type Context = ws::WebsocketContext<Self>; type Context = ws::WebsocketContext<Self>;
/// Method is called on actor start. /// Method is called on actor start.
@ -87,16 +87,16 @@ impl Actor for WSSession {
/// Handle messages from chat server, we simply send it to peer websocket /// Handle messages from chat server, we simply send it to peer websocket
/// These are room messages, IE sent to others in the room /// These are room messages, IE sent to others in the room
impl Handler<WSMessage> for WSSession { impl Handler<WsMessage> for WsSession {
type Result = (); type Result = ();
fn handle(&mut self, msg: WSMessage, ctx: &mut Self::Context) { fn handle(&mut self, msg: WsMessage, ctx: &mut Self::Context) {
ctx.text(msg.0); ctx.text(msg.0);
} }
} }
/// WebSocket message handler /// WebSocket message handler
impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for WSSession { impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for WsSession {
fn handle(&mut self, result: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) { fn handle(&mut self, result: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
let message = match result { let message = match result {
Ok(m) => m, Ok(m) => m,
@ -143,7 +143,7 @@ impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for WSSession {
} }
} }
impl WSSession { impl WsSession {
/// helper method that sends ping to client every second. /// helper method that sends ping to client every second.
/// ///
/// also this method checks heartbeats from client /// also this method checks heartbeats from client

View file

@ -3,7 +3,7 @@ ARG RUST_BUILDER_IMAGE=ekidd/rust-musl-builder:1.47.0
# Cargo chef plan # Cargo chef plan
FROM $RUST_BUILDER_IMAGE as planner FROM $RUST_BUILDER_IMAGE as planner
WORKDIR /app WORKDIR /app
RUN cargo install cargo-chef --version 0.1.6 RUN cargo install cargo-chef
# Copy dirs # Copy dirs
COPY ./ ./ COPY ./ ./
@ -15,7 +15,7 @@ RUN cargo chef prepare --recipe-path recipe.json
FROM $RUST_BUILDER_IMAGE as cacher FROM $RUST_BUILDER_IMAGE as cacher
ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl
WORKDIR /app WORKDIR /app
RUN cargo install cargo-chef --version 0.1.6 RUN cargo install cargo-chef
COPY --from=planner /app/recipe.json ./recipe.json COPY --from=planner /app/recipe.json ./recipe.json
RUN sudo chown -R rust:rust . RUN sudo chown -R rust:rust .
RUN cargo chef cook --target ${CARGO_BUILD_TARGET} --recipe-path recipe.json RUN cargo chef cook --target ${CARGO_BUILD_TARGET} --recipe-path recipe.json

View file

@ -17,7 +17,7 @@ services:
- iframely - iframely
lemmy-ui: lemmy-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
ports: ports:
- "1235:1234" - "1235:1234"
restart: always restart: always

View file

@ -29,7 +29,7 @@ services:
- ./volumes/pictrs_alpha:/mnt - ./volumes/pictrs_alpha:/mnt
lemmy-alpha-ui: lemmy-alpha-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-alpha:8541 - LEMMY_INTERNAL_HOST=lemmy-alpha:8541
- LEMMY_EXTERNAL_HOST=localhost:8541 - LEMMY_EXTERNAL_HOST=localhost:8541
@ -69,7 +69,7 @@ services:
- ./volumes/postgres_alpha:/var/lib/postgresql/data - ./volumes/postgres_alpha:/var/lib/postgresql/data
lemmy-beta-ui: lemmy-beta-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-beta:8551 - LEMMY_INTERNAL_HOST=lemmy-beta:8551
- LEMMY_EXTERNAL_HOST=localhost:8551 - LEMMY_EXTERNAL_HOST=localhost:8551
@ -109,7 +109,7 @@ services:
- ./volumes/postgres_beta:/var/lib/postgresql/data - ./volumes/postgres_beta:/var/lib/postgresql/data
lemmy-gamma-ui: lemmy-gamma-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-gamma:8561 - LEMMY_INTERNAL_HOST=lemmy-gamma:8561
- LEMMY_EXTERNAL_HOST=localhost:8561 - LEMMY_EXTERNAL_HOST=localhost:8561
@ -150,7 +150,7 @@ services:
# An instance with only an allowlist for beta # An instance with only an allowlist for beta
lemmy-delta-ui: lemmy-delta-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-delta:8571 - LEMMY_INTERNAL_HOST=lemmy-delta:8571
- LEMMY_EXTERNAL_HOST=localhost:8571 - LEMMY_EXTERNAL_HOST=localhost:8571
@ -191,7 +191,7 @@ services:
# An instance who has a blocklist, with lemmy-alpha blocked # An instance who has a blocklist, with lemmy-alpha blocked
lemmy-epsilon-ui: lemmy-epsilon-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-epsilon:8581 - LEMMY_INTERNAL_HOST=lemmy-epsilon:8581
- LEMMY_EXTERNAL_HOST=localhost:8581 - LEMMY_EXTERNAL_HOST=localhost:8581

View file

@ -3,7 +3,7 @@ ARG RUST_BUILDER_IMAGE=ekidd/rust-musl-builder:1.47.0
# Cargo chef plan # Cargo chef plan
FROM $RUST_BUILDER_IMAGE as planner FROM $RUST_BUILDER_IMAGE as planner
WORKDIR /app WORKDIR /app
RUN cargo install cargo-chef --version 0.1.6 RUN cargo install cargo-chef
# Copy dirs # Copy dirs
COPY ./ ./ COPY ./ ./
@ -15,7 +15,7 @@ RUN cargo chef prepare --recipe-path recipe.json
FROM $RUST_BUILDER_IMAGE as cacher FROM $RUST_BUILDER_IMAGE as cacher
ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl
WORKDIR /app WORKDIR /app
RUN cargo install cargo-chef --version 0.1.6 RUN cargo install cargo-chef
COPY --from=planner /app/recipe.json ./recipe.json COPY --from=planner /app/recipe.json ./recipe.json
RUN sudo chown -R rust:rust . RUN sudo chown -R rust:rust .
RUN cargo chef cook --release --target ${CARGO_BUILD_TARGET} --recipe-path recipe.json RUN cargo chef cook --release --target ${CARGO_BUILD_TARGET} --recipe-path recipe.json

View file

@ -9,8 +9,8 @@ new_tag="$1"
# Setting the version on the front end # Setting the version on the front end
cd ../../ cd ../../
# Setting the version on the backend # Setting the version on the backend
echo "pub const VERSION: &str = \"$new_tag\";" > "crates/api/src/version.rs" echo "pub const VERSION: &str = \"$new_tag\";" > "crates/utils/src/version.rs"
git add "crates/api/src/version.rs" git add "crates/utils/src/version.rs"
# Setting the version for Ansible # Setting the version for Ansible
echo $new_tag > "ansible/VERSION" echo $new_tag > "ansible/VERSION"
git add "ansible/VERSION" git add "ansible/VERSION"

View file

@ -12,7 +12,7 @@ services:
restart: always restart: always
lemmy: lemmy:
image: dessalines/lemmy:0.9.7 image: dessalines/lemmy:0.9.9
ports: ports:
- "127.0.0.1:8536:8536" - "127.0.0.1:8536:8536"
restart: always restart: always
@ -26,9 +26,9 @@ services:
- iframely - iframely
lemmy-ui: lemmy-ui:
image: dessalines/lemmy-ui:0.9.7 image: dessalines/lemmy-ui:0.9.9
ports: ports:
- "1235:1234" - "127.0.0.1:1235:1234"
restart: always restart: always
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy:8536 - LEMMY_INTERNAL_HOST=lemmy:8536

View file

@ -0,0 +1,33 @@
drop index idx_post_aggregates_newest_comment_time,
idx_post_aggregates_stickied_newest_comment_time,
idx_post_aggregates_stickied_comments;
alter table post_aggregates drop column newest_comment_time;
alter table post_aggregates rename column newest_comment_time_necro to newest_comment_time;
create or replace function post_aggregates_comment_count()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
update post_aggregates pa
set comments = comments + 1
where pa.post_id = NEW.post_id;
-- A 2 day necro-bump limit
update post_aggregates pa
set newest_comment_time = NEW.published
where pa.post_id = NEW.post_id
and published > ('now'::timestamp - '2 days'::interval);
ELSIF (TG_OP = 'DELETE') THEN
-- Join to post because that post may not exist anymore
update post_aggregates pa
set comments = comments - 1
from post p
where pa.post_id = p.id
and pa.post_id = OLD.post_id;
END IF;
return null;
end $$;

View file

@ -0,0 +1,43 @@
-- First rename current newest comment time to newest_comment_time_necro
-- necro means that time is limited to 2 days, whereas newest_comment_time ignores that.
alter table post_aggregates rename column newest_comment_time to newest_comment_time_necro;
-- Add the newest_comment_time column
alter table post_aggregates add column newest_comment_time timestamp not null default now();
-- Set the current newest_comment_time based on the old ones
update post_aggregates set newest_comment_time = newest_comment_time_necro;
-- Add the indexes for this new column
create index idx_post_aggregates_newest_comment_time on post_aggregates (newest_comment_time desc);
create index idx_post_aggregates_stickied_newest_comment_time on post_aggregates (stickied desc, newest_comment_time desc);
-- Forgot to add index w/ stickied first for most comments:
create index idx_post_aggregates_stickied_comments on post_aggregates (stickied desc, comments desc);
-- Alter the comment trigger to set the newest_comment_time, and newest_comment_time_necro
create or replace function post_aggregates_comment_count()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
update post_aggregates pa
set comments = comments + 1,
newest_comment_time = NEW.published
where pa.post_id = NEW.post_id;
-- A 2 day necro-bump limit
update post_aggregates pa
set newest_comment_time_necro = NEW.published
where pa.post_id = NEW.post_id
and published > ('now'::timestamp - '2 days'::interval);
ELSIF (TG_OP = 'DELETE') THEN
-- Join to post because that post may not exist anymore
update post_aggregates pa
set comments = comments - 1
from post p
where pa.post_id = p.id
and pa.post_id = OLD.post_id;
END IF;
return null;
end $$;

View file

@ -0,0 +1,35 @@
create or replace function comment_aggregates_comment()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into comment_aggregates (comment_id) values (NEW.id);
ELSIF (TG_OP = 'DELETE') THEN
delete from comment_aggregates where comment_id = OLD.id;
END IF;
return null;
end $$;
create or replace function post_aggregates_post()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into post_aggregates (post_id) values (NEW.id);
ELSIF (TG_OP = 'DELETE') THEN
delete from post_aggregates where post_id = OLD.id;
END IF;
return null;
end $$;
create or replace function community_aggregates_community()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into community_aggregates (community_id) values (NEW.id);
ELSIF (TG_OP = 'DELETE') THEN
delete from community_aggregates where community_id = OLD.id;
END IF;
return null;
end $$;

View file

@ -0,0 +1,39 @@
-- The published and updated columns on the aggregates tables are using now(),
-- when they should use the correct published or updated columns
-- This is mainly a problem with federated posts being fetched
create or replace function comment_aggregates_comment()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into comment_aggregates (comment_id, published) values (NEW.id, NEW.published);
ELSIF (TG_OP = 'DELETE') THEN
delete from comment_aggregates where comment_id = OLD.id;
END IF;
return null;
end $$;
create or replace function post_aggregates_post()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into post_aggregates (post_id, published, newest_comment_time, newest_comment_time_necro) values (NEW.id, NEW.published, NEW.published, NEW.published);
ELSIF (TG_OP = 'DELETE') THEN
delete from post_aggregates where post_id = OLD.id;
END IF;
return null;
end $$;
create or replace function community_aggregates_community()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'INSERT') THEN
insert into community_aggregates (community_id, published) values (NEW.id, NEW.published);
ELSIF (TG_OP = 'DELETE') THEN
delete from community_aggregates where community_id = OLD.id;
END IF;
return null;
end $$;

View file

@ -0,0 +1,34 @@
create table category (
id serial primary key,
name varchar(100) not null unique
);
insert into category (name) values
('Discussion'),
('Humor/Memes'),
('Gaming'),
('Movies'),
('TV'),
('Music'),
('Literature'),
('Comics'),
('Photography'),
('Art'),
('Learning'),
('DIY'),
('Lifestyle'),
('News'),
('Politics'),
('Society'),
('Gender/Identity/Sexuality'),
('Race/Colonisation'),
('Religion'),
('Science/Technology'),
('Programming/Software'),
('Health/Sports/Fitness'),
('Porn'),
('Places'),
('Meta'),
('Other');
ALTER TABLE community ADD category_id int references category on update cascade on delete cascade not null default 1;

View file

@ -0,0 +1,2 @@
ALTER TABLE community DROP COLUMN category_id;
DROP TABLE category;

View file

@ -109,7 +109,6 @@ fn community_updates_2020_04_02(conn: &PgConnection) -> Result<(), LemmyError> {
name: ccommunity.name.to_owned(), name: ccommunity.name.to_owned(),
title: ccommunity.title.to_owned(), title: ccommunity.title.to_owned(),
description: ccommunity.description.to_owned(), description: ccommunity.description.to_owned(),
category_id: ccommunity.category_id,
creator_id: ccommunity.creator_id, creator_id: ccommunity.creator_id,
removed: None, removed: None,
deleted: None, deleted: None,

View file

@ -1,231 +0,0 @@
extern crate lemmy_server;
#[macro_use]
extern crate diesel_migrations;
use activitystreams::{
activity::{
kind::{CreateType, FollowType},
ActorAndObject,
},
base::{BaseExt, ExtendsExt},
object::{Note, ObjectExt},
};
use actix::prelude::*;
use actix_web::{test::TestRequest, web, web::Path, HttpRequest};
use chrono::Utc;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use http_signature_normalization_actix::PrepareVerifyError;
use lemmy_api::match_websocket_operation;
use lemmy_apub::{
activity_queue::create_activity_queue,
inbox::{
community_inbox,
community_inbox::community_inbox,
shared_inbox,
shared_inbox::shared_inbox,
user_inbox,
user_inbox::user_inbox,
},
};
use lemmy_db_queries::{get_database_url_from_env, Crud, ListingType, SortType};
use lemmy_db_schema::source::{
community::{Community, CommunityForm},
user::{UserForm, User_},
};
use lemmy_server::code_migrations::run_advanced_migrations;
use lemmy_utils::{
apub::generate_actor_keypair,
rate_limit::{rate_limiter::RateLimiter, RateLimit},
settings::Settings,
};
use lemmy_websocket::{chat_server::ChatServer, LemmyContext};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::{ops::Deref, sync::Arc};
use tokio::sync::Mutex;
use url::Url;
embed_migrations!();
fn create_context() -> LemmyContext {
let settings = Settings::get();
let db_url = match get_database_url_from_env() {
Ok(url) => url,
Err(_) => settings.get_database_url(),
};
let manager = ConnectionManager::<PgConnection>::new(&db_url);
let pool = Pool::builder()
.max_size(settings.database.pool_size)
.build(manager)
.unwrap();
embedded_migrations::run(&pool.get().unwrap()).unwrap();
run_advanced_migrations(pool.get().unwrap().deref()).unwrap();
let rate_limiter = RateLimit {
rate_limiter: Arc::new(Mutex::new(RateLimiter::default())),
};
let activity_queue = create_activity_queue();
let chat_server = ChatServer::startup(
pool.clone(),
rate_limiter,
|c, i, o, d| Box::pin(match_websocket_operation(c, i, o, d)),
Client::default(),
activity_queue,
)
.start();
LemmyContext::create(
pool,
chat_server,
Client::default(),
create_activity_queue(),
)
}
fn create_user(conn: &PgConnection, name: &str) -> User_ {
let user_keypair = generate_actor_keypair().unwrap();
let new_user = UserForm {
name: name.into(),
preferred_username: None,
password_encrypted: "nope".into(),
email: None,
matrix_user_id: None,
avatar: None,
banner: None,
admin: false,
banned: Some(false),
updated: None,
published: None,
show_nsfw: false,
theme: "browser".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
actor_id: Some(
Url::parse(&format!("http://localhost:8536/u/{}", name))
.unwrap()
.into(),
),
bio: None,
local: true,
private_key: Some(user_keypair.private_key),
public_key: Some(user_keypair.public_key),
last_refreshed_at: None,
inbox_url: None,
shared_inbox_url: None,
};
User_::create(&conn, &new_user).unwrap()
}
fn create_community(conn: &PgConnection, creator_id: i32) -> Community {
let new_community = CommunityForm {
name: "test_community".into(),
creator_id,
title: "test_community".to_owned(),
description: None,
category_id: 1,
nsfw: false,
removed: None,
deleted: None,
updated: None,
actor_id: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
published: None,
icon: None,
banner: None,
followers_url: None,
inbox_url: None,
shared_inbox_url: None,
};
Community::create(&conn, &new_community).unwrap()
}
fn create_activity<'a, Activity, Return>(user_id: Url) -> web::Json<Return>
where
for<'de> Return: Deserialize<'de> + 'a,
Activity: std::default::Default + Serialize,
{
let mut activity = ActorAndObject::<Activity>::new(user_id, Note::new().into_any_base().unwrap());
activity
.set_id(Url::parse("http://localhost:8536/create/1").unwrap())
.set_many_ccs(vec![Url::parse("http://localhost:8536/c/main").unwrap()]);
let activity = serde_json::to_value(&activity).unwrap();
let activity: Return = serde_json::from_value(activity).unwrap();
web::Json(activity)
}
fn create_http_request() -> HttpRequest {
let time1 = Utc::now().timestamp();
let time2 = Utc::now().timestamp();
let signature = format!(
r#"keyId="my-key-id",algorithm="hs2019",created="{}",expires="{}",headers="(request-target) (created) (expires) date content-type",signature="blah blah blah""#,
time1, time2
);
TestRequest::post()
.uri("http://localhost:8536/")
.header("Signature", signature)
.to_http_request()
}
// TODO: this fails with a stack overflow for some reason
#[actix_rt::test]
#[ignore]
async fn test_shared_inbox_expired_signature() {
let request = create_http_request();
let context = create_context();
let connection = &context.pool().get().unwrap();
let user = create_user(connection, "shared_inbox_rvgfd");
let activity =
create_activity::<CreateType, ActorAndObject<shared_inbox::ValidTypes>>(user.actor_id.into());
let response = shared_inbox(request, activity, web::Data::new(context)).await;
assert_eq!(
format!("{}", response.err().unwrap()),
format!("{}", PrepareVerifyError::Expired)
);
User_::delete(connection, user.id).unwrap();
}
#[actix_rt::test]
async fn test_user_inbox_expired_signature() {
let request = create_http_request();
let context = create_context();
let connection = &context.pool().get().unwrap();
let user = create_user(connection, "user_inbox_cgsax");
let activity =
create_activity::<CreateType, ActorAndObject<user_inbox::UserValidTypes>>(user.actor_id.into());
let path = Path::<String> {
0: "username".to_string(),
};
let response = user_inbox(request, activity, path, web::Data::new(context)).await;
assert_eq!(
format!("{}", response.err().unwrap()),
format!("{}", PrepareVerifyError::Expired)
);
User_::delete(connection, user.id).unwrap();
}
#[actix_rt::test]
async fn test_community_inbox_expired_signature() {
let context = create_context();
let connection = &context.pool().get().unwrap();
let user = create_user(connection, "community_inbox_hrxa");
let community = create_community(connection, user.id);
let request = create_http_request();
let activity = create_activity::<FollowType, ActorAndObject<community_inbox::CommunityValidTypes>>(
user.actor_id.into(),
);
let path = Path::<String> { 0: community.name };
let response = community_inbox(request, activity, path, web::Data::new(context)).await;
assert_eq!(
format!("{}", response.err().unwrap()),
format!("{}", PrepareVerifyError::Expired)
);
User_::delete(connection, user.id).unwrap();
Community::delete(connection, community.id).unwrap();
}