Merge branch 'main' into rewrite-remaining-activities

This commit is contained in:
Felix Ableitner 2021-08-19 20:18:48 +02:00
commit a430ea98c1
59 changed files with 1086 additions and 768 deletions

805
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -47,24 +47,27 @@ lemmy_routes = { version = "=0.11.3", path = "./crates/routes" }
diesel = "1.4.7" diesel = "1.4.7"
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
actix = "0.12.0" actix = "0.12.0"
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["rustls"] } actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["rustls"] }
log = "0.4.14" log = "0.4.14"
env_logger = "0.8.4" env_logger = "0.9.0"
strum = "0.21.0" strum = "0.21.0"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
openssl = "0.10.35" openssl = "0.10.36"
http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] } http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] }
tokio = { version = "1.8.0", features = ["sync"] } tokio = { version = "1.10.0", features = ["sync"] }
anyhow = "1.0.41" anyhow = "1.0.43"
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }
activitystreams = "0.7.0-alpha.11" activitystreams = "0.7.0-alpha.11"
actix-rt = { version = "2.2.0", default-features = false } actix-rt = { version = "2.2.0", default-features = false }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
clokwerk = "0.3.5" clokwerk = "0.3.5"
[dev-dependencies.cargo-husky] [dev-dependencies.cargo-husky]
version = "1.5.0" version = "1.5.0"
default-features = false # Disable features which are enabled by default default-features = false # Disable features which are enabled by default
features = ["precommit-hook", "run-cargo-fmt", "run-cargo-clippy"] features = ["precommit-hook", "run-cargo-fmt", "run-cargo-clippy"]
[package.metadata.cargo-udeps.ignore]
development = ["cargo-husky"]

View file

@ -101,7 +101,6 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins
- Can transfer site and communities to others. - Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments. - Can fully erase your data, replacing all posts and comments.
- NSFW post / community support. - NSFW post / community support.
- OEmbed support via Iframely.
- High performance. - High performance.
- Server is written in rust. - Server is written in rust.
- Front end is `~80kB` gzipped. - Front end is `~80kB` gzipped.
@ -124,7 +123,7 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins
- [lemmy-js-client](https://github.com/LemmyNet/lemmy-js-client) - [lemmy-js-client](https://github.com/LemmyNet/lemmy-js-client)
- [Kotlin API ( under development )](https://github.com/eiknat/lemmy-client) - [Kotlin API ( under development )](https://github.com/eiknat/lemmy-client)
- [Dart API client ( under development )](https://github.com/krawieck/lemmy_api_client) - [Dart API client](https://github.com/krawieck/lemmy_api_client)
## Support / Donate ## Support / Donate

View file

@ -72,16 +72,12 @@
- src: 'templates/nginx.conf' - src: 'templates/nginx.conf'
dest: '/etc/nginx/sites-enabled/lemmy.conf' dest: '/etc/nginx/sites-enabled/lemmy.conf'
mode: '0644' mode: '0644'
- src: '../docker/iframely.config.local.js'
dest: '{{lemmy_base_dir}}/iframely.config.local.js'
mode: '0600'
vars: vars:
lemmy_docker_image: "dessalines/lemmy:{{ lookup('file', 'VERSION') }}" lemmy_docker_image: "dessalines/lemmy:{{ lookup('file', 'VERSION') }}"
lemmy_docker_ui_image: "dessalines/lemmy-ui:{{ lookup('file', 'VERSION') }}" lemmy_docker_ui_image: "dessalines/lemmy-ui:{{ lookup('file', 'VERSION') }}"
lemmy_port: "8536" lemmy_port: "8536"
lemmy_ui_port: "1235" lemmy_ui_port: "1235"
pictshare_port: "8537" pictshare_port: "8537"
iframely_port: "8538"
- name: add config file (only during initial setup) - name: add config file (only during initial setup)
template: template:

View file

@ -61,16 +61,12 @@
- src: 'templates/nginx.conf' - src: 'templates/nginx.conf'
dest: '/etc/nginx/sites-enabled/lemmy.conf' dest: '/etc/nginx/sites-enabled/lemmy.conf'
mode: '0644' mode: '0644'
- src: '../docker/iframely.config.local.js'
dest: '{{lemmy_base_dir}}/iframely.config.local.js'
mode: '0600'
vars: vars:
lemmy_docker_image: "dessalines/lemmy:dev" lemmy_docker_image: "dessalines/lemmy:dev"
lemmy_docker_ui_image: "dessalines/lemmy-ui:{{ lookup('file', 'VERSION') }}" lemmy_docker_ui_image: "dessalines/lemmy-ui:{{ lookup('file', 'VERSION') }}"
lemmy_port: "8536" lemmy_port: "8536"
lemmy_ui_port: "1235" lemmy_ui_port: "1235"
pictshare_port: "8537" pictshare_port: "8537"
iframely_port: "8538"
postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}" postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}"
- name: add config file (only during initial setup) - name: add config file (only during initial setup)

View file

@ -13,7 +13,6 @@ services:
depends_on: depends_on:
- postgres - postgres
- pictrs - pictrs
- iframely
lemmy-ui: lemmy-ui:
image: {{ lemmy_docker_ui_image }} image: {{ lemmy_docker_ui_image }}
@ -47,15 +46,6 @@ services:
restart: always restart: always
mem_limit: 200m mem_limit: 200m
iframely:
image: dogbin/iframely:latest
ports:
- "127.0.0.1:8061:80"
volumes:
- ./iframely.config.local.js:/iframely/config.local.js:ro
restart: always
mem_limit: 200m
postfix: postfix:
image: mwader/postfix-relay image: mwader/postfix-relay
environment: environment:

View file

@ -101,12 +101,6 @@ server {
return 301 /pictrs/image/$1; return 301 /pictrs/image/$1;
} }
location /iframely/ {
proxy_pass http://0.0.0.0:8061/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
# Anonymize IP addresses # Anonymize IP addresses

View file

@ -37,8 +37,6 @@
jwt_secret: "changeme" jwt_secret: "changeme"
# address where pictrs is available # address where pictrs is available
pictrs_url: "http://pictrs:8080" pictrs_url: "http://pictrs:8080"
# address where iframely is available
iframely_url: "http://iframely"
# maximum length of local community and user names # maximum length of local community and user names
actor_name_max_length: 20 actor_name_max_length: 20
# rate limits for various user actions, by user ip # rate limits for various user actions, by user ip

View file

@ -21,10 +21,10 @@ lemmy_db_views_actor = { version = "=0.11.3", path = "../db_views_actor" }
lemmy_api_common = { version = "=0.11.3", path = "../api_common" } lemmy_api_common = { version = "=0.11.3", path = "../api_common" }
lemmy_websocket = { version = "=0.11.3", path = "../websocket" } lemmy_websocket = { version = "=0.11.3", path = "../websocket" }
diesel = "1.4.7" diesel = "1.4.7"
bcrypt = "0.10.0" bcrypt = "0.10.1"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
actix = "0.12.0" actix = "0.12.0"
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.8", default-features = false }
actix-rt = { version = "2.2.0", default-features = false } actix-rt = { version = "2.2.0", default-features = false }
@ -35,18 +35,18 @@ strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
lazy_static = "1.4.0" lazy_static = "1.4.0"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
openssl = "0.10.35" openssl = "0.10.36"
http = "0.2.4" http = "0.2.4"
http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] } http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] }
base64 = "0.13.0" base64 = "0.13.0"
tokio = "1.8.0" tokio = "1.10.0"
futures = "0.3.15" futures = "0.3.16"
itertools = "0.10.1" itertools = "0.10.1"
uuid = { version = "0.8.2", features = ["serde", "v4"] } uuid = { version = "0.8.2", features = ["serde", "v4"] }
sha2 = "0.9.5" sha2 = "0.9.5"
async-trait = "0.1.50" async-trait = "0.1.51"
captcha = "0.0.8" captcha = "0.0.8"
anyhow = "1.0.41" anyhow = "1.0.43"
thiserror = "1.0.26" thiserror = "1.0.26"
background-jobs = "0.9.0" background-jobs = "0.9.0"
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }

View file

@ -400,16 +400,14 @@ impl Perform for TransferCommunity {
} }
// Mod tables // Mod tables
// TODO there should probably be another table for transfer community let form = ModTransferCommunityForm {
// Right now, it will just look like it modded them twice
let form = ModAddCommunityForm {
mod_person_id: local_user_view.person.id, mod_person_id: local_user_view.person.id,
other_person_id: data.person_id, other_person_id: data.person_id,
community_id: data.community_id, community_id: data.community_id,
removed: Some(false), removed: Some(false),
}; };
blocking(context.pool(), move |conn| { blocking(context.pool(), move |conn| {
ModAddCommunity::create(conn, &form) ModTransferCommunity::create(conn, &form)
}) })
.await??; .await??;

View file

@ -121,6 +121,9 @@ pub async fn match_websocket_operation(
UserOperation::ResolvePostReport => { UserOperation::ResolvePostReport => {
do_websocket_operation::<ResolvePostReport>(context, id, op, data).await do_websocket_operation::<ResolvePostReport>(context, id, op, data).await
} }
UserOperation::GetSiteMetadata => {
do_websocket_operation::<GetSiteMetadata>(context, id, op, data).await
}
// Comment ops // Comment ops
UserOperation::MarkCommentAsRead => { UserOperation::MarkCommentAsRead => {

View file

@ -19,6 +19,7 @@ use lemmy_db_queries::{
from_opt_str_to_opt_enum, from_opt_str_to_opt_enum,
source::{ source::{
comment::Comment_, comment::Comment_,
community::Community_,
local_user::LocalUser_, local_user::LocalUser_,
password_reset_request::PasswordResetRequest_, password_reset_request::PasswordResetRequest_,
person::Person_, person::Person_,
@ -33,6 +34,7 @@ use lemmy_db_schema::{
naive_now, naive_now,
source::{ source::{
comment::Comment, comment::Comment,
community::Community,
local_user::{LocalUser, LocalUserForm}, local_user::{LocalUser, LocalUserForm},
moderator::*, moderator::*,
password_reset_request::*, password_reset_request::*,
@ -51,6 +53,7 @@ use lemmy_db_views::{
}; };
use lemmy_db_views_actor::{ use lemmy_db_views_actor::{
community_follower_view::CommunityFollowerView, community_follower_view::CommunityFollowerView,
community_moderator_view::CommunityModeratorView,
person_mention_view::{PersonMentionQueryBuilder, PersonMentionView}, person_mention_view::{PersonMentionQueryBuilder, PersonMentionView},
person_view::PersonViewSafe, person_view::PersonViewSafe,
}; };
@ -408,8 +411,24 @@ impl Perform for BanPerson {
// Communities // Communities
// Remove all communities where they're the top mod // Remove all communities where they're the top mod
// TODO couldn't get group by's working in diesel,
// for now, remove the communities manually // for now, remove the communities manually
let first_mod_communities = blocking(context.pool(), move |conn: &'_ _| {
CommunityModeratorView::get_community_first_mods(conn)
})
.await??;
// Filter to only this banned users top communities
let banned_user_first_communities: Vec<CommunityModeratorView> = first_mod_communities
.into_iter()
.filter(|fmc| fmc.moderator.id == banned_person_id)
.collect();
for first_mod_community in banned_user_first_communities {
blocking(context.pool(), move |conn: &'_ _| {
Community::update_removed(conn, first_mod_community.community.id, true)
})
.await??;
}
// Comments // Comments
blocking(context.pool(), move |conn: &'_ _| { blocking(context.pool(), move |conn: &'_ _| {

View file

@ -23,7 +23,7 @@ use lemmy_apub::{
use lemmy_db_queries::{source::post::Post_, Crud, Likeable, Saveable}; use lemmy_db_queries::{source::post::Post_, Crud, Likeable, Saveable};
use lemmy_db_schema::source::{moderator::*, post::*}; use lemmy_db_schema::source::{moderator::*, post::*};
use lemmy_db_views::post_view::PostView; use lemmy_db_views::post_view::PostView;
use lemmy_utils::{ApiError, ConnectionId, LemmyError}; use lemmy_utils::{request::fetch_site_metadata, ApiError, ConnectionId, LemmyError};
use lemmy_websocket::{send::send_post_ws_message, LemmyContext, UserOperation}; use lemmy_websocket::{send::send_post_ws_message, LemmyContext, UserOperation};
use std::convert::TryInto; use std::convert::TryInto;
@ -285,3 +285,20 @@ impl Perform for SavePost {
Ok(PostResponse { post_view }) Ok(PostResponse { post_view })
} }
} }
#[async_trait::async_trait(?Send)]
impl Perform for GetSiteMetadata {
type Response = GetSiteMetadataResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
_websocket_id: Option<ConnectionId>,
) -> Result<GetSiteMetadataResponse, LemmyError> {
let data: &Self = self;
let metadata = fetch_site_metadata(context.client(), &data.url).await?;
Ok(GetSiteMetadataResponse { metadata })
}
}

View file

@ -40,6 +40,7 @@ use lemmy_db_views_moderator::{
mod_remove_community_view::ModRemoveCommunityView, mod_remove_community_view::ModRemoveCommunityView,
mod_remove_post_view::ModRemovePostView, mod_remove_post_view::ModRemovePostView,
mod_sticky_post_view::ModStickyPostView, mod_sticky_post_view::ModStickyPostView,
mod_transfer_community_view::ModTransferCommunityView,
}; };
use lemmy_utils::{ use lemmy_utils::{
location_info, location_info,
@ -97,6 +98,11 @@ impl Perform for GetModlog {
}) })
.await??; .await??;
let transferred_to_community = blocking(context.pool(), move |conn| {
ModTransferCommunityView::list(conn, community_id, mod_person_id, page, limit)
})
.await??;
// These arrays are only for the full modlog, when a community isn't given // These arrays are only for the full modlog, when a community isn't given
let (removed_communities, banned, added) = if data.community_id.is_none() { let (removed_communities, banned, added) = if data.community_id.is_none() {
blocking(context.pool(), move |conn| { blocking(context.pool(), move |conn| {
@ -122,6 +128,7 @@ impl Perform for GetModlog {
banned, banned,
added_to_community, added_to_community,
added, added,
transferred_to_community,
}) })
} }
} }

View file

@ -17,10 +17,10 @@ lemmy_db_views_moderator = { version = "=0.11.3", path = "../db_views_moderator"
lemmy_db_views_actor = { version = "=0.11.3", path = "../db_views_actor" } lemmy_db_views_actor = { version = "=0.11.3", path = "../db_views_actor" }
lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" } lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" }
lemmy_utils = { version = "=0.11.3", path = "../utils" } lemmy_utils = { version = "=0.11.3", path = "../utils" }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
log = "0.4.14" log = "0.4.14"
diesel = "1.4.7" diesel = "1.4.7"
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] }
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
url = "2.2.2" url = "2.2.2"

View file

@ -272,6 +272,11 @@ pub async fn get_local_user_view_from_jwt(
return Err(ApiError::err("site_ban").into()); return Err(ApiError::err("site_ban").into());
} }
// Check for user deletion
if local_user_view.person.deleted {
return Err(ApiError::err("deleted").into());
}
check_validator_time(&local_user_view.local_user.validator_time, &claims)?; check_validator_time(&local_user_view.local_user.validator_time, &claims)?;
Ok(local_user_view) Ok(local_user_view)

View file

@ -8,6 +8,7 @@ use lemmy_db_views_actor::{
community_moderator_view::CommunityModeratorView, community_moderator_view::CommunityModeratorView,
community_view::CommunityView, community_view::CommunityView,
}; };
use lemmy_utils::request::SiteMetadata;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
@ -148,3 +149,13 @@ pub struct ListPostReports {
pub struct ListPostReportsResponse { pub struct ListPostReportsResponse {
pub posts: Vec<PostReportView>, pub posts: Vec<PostReportView>,
} }
#[derive(Deserialize, Debug)]
pub struct GetSiteMetadata {
pub url: Url,
}
#[derive(Serialize, Clone, Debug)]
pub struct GetSiteMetadataResponse {
pub metadata: SiteMetadata,
}

View file

@ -16,6 +16,7 @@ use lemmy_db_views_moderator::{
mod_remove_community_view::ModRemoveCommunityView, mod_remove_community_view::ModRemoveCommunityView,
mod_remove_post_view::ModRemovePostView, mod_remove_post_view::ModRemovePostView,
mod_sticky_post_view::ModStickyPostView, mod_sticky_post_view::ModStickyPostView,
mod_transfer_community_view::ModTransferCommunityView,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -60,6 +61,7 @@ pub struct GetModlogResponse {
pub banned_from_community: Vec<ModBanFromCommunityView>, pub banned_from_community: Vec<ModBanFromCommunityView>,
pub banned: Vec<ModBanView>, pub banned: Vec<ModBanView>,
pub added_to_community: Vec<ModAddCommunityView>, pub added_to_community: Vec<ModAddCommunityView>,
pub transferred_to_community: Vec<ModTransferCommunityView>,
pub added: Vec<ModAddView>, pub added: Vec<ModAddView>,
} }

View file

@ -16,10 +16,10 @@ lemmy_db_views_actor = { version = "=0.11.3", path = "../db_views_actor" }
lemmy_api_common = { version = "=0.11.3", path = "../api_common" } lemmy_api_common = { version = "=0.11.3", path = "../api_common" }
lemmy_websocket = { version = "=0.11.3", path = "../websocket" } lemmy_websocket = { version = "=0.11.3", path = "../websocket" }
diesel = "1.4.7" diesel = "1.4.7"
bcrypt = "0.10.0" bcrypt = "0.10.1"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
actix = "0.12.0" actix = "0.12.0"
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.8", default-features = false }
actix-rt = { version = "2.2.0", default-features = false } actix-rt = { version = "2.2.0", default-features = false }
@ -30,17 +30,17 @@ strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
lazy_static = "1.4.0" lazy_static = "1.4.0"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
openssl = "0.10.35" openssl = "0.10.36"
http = "0.2.4" http = "0.2.4"
http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] } http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] }
base64 = "0.13.0" base64 = "0.13.0"
tokio = "1.8.0" tokio = "1.10.0"
futures = "0.3.15" futures = "0.3.16"
itertools = "0.10.1" itertools = "0.10.1"
uuid = { version = "0.8.2", features = ["serde", "v4"] } uuid = { version = "0.8.2", features = ["serde", "v4"] }
sha2 = "0.9.5" sha2 = "0.9.5"
async-trait = "0.1.50" async-trait = "0.1.51"
anyhow = "1.0.41" anyhow = "1.0.43"
thiserror = "1.0.26" thiserror = "1.0.26"
background-jobs = "0.9.0" background-jobs = "0.9.0"
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }

View file

@ -20,7 +20,7 @@ use lemmy_apub::{
use lemmy_db_queries::{source::post::Post_, Crud, Likeable}; use lemmy_db_queries::{source::post::Post_, Crud, Likeable};
use lemmy_db_schema::source::post::*; use lemmy_db_schema::source::post::*;
use lemmy_utils::{ use lemmy_utils::{
request::fetch_iframely_and_pictrs_data, request::fetch_site_data,
utils::{check_slurs, check_slurs_opt, clean_url_params, is_valid_post_title}, utils::{check_slurs, check_slurs_opt, clean_url_params, is_valid_post_title},
ApiError, ApiError,
ConnectionId, ConnectionId,
@ -49,11 +49,10 @@ impl PerformCrud for CreatePost {
check_community_ban(local_user_view.person.id, data.community_id, context.pool()).await?; check_community_ban(local_user_view.person.id, data.community_id, context.pool()).await?;
// Fetch Iframely and pictrs cached image // Fetch post links and pictrs cached image
let data_url = data.url.as_ref(); let data_url = data.url.as_ref();
let (iframely_response, pictrs_thumbnail) = let (metadata_res, pictrs_thumbnail) = fetch_site_data(context.client(), data_url).await;
fetch_iframely_and_pictrs_data(context.client(), data_url).await?; let (embed_title, embed_description, embed_html) = metadata_res
let (embed_title, embed_description, embed_html) = iframely_response
.map(|u| (u.title, u.description, u.html)) .map(|u| (u.title, u.description, u.html))
.unwrap_or((None, None, None)); .unwrap_or((None, None, None));

View file

@ -5,7 +5,7 @@ use lemmy_apub::activities::{post::create_or_update::CreateOrUpdatePost, CreateO
use lemmy_db_queries::{source::post::Post_, Crud}; use lemmy_db_queries::{source::post::Post_, Crud};
use lemmy_db_schema::{naive_now, source::post::*}; use lemmy_db_schema::{naive_now, source::post::*};
use lemmy_utils::{ use lemmy_utils::{
request::fetch_iframely_and_pictrs_data, request::fetch_site_data,
utils::{check_slurs_opt, clean_url_params, is_valid_post_title}, utils::{check_slurs_opt, clean_url_params, is_valid_post_title},
ApiError, ApiError,
ConnectionId, ConnectionId,
@ -49,11 +49,10 @@ impl PerformCrud for EditPost {
return Err(ApiError::err("no_post_edit_allowed").into()); return Err(ApiError::err("no_post_edit_allowed").into());
} }
// Fetch Iframely and Pictrs cached image // Fetch post links and Pictrs cached image
let data_url = data.url.as_ref(); let data_url = data.url.as_ref();
let (iframely_response, pictrs_thumbnail) = let (metadata_res, pictrs_thumbnail) = fetch_site_data(context.client(), data_url).await;
fetch_iframely_and_pictrs_data(context.client(), data_url).await?; let (embed_title, embed_description, embed_html) = metadata_res
let (embed_title, embed_description, embed_html) = iframely_response
.map(|u| (u.title, u.description, u.html)) .map(|u| (u.title, u.description, u.html))
.unwrap_or((None, None, None)); .unwrap_or((None, None, None));

View file

@ -21,10 +21,10 @@ lemmy_api_common = { version = "=0.11.3", path = "../api_common" }
lemmy_websocket = { version = "=0.11.3", path = "../websocket" } lemmy_websocket = { version = "=0.11.3", path = "../websocket" }
diesel = "1.4.7" diesel = "1.4.7"
activitystreams = "0.7.0-alpha.11" activitystreams = "0.7.0-alpha.11"
bcrypt = "0.10.0" bcrypt = "0.10.1"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
serde_with = "1.9.4" serde_with = "1.9.4"
actix = "0.12.0" actix = "0.12.0"
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.8", default-features = false }
@ -36,21 +36,20 @@ strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
percent-encoding = "2.1.0" percent-encoding = "2.1.0"
openssl = "0.10.35" openssl = "0.10.36"
http = "0.2.4" http = "0.2.4"
http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] } http-signature-normalization-actix = { version = "0.5.0-beta.7", default-features = false, features = ["sha-2"] }
http-signature-normalization-reqwest = { version = "0.2.0", default-features = false, features = ["sha-2"] } http-signature-normalization-reqwest = { version = "0.2.0", default-features = false, features = ["sha-2"] }
base64 = "0.13.0" base64 = "0.13.0"
tokio = "1.8.0" tokio = "1.10.0"
futures = "0.3.15" futures = "0.3.16"
itertools = "0.10.1" itertools = "0.10.1"
uuid = { version = "0.8.2", features = ["serde", "v4"] } uuid = { version = "0.8.2", features = ["serde", "v4"] }
sha2 = "0.9.5" sha2 = "0.9.5"
async-trait = "0.1.50" async-trait = "0.1.51"
anyhow = "1.0.41" anyhow = "1.0.43"
thiserror = "1.0.26" thiserror = "1.0.26"
background-jobs = "0.9.0" background-jobs = "0.9.0"
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }
backtrace = "0.3.60"
lazy_static = "1.4.0" lazy_static = "1.4.0"

View file

@ -65,7 +65,7 @@ pub struct Delete {
actor: Url, actor: Url,
to: PublicUrl, to: PublicUrl,
pub(in crate::activities::deletion) object: Url, pub(in crate::activities::deletion) object: Url,
cc: [Url; 1], pub(in crate::activities::deletion) cc: [Url; 1],
#[serde(rename = "type")] #[serde(rename = "type")]
kind: DeleteType, kind: DeleteType,
/// If summary is present, this is a mod action (Remove in Lemmy terms). Otherwise, its a user /// If summary is present, this is a mod action (Remove in Lemmy terms). Otherwise, its a user

View file

@ -31,7 +31,7 @@ use lemmy_db_schema::{
}, },
}; };
use lemmy_utils::{ use lemmy_utils::{
request::fetch_iframely_and_pictrs_data, request::fetch_site_data,
utils::{check_slurs, convert_datetime, markdown_to_html, remove_slurs}, utils::{check_slurs, convert_datetime, markdown_to_html, remove_slurs},
LemmyError, LemmyError,
}; };
@ -188,12 +188,12 @@ impl FromApub for Post {
let community = extract_community(&page.to, context, request_counter).await?; let community = extract_community(&page.to, context, request_counter).await?;
let thumbnail_url: Option<Url> = page.image.clone().map(|i| i.url); let thumbnail_url: Option<Url> = page.image.clone().map(|i| i.url);
let (iframely_response, pictrs_thumbnail) = if let Some(url) = &page.url { let (metadata_res, pictrs_thumbnail) = if let Some(url) = &page.url {
fetch_iframely_and_pictrs_data(context.client(), Some(url)).await? fetch_site_data(context.client(), Some(url)).await
} else { } else {
(None, thumbnail_url) (None, thumbnail_url)
}; };
let (embed_title, embed_description, embed_html) = iframely_response let (embed_title, embed_description, embed_html) = metadata_res
.map(|u| (u.title, u.description, u.html)) .map(|u| (u.title, u.description, u.html))
.unwrap_or((None, None, None)); .unwrap_or((None, None, None));

View file

@ -10,8 +10,7 @@ lemmy_utils = { version = "=0.11.3", path = "../utils" }
lemmy_websocket = { version = "=0.11.3", path = "../websocket" } lemmy_websocket = { version = "=0.11.3", path = "../websocket" }
lemmy_apub_lib_derive = { version = "=0.11.3", path = "../apub_lib_derive" } lemmy_apub_lib_derive = { version = "=0.11.3", path = "../apub_lib_derive" }
activitystreams = "0.7.0-alpha.11" activitystreams = "0.7.0-alpha.11"
activitystreams-ext = "0.1.0-alpha.2" serde = { version = "1.0.127", features = ["derive"] }
serde = { version = "1.0.123", features = ["derive"] } async-trait = "0.1.51"
async-trait = "0.1.42" url = { version = "2.2.2", features = ["serde"] }
url = { version = "2.2.1", features = ["serde"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] }

View file

@ -9,9 +9,9 @@ license = "AGPL-3.0"
proc-macro = true proc-macro = true
[dev-dependencies] [dev-dependencies]
trybuild = { version = "1.0", features = ["diff"] } trybuild = { version = "1.0.45", features = ["diff"] }
[dependencies] [dependencies]
proc-macro2 = "1.0" proc-macro2 = "1.0.28"
syn = "1.0" syn = "1.0.74"
quote = "1.0" quote = "1.0.9"

View file

@ -16,8 +16,8 @@ lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" }
diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] } diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] }
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
strum = "0.21.0" strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
log = "0.4.14" log = "0.4.14"
@ -25,7 +25,7 @@ sha2 = "0.9.5"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
regex = "1.5.4" regex = "1.5.4"
bcrypt = "0.10.0" bcrypt = "0.10.1"
[dev-dependencies] [dev-dependencies]
serial_test = "0.5.1" serial_test = "0.5.1"

View file

@ -192,6 +192,33 @@ impl Crud for ModAddCommunity {
} }
} }
impl Crud for ModTransferCommunity {
type Form = ModTransferCommunityForm;
type IdType = i32;
fn read(conn: &PgConnection, from_id: i32) -> Result<Self, Error> {
use lemmy_db_schema::schema::mod_transfer_community::dsl::*;
mod_transfer_community.find(from_id).first::<Self>(conn)
}
fn create(conn: &PgConnection, form: &ModTransferCommunityForm) -> Result<Self, Error> {
use lemmy_db_schema::schema::mod_transfer_community::dsl::*;
insert_into(mod_transfer_community)
.values(form)
.get_result::<Self>(conn)
}
fn update(
conn: &PgConnection,
from_id: i32,
form: &ModTransferCommunityForm,
) -> Result<Self, Error> {
use lemmy_db_schema::schema::mod_transfer_community::dsl::*;
diesel::update(mod_transfer_community.find(from_id))
.set(form)
.get_result::<Self>(conn)
}
}
impl Crud for ModAdd { impl Crud for ModAdd {
type Form = ModAddForm; type Form = ModAddForm;
type IdType = i32; type IdType = i32;

View file

@ -11,8 +11,8 @@ doctest = false
[dependencies] [dependencies]
diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] } diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] }
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
log = "0.4.14" log = "0.4.14"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
diesel-derive-newtype = "0.1.2" diesel-derive-newtype = "0.1.2"

View file

@ -181,6 +181,17 @@ table! {
} }
} }
table! {
mod_transfer_community (id) {
id -> Int4,
mod_person_id -> Int4,
other_person_id -> Int4,
community_id -> Int4,
removed -> Nullable<Bool>,
when_ -> Timestamp,
}
}
table! { table! {
mod_ban (id) { mod_ban (id) {
id -> Int4, id -> Int4,
@ -549,6 +560,7 @@ joinable!(community_person_ban -> community (community_id));
joinable!(community_person_ban -> person (person_id)); joinable!(community_person_ban -> person (person_id));
joinable!(local_user -> person (person_id)); joinable!(local_user -> person (person_id));
joinable!(mod_add_community -> community (community_id)); joinable!(mod_add_community -> community (community_id));
joinable!(mod_transfer_community -> community (community_id));
joinable!(mod_ban_from_community -> community (community_id)); joinable!(mod_ban_from_community -> community (community_id));
joinable!(mod_lock_post -> person (mod_person_id)); joinable!(mod_lock_post -> person (mod_person_id));
joinable!(mod_lock_post -> post (post_id)); joinable!(mod_lock_post -> post (post_id));
@ -593,6 +605,7 @@ allow_tables_to_appear_in_same_query!(
local_user, local_user,
mod_add, mod_add,
mod_add_community, mod_add_community,
mod_transfer_community,
mod_ban, mod_ban,
mod_ban_from_community, mod_ban_from_community,
mod_lock_post, mod_lock_post,

View file

@ -9,6 +9,7 @@ use crate::{
mod_remove_community, mod_remove_community,
mod_remove_post, mod_remove_post,
mod_sticky_post, mod_sticky_post,
mod_transfer_community,
}, },
CommentId, CommentId,
CommunityId, CommunityId,
@ -181,6 +182,26 @@ pub struct ModAddCommunityForm {
pub removed: Option<bool>, pub removed: Option<bool>,
} }
#[derive(Clone, Queryable, Identifiable, PartialEq, Debug, Serialize)]
#[table_name = "mod_transfer_community"]
pub struct ModTransferCommunity {
pub id: i32,
pub mod_person_id: PersonId,
pub other_person_id: PersonId,
pub community_id: CommunityId,
pub removed: Option<bool>,
pub when_: chrono::NaiveDateTime,
}
#[derive(Insertable, AsChangeset)]
#[table_name = "mod_transfer_community"]
pub struct ModTransferCommunityForm {
pub mod_person_id: PersonId,
pub other_person_id: PersonId,
pub community_id: CommunityId,
pub removed: Option<bool>,
}
#[derive(Clone, Queryable, Identifiable, PartialEq, Debug, Serialize)] #[derive(Clone, Queryable, Identifiable, PartialEq, Debug, Serialize)]
#[table_name = "mod_add"] #[table_name = "mod_add"]
pub struct ModAdd { pub struct ModAdd {

View file

@ -12,7 +12,7 @@ doctest = false
lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" } lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" }
lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" } lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" }
diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] } diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
log = "0.4.14" log = "0.4.14"
url = "2.2.2" url = "2.2.2"

View file

@ -12,4 +12,4 @@ doctest = false
lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" } lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" }
lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" } lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" }
diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] } diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }

View file

@ -49,6 +49,28 @@ impl CommunityModeratorView {
Ok(Self::from_tuple_to_vec(res)) Ok(Self::from_tuple_to_vec(res))
} }
/// Finds all communities first mods / creators
/// Ideally this should be a group by, but diesel doesn't support it yet
pub fn get_community_first_mods(conn: &PgConnection) -> Result<Vec<Self>, Error> {
let res = community_moderator::table
.inner_join(community::table)
.inner_join(person::table)
.select((
Community::safe_columns_tuple(),
Person::safe_columns_tuple(),
))
// A hacky workaround instead of group_bys
// https://stackoverflow.com/questions/24042359/how-to-join-only-one-row-in-joined-table-with-postgres
.distinct_on(community_moderator::community_id)
.order_by((
community_moderator::community_id,
community_moderator::person_id,
))
.load::<CommunityModeratorViewTuple>(conn)?;
Ok(Self::from_tuple_to_vec(res))
}
} }
impl ViewToVec for CommunityModeratorView { impl ViewToVec for CommunityModeratorView {

View file

@ -12,4 +12,4 @@ doctest = false
lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" } lemmy_db_queries = { version = "=0.11.3", path = "../db_queries" }
lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" } lemmy_db_schema = { version = "=0.11.3", path = "../db_schema" }
diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] } diesel = { version = "1.4.7", features = ["postgres","chrono","r2d2","serde_json"] }
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }

View file

@ -7,3 +7,4 @@ pub mod mod_remove_comment_view;
pub mod mod_remove_community_view; pub mod mod_remove_community_view;
pub mod mod_remove_post_view; pub mod mod_remove_post_view;
pub mod mod_sticky_post_view; pub mod mod_sticky_post_view;
pub mod mod_transfer_community_view;

View file

@ -0,0 +1,85 @@
use diesel::{result::Error, *};
use lemmy_db_queries::{limit_and_offset, ToSafe, ViewToVec};
use lemmy_db_schema::{
schema::{community, mod_transfer_community, person, person_alias_1},
source::{
community::{Community, CommunitySafe},
moderator::ModTransferCommunity,
person::{Person, PersonAlias1, PersonSafe, PersonSafeAlias1},
},
CommunityId,
PersonId,
};
use serde::Serialize;
#[derive(Debug, Serialize, Clone)]
pub struct ModTransferCommunityView {
pub mod_transfer_community: ModTransferCommunity,
pub moderator: PersonSafe,
pub community: CommunitySafe,
pub modded_person: PersonSafeAlias1,
}
type ModTransferCommunityViewTuple = (
ModTransferCommunity,
PersonSafe,
CommunitySafe,
PersonSafeAlias1,
);
impl ModTransferCommunityView {
pub fn list(
conn: &PgConnection,
community_id: Option<CommunityId>,
mod_person_id: Option<PersonId>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
let mut query = mod_transfer_community::table
.inner_join(person::table.on(mod_transfer_community::mod_person_id.eq(person::id)))
.inner_join(community::table)
.inner_join(
person_alias_1::table.on(mod_transfer_community::other_person_id.eq(person_alias_1::id)),
)
.select((
mod_transfer_community::all_columns,
Person::safe_columns_tuple(),
Community::safe_columns_tuple(),
PersonAlias1::safe_columns_tuple(),
))
.into_boxed();
if let Some(mod_person_id) = mod_person_id {
query = query.filter(mod_transfer_community::mod_person_id.eq(mod_person_id));
};
if let Some(community_id) = community_id {
query = query.filter(mod_transfer_community::community_id.eq(community_id));
};
let (limit, offset) = limit_and_offset(page, limit);
let res = query
.limit(limit)
.offset(offset)
.order_by(mod_transfer_community::when_.desc())
.load::<ModTransferCommunityViewTuple>(conn)?;
Ok(Self::from_tuple_to_vec(res))
}
}
impl ViewToVec for ModTransferCommunityView {
type DbTuple = ModTransferCommunityViewTuple;
fn from_tuple_to_vec(items: Vec<Self::DbTuple>) -> Vec<Self> {
items
.iter()
.map(|a| Self {
mod_transfer_community: a.0.to_owned(),
moderator: a.1.to_owned(),
community: a.2.to_owned(),
modded_person: a.3.to_owned(),
})
.collect::<Vec<Self>>()
}
}

View file

@ -22,10 +22,10 @@ actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["r
actix-web-actors = { version = "4.0.0-beta.6", default-features = false } actix-web-actors = { version = "4.0.0-beta.6", default-features = false }
sha2 = "0.9.5" sha2 = "0.9.5"
log = "0.4.14" log = "0.4.14"
anyhow = "1.0.41" anyhow = "1.0.43"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }
rss = "1.10.0" rss = "1.10.0"
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
awc = { version = "3.0.0-beta.7", default-features = false } awc = { version = "3.0.0-beta.7", default-features = false }
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
strum = "0.21.0" strum = "0.21.0"

View file

@ -18,23 +18,24 @@ log = "0.4.14"
itertools = "0.10.1" itertools = "0.10.1"
rand = "0.8.4" rand = "0.8.4"
percent-encoding = "2.1.0" percent-encoding = "2.1.0"
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
thiserror = "1.0.26" thiserror = "1.0.26"
comrak = { version = "0.10.1", default-features = false } comrak = { version = "0.11.0", default-features = false }
lazy_static = "1.4.0" lazy_static = "1.4.0"
openssl = "0.10.35" openssl = "0.10.36"
url = { version = "2.2.2", features = ["serde"] } url = { version = "2.2.2", features = ["serde"] }
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["rustls"] } actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["rustls"] }
actix-rt = { version = "2.2.0", default-features = false } actix-rt = { version = "2.2.0", default-features = false }
anyhow = "1.0.41" anyhow = "1.0.43"
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }
tokio = { version = "1.8.0", features = ["sync"] } tokio = { version = "1.10.0", features = ["sync"] }
strum = "0.21.0" strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
futures = "0.3.15" futures = "0.3.16"
diesel = "1.4.7" diesel = "1.4.7"
http = "0.2.4" http = "0.2.4"
jsonwebtoken = "7.2.0" jsonwebtoken = "7.2.0"
deser-hjson = "1.0.1" deser-hjson = "1.0.2"
smart-default = "0.6.0" smart-default = "0.6.0"
webpage = { version = "1.1", default-features = false, features = ["serde"] }

View file

@ -3,10 +3,11 @@ use anyhow::anyhow;
use log::error; use log::error;
use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC};
use reqwest::Client; use reqwest::Client;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use std::future::Future; use std::future::Future;
use thiserror::Error; use thiserror::Error;
use url::Url; use url::Url;
use webpage::HTML;
#[derive(Clone, Debug, Error)] #[derive(Clone, Debug, Error)]
#[error("Error sending request, {0}")] #[error("Error sending request, {0}")]
@ -47,31 +48,61 @@ where
response.expect("retry http request") response.expect("retry http request")
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
pub struct IframelyResponse { pub struct SiteMetadata {
pub title: Option<String>, pub title: Option<String>,
pub description: Option<String>, pub description: Option<String>,
thumbnail_url: Option<Url>, image: Option<Url>,
pub html: Option<String>, pub html: Option<String>,
} }
pub(crate) async fn fetch_iframely( /// Fetches the post link html tags (like title, description, image, etc)
client: &Client, pub async fn fetch_site_metadata(client: &Client, url: &Url) -> Result<SiteMetadata, LemmyError> {
url: &Url, let response = retry(|| client.get(url.as_str()).send()).await?;
) -> Result<IframelyResponse, LemmyError> {
if let Some(iframely_url) = Settings::get().iframely_url {
let fetch_url = format!("{}/oembed?url={}", iframely_url, url);
let response = retry(|| client.get(&fetch_url).send()).await?; let html = response
.text()
let res: IframelyResponse = response
.json()
.await .await
.map_err(|e| RecvError(e.to_string()))?; .map_err(|e| RecvError(e.to_string()))?;
Ok(res)
} else { let tags = html_to_site_metadata(&html)?;
Err(anyhow!("Missing Iframely URL in config.").into())
Ok(tags)
} }
fn html_to_site_metadata(html: &str) -> Result<SiteMetadata, LemmyError> {
let page = HTML::from_string(html.to_string(), None)?;
let page_title = page.title;
let page_description = page.description;
let og_description = page
.opengraph
.properties
.get("description")
.map(|t| t.to_string());
let og_title = page
.opengraph
.properties
.get("title")
.map(|t| t.to_string());
let og_image = page
.opengraph
.images
.get(0)
.map(|ogo| Url::parse(&ogo.url).ok())
.flatten();
let title = og_title.or(page_title);
let description = og_description.or(page_description);
let image = og_image;
Ok(SiteMetadata {
title,
description,
image,
html: None,
})
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
@ -89,7 +120,7 @@ pub(crate) struct PictrsFile {
pub(crate) async fn fetch_pictrs( pub(crate) async fn fetch_pictrs(
client: &Client, client: &Client,
image_url: &Url, image_url: &Url,
) -> Result<Option<PictrsResponse>, LemmyError> { ) -> Result<PictrsResponse, LemmyError> {
if let Some(pictrs_url) = Settings::get().pictrs_url { if let Some(pictrs_url) = Settings::get().pictrs_url {
is_image_content_type(client, image_url).await?; is_image_content_type(client, image_url).await?;
@ -107,37 +138,44 @@ pub(crate) async fn fetch_pictrs(
.map_err(|e| RecvError(e.to_string()))?; .map_err(|e| RecvError(e.to_string()))?;
if response.msg == "ok" { if response.msg == "ok" {
Ok(Some(response)) Ok(response)
} else { } else {
Err(anyhow!("{}", &response.msg).into()) Err(anyhow!("{}", &response.msg).into())
} }
} else { } else {
Ok(None) Err(anyhow!("pictrs_url not set up in config").into())
} }
} }
pub async fn fetch_iframely_and_pictrs_data( /// Both are options, since the URL might be either an html page, or an image
/// Returns the SiteMetadata, and a Pictrs URL, if there is a picture associated
pub async fn fetch_site_data(
client: &Client, client: &Client,
url: Option<&Url>, url: Option<&Url>,
) -> Result<(Option<IframelyResponse>, Option<Url>), LemmyError> { ) -> (Option<SiteMetadata>, Option<Url>) {
match &url { match &url {
Some(url) => { Some(url) => {
// Fetch iframely data // Fetch metadata
let iframely_res_option = fetch_iframely(client, url).await.ok(); // Ignore errors, since it may be an image, or not have the data.
// Warning, this may ignore SSL errors
let metadata_option = fetch_site_metadata(client, url).await.ok();
// Fetch pictrs thumbnail // Fetch pictrs thumbnail
let pictrs_hash = match &iframely_res_option { let pictrs_hash = match &metadata_option {
Some(iframely_res) => match &iframely_res.thumbnail_url { Some(metadata_res) => match &metadata_res.image {
Some(iframely_thumbnail_url) => fetch_pictrs(client, iframely_thumbnail_url) // Metadata, with image
.await? // Try to generate a small thumbnail if there's a full sized one from post-links
Some(metadata_image) => fetch_pictrs(client, metadata_image)
.await
.map(|r| r.files[0].file.to_owned()), .map(|r| r.files[0].file.to_owned()),
// Try to generate a small thumbnail if iframely is not supported // Metadata, but no image
None => fetch_pictrs(client, url) None => fetch_pictrs(client, url)
.await? .await
.map(|r| r.files[0].file.to_owned()), .map(|r| r.files[0].file.to_owned()),
}, },
// No metadata, try to fetch the URL as an image
None => fetch_pictrs(client, url) None => fetch_pictrs(client, url)
.await? .await
.map(|r| r.files[0].file.to_owned()), .map(|r| r.files[0].file.to_owned()),
}; };
@ -151,11 +189,12 @@ pub async fn fetch_iframely_and_pictrs_data(
)) ))
.ok() .ok()
}) })
.ok()
.flatten(); .flatten();
Ok((iframely_res_option, pictrs_thumbnail)) (metadata_option, pictrs_thumbnail)
} }
None => Ok((None, None)), None => (None, None),
} }
} }
@ -176,12 +215,35 @@ async fn is_image_content_type(client: &Client, test: &Url) -> Result<(), LemmyE
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::request::fetch_site_metadata;
use url::Url;
use super::SiteMetadata;
// These helped with testing // These helped with testing
// #[test] #[actix_rt::test]
// fn test_iframely() { async fn test_site_metadata() {
// let res = fetch_iframely(client, "https://www.redspark.nu/?p=15341").await; let client = reqwest::Client::default();
// assert!(res.is_ok()); let sample_url = Url::parse("https://www.redspark.nu/en/peoples-war/district-leader-of-chand-led-cpn-arrested-in-bhojpur/").unwrap();
// } let sample_res = fetch_site_metadata(&client, &sample_url).await.unwrap();
assert_eq!(
SiteMetadata {
title: Some("District Leader Of Chand Led CPN Arrested In Bhojpur - Redspark".to_string()),
description: Some("BHOJPUR: A district leader of the outlawed Netra Bikram Chand alias Biplav-led outfit has been arrested. According to District Police".to_string()),
image: Some(Url::parse("https://www.redspark.nu/wp-content/uploads/2020/03/netra-bikram-chand-attends-program-1272019033653-1000x0-845x653-1.jpg").unwrap()),
html: None,
}, sample_res);
let youtube_url = Url::parse("https://www.youtube.com/watch?v=IquO_TcMZIQ").unwrap();
let youtube_res = fetch_site_metadata(&client, &youtube_url).await.unwrap();
assert_eq!(
SiteMetadata {
title: Some("A Hard Look at Rent and Rent Seeking with Michael Hudson & Pepe Escobar".to_string()),
description: Some("An interactive discussion on wealth inequality and the “Great Game” on the control of natural resources.In this webinar organized jointly by the Henry George...".to_string()),
image: Some(Url::parse("https://i.ytimg.com/vi/IquO_TcMZIQ/maxresdefault.jpg").unwrap()),
html: None,
}, youtube_res);
}
// #[test] // #[test]
// fn test_pictshare() { // fn test_pictshare() {

View file

@ -29,8 +29,6 @@ pub struct Settings {
#[default(None)] #[default(None)]
pub pictrs_url: Option<String>, pub pictrs_url: Option<String>,
#[default(None)] #[default(None)]
pub iframely_url: Option<String>,
#[default(None)]
pub additional_slurs: Option<String>, pub additional_slurs: Option<String>,
#[default(20)] #[default(20)]
pub actor_name_max_length: usize, pub actor_name_max_length: usize,

View file

@ -20,13 +20,13 @@ lemmy_db_views_actor = { version = "=0.11.3", path = "../db_views_actor" }
reqwest = { version = "0.11.4", features = ["json"] } reqwest = { version = "0.11.4", features = ["json"] }
log = "0.4.14" log = "0.4.14"
rand = "0.8.4" rand = "0.8.4"
serde = { version = "1.0.126", features = ["derive"] } serde = { version = "1.0.127", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] } serde_json = { version = "1.0.66", features = ["preserve_order"] }
actix = "0.12.0" actix = "0.12.0"
anyhow = "1.0.41" anyhow = "1.0.43"
diesel = "1.4.7" diesel = "1.4.7"
background-jobs = "0.9.0" background-jobs = "0.9.0"
tokio = "1.8.0" tokio = "1.10.0"
strum = "0.21.0" strum = "0.21.0"
strum_macros = "0.21.1" strum_macros = "0.21.1"
chrono = { version = "0.4.19", features = ["serde"] } chrono = { version = "0.4.19", features = ["serde"] }

View file

@ -125,6 +125,7 @@ pub enum UserOperation {
CommunityJoin, CommunityJoin,
ModJoin, ModJoin,
ChangePassword, ChangePassword,
GetSiteMetadata,
} }
#[derive(EnumString, ToString, Debug, Clone)] #[derive(EnumString, ToString, Debug, Clone)]

View file

@ -45,7 +45,7 @@ RUN strip ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server
RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_server RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_server
# The alpine runner # The alpine runner
FROM alpine:3.12 as lemmy FROM alpine:3.14 as lemmy
# Install libpq for postgres # Install libpq for postgres
RUN apk add libpq RUN apk add libpq

View file

@ -14,7 +14,6 @@ services:
depends_on: depends_on:
- pictrs - pictrs
- postgres - postgres
- iframely
lemmy-ui: lemmy-ui:
image: dessalines/lemmy-ui:dev image: dessalines/lemmy-ui:dev
@ -49,12 +48,3 @@ services:
volumes: volumes:
- ./volumes/pictrs:/mnt - ./volumes/pictrs:/mnt
restart: always restart: always
iframely:
image: dogbin/iframely:latest
ports:
- "8061:80"
volumes:
- ../iframely.config.local.js:/iframely/config.local.js:ro
restart: always
mem_limit: 200m

View file

@ -22,7 +22,7 @@ FROM ubuntu:20.10
# Install libpq for postgres # Install libpq for postgres
RUN apt-get update -y RUN apt-get update -y
RUN apt-get install -y libpq-dev RUN apt-get install -y libpq-dev ca-certificates
# Copy resources # Copy resources
COPY --from=rust /app/lemmy_server /app/lemmy COPY --from=rust /app/lemmy_server /app/lemmy

View file

@ -14,7 +14,6 @@ services:
restart: on-failure restart: on-failure
depends_on: depends_on:
- pictrs - pictrs
- iframely
- lemmy-alpha-ui - lemmy-alpha-ui
- lemmy-beta-ui - lemmy-beta-ui
- lemmy-gamma-ui - lemmy-gamma-ui
@ -174,9 +173,3 @@ services:
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- ./volumes/postgres_epsilon:/var/lib/postgresql/data - ./volumes/postgres_epsilon:/var/lib/postgresql/data
iframely:
image: dogbin/iframely:latest
volumes:
- ../iframely.config.local.js:/iframely/config.local.js:ro
restart: always

View file

@ -40,12 +40,6 @@ http {
# Cuts off the trailing slash on URLs to make them valid # Cuts off the trailing slash on URLs to make them valid
rewrite ^(.+)/+$ $1 permanent; rewrite ^(.+)/+$ $1 permanent;
} }
location /iframely/ {
proxy_pass http://iframely:80/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
upstream lemmy-beta { upstream lemmy-beta {
@ -85,12 +79,6 @@ http {
# Cuts off the trailing slash on URLs to make them valid # Cuts off the trailing slash on URLs to make them valid
rewrite ^(.+)/+$ $1 permanent; rewrite ^(.+)/+$ $1 permanent;
} }
location /iframely/ {
proxy_pass http://iframely:80/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
upstream lemmy-gamma { upstream lemmy-gamma {
@ -130,12 +118,6 @@ http {
# Cuts off the trailing slash on URLs to make them valid # Cuts off the trailing slash on URLs to make them valid
rewrite ^(.+)/+$ $1 permanent; rewrite ^(.+)/+$ $1 permanent;
} }
location /iframely/ {
proxy_pass http://iframely:80/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
upstream lemmy-delta { upstream lemmy-delta {
@ -175,12 +157,6 @@ http {
# Cuts off the trailing slash on URLs to make them valid # Cuts off the trailing slash on URLs to make them valid
rewrite ^(.+)/+$ $1 permanent; rewrite ^(.+)/+$ $1 permanent;
} }
location /iframely/ {
proxy_pass http://iframely:80/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
upstream lemmy-epsilon { upstream lemmy-epsilon {
@ -220,11 +196,5 @@ http {
# Cuts off the trailing slash on URLs to make them valid # Cuts off the trailing slash on URLs to make them valid
rewrite ^(.+)/+$ $1 permanent; rewrite ^(.+)/+$ $1 permanent;
} }
location /iframely/ {
proxy_pass http://iframely:80/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
} }

View file

@ -1,283 +0,0 @@
(function() {
var config = {
// Specify a path for custom plugins. Custom plugins will override core plugins.
// CUSTOM_PLUGINS_PATH: __dirname + '/yourcustom-plugin-folder',
DEBUG: false,
RICH_LOG_ENABLED: false,
// For embeds that require render, baseAppUrl will be used as the host.
baseAppUrl: "http://yourdomain.com",
relativeStaticUrl: "/r",
// Or just skip built-in renders altogether
SKIP_IFRAMELY_RENDERS: true,
// For legacy reasons the response format of Iframely open-source is
// different by default as it does not group the links array by rel.
// In order to get the same grouped response as in Cloud API,
// add `&group=true` to your request to change response per request
// or set `GROUP_LINKS` in your config to `true` for a global change.
GROUP_LINKS: true,
// Number of maximum redirects to follow before aborting the page
// request with `redirect loop` error.
MAX_REDIRECTS: 4,
SKIP_OEMBED_RE_LIST: [
// /^https?:\/\/yourdomain\.com\//,
],
/*
// Used to pass parameters to the generate functions when creating HTML elements
// disableSizeWrapper: Don't wrap element (iframe, video, etc) in a positioned div
GENERATE_LINK_PARAMS: {
disableSizeWrapper: true
},
*/
port: 80, //can be overridden by PORT env var
host: '0.0.0.0', // Dockers beware. See https://github.com/itteco/iframely/issues/132#issuecomment-242991246
//can be overridden by HOST env var
// Optional SSL cert, if you serve under HTTPS.
/*
ssl: {
key: require('fs').readFileSync(__dirname + '/key.pem'),
cert: require('fs').readFileSync(__dirname + '/cert.pem'),
port: 443
},
*/
/*
Supported cache engines:
- no-cache - no caching will be used.
- node-cache - good for debug, node memory will be used (https://github.com/tcs-de/nodecache).
- redis - https://github.com/mranney/node_redis.
- memcached - https://github.com/3rd-Eden/node-memcached
*/
CACHE_ENGINE: 'node-cache',
CACHE_TTL: 0, // In seconds.
// 0 = 'never expire' for memcached & node-cache to let cache engine decide itself when to evict the record
// 0 = 'no cache' for redis. Use high enough (e.g. 365*24*60*60*1000) ttl for similar 'never expire' approach instead
/*
// Redis cache options.
REDIS_OPTIONS: {
host: '127.0.0.1',
port: 6379
},
*/
/*
// Memcached options. See https://github.com/3rd-Eden/node-memcached#server-locations
MEMCACHED_OPTIONS: {
locations: "127.0.0.1:11211"
}
*/
/*
// Access-Control-Allow-Origin list.
allowedOrigins: [
"*",
"http://another_domain.com"
],
*/
/*
// Uncomment to enable plugin testing framework.
tests: {
mongodb: 'mongodb://localhost:27017/iframely-tests',
single_test_timeout: 10 * 1000,
plugin_test_period: 2 * 60 * 60 * 1000,
relaunch_script_period: 5 * 60 * 1000
},
*/
// If there's no response from remote server, the timeout will occur after
RESPONSE_TIMEOUT: 5 * 1000, //ms
/* From v1.4.0, Iframely supports HTTP/2 by default. Disable it, if you'd rather not.
Alternatively, you can also disable per origin. See `proxy` option below.
*/
// DISABLE_HTTP2: true,
// Customize API calls to oembed endpoints.
ADD_OEMBED_PARAMS: [{
// Endpoint url regexp array.
re: [/^http:\/\/api\.instagram\.com\/oembed/],
// Custom get params object.
params: {
hidecaption: true
}
}, {
re: [/^https:\/\/www\.facebook\.com\/plugins\/page\/oembed\.json/i],
params: {
show_posts: 0,
show_facepile: 0,
maxwidth: 600
}
}, {
// match i=user or i=moment or i=timeline to configure these types invidually
// see params spec at https://dev.twitter.com/web/embedded-timelines/oembed
re: [/^https?:\/\/publish\.twitter\.com\/oembed\?i=user/i],
params: {
limit: 1,
maxwidth: 600
}
/*
}, {
// Facebook https://developers.facebook.com/docs/plugins/oembed-endpoints
re: [/^https:\/\/www\.facebook\.com\/plugins\/\w+\/oembed\.json/i],
params: {
// Skip script tag and fb-root div.
omitscript: true
}
*/
}],
/*
// Configure use of HTTP proxies as needed.
// You don't have to specify all options per regex - just what you need to override
PROXY: [{
re: [/^https?:\/\/www\.domain\.com/],
proxy_server: 'http://1.2.3.4:8080',
user_agent: 'CHANGE YOUR AGENT',
headers: {
// HTTP headers
// Overrides previous params if overlapped.
},
request_options: {
// Refer to: https://github.com/request/request
// Overrides previous params if overlapped.
},
disable_http2: true
}],
*/
// Customize API calls to 3rd parties. At the very least - configure required keys.
providerOptions: {
locale: "en_US", // ISO 639-1 two-letter language code, e.g. en_CA or fr_CH.
// Will be added as highest priotity in accept-language header with each request.
// Plus is used in FB, YouTube and perhaps other plugins
"twitter": {
"max-width": 550,
"min-width": 250,
hide_media: false,
hide_thread: false,
omit_script: false,
center: false,
// dnt: true,
cache_ttl: 100 * 365 * 24 * 3600 // 100 Years.
},
readability: {
enabled: false
// allowPTagDescription: true // to enable description fallback to first paragraph
},
images: {
loadSize: false, // if true, will try an load first bytes of all images to get/confirm the sizes
checkFavicon: false // if true, will verify all favicons
},
tumblr: {
consumer_key: "INSERT YOUR VALUE"
// media_only: true // disables status embeds for images and videos - will return plain media
},
google: {
// https://developers.google.com/maps/documentation/embed/guide#api_key
maps_key: "INSERT YOUR VALUE"
},
/*
// Optional Camo Proxy to wrap all images: https://github.com/atmos/camo
camoProxy: {
camo_proxy_key: "INSERT YOUR VALUE",
camo_proxy_host: "INSERT YOUR VALUE"
// ssl_only: true // will only proxy non-ssl images
},
*/
// List of query parameters to add to YouTube and Vimeo frames
// Start it with leading "?". Or omit alltogether for default values
// API key is optional, youtube will work without it too.
// It is probably the same API key you use for Google Maps.
youtube: {
// api_key: "INSERT YOUR VALUE",
get_params: "?rel=0&showinfo=1" // https://developers.google.com/youtube/player_parameters
},
vimeo: {
get_params: "?byline=0&badge=0" // https://developer.vimeo.com/player/embedding
},
/*
soundcloud: {
old_player: true // enables classic player
},
giphy: {
media_only: true // disables branded player for gifs and returns just the image
}
*/
/*
bandcamp: {
get_params: '/size=large/bgcol=333333/linkcol=ffffff/artwork=small/transparent=true/',
media: {
album: {
height: 472,
'max-width': 700
},
track: {
height: 120,
'max-width': 700
}
}
}
*/
},
// WHITELIST_WILDCARD, if present, will be added to whitelist as record for top level domain: "*"
// with it, you can define what parsers do when they run accross unknown publisher.
// If absent or empty, all generic media parsers will be disabled except for known domains
// More about format: https://iframely.com/docs/qa-format
/*
WHITELIST_WILDCARD: {
"twitter": {
"player": "allow",
"photo": "deny"
},
"oembed": {
"video": "allow",
"photo": "allow",
"rich": "deny",
"link": "deny"
},
"og": {
"video": ["allow", "ssl", "responsive"]
},
"iframely": {
"survey": "allow",
"reader": "allow",
"player": "allow",
"image": "allow"
},
"html-meta": {
"video": ["allow", "responsive"],
"promo": "allow"
}
}
*/
// Black-list any of the inappropriate domains. Iframely will return 417
// At minimum, keep your localhosts blacklisted to avoid SSRF
BLACKLIST_DOMAINS_RE: [
/^https?:\/\/127\.0\.0\.1/i,
/^https?:\/\/localhost/i,
// And this is AWS metadata service
// https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html
/^https?:\/\/169\.254\.169\.254/
]
};
module.exports = config;
})();

View file

@ -20,6 +20,8 @@
# json web token for authorization between server and client # json web token for authorization between server and client
jwt_secret: "changeme" jwt_secret: "changeme"
# settings related to the postgresql database # settings related to the postgresql database
# address where pictrs is available
pictrs_url: "http://pictrs:8080"
database: { database: {
# name of the postgres database for lemmy # name of the postgres database for lemmy
database: "lemmy" database: "lemmy"

View file

@ -18,7 +18,7 @@ RUN strip ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server
RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_server RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_server
# The alpine runner # The alpine runner
FROM alpine:3.12 as lemmy FROM alpine:3.14 as lemmy
# Install libpq for postgres # Install libpq for postgres
RUN apk add libpq RUN apk add libpq

View file

@ -23,7 +23,6 @@ services:
depends_on: depends_on:
- postgres - postgres
- pictrs - pictrs
- iframely
lemmy-ui: lemmy-ui:
image: dessalines/lemmy-ui:0.11.3 image: dessalines/lemmy-ui:0.11.3
@ -46,11 +45,3 @@ services:
- ./volumes/pictrs:/mnt - ./volumes/pictrs:/mnt
restart: always restart: always
iframely:
image: dogbin/iframely:latest
ports:
- "127.0.0.1:8061:80"
volumes:
- ./iframely.config.local.js:/iframely/config.local.js:ro
restart: always
mem_limit: 200m

View file

@ -0,0 +1,42 @@
create or replace function community_aggregates_activity(i text)
returns table(count_ bigint, community_id_ integer)
language plpgsql
as
$$
begin
return query
select count(*), community_id
from (
select c.creator_id, p.community_id from comment c
inner join post p on c.post_id = p.id
where c.published > ('now'::timestamp - i::interval)
union
select p.creator_id, p.community_id from post p
where p.published > ('now'::timestamp - i::interval)
) a
group by community_id;
end;
$$;
create or replace function site_aggregates_activity(i text) returns integer
language plpgsql
as $$
declare
count_ integer;
begin
select count(*)
into count_
from (
select c.creator_id from comment c
inner join person u on c.creator_id = u.id
where c.published > ('now'::timestamp - i::interval)
and u.local = true
union
select p.creator_id from post p
inner join person u on p.creator_id = u.id
where p.published > ('now'::timestamp - i::interval)
and u.local = true
) a;
return count_;
end;
$$;

View file

@ -0,0 +1,52 @@
-- Make sure bots aren't included in aggregate counts
create or replace function community_aggregates_activity(i text)
returns table(count_ bigint, community_id_ integer)
language plpgsql
as
$$
begin
return query
select count(*), community_id
from (
select c.creator_id, p.community_id from comment c
inner join post p on c.post_id = p.id
inner join person pe on c.creator_id = pe.id
where c.published > ('now'::timestamp - i::interval)
and pe.bot_account = false
union
select p.creator_id, p.community_id from post p
inner join person pe on p.creator_id = pe.id
where p.published > ('now'::timestamp - i::interval)
and pe.bot_account = false
) a
group by community_id;
end;
$$;
create or replace function site_aggregates_activity(i text) returns integer
language plpgsql
as $$
declare
count_ integer;
begin
select count(*)
into count_
from (
select c.creator_id from comment c
inner join person u on c.creator_id = u.id
inner join person pe on c.creator_id = pe.id
where c.published > ('now'::timestamp - i::interval)
and u.local = true
and pe.bot_account = false
union
select p.creator_id from post p
inner join person u on p.creator_id = u.id
inner join person pe on p.creator_id = pe.id
where p.published > ('now'::timestamp - i::interval)
and u.local = true
and pe.bot_account = false
) a;
return count_;
end;
$$;

View file

@ -0,0 +1 @@
drop table mod_transfer_community;

View file

@ -0,0 +1,9 @@
-- Add the mod_transfer_community log table
create table mod_transfer_community (
id serial primary key,
mod_person_id int references person on update cascade on delete cascade not null,
other_person_id int references person on update cascade on delete cascade not null,
community_id int references community on update cascade on delete cascade not null,
removed boolean default false,
when_ timestamp not null default now()
);

14
scripts/upgrade_deps.sh Executable file
View file

@ -0,0 +1,14 @@
#!/bin/bash
pushd ../
# Check unused deps
cargo udeps --all-targets
# Upgrade deps
cargo upgrade --workspace
# Run check
cargo check
popd

View file

@ -88,7 +88,11 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimit) {
"/report/resolve", "/report/resolve",
web::put().to(route_post::<ResolvePostReport>), web::put().to(route_post::<ResolvePostReport>),
) )
.route("/report/list", web::get().to(route_get::<ListPostReports>)), .route("/report/list", web::get().to(route_get::<ListPostReports>))
.route(
"/site_metadata",
web::get().to(route_get::<GetSiteMetadata>),
),
) )
// Comment // Comment
.service( .service(

View file

@ -66,6 +66,7 @@ async fn main() -> Result<(), LemmyError> {
); );
let activity_queue = create_activity_queue(); let activity_queue = create_activity_queue();
let chat_server = ChatServer::startup( let chat_server = ChatServer::startup(
pool.clone(), pool.clone(),
rate_limiter.clone(), rate_limiter.clone(),