mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-12-28 13:51:32 +00:00
Merge remote-tracking branch 'upstream/main' into migration-runner
This commit is contained in:
commit
1aab92cbfa
53 changed files with 2922 additions and 2532 deletions
|
@ -278,8 +278,7 @@ steps:
|
|||
commands:
|
||||
- cargo install cargo-workspaces
|
||||
- cp -r migrations crates/db_schema/
|
||||
- cargo login "$CARGO_API_TOKEN"
|
||||
- cargo workspaces publish --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
|
||||
- cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
|
||||
secrets: [cargo_api_token]
|
||||
when:
|
||||
- event: tag
|
||||
|
|
881
Cargo.lock
generated
881
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
36
Cargo.toml
36
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[workspace.package]
|
||||
version = "0.19.4-beta.5"
|
||||
version = "0.19.4-beta.6"
|
||||
edition = "2021"
|
||||
description = "A link aggregator for the fediverse"
|
||||
license = "AGPL-3.0"
|
||||
|
@ -88,25 +88,25 @@ unused_self = "deny"
|
|||
unwrap_used = "deny"
|
||||
|
||||
[workspace.dependencies]
|
||||
lemmy_api = { version = "=0.19.4-beta.5", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4-beta.5", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4-beta.5", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4-beta.5", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4-beta.5", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4-beta.5", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4-beta.5", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4-beta.5", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4-beta.5", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4-beta.5", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4-beta.5", path = "./crates/federate" }
|
||||
activitypub_federation = { version = "0.5.4", default-features = false, features = [
|
||||
lemmy_api = { version = "=0.19.4-beta.6", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4-beta.6", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4-beta.6", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4-beta.6", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4-beta.6", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4-beta.6", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4-beta.6", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4-beta.6", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4-beta.6", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4-beta.6", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4-beta.6", path = "./crates/federate" }
|
||||
activitypub_federation = { version = "0.5.6", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
diesel = "2.1.6"
|
||||
diesel_migrations = "2.1.0"
|
||||
diesel-async = "0.4.1"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_with = "3.7.0"
|
||||
serde = { version = "1.0.199", features = ["derive"] }
|
||||
serde_with = "3.8.1"
|
||||
actix-web = { version = "4.5.1", default-features = false, features = [
|
||||
"macros",
|
||||
"rustls",
|
||||
|
@ -129,7 +129,7 @@ doku = { version = "0.21.1", features = ["url-2"] }
|
|||
bcrypt = "0.15.1"
|
||||
chrono = { version = "0.4.38", features = ["serde"], default-features = false }
|
||||
serde_json = { version = "1.0.116", features = ["preserve_order"] }
|
||||
base64 = "0.22.0"
|
||||
base64 = "0.22.1"
|
||||
uuid = { version = "1.8.0", features = ["serde", "v4"] }
|
||||
async-trait = "0.1.80"
|
||||
captcha = "0.0.9"
|
||||
|
@ -157,10 +157,10 @@ ts-rs = { version = "7.1.1", features = [
|
|||
"chrono-impl",
|
||||
"no-serde-warnings",
|
||||
] }
|
||||
rustls = { version = "0.21.11", features = ["dangerous_configuration"] }
|
||||
rustls = { version = "0.23.5", features = ["ring"] }
|
||||
futures-util = "0.3.30"
|
||||
tokio-postgres = "0.7.10"
|
||||
tokio-postgres-rustls = "0.10.0"
|
||||
tokio-postgres-rustls = "0.12.0"
|
||||
urlencoding = "2.1.3"
|
||||
enum-map = "2.7"
|
||||
moka = { version = "0.12.7", features = ["future"] }
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"repository": "https://github.com/LemmyNet/lemmy",
|
||||
"author": "Dessalines",
|
||||
"license": "AGPL-3.0",
|
||||
"packageManager": "pnpm@9.0.4",
|
||||
"packageManager": "pnpm@9.0.6",
|
||||
"scripts": {
|
||||
"lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'",
|
||||
"fix": "prettier --write src && eslint --fix src",
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -25,7 +25,7 @@ full = [
|
|||
"lemmy_db_views_moderator/full",
|
||||
"lemmy_utils/full",
|
||||
"activitypub_federation",
|
||||
"encoding",
|
||||
"encoding_rs",
|
||||
"reqwest-middleware",
|
||||
"webpage",
|
||||
"ts-rs",
|
||||
|
@ -69,7 +69,7 @@ mime = { version = "0.3.17", optional = true }
|
|||
webpage = { version = "1.6", default-features = false, features = [
|
||||
"serde",
|
||||
], optional = true }
|
||||
encoding = { version = "0.2.33", optional = true }
|
||||
encoding_rs = { version = "0.8.34", optional = true }
|
||||
jsonwebtoken = { version = "8.3.0", optional = true }
|
||||
# necessary for wasmt compilation
|
||||
getrandom = { version = "0.2.14", features = ["js"] }
|
||||
|
|
|
@ -40,7 +40,7 @@ pub struct Register {
|
|||
pub username: String,
|
||||
pub password: Sensitive<String>,
|
||||
pub password_verify: Sensitive<String>,
|
||||
pub show_nsfw: bool,
|
||||
pub show_nsfw: Option<bool>,
|
||||
/// email is mandatory if email verification is enabled on the server
|
||||
pub email: Option<Sensitive<String>>,
|
||||
/// The UUID of the captcha item.
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::{
|
|||
utils::{local_site_opt_to_sensitive, proxy_image_link, proxy_image_link_opt_apub},
|
||||
};
|
||||
use activitypub_federation::config::Data;
|
||||
use encoding::{all::encodings, DecoderTrap};
|
||||
use encoding_rs::{Encoding, UTF_8};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::DbUrl,
|
||||
source::{
|
||||
|
@ -160,11 +160,9 @@ fn extract_opengraph_data(html_bytes: &[u8], url: &Url) -> LemmyResult<OpenGraph
|
|||
// proper encoding. If the specified encoding cannot be found, fall back to the original UTF-8
|
||||
// version.
|
||||
if let Some(charset) = page.meta.get("charset") {
|
||||
if charset.to_lowercase() != "utf-8" {
|
||||
if let Some(encoding_ref) = encodings().iter().find(|e| e.name() == charset) {
|
||||
if let Ok(html_with_encoding) = encoding_ref.decode(html_bytes, DecoderTrap::Replace) {
|
||||
page = HTML::from_string(html_with_encoding, None)?;
|
||||
}
|
||||
if charset != UTF_8.name() {
|
||||
if let Some(encoding) = Encoding::for_label(charset.as_bytes()) {
|
||||
page = HTML::from_string(encoding.decode(html_bytes).0.into(), None)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -142,12 +142,17 @@ pub async fn register(
|
|||
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
|
||||
.collect();
|
||||
|
||||
// Show nsfw content if param is true, or if content_warning exists
|
||||
let show_nsfw = data
|
||||
.show_nsfw
|
||||
.unwrap_or(site_view.site.content_warning.is_some());
|
||||
|
||||
// Create the local user
|
||||
let local_user_form = LocalUserInsertForm::builder()
|
||||
.person_id(inserted_person.id)
|
||||
.email(data.email.as_deref().map(str::to_lowercase))
|
||||
.password_encrypted(data.password.to_string())
|
||||
.show_nsfw(Some(data.show_nsfw))
|
||||
.show_nsfw(Some(show_nsfw))
|
||||
.accepted_application(accepted_application)
|
||||
.default_listing_type(Some(local_site.default_post_listing_type))
|
||||
.post_listing_mode(Some(local_site.default_post_listing_mode))
|
||||
|
|
22
crates/apub/assets/discourse/objects/group.json
Normal file
22
crates/apub/assets/discourse/objects/group.json
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
|
||||
"type": "Group",
|
||||
"updated": "2024-04-05T12:49:51Z",
|
||||
"url": "https://socialhub.activitypub.rocks/c/meeting/threadiverse-wg/88",
|
||||
"name": "Threadiverse Working Group (SocialHub)",
|
||||
"inbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/inbox",
|
||||
"outbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/outbox",
|
||||
"followers": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/followers",
|
||||
"preferredUsername": "threadiverse-wg",
|
||||
"publicKey": {
|
||||
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146#main-key",
|
||||
"owner": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApJi4iAcW6bPiHVCxT9p0\n8DVnrDDO4QtLNy7bpRFdMFifmmmXprsuAi9D2MSwbhH49V54HtIkxBpKd2IR/UD8\nmhMDY4CNI9FHpjqLw0wtkzxcqF9urSqhn0/vWX+9oxyhIgQS5KMiIkYDMJiAc691\niEcZ8LCran23xIGl6Dk54Nr3TqTMLcjDhzQYUJbxMrLq5/knWqOKG3IF5OxK+9ZZ\n1wxDF872eJTxJLkmpag+WYNtHzvB2SGTp8j5IF1/pZ9J1c3cpYfaeolTch/B/GQn\najCB4l27U52rIIObxJqFXSY8wHyd0aAmNmxzPZ7cduRlBDhmI40cAmnCV1YQPvpk\nDwIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
},
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/png",
|
||||
"url": "https://socialhub.activitypub.rocks/uploads/default/original/1X/8faac84234dc73d074dadaa2bcf24dc746b8647f.png"
|
||||
},
|
||||
"@context": "https://www.w3.org/ns/activitystreams"
|
||||
}
|
13
crates/apub/assets/discourse/objects/page.json
Normal file
13
crates/apub/assets/discourse/objects/page.json
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"id": "https://socialhub.activitypub.rocks/ap/object/1899f65c062200daec50a4c89ed76dc9",
|
||||
"type": "Note",
|
||||
"audience": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
|
||||
"published": "2024-04-13T14:36:19Z",
|
||||
"updated": "2024-04-13T14:36:19Z",
|
||||
"url": "https://socialhub.activitypub.rocks/t/our-next-meeting/4079/1",
|
||||
"attributedTo": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
|
||||
"name": "Our next meeting",
|
||||
"context": "https://socialhub.activitypub.rocks/ap/collection/8850f6e85b57c490da915a5dfbbd5045",
|
||||
"content": "<h3>Last Meeting</h3>\n<h4>Recording</h4>\n<a href=\"https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000\">https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000</a>\nPasscode: z+1*4pUB\n<h4>Minutes</h4>\nTo refresh your memory, you can read the minutes of last week's meeting <a href=\"https://community.nodebb.org/topic/17949/minutes…",
|
||||
"@context": "https://www.w3.org/ns/activitystreams"
|
||||
}
|
23
crates/apub/assets/discourse/objects/person.json
Normal file
23
crates/apub/assets/discourse/objects/person.json
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
|
||||
"type": "Person",
|
||||
"updated": "2024-01-15T12:27:03Z",
|
||||
"url": "https://socialhub.activitypub.rocks/u/angus",
|
||||
"name": "Angus McLeod",
|
||||
"inbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/inbox",
|
||||
"outbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/outbox",
|
||||
"sharedInbox": "https://socialhub.activitypub.rocks/ap/users/inbox",
|
||||
"followers": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/followers",
|
||||
"preferredUsername": "angus",
|
||||
"publicKey": {
|
||||
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1#main-key",
|
||||
"owner": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3RpuFDuwXZzOeHO5fO2O\nHmP7Flc5JDXJ8OOEJYq5T/dzUKqREOF1ZT0WMww8/E3P6w+gfFsjzThraJb8nHuW\nP6798SUD35CWBclfhw9DapjVn99JyFcAWcH3b9fr6LYshc4y1BoeJagk1kcro2Dc\n+pX0vVXgNjwWnGfyucAgGIUWrNUjcvIvXmyVCBSQfXG3nCALV1JbI4KSgf/5KyBn\nza/QefaetxYiFV8wAisPKLsz3XQAaITsQmbSi+8gmwXt/9U808PK1KphCiClDOWg\noi0HPzJn0rn+mwFCfgNWenvribfeG40AHLG33OkWKvslufjifdWDCOcBYYzyCEV6\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
},
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/png",
|
||||
"url": "https://socialhub.activitypub.rocks/user_avatar/socialhub.activitypub.rocks/angus/96/2295_2.png"
|
||||
},
|
||||
"@context": "https://www.w3.org/ns/activitystreams"
|
||||
}
|
22
crates/apub/assets/nodebb/objects/group.json
Normal file
22
crates/apub/assets/nodebb/objects/group.json
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": "https://community.nodebb.org/category/31",
|
||||
"url": "https://community.nodebb.org/category/31/threadiverse-working-group",
|
||||
"inbox": "https://community.nodebb.org/category/31/inbox",
|
||||
"outbox": "https://community.nodebb.org/category/31/outbox",
|
||||
"sharedInbox": "https://community.nodebb.org/inbox",
|
||||
"type": "Group",
|
||||
"name": "Threadiverse Working Group",
|
||||
"preferredUsername": "swicg-threadiverse-wg",
|
||||
"summary": "Discussion and announcements related to the SWICG Threadiverse task force",
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/png",
|
||||
"url": "https://community.nodebb.org/assets/uploads/system/site-logo.png"
|
||||
},
|
||||
"publicKey": {
|
||||
"id": "https://community.nodebb.org/category/31#key",
|
||||
"owner": "https://community.nodebb.org/category/31",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0/Or3Ox2/jbhBZzF8W0Y\nWuS/4lgm5O5rxQk2nDRBXU/qNaZnMPkW2FxFPuPetndUVKSD2+vWF3SUlFyZ/vhT\nITzLkbRSILMiZCUg+0mvqi6va1WMBglMe5jLkc7wdfgNsosqBzKMdyMxqDZr++mJ\n8DjuqzWHENcjWcbMfSfAa9nkZHBIQUsHGGIwxEbKNlPqF0JIB66py7xmXbboDxpD\nPVF3EMkgZNnbmDGtlkZCKbztradyNRVl/u6KJpV3fbi+m/8CZ+POc4I5sKCQY1Hr\ndslHlm6tCkJQxIIKQtz0ZJ5yCUYmk48C2gFCndfJtYoEy9iR62xSemky6y04gWVc\naQIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
}
|
||||
}
|
38
crates/apub/assets/nodebb/objects/page.json
Normal file
38
crates/apub/assets/nodebb/objects/page.json
Normal file
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": "https://community.nodebb.org/topic/17908",
|
||||
"type": "Page",
|
||||
"to": ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc": ["https://community.nodebb.org/uid/2/followers"],
|
||||
"inReplyTo": null,
|
||||
"published": "2024-03-19T20:25:39.462Z",
|
||||
"url": "https://community.nodebb.org/topic/17908/threadiverse-working-group",
|
||||
"attributedTo": "https://community.nodebb.org/uid/2",
|
||||
"audience": "https://community.nodebb.org/category/31/threadiverse-working-group",
|
||||
"sensitive": false,
|
||||
"summary": null,
|
||||
"name": "Threadiverse Working Group",
|
||||
"content": "<p dir=\"auto\">NodeBB is at this year's FediForum, and one of the breakout sessions centred around <strong>the Theadiverse</strong>, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.</p>\n<p dir=\"auto\">Some of the topic touched upon included:</p>\n<ul>\n<li>Aligning on a standard representation for collections of Notes</li>\n<li>FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.</li>\n<li>Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging</li>\n<li>Going forward: collaborating on building compatible threadiverse implementations</li>\n</ul>\n<p dir=\"auto\">The main action item involved <strong>the genesis of an informal working group for the threadiverse</strong>, in order to align our disparate implementations toward a common path.</p>\n<p dir=\"auto\">We intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.</p>\n<p dir=\"auto\">The topic of the first WG call is: <strong>Representation of the higherlevel collection of Notes (posts, etc.) — Article vs. Page, etc?</strong></p>\n<p dir=\"auto\">Interested?</p>\n<ul>\n<li>Publicly reply to this post (NodeBB does not support non-public posts at this time) if you'd like to join the list</li>\n<li>If you prefer to remain private, please email <a href=\"mailto:julian@nodebb.org\" rel=\"nofollow ugc\">julian@nodebb.org</a></li>\n</ul>\n<hr />\n<p dir=\"auto\">As an aside, I'd love to try something new and attempt tokeep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?</p>\n",
|
||||
"source": {
|
||||
"content": "NodeBB is at this year's FediForum, and one of the breakout sessions centred around **the Theadiverse**, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.\n\nSome of the topic touched upon included:\n\n* Aligning on a standard representation for collections of Notes\n* FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.\n* Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging\n* Going forward: collaborating on building compatible threadiverse implementations\n\nThe main action item involved **the genesis of an informal working group for the threadiverse**, in order to align our disparate implementations toward a common path.\n\nWe intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.\n\nThe topic of the first WG call is: **Representation of the higher level collection of Notes (posts, etc.) — Article vs. Page, etc?**\n\nInterested?\n\n* Publicly reply to this post (NodeBB does not support non-public postsat this time) if you'd like to join the list\n* If you prefer to remain private, please email julian@nodebb.org\n\n----\n\nAs an aside, I'd love to try something new and attempt to keep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?",
|
||||
"mediaType": "text/markdown"
|
||||
},
|
||||
"tag": [
|
||||
{
|
||||
"type": "Hashtag",
|
||||
"href": "https://community.nodebb.org/tags/fediforum",
|
||||
"name": "#fediforum"
|
||||
},
|
||||
{
|
||||
"type": "Hashtag",
|
||||
"href": "https://community.nodebb.org/tags/activitypub",
|
||||
"name": "#activitypub"
|
||||
},
|
||||
{
|
||||
"type": "Hashtag",
|
||||
"href": "https://community.nodebb.org/tags/threadiverse",
|
||||
"name": "#threadiverse"
|
||||
}
|
||||
],
|
||||
"attachment": []
|
||||
}
|
29
crates/apub/assets/nodebb/objects/person.json
Normal file
29
crates/apub/assets/nodebb/objects/person.json
Normal file
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": "https://community.nodebb.org/uid/2",
|
||||
"url": "https://community.nodebb.org/user/julian",
|
||||
"followers": "https://community.nodebb.org/uid/2/followers",
|
||||
"following": "https://community.nodebb.org/uid/2/following",
|
||||
"inbox": "https://community.nodebb.org/uid/2/inbox",
|
||||
"outbox": "https://community.nodebb.org/uid/2/outbox",
|
||||
"sharedInbox": "https://community.nodebb.org/inbox",
|
||||
"type": "Person",
|
||||
"name": "julian",
|
||||
"preferredUsername": "julian",
|
||||
"summary": "Hi! I'm Julian, one of the co-founders of NodeBB, the forum software you are using right now.\r\n\r\nI started this company with two colleagues, Baris and Andrew, in 2013, and have been doing the startup thing since (although I think at some point along the way we stopped being a startup and just became a boring ol' small business).\r\n\r\nIn my free time I rock climb, cycle, and lift weights. I live just outside Toronto, Canada, with my wife and three children.",
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/jpeg",
|
||||
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profileavatar-1701457270279.jpeg"
|
||||
},
|
||||
"image": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/jpeg",
|
||||
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profilecover-1649468285913.jpeg"
|
||||
},
|
||||
"publicKey": {
|
||||
"id": "https://community.nodebb.org/uid/2#key",
|
||||
"owner": "https://community.nodebb.org/uid/2",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzEr0sFdATahQzprS4EOT\nZq+KMc6UTbt2GDP20OrQi/P5AXAbMaQiRCRdGWhYGjnH0jicn5NnozNxRo+HchJT\nV6NOHxpsxqPCoaLeoBkhfhbSCLr2Gzil6mmfqf9TjnI7A7ZTtCc0G+n0ztyL9HwL\nkEAI178l2gckk4XKKYnEd+dyiIevExrq/ROLgwW1o428FZvlF5amKxhpVUEygRU8\nCd1hqWYs+xYDOJURCP5qEx/MmRPpV/yGMTMyF+/gcQc0TUZnhWAM2E4M+aq3aKh6\nJP/vsry+5YZPUaPWfopbT5Ijyt6ZSElp6Avkg56eTz0a5SRcjCVS6IFVPwiLlzOe\nYwIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
}
|
||||
}
|
49
crates/apub/assets/wordpress/activities/announce.json
Normal file
49
crates/apub/assets/wordpress/activities/announce.json
Normal file
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"@context": ["https://www.w3.org/ns/activitystreams"],
|
||||
"id": "https://pfefferle.org/lemmy-part-4/#activity#activity",
|
||||
"type": "Announce",
|
||||
"audience": "https://pfefferle.org/@pfefferle.org",
|
||||
"published": "2024-05-03T12:32:29Z",
|
||||
"updated": "2024-05-06T08:20:33Z",
|
||||
"to": [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
|
||||
],
|
||||
"cc": [],
|
||||
"object": {
|
||||
"id": "https://pfefferle.org/lemmy-part-4/#activity",
|
||||
"type": "Update",
|
||||
"audience": "https://pfefferle.org/@pfefferle.org",
|
||||
"published": "2024-05-03T12:32:29Z",
|
||||
"updated": "2024-05-06T08:20:33Z",
|
||||
"to": [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
|
||||
],
|
||||
"cc": [],
|
||||
"object": {
|
||||
"id": "https://pfefferle.org/lemmy-part-4/",
|
||||
"type": "Article",
|
||||
"attachment": [],
|
||||
"attributedTo": "https://pfefferle.org/author/pfefferle/",
|
||||
"audience": "https://pfefferle.org/@pfefferle.org",
|
||||
"content": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E",
|
||||
"contentMap": {
|
||||
"en": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E"
|
||||
},
|
||||
"name": "Lemmy (Part 4)",
|
||||
"published": "2024-05-03T12:32:29Z",
|
||||
"summary": "Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object [...]",
|
||||
"tag": [],
|
||||
"updated": "2024-05-06T08:20:33Z",
|
||||
"url": "https://pfefferle.org/lemmy-part-4/",
|
||||
"to": [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
|
||||
],
|
||||
"cc": []
|
||||
},
|
||||
"actor": "https://pfefferle.org/author/pfefferle/"
|
||||
},
|
||||
"actor": "https://pfefferle.org/@pfefferle.org"
|
||||
}
|
66
crates/apub/assets/wordpress/objects/group.json
Normal file
66
crates/apub/assets/wordpress/objects/group.json
Normal file
|
@ -0,0 +1,66 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
"https://purl.archive.org/socialweb/webfinger",
|
||||
{
|
||||
"schema": "http://schema.org#",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"webfinger": "https://webfinger.net/#",
|
||||
"lemmy": "https://join-lemmy.org/ns#",
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"PropertyValue": "schema:PropertyValue",
|
||||
"value": "schema:value",
|
||||
"Hashtag": "as:Hashtag",
|
||||
"featured": {
|
||||
"@id": "toot:featured",
|
||||
"@type": "@id"
|
||||
},
|
||||
"featuredTags": {
|
||||
"@id": "toot:featuredTags",
|
||||
"@type": "@id"
|
||||
},
|
||||
"moderators": {
|
||||
"@id": "lemmy:moderators",
|
||||
"@type": "@id"
|
||||
},
|
||||
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
|
||||
"discoverable": "toot:discoverable",
|
||||
"indexable": "toot:indexable",
|
||||
"resource": "webfinger:resource"
|
||||
}
|
||||
],
|
||||
"id": "https://pfefferle.org/@pfefferle.org",
|
||||
"type": "Group",
|
||||
"attachment": [],
|
||||
"attributedTo": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
|
||||
"name": "Matthias Pfefferle",
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"url": "https://pfefferle.org/wp-content/uploads/2023/06/cropped-BeLItBV-_400x400.jpg"
|
||||
},
|
||||
"published": "2024-04-03T16:58:22Z",
|
||||
"summary": "<p>Webworker, blogger und podcaster</p>\n",
|
||||
"tag": [],
|
||||
"url": "https://pfefferle.org/@pfefferle.org",
|
||||
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/inbox",
|
||||
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/outbox",
|
||||
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/following",
|
||||
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers",
|
||||
"preferredUsername": "pfefferle.org",
|
||||
"endpoints": {
|
||||
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
|
||||
},
|
||||
"publicKey": {
|
||||
"id": "https://pfefferle.org/@pfefferle.org#main-key",
|
||||
"owner": "https://pfefferle.org/@pfefferle.org",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuq8xeLMFcaCwPFBhgMRE\n/dDh2XKoNXFXnixctmK8BXSuuLMxucm3I/8NyhIvb3LqU+uP1fO8F0ecUbk2sN+x\nKag5vIV6yKXzJ8ILMWQ9AaELpXDmMZqL0zal0LUJRAOkDgPDovDAoq6tx++yDoV0\njdVbf9CoZKit1cz2ZrEuE5dswq3J/z9+c6POkhCkWEX5TPJzkOrmnjkvrXxGHUJ2\nA3+P+VaZhd5cmvqYosSpYNJshxCdev12pIF78OnYLiYiyXlgGHU+7uQR0M4tTcij\n6cUdLkms9m+b6H3ctXntPn410e5YLFPldjAYzQB5wHVdFZsWtyrbqfYdCa+KkKpA\nvwIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
},
|
||||
"manuallyApprovesFollowers": false,
|
||||
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/collections/featured",
|
||||
"moderators": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
|
||||
"discoverable": true,
|
||||
"indexable": true,
|
||||
"webfinger": "pfefferle.org@pfefferle.org",
|
||||
"postingRestrictedToMods": true
|
||||
}
|
24
crates/apub/assets/wordpress/objects/note.json
Normal file
24
crates/apub/assets/wordpress/objects/note.json
Normal file
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
{
|
||||
"Hashtag": "as:Hashtag"
|
||||
}
|
||||
],
|
||||
"id": "https://pfefferle.org?c=148",
|
||||
"type": "Note",
|
||||
"attributedTo": "https://pfefferle.org/author/pfefferle/",
|
||||
"content": "<p>Nice! Hello from WordPress!</p>",
|
||||
"contentMap": {
|
||||
"en": "<p>Nice! Hello from WordPress!</p>"
|
||||
},
|
||||
"inReplyTo": "https://socialhub.activitypub.rocks/ap/object/ce040f1ead95964f6dbbf1084b81432d",
|
||||
"published": "2024-04-30T15:21:13Z",
|
||||
"tag": [],
|
||||
"url": "https://pfefferle.org?c=148",
|
||||
"to": [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers"
|
||||
],
|
||||
"cc": []
|
||||
}
|
26
crates/apub/assets/wordpress/objects/page.json
Normal file
26
crates/apub/assets/wordpress/objects/page.json
Normal file
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
{
|
||||
"Hashtag": "as:Hashtag"
|
||||
}
|
||||
],
|
||||
"id": "https://pfefferle.org/this-is-a-test-federation/",
|
||||
"type": "Article",
|
||||
"attachment": [],
|
||||
"attributedTo": "https://pfefferle.org/author/pfefferle/",
|
||||
"content": "<p>with Discource!</p>",
|
||||
"contentMap": {
|
||||
"en": "<p>with Discource!</p>"
|
||||
},
|
||||
"name": "This is a test-federation",
|
||||
"published": "2024-04-30T15:16:41Z",
|
||||
"summary": "with Discource! [...]",
|
||||
"tag": [],
|
||||
"url": "https://pfefferle.org/this-is-a-test-federation/",
|
||||
"to": [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers"
|
||||
],
|
||||
"cc": []
|
||||
}
|
74
crates/apub/assets/wordpress/objects/person.json
Normal file
74
crates/apub/assets/wordpress/objects/person.json
Normal file
|
@ -0,0 +1,74 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
"https://purl.archive.org/socialweb/webfinger",
|
||||
{
|
||||
"schema": "http://schema.org#",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"webfinger": "https://webfinger.net/#",
|
||||
"lemmy": "https://join-lemmy.org/ns#",
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"PropertyValue": "schema:PropertyValue",
|
||||
"value": "schema:value",
|
||||
"Hashtag": "as:Hashtag",
|
||||
"featured": {
|
||||
"@id": "toot:featured",
|
||||
"@type": "@id"
|
||||
},
|
||||
"featuredTags": {
|
||||
"@id": "toot:featuredTags",
|
||||
"@type": "@id"
|
||||
},
|
||||
"moderators": {
|
||||
"@id": "lemmy:moderators",
|
||||
"@type": "@id"
|
||||
},
|
||||
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
|
||||
"discoverable": "toot:discoverable",
|
||||
"indexable": "toot:indexable",
|
||||
"resource": "webfinger:resource"
|
||||
}
|
||||
],
|
||||
"id": "https://pfefferle.org/author/pfefferle/",
|
||||
"type": "Person",
|
||||
"attachment": [
|
||||
{
|
||||
"type": "PropertyValue",
|
||||
"name": "Blog",
|
||||
"value": "<a rel=\"me\" title=\"https://pfefferle.org/\" target=\"_blank\" href=\"https://pfefferle.org/\">pfefferle.org</a>"
|
||||
},
|
||||
{
|
||||
"type": "PropertyValue",
|
||||
"name": "Profile",
|
||||
"value": "<a rel=\"me\" title=\"https://pfefferle.org/author/pfefferle/\" target=\"_blank\" href=\"https://pfefferle.org/author/pfefferle/\">pfefferle.org</a>"
|
||||
}
|
||||
],
|
||||
"name": "Matthias Pfefferle",
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"url": "https://secure.gravatar.com/avatar/a2bdca7870e859658cece96c044b3be5?s=120&d=mm&r=g"
|
||||
},
|
||||
"published": "2014-02-10T15:23:08Z",
|
||||
"summary": "<p>Ich arbeite als Open Web Lead für Automattic.</p>\n",
|
||||
"tag": [],
|
||||
"url": "https://pfefferle.org/author/pfefferle/",
|
||||
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/inbox",
|
||||
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/outbox",
|
||||
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/following",
|
||||
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers",
|
||||
"preferredUsername": "matthias",
|
||||
"endpoints": {
|
||||
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
|
||||
},
|
||||
"publicKey": {
|
||||
"id": "https://pfefferle.org/author/pfefferle/#main-key",
|
||||
"owner": "https://pfefferle.org/author/pfefferle/",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvTA5RA40nOsso04RSwyX\nHXTojRPUMlIlArDcSy3M5GUJp9/xbxSUOdBjqd31KKB1GIi3vrLmD1Qi/ZqS95Qy\nw2Zd3xOsCg+o9bsyOG+O6Y8Lu+HEB5JKLUbNHdiSviakJ8wGadH9Wm4WIiN20y+q\n/u6lgxgiWfZ2CFCN6SOc28fUKi9NmKvXK+M12BhFfy1tC5KWXKDm0UbfI1+dmqhR\n3Ffe6vEsCI/YIVVdWxQ9kouOd0XSHOGdslktkepRO7IP9i9TdwyeCa0WWRoeO5Wa\ntVpc1Y0WuNbTM2ksIXTg0G+rO1/6KO/hrHnGu3RCfb/ZIHK5L/aWYb9B3PG3LyKV\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
},
|
||||
"manuallyApprovesFollowers": false,
|
||||
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/collections/featured",
|
||||
"discoverable": true,
|
||||
"indexable": true,
|
||||
"webfinger": "matthias@pfefferle.org"
|
||||
}
|
|
@ -39,7 +39,10 @@ use lemmy_db_schema::{
|
|||
},
|
||||
traits::{Bannable, Crud, Followable},
|
||||
};
|
||||
use lemmy_utils::error::{LemmyError, LemmyResult};
|
||||
use lemmy_utils::{
|
||||
error::{LemmyError, LemmyResult},
|
||||
LemmyErrorType,
|
||||
};
|
||||
use url::Url;
|
||||
|
||||
impl BlockUser {
|
||||
|
@ -129,7 +132,11 @@ impl ActivityHandler for BlockUser {
|
|||
verify_is_public(&self.to, &self.cc)?;
|
||||
match self.target.dereference(context).await? {
|
||||
SiteOrCommunity::Site(site) => {
|
||||
let domain = self.object.inner().domain().expect("url needs domain");
|
||||
let domain = self
|
||||
.object
|
||||
.inner()
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
|
||||
if context.settings().hostname == domain {
|
||||
return Err(
|
||||
anyhow!("Site bans from remote instance can't affect user's home instance").into(),
|
||||
|
|
|
@ -94,7 +94,12 @@ impl AnnounceActivity {
|
|||
actor: community.id().into(),
|
||||
to: vec![public()],
|
||||
object: IdOrNestedObject::NestedObject(object),
|
||||
cc: vec![community.followers_url.clone().into()],
|
||||
cc: community
|
||||
.followers_url
|
||||
.clone()
|
||||
.map(Into::into)
|
||||
.into_iter()
|
||||
.collect(),
|
||||
kind: AnnounceType::Announce,
|
||||
id,
|
||||
})
|
||||
|
|
|
@ -105,7 +105,7 @@ impl ActivityHandler for UpdateCommunity {
|
|||
last_refreshed_at: Some(naive_now()),
|
||||
icon: Some(self.object.icon.map(|i| i.url.into())),
|
||||
banner: Some(self.object.image.map(|i| i.url.into())),
|
||||
followers_url: Some(self.object.followers.into()),
|
||||
followers_url: self.object.followers.map(Into::into),
|
||||
inbox_url: Some(self.object.inbox.into()),
|
||||
shared_inbox_url: Some(self.object.endpoints.map(|e| e.shared_inbox.into())),
|
||||
moderators_url: self.object.attributed_to.map(Into::into),
|
||||
|
|
|
@ -19,7 +19,7 @@ use activitypub_federation::{
|
|||
config::Data,
|
||||
fetch::object_id::ObjectId,
|
||||
kinds::public,
|
||||
protocol::verification::verify_domains_match,
|
||||
protocol::verification::{verify_domains_match, verify_urls_match},
|
||||
traits::{ActivityHandler, Actor, Object},
|
||||
};
|
||||
use lemmy_api_common::{
|
||||
|
@ -133,6 +133,7 @@ impl ActivityHandler for CreateOrUpdateNote {
|
|||
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
|
||||
check_community_deleted_or_removed(&community)?;
|
||||
check_post_deleted_or_removed(&post)?;
|
||||
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
|
||||
|
||||
ApubComment::verify(&self.object, self.actor.inner(), context).await?;
|
||||
Ok(())
|
||||
|
|
|
@ -66,7 +66,6 @@ impl CreateOrUpdatePage {
|
|||
kind: CreateOrUpdateType,
|
||||
context: Data<LemmyContext>,
|
||||
) -> LemmyResult<()> {
|
||||
let post = ApubPost(post);
|
||||
let community_id = post.community_id;
|
||||
let person: ApubPerson = Person::read(&mut context.pool(), person_id)
|
||||
.await?
|
||||
|
@ -78,7 +77,7 @@ impl CreateOrUpdatePage {
|
|||
.into();
|
||||
|
||||
let create_or_update =
|
||||
CreateOrUpdatePage::new(post, &person, &community, kind, &context).await?;
|
||||
CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?;
|
||||
let is_mod_action = create_or_update.object.is_mod_action(&context).await?;
|
||||
let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update);
|
||||
send_activity_in_community(
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
};
|
||||
use activitypub_federation::{
|
||||
config::Data,
|
||||
protocol::verification::verify_domains_match,
|
||||
protocol::verification::{verify_domains_match, verify_urls_match},
|
||||
traits::{ActivityHandler, Actor, Object},
|
||||
};
|
||||
use lemmy_api_common::context::LemmyContext;
|
||||
|
@ -61,6 +61,7 @@ impl ActivityHandler for CreateOrUpdateChatMessage {
|
|||
verify_person(&self.actor, context).await?;
|
||||
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
|
||||
verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?;
|
||||
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
|
||||
ApubPrivateMessage::verify(&self.object, self.actor.inner(), context).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
activity_lists::AnnouncableActivities,
|
||||
objects::{community::ApubCommunity, post::ApubPost},
|
||||
objects::community::ApubCommunity,
|
||||
protocol::{
|
||||
activities::{
|
||||
community::announce::AnnounceActivity,
|
||||
|
@ -18,11 +18,8 @@ use activitypub_federation::{
|
|||
};
|
||||
use futures::future::join_all;
|
||||
use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url};
|
||||
use lemmy_db_schema::{
|
||||
source::{person::Person, post::Post},
|
||||
traits::Crud,
|
||||
utils::FETCH_LIMIT_MAX,
|
||||
};
|
||||
use lemmy_db_schema::{utils::FETCH_LIMIT_MAX, SortType};
|
||||
use lemmy_db_views::{post_view::PostQuery, structs::SiteView};
|
||||
use lemmy_utils::{
|
||||
error::{LemmyError, LemmyResult},
|
||||
LemmyErrorType,
|
||||
|
@ -41,19 +38,30 @@ impl Collection for ApubCommunityOutbox {
|
|||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn read_local(owner: &Self::Owner, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
|
||||
let post_list: Vec<ApubPost> = Post::list_for_community(&mut data.pool(), owner.id)
|
||||
let site = SiteView::read_local(&mut data.pool())
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
.ok_or(LemmyErrorType::LocalSiteNotSetup)?
|
||||
.site;
|
||||
|
||||
let post_views = PostQuery {
|
||||
community_id: Some(owner.id),
|
||||
sort: Some(SortType::New),
|
||||
limit: Some(FETCH_LIMIT_MAX),
|
||||
..Default::default()
|
||||
}
|
||||
.list(&site, &mut data.pool())
|
||||
.await?;
|
||||
|
||||
let mut ordered_items = vec![];
|
||||
for post in post_list {
|
||||
let person = Person::read(&mut data.pool(), post.creator_id)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPerson)?
|
||||
.into();
|
||||
let create =
|
||||
CreateOrUpdatePage::new(post, &person, owner, CreateOrUpdateType::Create, data).await?;
|
||||
for post_view in post_views {
|
||||
let create = CreateOrUpdatePage::new(
|
||||
post_view.post.into(),
|
||||
&post_view.creator.into(),
|
||||
owner,
|
||||
CreateOrUpdateType::Create,
|
||||
data,
|
||||
)
|
||||
.await?;
|
||||
let announcable = AnnouncableActivities::CreateOrUpdatePost(create);
|
||||
let announce = AnnounceActivity::new(announcable.try_into()?, owner, data)?;
|
||||
ordered_items.push(announce);
|
||||
|
|
|
@ -128,7 +128,14 @@ pub(crate) mod tests {
|
|||
use crate::protocol::objects::{group::Group, tombstone::Tombstone};
|
||||
use actix_web::body::to_bytes;
|
||||
use lemmy_db_schema::{
|
||||
source::{community::CommunityInsertForm, instance::Instance},
|
||||
newtypes::InstanceId,
|
||||
source::{
|
||||
community::CommunityInsertForm,
|
||||
instance::Instance,
|
||||
local_site::{LocalSite, LocalSiteInsertForm},
|
||||
local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm},
|
||||
site::{Site, SiteInsertForm},
|
||||
},
|
||||
traits::Crud,
|
||||
CommunityVisibility,
|
||||
};
|
||||
|
@ -142,6 +149,8 @@ pub(crate) mod tests {
|
|||
) -> LemmyResult<(Instance, Community)> {
|
||||
let instance =
|
||||
Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?;
|
||||
create_local_site(context, instance.id).await?;
|
||||
|
||||
let community_form = CommunityInsertForm::builder()
|
||||
.name("testcom6".to_string())
|
||||
.title("nada".to_owned())
|
||||
|
@ -154,6 +163,28 @@ pub(crate) mod tests {
|
|||
Ok((instance, community))
|
||||
}
|
||||
|
||||
/// Necessary for the community outbox fetching
|
||||
async fn create_local_site(
|
||||
context: &Data<LemmyContext>,
|
||||
instance_id: InstanceId,
|
||||
) -> LemmyResult<()> {
|
||||
// Create a local site, since this is necessary for community fetching.
|
||||
let site_form = SiteInsertForm::builder()
|
||||
.name("test site".to_string())
|
||||
.instance_id(instance_id)
|
||||
.build();
|
||||
let site = Site::create(&mut context.pool(), &site_form).await?;
|
||||
|
||||
let local_site_form = LocalSiteInsertForm::builder().site_id(site.id).build();
|
||||
let local_site = LocalSite::create(&mut context.pool(), &local_site_form).await?;
|
||||
let local_site_rate_limit_form = LocalSiteRateLimitInsertForm::builder()
|
||||
.local_site_id(local_site.id)
|
||||
.build();
|
||||
|
||||
LocalSiteRateLimit::create(&mut context.pool(), &local_site_rate_limit_form).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn decode_response<T: DeserializeOwned>(res: HttpResponse) -> LemmyResult<T> {
|
||||
let body = to_bytes(res.into_body()).await.unwrap();
|
||||
let body = std::str::from_utf8(&body)?;
|
||||
|
@ -164,6 +195,7 @@ pub(crate) mod tests {
|
|||
#[serial]
|
||||
async fn test_get_community() -> LemmyResult<()> {
|
||||
let context = LemmyContext::init_test_context().await;
|
||||
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
|
||||
|
||||
// fetch invalid community
|
||||
let query = CommunityQuery {
|
||||
|
@ -172,8 +204,6 @@ pub(crate) mod tests {
|
|||
let res = get_apub_community_http(query.into(), context.reset_request_count()).await;
|
||||
assert!(res.is_err());
|
||||
|
||||
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
|
||||
|
||||
// fetch valid community
|
||||
let query = CommunityQuery {
|
||||
community_name: community.name.clone(),
|
||||
|
|
|
@ -20,7 +20,8 @@ use lemmy_db_schema::{
|
|||
};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::ops::Deref;
|
||||
use std::{ops::Deref, time::Duration};
|
||||
use tokio::time::timeout;
|
||||
use url::Url;
|
||||
|
||||
mod comment;
|
||||
|
@ -30,13 +31,22 @@ mod post;
|
|||
pub mod routes;
|
||||
pub mod site;
|
||||
|
||||
const INCOMING_ACTIVITY_TIMEOUT: Duration = Duration::from_secs(9);
|
||||
|
||||
pub async fn shared_inbox(
|
||||
request: HttpRequest,
|
||||
body: Bytes,
|
||||
data: Data<LemmyContext>,
|
||||
) -> LemmyResult<HttpResponse> {
|
||||
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data)
|
||||
let receive_fut =
|
||||
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data);
|
||||
// Set a timeout shorter than `REQWEST_TIMEOUT` for processing incoming activities. This is to
|
||||
// avoid taking a long time to process an incoming activity when a required data fetch times out.
|
||||
// In this case our own instance would timeout and be marked as dead by the sender. Better to
|
||||
// consider the activity broken and move on.
|
||||
timeout(INCOMING_ACTIVITY_TIMEOUT, receive_fut)
|
||||
.await
|
||||
.map_err(|_| LemmyErrorType::InboxTimeout)?
|
||||
}
|
||||
|
||||
/// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub
|
||||
|
|
|
@ -78,7 +78,10 @@ impl UrlVerifier for VerifyUrlData {
|
|||
/// - URL not being in the blocklist (if it is active)
|
||||
#[tracing::instrument(skip(local_site_data))]
|
||||
fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> LemmyResult<()> {
|
||||
let domain = apub_id.domain().expect("apud id has domain").to_string();
|
||||
let domain = apub_id
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?
|
||||
.to_string();
|
||||
|
||||
if !local_site_data
|
||||
.local_site
|
||||
|
@ -158,7 +161,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
|
|||
is_strict: bool,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<()> {
|
||||
let domain = apub_id.domain().expect("apud id has domain").to_string();
|
||||
let domain = apub_id
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?
|
||||
.to_string();
|
||||
let local_instance = context
|
||||
.settings()
|
||||
.get_hostname_without_port()
|
||||
|
@ -185,7 +191,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
|
|||
.expect("local hostname is valid");
|
||||
allowed_and_local.push(local_instance);
|
||||
|
||||
let domain = apub_id.domain().expect("apud id has domain").to_string();
|
||||
let domain = apub_id
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?
|
||||
.to_string();
|
||||
if !allowed_and_local.contains(&domain) {
|
||||
Err(LemmyErrorType::FederationDisabledByStrictAllowList)?
|
||||
}
|
||||
|
|
|
@ -54,7 +54,10 @@ pub async fn collect_non_local_mentions(
|
|||
name: Some(format!(
|
||||
"@{}@{}",
|
||||
&parent_creator.name,
|
||||
&parent_creator.id().domain().expect("has domain")
|
||||
&parent_creator
|
||||
.id()
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?
|
||||
)),
|
||||
kind: MentionType::Mention,
|
||||
};
|
||||
|
|
|
@ -113,7 +113,7 @@ impl Object for ApubCommunity {
|
|||
featured: Some(generate_featured_url(&self.actor_id)?.into()),
|
||||
inbox: self.inbox_url.clone().into(),
|
||||
outbox: generate_outbox_url(&self.actor_id)?.into(),
|
||||
followers: self.followers_url.clone().into(),
|
||||
followers: self.followers_url.clone().map(Into::into),
|
||||
endpoints: self.shared_inbox_url.clone().map(|s| Endpoints {
|
||||
shared_inbox: s.into(),
|
||||
}),
|
||||
|
@ -164,7 +164,7 @@ impl Object for ApubCommunity {
|
|||
last_refreshed_at: Some(naive_now()),
|
||||
icon,
|
||||
banner,
|
||||
followers_url: Some(group.followers.clone().into()),
|
||||
followers_url: group.followers.clone().map(Into::into),
|
||||
inbox_url: Some(group.inbox.into()),
|
||||
shared_inbox_url: group.endpoints.map(|e| e.shared_inbox.into()),
|
||||
moderators_url: group.attributed_to.clone().map(Into::into),
|
||||
|
@ -187,11 +187,9 @@ impl Object for ApubCommunity {
|
|||
let context_ = context.reset_request_count();
|
||||
spawn_try_task(async move {
|
||||
group.outbox.dereference(&community_, &context_).await.ok();
|
||||
group
|
||||
.followers
|
||||
.dereference(&community_, &context_)
|
||||
.await
|
||||
.ok();
|
||||
if let Some(followers) = group.followers {
|
||||
followers.dereference(&community_, &context_).await.ok();
|
||||
}
|
||||
if let Some(featured) = group.featured {
|
||||
featured.dereference(&community_, &context_).await.ok();
|
||||
}
|
||||
|
@ -275,7 +273,9 @@ pub(crate) mod tests {
|
|||
// change these links so they dont fetch over the network
|
||||
json.attributed_to = None;
|
||||
json.outbox = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox")?;
|
||||
json.followers = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_followers")?;
|
||||
json.followers = Some(CollectionId::parse(
|
||||
"https://enterprise.lemmy.ml/c/tenforward/not_followers",
|
||||
)?);
|
||||
|
||||
let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?;
|
||||
ApubCommunity::verify(&json, &url, &context2).await?;
|
||||
|
|
|
@ -45,6 +45,7 @@ use lemmy_utils::{
|
|||
markdown::markdown_to_html,
|
||||
slurs::{check_slurs, check_slurs_opt},
|
||||
},
|
||||
LemmyErrorType,
|
||||
};
|
||||
use std::ops::Deref;
|
||||
use tracing::debug;
|
||||
|
@ -99,7 +100,7 @@ impl Object for ApubSite {
|
|||
kind: ApplicationType::Application,
|
||||
id: self.id().into(),
|
||||
name: self.name.clone(),
|
||||
preferred_username: data.domain().to_string(),
|
||||
preferred_username: Some(data.domain().to_string()),
|
||||
content: self.sidebar.as_ref().map(|d| markdown_to_html(d)),
|
||||
source: self.sidebar.clone().map(Source::new),
|
||||
summary: self.description.clone(),
|
||||
|
@ -137,7 +138,11 @@ impl Object for ApubSite {
|
|||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn from_json(apub: Self::Kind, context: &Data<Self::DataType>) -> LemmyResult<Self> {
|
||||
let domain = apub.id.inner().domain().expect("group id has domain");
|
||||
let domain = apub
|
||||
.id
|
||||
.inner()
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
|
||||
let instance = DbInstance::read_or_create(&mut context.pool(), domain.to_string()).await?;
|
||||
|
||||
let local_site = LocalSite::read(&mut context.pool()).await.ok();
|
||||
|
@ -210,7 +215,9 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + C
|
|||
Err(e) => {
|
||||
// Failed to fetch instance actor, its probably not a lemmy instance
|
||||
debug!("Failed to dereference site for {}: {}", &instance_id, e);
|
||||
let domain = instance_id.domain().expect("has domain");
|
||||
let domain = instance_id
|
||||
.domain()
|
||||
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
|
||||
Ok(
|
||||
DbInstance::read_or_create(&mut context.pool(), domain.to_string())
|
||||
.await?
|
||||
|
|
|
@ -96,4 +96,10 @@ mod tests {
|
|||
test_json::<Report>("assets/mbin/activities/flag.json")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wordpress_activities() -> LemmyResult<()> {
|
||||
test_json::<AnnounceActivity>("assets/wordpress/activities/announce.json")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ pub struct Group {
|
|||
/// username, set at account creation and usually fixed after that
|
||||
pub(crate) preferred_username: String,
|
||||
pub(crate) inbox: Url,
|
||||
pub(crate) followers: CollectionId<ApubCommunityFollower>,
|
||||
pub(crate) followers: Option<CollectionId<ApubCommunityFollower>>,
|
||||
pub(crate) public_key: PublicKey,
|
||||
|
||||
/// title
|
||||
|
|
|
@ -22,7 +22,7 @@ pub struct Instance {
|
|||
/// site name
|
||||
pub(crate) name: String,
|
||||
/// instance domain, necessary for mastodon authorized fetch
|
||||
pub(crate) preferred_username: String,
|
||||
pub(crate) preferred_username: Option<String>,
|
||||
pub(crate) inbox: Url,
|
||||
/// mandatory field in activitypub, lemmy currently serves an empty outbox
|
||||
pub(crate) outbox: Url,
|
||||
|
|
|
@ -190,4 +190,29 @@ mod tests {
|
|||
test_json::<Person>("assets/mobilizon/objects/person.json")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_object_discourse() -> LemmyResult<()> {
|
||||
test_json::<Group>("assets/discourse/objects/group.json")?;
|
||||
test_json::<Page>("assets/discourse/objects/page.json")?;
|
||||
test_json::<Person>("assets/discourse/objects/person.json")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_object_nodebb() -> LemmyResult<()> {
|
||||
test_json::<Group>("assets/nodebb/objects/group.json")?;
|
||||
test_json::<Page>("assets/nodebb/objects/page.json")?;
|
||||
test_json::<Person>("assets/nodebb/objects/person.json")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_object_wordpress() -> LemmyResult<()> {
|
||||
test_json::<Group>("assets/wordpress/objects/group.json")?;
|
||||
test_json::<Page>("assets/wordpress/objects/page.json")?;
|
||||
test_json::<Person>("assets/wordpress/objects/person.json")?;
|
||||
test_json::<Note>("assets/wordpress/objects/note.json")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ pub struct Page {
|
|||
pub(crate) kind: PageType,
|
||||
pub(crate) id: ObjectId<ApubPost>,
|
||||
pub(crate) attributed_to: AttributedTo,
|
||||
#[serde(deserialize_with = "deserialize_one_or_many")]
|
||||
#[serde(deserialize_with = "deserialize_one_or_many", default)]
|
||||
pub(crate) to: Vec<Url>,
|
||||
// If there is inReplyTo field this is actually a comment and must not be parsed
|
||||
#[serde(deserialize_with = "deserialize_not_present", default)]
|
||||
|
@ -233,6 +233,10 @@ impl ActivityHandler for Page {
|
|||
#[async_trait::async_trait]
|
||||
impl InCommunity for Page {
|
||||
async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> {
|
||||
if let Some(audience) = &self.audience {
|
||||
return audience.dereference(context).await;
|
||||
}
|
||||
|
||||
let community = match &self.attributed_to {
|
||||
AttributedTo::Lemmy(_) => {
|
||||
let mut iter = self.to.iter().merge(self.cc.iter());
|
||||
|
@ -243,7 +247,7 @@ impl InCommunity for Page {
|
|||
break c;
|
||||
}
|
||||
} else {
|
||||
Err(LemmyErrorType::NoCommunityFoundInCc)?
|
||||
Err(LemmyErrorType::CouldntFindCommunity)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -251,11 +255,12 @@ impl InCommunity for Page {
|
|||
p.iter()
|
||||
.find(|a| a.kind == PersonOrGroupType::Group)
|
||||
.map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner()))
|
||||
.ok_or(LemmyErrorType::PageDoesNotSpecifyGroup)?
|
||||
.ok_or(LemmyErrorType::CouldntFindCommunity)?
|
||||
.dereference(context)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(audience) = &self.audience {
|
||||
verify_community_matches(audience, community.actor_id.clone())?;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
[package]
|
||||
name = "lemmy_db_perf"
|
||||
publish = false
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
|
|
|
@ -84,22 +84,6 @@ impl Post {
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn list_for_community(
|
||||
pool: &mut DbPool<'_>,
|
||||
the_community_id: CommunityId,
|
||||
) -> Result<Vec<Self>, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
post::table
|
||||
.filter(post::community_id.eq(the_community_id))
|
||||
.filter(post::deleted.eq(false))
|
||||
.filter(post::removed.eq(false))
|
||||
.then_order_by(post::featured_community.desc())
|
||||
.then_order_by(post::published.desc())
|
||||
.limit(FETCH_LIMIT_MAX)
|
||||
.load::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn list_featured_for_community(
|
||||
pool: &mut DbPool<'_>,
|
||||
the_community_id: CommunityId,
|
||||
|
|
|
@ -178,7 +178,7 @@ diesel::table! {
|
|||
icon -> Nullable<Text>,
|
||||
banner -> Nullable<Text>,
|
||||
#[max_length = 255]
|
||||
followers_url -> Varchar,
|
||||
followers_url -> Nullable<Varchar>,
|
||||
#[max_length = 255]
|
||||
inbox_url -> Varchar,
|
||||
#[max_length = 255]
|
||||
|
|
|
@ -49,8 +49,8 @@ pub struct Community {
|
|||
/// A URL for a banner.
|
||||
pub banner: Option<DbUrl>,
|
||||
#[cfg_attr(feature = "full", ts(skip))]
|
||||
#[serde(skip, default = "placeholder_apub_url")]
|
||||
pub followers_url: DbUrl,
|
||||
#[serde(skip)]
|
||||
pub followers_url: Option<DbUrl>,
|
||||
#[cfg_attr(feature = "full", ts(skip))]
|
||||
#[serde(skip, default = "placeholder_apub_url")]
|
||||
pub inbox_url: DbUrl,
|
||||
|
|
|
@ -33,13 +33,22 @@ use lemmy_utils::{
|
|||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustls::{
|
||||
client::{ServerCertVerified, ServerCertVerifier},
|
||||
ServerName,
|
||||
client::danger::{
|
||||
DangerousClientConfigBuilder,
|
||||
HandshakeSignatureValid,
|
||||
ServerCertVerified,
|
||||
ServerCertVerifier,
|
||||
},
|
||||
crypto::{self, verify_tls12_signature, verify_tls13_signature},
|
||||
pki_types::{CertificateDer, ServerName, UnixTime},
|
||||
ClientConfig,
|
||||
DigitallySignedStruct,
|
||||
SignatureScheme,
|
||||
};
|
||||
use std::{
|
||||
ops::{Deref, DerefMut},
|
||||
sync::Arc,
|
||||
time::{Duration, SystemTime},
|
||||
time::Duration,
|
||||
};
|
||||
use tracing::error;
|
||||
use url::Url;
|
||||
|
@ -312,10 +321,11 @@ pub fn diesel_option_overwrite_to_url_create(opt: &Option<String>) -> LemmyResul
|
|||
|
||||
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
||||
let fut = async {
|
||||
let rustls_config = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
|
||||
.with_no_client_auth();
|
||||
let rustls_config = DangerousClientConfigBuilder {
|
||||
cfg: ClientConfig::builder(),
|
||||
}
|
||||
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
|
||||
.with_no_client_auth();
|
||||
|
||||
let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config);
|
||||
let (client, conn) = tokio_postgres::connect(config, tls)
|
||||
|
@ -338,21 +348,55 @@ fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConne
|
|||
fut.boxed()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NoCertVerifier {}
|
||||
|
||||
impl ServerCertVerifier for NoCertVerifier {
|
||||
fn verify_server_cert(
|
||||
&self,
|
||||
_end_entity: &rustls::Certificate,
|
||||
_intermediates: &[rustls::Certificate],
|
||||
_end_entity: &CertificateDer,
|
||||
_intermediates: &[CertificateDer],
|
||||
_server_name: &ServerName,
|
||||
_scts: &mut dyn Iterator<Item = &[u8]>,
|
||||
_ocsp_response: &[u8],
|
||||
_now: SystemTime,
|
||||
_ocsp: &[u8],
|
||||
_now: UnixTime,
|
||||
) -> Result<ServerCertVerified, rustls::Error> {
|
||||
// Will verify all (even invalid) certs without any checks (sslmode=require)
|
||||
Ok(ServerCertVerified::assertion())
|
||||
}
|
||||
|
||||
fn verify_tls12_signature(
|
||||
&self,
|
||||
message: &[u8],
|
||||
cert: &CertificateDer,
|
||||
dss: &DigitallySignedStruct,
|
||||
) -> Result<HandshakeSignatureValid, rustls::Error> {
|
||||
verify_tls12_signature(
|
||||
message,
|
||||
cert,
|
||||
dss,
|
||||
&crypto::ring::default_provider().signature_verification_algorithms,
|
||||
)
|
||||
}
|
||||
|
||||
fn verify_tls13_signature(
|
||||
&self,
|
||||
message: &[u8],
|
||||
cert: &CertificateDer,
|
||||
dss: &DigitallySignedStruct,
|
||||
) -> Result<HandshakeSignatureValid, rustls::Error> {
|
||||
verify_tls13_signature(
|
||||
message,
|
||||
cert,
|
||||
dss,
|
||||
&crypto::ring::default_provider().signature_verification_algorithms,
|
||||
)
|
||||
}
|
||||
|
||||
fn supported_verify_schemes(&self) -> Vec<SignatureScheme> {
|
||||
crypto::ring::default_provider()
|
||||
.signature_verification_algorithms
|
||||
.supported_schemes()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn build_db_pool() -> LemmyResult<ActualDbPool> {
|
||||
|
|
|
@ -220,8 +220,7 @@ fn queries<'a>() -> Queries<
|
|||
query = query.filter(
|
||||
comment::content
|
||||
.ilike(fuzzy_search(&search_term))
|
||||
.and(comment::removed.eq(false))
|
||||
.and(comment::deleted.eq(false)),
|
||||
.and(not(comment::removed.or(comment::deleted))),
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -265,10 +264,13 @@ fn queries<'a>() -> Queries<
|
|||
.then_order_by(is_saved(person_id_join).desc());
|
||||
}
|
||||
|
||||
if options.liked_only {
|
||||
query = query.filter(score(person_id_join).eq(1));
|
||||
} else if options.disliked_only {
|
||||
query = query.filter(score(person_id_join).eq(-1));
|
||||
if let Some(my_id) = my_person_id {
|
||||
let not_creator_filter = comment::creator_id.ne(my_id);
|
||||
if options.liked_only {
|
||||
query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
|
||||
} else if options.disliked_only {
|
||||
query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
|
||||
}
|
||||
}
|
||||
|
||||
if !options
|
||||
|
@ -683,8 +685,10 @@ mod tests {
|
|||
.await?;
|
||||
|
||||
assert_eq!(
|
||||
expected_comment_view_no_person,
|
||||
read_comment_views_no_person[0]
|
||||
&expected_comment_view_no_person,
|
||||
read_comment_views_no_person
|
||||
.first()
|
||||
.ok_or(LemmyErrorType::CouldntFindComment)?
|
||||
);
|
||||
|
||||
let read_comment_views_with_person = CommentQuery {
|
||||
|
@ -715,18 +719,45 @@ mod tests {
|
|||
// Make sure block set the creator blocked
|
||||
assert!(read_comment_from_blocked_person.creator_blocked);
|
||||
|
||||
cleanup(data, pool).await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_liked_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool_for_tests().await;
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await;
|
||||
|
||||
// Unblock sara first
|
||||
let timmy_unblocks_sara_form = PersonBlockForm {
|
||||
person_id: data.timmy_local_user_view.person.id,
|
||||
target_id: data.inserted_sara_person.id,
|
||||
};
|
||||
PersonBlock::unblock(pool, &timmy_unblocks_sara_form).await?;
|
||||
|
||||
// Like a new comment
|
||||
let comment_like_form = CommentLikeForm {
|
||||
comment_id: data.inserted_comment_1.id,
|
||||
post_id: data.inserted_post.id,
|
||||
person_id: data.timmy_local_user_view.person.id,
|
||||
score: 1,
|
||||
};
|
||||
CommentLike::like(pool, &comment_like_form).await.unwrap();
|
||||
|
||||
let read_liked_comment_views = CommentQuery {
|
||||
local_user: (Some(&data.timmy_local_user_view)),
|
||||
liked_only: (true),
|
||||
..Default::default()
|
||||
}
|
||||
.list(pool)
|
||||
.await?;
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|c| c.comment.content)
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
assert_eq!(
|
||||
expected_comment_view_with_person,
|
||||
read_liked_comment_views[0]
|
||||
);
|
||||
// Shouldn't include your own post, only other peoples
|
||||
assert_eq!(data.inserted_comment_1.content, read_liked_comment_views[0]);
|
||||
|
||||
assert_length!(1, read_liked_comment_views);
|
||||
|
||||
|
@ -836,7 +867,7 @@ mod tests {
|
|||
// change user lang to finnish, should only show one post in finnish and one undetermined
|
||||
let finnish_id = Language::read_id_from_code(pool, Some("fi"))
|
||||
.await?
|
||||
.unwrap();
|
||||
.ok_or(LemmyErrorType::LanguageNotAllowed)?;
|
||||
LocalUserLanguage::update(
|
||||
pool,
|
||||
vec![finnish_id],
|
||||
|
@ -856,7 +887,10 @@ mod tests {
|
|||
assert!(finnish_comment.is_some());
|
||||
assert_eq!(
|
||||
data.inserted_comment_2.content,
|
||||
finnish_comment.unwrap().comment.content
|
||||
finnish_comment
|
||||
.ok_or(LemmyErrorType::CouldntFindComment)?
|
||||
.comment
|
||||
.content
|
||||
);
|
||||
|
||||
// now show all comments with undetermined language (which is the default value)
|
||||
|
|
|
@ -396,11 +396,13 @@ fn queries<'a>() -> Queries<
|
|||
|
||||
if let Some(search_term) = &options.search_term {
|
||||
let searcher = fuzzy_search(search_term);
|
||||
query = query.filter(
|
||||
post::name
|
||||
.ilike(searcher.clone())
|
||||
.or(post::body.ilike(searcher)),
|
||||
);
|
||||
query = query
|
||||
.filter(
|
||||
post::name
|
||||
.ilike(searcher.clone())
|
||||
.or(post::body.ilike(searcher)),
|
||||
)
|
||||
.filter(not(post::removed.or(post::deleted)));
|
||||
}
|
||||
|
||||
// If there is a content warning, show nsfw content by default.
|
||||
|
@ -450,11 +452,12 @@ fn queries<'a>() -> Queries<
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(person_id) = my_person_id {
|
||||
if let Some(my_id) = my_person_id {
|
||||
let not_creator_filter = post_aggregates::creator_id.ne(my_id);
|
||||
if options.liked_only {
|
||||
query = query.filter(score(person_id).eq(1));
|
||||
query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
|
||||
} else if options.disliked_only {
|
||||
query = query.filter(score(person_id).eq(-1));
|
||||
query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1119,6 +1122,36 @@ mod tests {
|
|||
.await?;
|
||||
assert_eq!(vec![expected_post_with_upvote], read_post_listing);
|
||||
|
||||
let like_removed =
|
||||
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
|
||||
assert_eq!(1, like_removed);
|
||||
cleanup(data, pool).await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn post_listing_liked_only() -> LemmyResult<()> {
|
||||
let pool = &build_db_pool().await?;
|
||||
let pool = &mut pool.into();
|
||||
let data = init_data(pool).await?;
|
||||
|
||||
// Like both the bot post, and your own
|
||||
// The liked_only should not show your own post
|
||||
let post_like_form = PostLikeForm {
|
||||
post_id: data.inserted_post.id,
|
||||
person_id: data.local_user_view.person.id,
|
||||
score: 1,
|
||||
};
|
||||
PostLike::like(pool, &post_like_form).await?;
|
||||
|
||||
let bot_post_like_form = PostLikeForm {
|
||||
post_id: data.inserted_bot_post.id,
|
||||
person_id: data.local_user_view.person.id,
|
||||
score: 1,
|
||||
};
|
||||
PostLike::like(pool, &bot_post_like_form).await?;
|
||||
|
||||
// Read the liked only
|
||||
let read_liked_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
liked_only: true,
|
||||
|
@ -1126,7 +1159,9 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
assert_eq!(read_post_listing, read_liked_post_listing);
|
||||
|
||||
// This should only include the bot post, not the one you created
|
||||
assert_eq!(vec![POST_BY_BOT], names(&read_liked_post_listing));
|
||||
|
||||
let read_disliked_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
|
@ -1135,11 +1170,10 @@ mod tests {
|
|||
}
|
||||
.list(&data.site, pool)
|
||||
.await?;
|
||||
|
||||
// Should be no posts
|
||||
assert_eq!(read_disliked_post_listing, vec![]);
|
||||
|
||||
let like_removed =
|
||||
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
|
||||
assert_eq!(1, like_removed);
|
||||
cleanup(data, pool).await
|
||||
}
|
||||
|
||||
|
@ -1552,7 +1586,7 @@ mod tests {
|
|||
assert!(
|
||||
&post_listings_show_hidden
|
||||
.first()
|
||||
.expect("first post should exist")
|
||||
.ok_or(LemmyErrorType::CouldntFindPost)?
|
||||
.hidden
|
||||
);
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ openssl = { version = "0.10.64", optional = true }
|
|||
html2text = { version = "0.6.0", optional = true }
|
||||
deser-hjson = { version = "2.2.4", optional = true }
|
||||
smart-default = { version = "0.7.1", optional = true }
|
||||
lettre = { version = "0.11.6", features = [
|
||||
lettre = { version = "0.11.7", features = [
|
||||
"tokio1",
|
||||
"tokio1-native-tls",
|
||||
], optional = true }
|
||||
|
|
|
@ -99,8 +99,6 @@ pub enum LemmyErrorType {
|
|||
PersonIsBannedFromSite(String),
|
||||
InvalidVoteValue,
|
||||
PageDoesNotSpecifyCreator,
|
||||
PageDoesNotSpecifyGroup,
|
||||
NoCommunityFoundInCc,
|
||||
NoEmailSetup,
|
||||
LocalSiteNotSetup,
|
||||
EmailSmtpServerNeedsAPort,
|
||||
|
@ -176,6 +174,8 @@ pub enum LemmyErrorType {
|
|||
InvalidUnixTime,
|
||||
InvalidBotAction,
|
||||
CantBlockLocalInstance,
|
||||
UrlWithoutDomain,
|
||||
InboxTimeout,
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 866e4056656755f7b31e20094b46391e6931e3e7
|
||||
Subproject commit a4681f70a4ddf077951ed2dcc8cf90bb243d4828
|
|
@ -114,6 +114,8 @@ services:
|
|||
"-c",
|
||||
"auto_explain.log_analyze=true",
|
||||
"-c",
|
||||
"auto_explain.log_triggers=true",
|
||||
"-c",
|
||||
"track_activity_query_size=1048576",
|
||||
]
|
||||
ports:
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE community
|
||||
ALTER COLUMN followers_url SET NOT NULL;
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE community
|
||||
ALTER COLUMN followers_url DROP NOT NULL;
|
||||
|
|
@ -4,39 +4,39 @@ set -e
|
|||
echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete."
|
||||
|
||||
echo "Stopping lemmy and all services..."
|
||||
sudo docker-compose stop
|
||||
sudo docker compose stop
|
||||
|
||||
echo "Make sure postgres is started..."
|
||||
sudo docker-compose up -d postgres
|
||||
sudo docker compose up -d postgres
|
||||
echo "Waiting..."
|
||||
sleep 20s
|
||||
|
||||
echo "Exporting the Database to 15_16.dump.sql ..."
|
||||
sudo docker-compose exec -T postgres pg_dumpall -c -U lemmy > 15_16_dump.sql
|
||||
sudo docker compose exec -T postgres pg_dumpall -c -U lemmy | sudo tee 15_16_dump.sql > /dev/null
|
||||
echo "Done."
|
||||
|
||||
echo "Stopping postgres..."
|
||||
sudo docker-compose stop postgres
|
||||
sudo docker compose stop postgres
|
||||
echo "Waiting..."
|
||||
sleep 20s
|
||||
|
||||
echo "Removing the old postgres folder"
|
||||
sudo rm -rf volumes/postgres
|
||||
|
||||
echo "Updating docker-compose to use postgres version 16."
|
||||
sed -i "s/image: postgres:.*/image: postgres:16-alpine/" ./docker-compose.yml
|
||||
echo "Updating docker compose to use postgres version 16."
|
||||
sudo sed -i "s/image: .*postgres:.*/image: docker.io/postgres:16-alpine/" ./docker-compose.yml
|
||||
|
||||
echo "Starting up new postgres..."
|
||||
sudo docker-compose up -d postgres
|
||||
sudo docker compose up -d postgres
|
||||
echo "Waiting..."
|
||||
sleep 20s
|
||||
|
||||
echo "Importing the database...."
|
||||
cat 15_16_dump.sql | sudo docker-compose exec -T postgres psql -U lemmy
|
||||
sudo cat 15_16_dump.sql | sudo docker compose exec -T postgres psql -U lemmy
|
||||
echo "Done."
|
||||
|
||||
echo "Starting up lemmy..."
|
||||
sudo docker-compose up -d
|
||||
sudo docker compose up -d
|
||||
|
||||
echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly."
|
||||
echo "Upgrade complete."
|
||||
|
|
|
@ -160,10 +160,10 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
|
|||
rate_limit_cell.clone(),
|
||||
);
|
||||
|
||||
if !args.disable_scheduled_tasks {
|
||||
let scheduled_tasks = (!args.disable_scheduled_tasks).then(|| {
|
||||
// Schedules various cleanup tasks for the DB
|
||||
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone()));
|
||||
}
|
||||
tokio::task::spawn(scheduled_tasks::setup(context.clone()))
|
||||
});
|
||||
|
||||
if let Some(prometheus) = SETTINGS.prometheus.clone() {
|
||||
serve_prometheus(prometheus, context.clone())?;
|
||||
|
@ -218,7 +218,7 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
|
|||
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
|
||||
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
|
||||
|
||||
if server.is_some() || federate.is_some() {
|
||||
if server.is_some() || federate.is_some() || scheduled_tasks.is_some() {
|
||||
tokio::select! {
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
tracing::warn!("Received ctrl-c, shutting down gracefully...");
|
||||
|
|
Loading…
Reference in a new issue