diff --git a/Cargo.lock b/Cargo.lock index d951d3bbd..a34496113 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1072,20 +1072,6 @@ dependencies = [ "url", ] -[[package]] -name = "clearurls" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e291c00af89ac0a5b400d9ba46a682e38015ae3cd8926dbbe85b3b864d550be3" -dependencies = [ - "linkify", - "percent-encoding", - "regex", - "serde", - "serde_json", - "url", -] - [[package]] name = "clokwerk" version = "0.4.0" diff --git a/api_tests/src/comment.spec.ts b/api_tests/src/comment.spec.ts index 8c3a23ab5..54c3c8bc4 100644 --- a/api_tests/src/comment.spec.ts +++ b/api_tests/src/comment.spec.ts @@ -858,3 +858,26 @@ test("Dont send a comment reply to a blocked community", async () => { blockRes = await blockCommunity(beta, newCommunityId, false); expect(blockRes.blocked).toBe(false); }); + +/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also +/// fetched recursively. Ensure that it works properly. +test("Fetch a deeply nested comment", async () => { + let lastComment; + for (let i = 0; i < 50; i++) { + let commentRes = await createComment( + alpha, + postOnAlphaRes.post_view.post.id, + lastComment?.comment_view.comment.id, + ); + expect(commentRes.comment_view.comment).toBeDefined(); + lastComment = commentRes; + } + + let betaComment = await resolveComment( + beta, + lastComment!.comment_view.comment, + ); + + expect(betaComment!.comment!.comment).toBeDefined(); + expect(betaComment?.comment?.post).toBeDefined(); +}); diff --git a/crates/api_crud/src/comment/create.rs b/crates/api_crud/src/comment/create.rs index 49de9d5de..ed9172c7b 100644 --- a/crates/api_crud/src/comment/create.rs +++ b/crates/api_crud/src/comment/create.rs @@ -30,10 +30,9 @@ use lemmy_db_views::structs::{LocalUserView, PostView}; use lemmy_utils::{ error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, + MAX_COMMENT_DEPTH_LIMIT, }; -const MAX_COMMENT_DEPTH_LIMIT: usize = 100; - #[tracing::instrument(skip(context))] pub async fn create_comment( data: Json, diff --git a/crates/apub/src/protocol/objects/note.rs b/crates/apub/src/protocol/objects/note.rs index b0ae00037..f4cc506cc 100644 --- a/crates/apub/src/protocol/objects/note.rs +++ b/crates/apub/src/protocol/objects/note.rs @@ -20,10 +20,9 @@ use lemmy_db_schema::{ source::{community::Community, post::Post}, traits::Crud, }; -use lemmy_utils::{error::LemmyResult, LemmyErrorType}; +use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; -use std::ops::Deref; use url::Url; #[skip_serializing_none] @@ -58,9 +57,19 @@ impl Note { &self, context: &Data, ) -> LemmyResult<(ApubPost, Option)> { - // Fetch parent comment chain in a box, otherwise it can cause a stack overflow. - let parent = Box::pin(self.in_reply_to.dereference(context).await?); - match parent.deref() { + // We use recursion here to fetch the entire comment chain up to the top-level parent. This is + // necessary because we need to know the post and parent comment in order to insert a new + // comment. However it can also lead to stack overflow when fetching many comments recursively. + // To avoid this we check the request count against max comment depth, which based on testing + // can be handled without risking stack overflow. This is not a perfect solution, because in + // some cases we have to fetch user profiles too, and reach the limit after only 25 comments + // or so. + // A cleaner solution would be converting the recursion into a loop, but that is tricky. + if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 { + Err(LemmyErrorType::MaxCommentDepthReached)?; + } + let parent = self.in_reply_to.dereference(context).await?; + match parent { PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Comment(c) => { let post_id = c.post_id; diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 5a5e76d2a..7f0691496 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -29,6 +29,8 @@ pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60); pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); +pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50; + #[macro_export] macro_rules! location_info { () => {