2021-01-12 16:12:41 +00:00
|
|
|
use crate::{
|
2021-10-16 11:14:02 +00:00
|
|
|
fetcher::object_id::ObjectId,
|
2021-08-12 12:48:09 +00:00
|
|
|
objects::{comment::Note, community::Group, person::Person as ApubPerson, post::Page, FromApub},
|
2021-01-12 16:12:41 +00:00
|
|
|
};
|
2021-09-25 15:44:52 +00:00
|
|
|
use activitystreams::chrono::NaiveDateTime;
|
2021-08-12 12:48:09 +00:00
|
|
|
use anyhow::anyhow;
|
2021-10-06 20:20:05 +00:00
|
|
|
use diesel::PgConnection;
|
2021-07-20 07:00:20 +00:00
|
|
|
use itertools::Itertools;
|
2021-09-25 15:44:52 +00:00
|
|
|
use lemmy_api_common::blocking;
|
2021-10-06 20:20:05 +00:00
|
|
|
use lemmy_apub_lib::{
|
|
|
|
traits::ApubObject,
|
|
|
|
webfinger::{webfinger_resolve_actor, WebfingerType},
|
|
|
|
};
|
2021-09-25 15:44:52 +00:00
|
|
|
use lemmy_db_queries::{
|
|
|
|
source::{community::Community_, person::Person_},
|
|
|
|
DbPool,
|
2021-01-12 16:12:41 +00:00
|
|
|
};
|
2021-10-06 20:20:05 +00:00
|
|
|
use lemmy_db_schema::source::{comment::Comment, community::Community, person::Person, post::Post};
|
2021-07-20 07:00:20 +00:00
|
|
|
use lemmy_utils::LemmyError;
|
2021-01-12 16:12:41 +00:00
|
|
|
use lemmy_websocket::LemmyContext;
|
2021-09-25 15:44:52 +00:00
|
|
|
use serde::Deserialize;
|
2021-01-12 16:12:41 +00:00
|
|
|
use url::Url;
|
|
|
|
|
|
|
|
/// Attempt to parse the query as URL, and fetch an ActivityPub object from it.
|
|
|
|
///
|
|
|
|
/// Some working examples for use with the `docker/federation/` setup:
|
|
|
|
/// http://lemmy_alpha:8541/c/main, or !main@lemmy_alpha:8541
|
|
|
|
/// http://lemmy_beta:8551/u/lemmy_alpha, or @lemmy_beta@lemmy_beta:8551
|
|
|
|
/// http://lemmy_gamma:8561/post/3
|
|
|
|
/// http://lemmy_delta:8571/comment/2
|
|
|
|
pub async fn search_by_apub_id(
|
|
|
|
query: &str,
|
|
|
|
context: &LemmyContext,
|
2021-09-25 15:44:52 +00:00
|
|
|
) -> Result<SearchableObjects, LemmyError> {
|
2021-07-20 07:00:20 +00:00
|
|
|
let query_url = match Url::parse(query) {
|
|
|
|
Ok(u) => u,
|
|
|
|
Err(_) => {
|
|
|
|
let (kind, name) = query.split_at(1);
|
|
|
|
let kind = match kind {
|
|
|
|
"@" => WebfingerType::Person,
|
|
|
|
"!" => WebfingerType::Group,
|
|
|
|
_ => return Err(anyhow!("invalid query").into()),
|
|
|
|
};
|
|
|
|
// remote actor, use webfinger to resolve url
|
|
|
|
if name.contains('@') {
|
|
|
|
let (name, domain) = name.splitn(2, '@').collect_tuple().expect("invalid query");
|
2021-09-22 15:57:09 +00:00
|
|
|
webfinger_resolve_actor(
|
|
|
|
name,
|
|
|
|
domain,
|
|
|
|
kind,
|
|
|
|
context.client(),
|
|
|
|
context.settings().get_protocol_string(),
|
|
|
|
)
|
|
|
|
.await?
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
2021-07-20 07:00:20 +00:00
|
|
|
// local actor, read from database and return
|
|
|
|
else {
|
2021-09-25 15:44:52 +00:00
|
|
|
return find_local_actor_by_name(name, kind, context.pool()).await;
|
2021-07-20 07:00:20 +00:00
|
|
|
}
|
|
|
|
}
|
2021-01-12 16:12:41 +00:00
|
|
|
};
|
|
|
|
|
2021-07-20 07:00:20 +00:00
|
|
|
let request_counter = &mut 0;
|
2021-09-25 15:44:52 +00:00
|
|
|
ObjectId::new(query_url)
|
|
|
|
.dereference(context, request_counter)
|
|
|
|
.await
|
|
|
|
}
|
2021-01-12 16:12:41 +00:00
|
|
|
|
2021-09-25 15:44:52 +00:00
|
|
|
async fn find_local_actor_by_name(
|
|
|
|
name: &str,
|
|
|
|
kind: WebfingerType,
|
|
|
|
pool: &DbPool,
|
|
|
|
) -> Result<SearchableObjects, LemmyError> {
|
|
|
|
let name: String = name.into();
|
|
|
|
Ok(match kind {
|
|
|
|
WebfingerType::Group => SearchableObjects::Community(
|
|
|
|
blocking(pool, move |conn| Community::read_from_name(conn, &name)).await??,
|
|
|
|
),
|
|
|
|
WebfingerType::Person => SearchableObjects::Person(
|
|
|
|
blocking(pool, move |conn| Person::find_by_name(conn, &name)).await??,
|
|
|
|
),
|
|
|
|
})
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
|
|
|
|
2021-09-25 15:44:52 +00:00
|
|
|
/// The types of ActivityPub objects that can be fetched directly by searching for their ID.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum SearchableObjects {
|
|
|
|
Person(Person),
|
|
|
|
Community(Community),
|
|
|
|
Post(Post),
|
|
|
|
Comment(Comment),
|
|
|
|
}
|
2021-01-12 16:12:41 +00:00
|
|
|
|
2021-09-25 15:44:52 +00:00
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(untagged)]
|
|
|
|
pub enum SearchableApubTypes {
|
|
|
|
Group(Group),
|
|
|
|
Person(ApubPerson),
|
|
|
|
Page(Page),
|
|
|
|
Note(Note),
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
|
|
|
|
2021-09-25 15:44:52 +00:00
|
|
|
impl ApubObject for SearchableObjects {
|
2021-10-06 20:20:05 +00:00
|
|
|
type DataType = PgConnection;
|
|
|
|
|
2021-09-25 15:44:52 +00:00
|
|
|
fn last_refreshed_at(&self) -> Option<NaiveDateTime> {
|
|
|
|
match self {
|
|
|
|
SearchableObjects::Person(p) => p.last_refreshed_at(),
|
|
|
|
SearchableObjects::Community(c) => c.last_refreshed_at(),
|
|
|
|
SearchableObjects::Post(p) => p.last_refreshed_at(),
|
|
|
|
SearchableObjects::Comment(c) => c.last_refreshed_at(),
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
2021-09-25 15:44:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: this is inefficient, because if the object is not in local db, it will run 4 db queries
|
|
|
|
// before finally returning an error. it would be nice if we could check all 4 tables in
|
|
|
|
// a single query.
|
2021-10-06 20:20:05 +00:00
|
|
|
// we could skip this and always return an error, but then it would always fetch objects
|
|
|
|
// over http, and not be able to mark objects as deleted that were deleted by remote server.
|
|
|
|
fn read_from_apub_id(conn: &PgConnection, object_id: Url) -> Result<Option<Self>, LemmyError> {
|
|
|
|
let c = Community::read_from_apub_id(conn, object_id.clone())?;
|
|
|
|
if let Some(c) = c {
|
|
|
|
return Ok(Some(SearchableObjects::Community(c)));
|
|
|
|
}
|
|
|
|
let p = Person::read_from_apub_id(conn, object_id.clone())?;
|
|
|
|
if let Some(p) = p {
|
|
|
|
return Ok(Some(SearchableObjects::Person(p)));
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
2021-10-06 20:20:05 +00:00
|
|
|
let p = Post::read_from_apub_id(conn, object_id.clone())?;
|
|
|
|
if let Some(p) = p {
|
|
|
|
return Ok(Some(SearchableObjects::Post(p)));
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
2021-10-06 20:20:05 +00:00
|
|
|
let c = Comment::read_from_apub_id(conn, object_id)?;
|
|
|
|
if let Some(c) = c {
|
|
|
|
return Ok(Some(SearchableObjects::Comment(c)));
|
2021-01-12 16:12:41 +00:00
|
|
|
}
|
2021-10-06 20:20:05 +00:00
|
|
|
Ok(None)
|
2021-09-25 15:44:52 +00:00
|
|
|
}
|
2021-10-16 11:14:02 +00:00
|
|
|
|
|
|
|
fn delete(self, conn: &Self::DataType) -> Result<(), LemmyError> {
|
|
|
|
match self {
|
|
|
|
SearchableObjects::Person(p) => p.delete(conn),
|
|
|
|
SearchableObjects::Community(c) => c.delete(conn),
|
|
|
|
SearchableObjects::Post(p) => p.delete(conn),
|
|
|
|
SearchableObjects::Comment(c) => c.delete(conn),
|
|
|
|
}
|
|
|
|
}
|
2021-09-25 15:44:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl FromApub for SearchableObjects {
|
|
|
|
type ApubType = SearchableApubTypes;
|
|
|
|
|
|
|
|
async fn from_apub(
|
|
|
|
apub: &Self::ApubType,
|
|
|
|
context: &LemmyContext,
|
|
|
|
ed: &Url,
|
|
|
|
rc: &mut i32,
|
|
|
|
) -> Result<Self, LemmyError> {
|
|
|
|
use SearchableApubTypes as SAT;
|
|
|
|
use SearchableObjects as SO;
|
|
|
|
Ok(match apub {
|
|
|
|
SAT::Group(g) => SO::Community(Community::from_apub(g, context, ed, rc).await?),
|
|
|
|
SAT::Person(p) => SO::Person(Person::from_apub(p, context, ed, rc).await?),
|
|
|
|
SAT::Page(p) => SO::Post(Post::from_apub(p, context, ed, rc).await?),
|
|
|
|
SAT::Note(n) => SO::Comment(Comment::from_apub(n, context, ed, rc).await?),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|