Fixing search result, running clippy.

This commit is contained in:
Dessalines 2025-01-16 15:01:11 -05:00
parent 66a8823068
commit dd66ef57b8
6 changed files with 45 additions and 161 deletions

View file

@ -40,6 +40,8 @@ use lemmy_db_views::structs::{
PersonView,
PostView,
RegistrationApplicationView,
SearchCombinedPaginationCursor,
SearchCombinedView,
SiteView,
};
use serde::{Deserialize, Serialize};
@ -51,9 +53,9 @@ use ts_rs::TS;
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// Searches the site, given a query string, and some optional filters.
/// Searches the site, given a search term, and some optional filters.
pub struct Search {
pub q: String,
pub search_term: Option<String>,
#[cfg_attr(feature = "full", ts(optional))]
pub community_id: Option<CommunityId>,
#[cfg_attr(feature = "full", ts(optional))]
@ -63,14 +65,11 @@ pub struct Search {
#[cfg_attr(feature = "full", ts(optional))]
pub type_: Option<SearchType>,
#[cfg_attr(feature = "full", ts(optional))]
// TODO
pub sort: Option<PostSortType>,
#[cfg_attr(feature = "full", ts(optional))]
pub listing_type: Option<ListingType>,
#[cfg_attr(feature = "full", ts(optional))]
pub page: Option<i64>,
#[cfg_attr(feature = "full", ts(optional))]
pub limit: Option<i64>,
#[cfg_attr(feature = "full", ts(optional))]
pub title_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub post_url_only: Option<bool>,
@ -78,19 +77,18 @@ pub struct Search {
pub liked_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub disliked_only: Option<bool>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_cursor: Option<SearchCombinedPaginationCursor>,
#[cfg_attr(feature = "full", ts(optional))]
pub page_back: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
/// The search response, containing lists of the return type possibilities
// TODO this should be redone as a list of tagged enums
pub struct SearchResponse {
pub type_: SearchType,
pub comments: Vec<CommentView>,
pub posts: Vec<PostView>,
pub communities: Vec<CommunityView>,
pub users: Vec<PersonView>,
pub results: Vec<SearchCombinedView>,
}
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]

View file

@ -4,15 +4,12 @@ use actix_web::web::{Json, Query};
use lemmy_api_common::{
context::LemmyContext,
site::{Search, SearchResponse},
utils::{check_conflicting_like_filters, check_private_instance, is_admin},
utils::{check_conflicting_like_filters, check_private_instance},
};
use lemmy_db_schema::{source::community::Community, utils::post_to_comment_sort_type, SearchType};
use lemmy_db_schema::source::community::Community;
use lemmy_db_views::{
comment::comment_view::CommentQuery,
community::community_view::CommunityQuery,
person::person_view::PersonQuery,
post::post_view::PostQuery,
structs::{CommunitySortType, LocalUserView, SiteView},
combined::search_combined_view::SearchCombinedQuery,
structs::{LocalUserView, SiteView},
};
use lemmy_utils::error::LemmyResult;
@ -25,154 +22,43 @@ pub async fn search(
let local_site = SiteView::read_local(&mut context.pool()).await?;
check_private_instance(&local_user_view, &local_site.local_site)?;
check_conflicting_like_filters(data.liked_only, data.disliked_only)?;
let is_admin = local_user_view
.as_ref()
.map(|luv| is_admin(luv).is_ok())
.unwrap_or_default();
let mut posts = Vec::new();
let mut comments = Vec::new();
let mut communities = Vec::new();
let mut users = Vec::new();
// TODO no clean / non-nsfw searching rn
let Query(Search {
q,
community_id,
community_name,
creator_id,
type_,
sort,
listing_type,
page,
limit,
title_only,
post_url_only,
liked_only,
disliked_only,
}) = data;
let q = q.clone();
let search_type = type_.unwrap_or(SearchType::All);
let community_id = if let Some(name) = &community_name {
let community_id = if let Some(name) = &data.community_name {
Some(
resolve_actor_identifier::<ApubCommunity, Community>(name, &context, &local_user_view, false)
.await?,
)
.map(|c| c.id)
} else {
community_id
data.community_id
};
let local_user = local_user_view.as_ref().map(|l| &l.local_user);
let search_term = data.search_term.clone();
check_conflicting_like_filters(liked_only, disliked_only)?;
// parse pagination token
let page_after = if let Some(pa) = &data.page_cursor {
Some(pa.read(&mut context.pool()).await?)
} else {
None
};
let page_back = data.page_back;
let posts_query = PostQuery {
sort,
listing_type,
let results = SearchCombinedQuery {
search_term,
community_id,
creator_id,
local_user,
search_term: Some(q.clone()),
page,
limit,
title_only,
url_only: post_url_only,
liked_only,
disliked_only,
..Default::default()
};
creator_id: data.creator_id,
type_: data.type_,
// TODO add sorts
listing_type: data.listing_type,
title_only: data.title_only,
post_url_only: data.post_url_only,
liked_only: data.liked_only,
disliked_only: data.disliked_only,
page_after,
page_back,
}
.list(&mut context.pool(), &local_user_view)
.await?;
let comment_query = CommentQuery {
sort: sort.map(post_to_comment_sort_type),
listing_type,
search_term: Some(q.clone()),
community_id,
creator_id,
local_user,
page,
limit,
liked_only,
disliked_only,
..Default::default()
};
let community_query = CommunityQuery {
sort: sort.map(CommunitySortType::from),
listing_type,
search_term: Some(q.clone()),
title_only,
local_user,
is_mod_or_admin: is_admin,
page,
limit,
..Default::default()
};
let person_query = PersonQuery {
sort,
search_term: Some(q.clone()),
listing_type,
page,
limit,
};
match search_type {
SearchType::Posts => {
posts = posts_query
.list(&local_site.site, &mut context.pool())
.await?;
}
SearchType::Comments => {
comments = comment_query
.list(&local_site.site, &mut context.pool())
.await?;
}
SearchType::Communities => {
communities = community_query
.list(&local_site.site, &mut context.pool())
.await?;
}
SearchType::Users => {
users = person_query.list(&mut context.pool()).await?;
}
SearchType::All => {
// If the community or creator is included, dont search communities or users
let community_or_creator_included =
community_id.is_some() || community_name.is_some() || creator_id.is_some();
posts = posts_query
.list(&local_site.site, &mut context.pool())
.await?;
comments = comment_query
.list(&local_site.site, &mut context.pool())
.await?;
communities = if community_or_creator_included {
vec![]
} else {
community_query
.list(&local_site.site, &mut context.pool())
.await?
};
users = if community_or_creator_included {
vec![]
} else {
person_query.list(&mut context.pool()).await?
};
}
};
// Return the jwt
Ok(Json(SearchResponse {
type_: search_type,
comments,
posts,
communities,
users,
}))
Ok(Json(SearchResponse { results }))
}

View file

@ -160,7 +160,7 @@ async fn try_main() -> LemmyResult<()> {
.list(&site()?, &mut conn.into())
.await?;
if let Some(post_view) = post_views.into_iter().last() {
if let Some(post_view) = post_views.into_iter().next_back() {
println!("👀 getting pagination cursor data for next page");
let cursor_data = PaginationCursor::after_post(&post_view)
.read(&mut conn.into(), None)

View file

@ -66,7 +66,7 @@ impl LocalImage {
}
pub async fn delete_by_url(pool: &mut DbPool<'_>, url: &DbUrl) -> Result<Self, Error> {
let alias = url.as_str().split('/').last().ok_or(NotFound)?;
let alias = url.as_str().split('/').next_back().ok_or(NotFound)?;
Self::delete_by_alias(pool, alias).await
}
}

View file

@ -1604,7 +1604,7 @@ mod tests {
listed_post_ids.extend(post_listings.iter().map(|p| p.post.id));
if let Some(p) = post_listings.into_iter().last() {
if let Some(p) = post_listings.into_iter().next_back() {
page_after = Some(p.counts);
} else {
break;

View file

@ -125,5 +125,5 @@ pub(super) async fn do_get_image(
pub(super) fn file_type(file_type: Option<String>, name: &str) -> String {
file_type
.clone()
.unwrap_or_else(|| name.split('.').last().unwrap_or("jpg").to_string())
.unwrap_or_else(|| name.split('.').next_back().unwrap_or("jpg").to_string())
}