refactor: destructure tuples to enhance readability (#5151)

This commit is contained in:
Integral 2024-10-31 20:12:24 +08:00 committed by GitHub
parent df07d8e31c
commit 8f88dda28f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 32 additions and 32 deletions

View file

@ -14,9 +14,9 @@ async fn generate_urlset(
) -> LemmyResult<UrlSet> {
let urls = posts
.into_iter()
.map_while(|post| {
Url::builder(post.0.to_string())
.last_modified(post.1.into())
.map_while(|(url, date_time)| {
Url::builder(url.to_string())
.last_modified(date_time.into())
.build()
.ok()
})

View file

@ -247,13 +247,13 @@ pub(crate) mod tests {
}
async fn cleanup(
data: (ApubPerson, ApubCommunity, ApubPost, ApubSite),
(person, community, post, site): (ApubPerson, ApubCommunity, ApubPost, ApubSite),
context: &LemmyContext,
) -> LemmyResult<()> {
Post::delete(&mut context.pool(), data.2.id).await?;
Community::delete(&mut context.pool(), data.1.id).await?;
Person::delete(&mut context.pool(), data.0.id).await?;
Site::delete(&mut context.pool(), data.3.id).await?;
Post::delete(&mut context.pool(), post.id).await?;
Community::delete(&mut context.pool(), community.id).await?;
Person::delete(&mut context.pool(), person.id).await?;
Site::delete(&mut context.pool(), site.id).await?;
LocalSite::delete(&mut context.pool()).await?;
Ok(())
}

View file

@ -285,9 +285,12 @@ pub(crate) mod tests {
Ok(())
}
async fn cleanup(data: (ApubPerson, ApubSite), context: &LemmyContext) -> LemmyResult<()> {
DbPerson::delete(&mut context.pool(), data.0.id).await?;
Site::delete(&mut context.pool(), data.1.id).await?;
async fn cleanup(
(person, site): (ApubPerson, ApubSite),
context: &LemmyContext,
) -> LemmyResult<()> {
DbPerson::delete(&mut context.pool(), person.id).await?;
Site::delete(&mut context.pool(), site.id).await?;
Ok(())
}
}

View file

@ -186,12 +186,12 @@ mod tests {
}
async fn cleanup(
data: (ApubPerson, ApubPerson, ApubSite),
(person1, person2, site): (ApubPerson, ApubPerson, ApubSite),
context: &Data<LemmyContext>,
) -> LemmyResult<()> {
Person::delete(&mut context.pool(), data.0.id).await?;
Person::delete(&mut context.pool(), data.1.id).await?;
Site::delete(&mut context.pool(), data.2.id).await?;
Person::delete(&mut context.pool(), person1.id).await?;
Person::delete(&mut context.pool(), person2.id).await?;
Site::delete(&mut context.pool(), site.id).await?;
Ok(())
}

View file

@ -76,16 +76,16 @@ impl CustomEmojiView {
fn from_tuple_to_vec(items: Vec<CustomEmojiTuple>) -> Vec<Self> {
let mut result = Vec::new();
let mut hash: HashMap<CustomEmojiId, Vec<CustomEmojiKeyword>> = HashMap::new();
for item in &items {
let emoji_id: CustomEmojiId = item.0.id;
for (emoji, keyword) in &items {
let emoji_id: CustomEmojiId = emoji.id;
if let std::collections::hash_map::Entry::Vacant(e) = hash.entry(emoji_id) {
e.insert(Vec::new());
result.push(CustomEmojiView {
custom_emoji: item.0.clone(),
custom_emoji: emoji.clone(),
keywords: Vec::new(),
})
}
if let Some(item_keyword) = &item.1 {
if let Some(item_keyword) = &keyword {
if let Some(keywords) = hash.get_mut(&emoji_id) {
keywords.push(item_keyword.clone())
}

View file

@ -42,13 +42,10 @@ pub fn markdown_rewrite_image_links(mut src: String) -> (String, Vec<Url>) {
pub fn markdown_handle_title(src: &str, start: usize, end: usize) -> (&str, Option<&str>) {
let content = src.get(start..end).unwrap_or_default();
// necessary for custom emojis which look like `![name](url "title")`
let (url, extra) = if content.contains(' ') {
let split = content.split_once(' ').expect("split is valid");
(split.0, Some(split.1))
} else {
(content, None)
};
(url, extra)
match content.split_once(' ') {
Some((a, b)) => (a, Some(b)),
_ => (content, None),
}
}
pub fn markdown_find_links(src: &str) -> Vec<(usize, usize)> {
@ -61,9 +58,9 @@ fn find_urls<T: NodeValue + UrlAndTitle>(src: &str) -> Vec<(usize, usize)> {
let mut links_offsets = vec![];
ast.walk(|node, _depth| {
if let Some(image) = node.cast::<T>() {
let node_offsets = node.srcmap.expect("srcmap is none").get_byte_offsets();
let start_offset = node_offsets.1 - image.url_len() - 1 - image.title_len();
let end_offset = node_offsets.1 - 1;
let (_, node_offset) = node.srcmap.expect("srcmap is none").get_byte_offsets();
let start_offset = node_offset - image.url_len() - 1 - image.title_len();
let end_offset = node_offset - 1;
links_offsets.push((start_offset, end_offset));
}

View file

@ -393,10 +393,10 @@ async fn active_counts(pool: &mut DbPool<'_>) {
("6 months", "half_year"),
];
for i in &intervals {
for (full_form, abbr) in &intervals {
let update_site_stmt = format!(
"update site_aggregates set users_active_{} = (select * from site_aggregates_activity('{}')) where site_id = 1",
i.1, i.0
abbr, full_form
);
sql_query(update_site_stmt)
.execute(&mut conn)
@ -404,7 +404,7 @@ async fn active_counts(pool: &mut DbPool<'_>) {
.inspect_err(|e| error!("Failed to update site stats: {e}"))
.ok();
let update_community_stmt = format!("update community_aggregates ca set users_active_{} = mv.count_ from community_aggregates_activity('{}') mv where ca.community_id = mv.community_id_", i.1, i.0);
let update_community_stmt = format!("update community_aggregates ca set users_active_{} = mv.count_ from community_aggregates_activity('{}') mv where ca.community_id = mv.community_id_", abbr, full_form);
sql_query(update_community_stmt)
.execute(&mut conn)
.await