1
0
Fork 0
mirror of https://github.com/Nutomic/ibis.git synced 2024-11-21 20:31:09 +00:00

Get rid of unwrap usage in backend

This commit is contained in:
Felix Ableitner 2024-02-27 18:04:39 +01:00
parent d22654261f
commit 6dd1711e11
17 changed files with 48 additions and 40 deletions

View file

@ -23,6 +23,7 @@ hydrate = ["leptos/hydrate", "leptos_meta/hydrate", "leptos_router/hydrate"]
[lints.clippy]
dbg_macro = "deny"
unwrap_used = "deny"
[dependencies]
activitypub_federation = { git = "https://github.com/LemmyNet/activitypub-federation-rust.git", branch = "optional-activity-queue", features = [

View file

@ -6,6 +6,7 @@ use crate::backend::error::MyResult;
use crate::backend::federation::activities::create_article::CreateArticle;
use crate::backend::federation::activities::submit_article_update;
use crate::backend::utils::generate_article_version;
use crate::common::utils::extract_domain;
use crate::common::utils::http_protocol_str;
use crate::common::validation::can_edit_article;
use crate::common::LocalUserView;
@ -38,10 +39,9 @@ pub(in crate::backend::api) async fn create_article(
let local_instance = DbInstance::read_local_instance(&data)?;
let ap_id = ObjectId::parse(&format!(
"{}://{}:{}/article/{}",
"{}://{}/article/{}",
http_protocol_str(),
local_instance.ap_id.inner().host_str().unwrap(),
local_instance.ap_id.inner().port().unwrap(),
extract_domain(&local_instance.ap_id),
create_article.title
))?;
let form = DbArticleForm {
@ -177,10 +177,9 @@ pub(in crate::backend::api) async fn fork_article(
let local_instance = DbInstance::read_local_instance(&data)?;
let ap_id = ObjectId::parse(&format!(
"{}://{}:{}/article/{}",
"{}://{}/article/{}",
http_protocol_str(),
local_instance.ap_id.inner().domain().unwrap(),
local_instance.ap_id.inner().port().unwrap(),
extract_domain(&local_instance.ap_id),
&fork_form.new_title
))?;
let form = DbArticleForm {
@ -227,7 +226,12 @@ pub(super) async fn resolve_article(
) -> MyResult<Json<ArticleView>> {
let article: DbArticle = ObjectId::from(query.id).dereference(&data).await?;
let edits = DbEdit::read_for_article(&article, &data)?;
let latest_version = edits.last().unwrap().edit.hash.clone();
let latest_version = edits
.last()
.expect("has at least one edit")
.edit
.hash
.clone();
Ok(Json(ArticleView {
article,
edits,

View file

@ -1,9 +1,9 @@
use crate::backend::error::MyResult;
use config::Config;
use doku::Document;
use serde::Deserialize;
use smart_default::SmartDefault;
use std::net::SocketAddr;
use crate::backend::error::MyResult;
#[derive(Debug, Deserialize, PartialEq, Eq, Clone, Document, SmartDefault)]
#[serde(default)]

View file

@ -16,7 +16,6 @@ use diesel::{
};
use std::ops::DerefMut;
#[derive(Debug, Clone, Insertable, AsChangeset)]
#[diesel(table_name = article, check_for_backend(diesel::pg::Pg))]
pub struct DbArticleForm {

View file

@ -10,14 +10,12 @@ use crate::common::{ApiConflict, DbArticle};
use activitypub_federation::config::Data;
use diesel::ExpressionMethods;
use diesel::{
delete, insert_into, Identifiable, Insertable, QueryDsl, Queryable, RunQueryDsl,
Selectable,
delete, insert_into, Identifiable, Insertable, QueryDsl, Queryable, RunQueryDsl, Selectable,
};
use diffy::{apply, merge, Patch};
use serde::{Deserialize, Serialize};
use std::ops::DerefMut;
/// A local only object which represents a merge conflict. It is created
/// when a local user edit conflicts with another concurrent edit.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Queryable, Selectable, Identifiable)]

View file

@ -10,7 +10,6 @@ use diesel::{insert_into, AsChangeset, Insertable, QueryDsl, RunQueryDsl};
use diffy::create_patch;
use std::ops::DerefMut;
#[derive(Debug, Clone, Insertable, AsChangeset)]
#[diesel(table_name = edit, check_for_backend(diesel::pg::Pg))]
pub struct DbEditForm {

View file

@ -8,13 +8,10 @@ use activitypub_federation::fetch::collection_id::CollectionId;
use activitypub_federation::fetch::object_id::ObjectId;
use chrono::{DateTime, Utc};
use diesel::ExpressionMethods;
use diesel::{
insert_into, AsChangeset, Insertable, JoinOnDsl, QueryDsl, RunQueryDsl,
};
use diesel::{insert_into, AsChangeset, Insertable, JoinOnDsl, QueryDsl, RunQueryDsl};
use std::fmt::Debug;
use std::ops::DerefMut;
#[derive(Debug, Clone, Insertable, AsChangeset)]
#[diesel(table_name = instance, check_for_backend(diesel::pg::Pg))]
pub struct DbInstanceForm {

View file

@ -5,9 +5,8 @@ use diesel::r2d2::ConnectionManager;
use diesel::r2d2::Pool;
use diesel::PgConnection;
use diesel::{QueryDsl, RunQueryDsl};
use std::ops::Deref;
use std::ops::DerefMut;
use std::sync::{Arc, Mutex};
pub mod article;
pub mod conflict;

View file

@ -15,7 +15,6 @@ use diesel::{ExpressionMethods, JoinOnDsl};
use diesel::{PgTextExpressionMethods, QueryDsl};
use std::ops::DerefMut;
#[derive(Debug, Clone, Insertable, AsChangeset)]
#[diesel(table_name = local_user, check_for_backend(diesel::pg::Pg))]
pub struct DbLocalUserForm {

View file

@ -36,7 +36,7 @@ pub struct VerifyUrlData(pub IbisConfig);
impl UrlVerifier for VerifyUrlData {
/// Check domain against allowlist and blocklist from config file.
async fn verify(&self, url: &Url) -> Result<(), ActivityPubError> {
let domain = url.domain().unwrap();
let domain = url.domain().expect("url has domain");
if let Some(allowlist) = &self.0.federation.allowlist {
let allowlist = allowlist.split(',').collect::<Vec<_>>();
if !allowlist.contains(&domain) {

View file

@ -57,13 +57,9 @@ impl DbInstance {
{
let mut inboxes: Vec<_> = DbInstance::read_followers(self.id, data)?
.iter()
.map(|f| Url::parse(&f.inbox_url).unwrap())
.map(|f| f.inbox_url())
.collect();
inboxes.extend(
extra_recipients
.into_iter()
.map(|i| Url::parse(&i.inbox_url).unwrap()),
);
inboxes.extend(extra_recipients.into_iter().map(|i| i.inbox_url()));
send_activity(self, activity, inboxes, data).await?;
Ok(())
}
@ -140,6 +136,6 @@ impl Actor for DbInstance {
}
fn inbox(&self) -> Url {
Url::parse(&self.inbox_url).unwrap()
self.inbox_url()
}
}

View file

@ -89,6 +89,6 @@ impl Actor for DbPerson {
}
fn inbox(&self) -> Url {
Url::parse(&self.inbox_url).unwrap()
self.inbox_url()
}
}

View file

@ -23,7 +23,7 @@ use axum::{middleware::Next, response::Response, Router};
use chrono::Local;
use diesel::r2d2::ConnectionManager;
use diesel::r2d2::Pool;
use diesel::Connection;
use diesel::PgConnection;
use diesel_migrations::embed_migrations;
use diesel_migrations::EmbeddedMigrations;
@ -31,7 +31,7 @@ use diesel_migrations::MigrationHarness;
use leptos::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use log::info;
use std::sync::{Arc, Mutex};
use tower::Layer;
use tower_http::cors::CorsLayer;
use tower_http::services::{ServeDir, ServeFile};
@ -57,10 +57,7 @@ pub async fn start(config: IbisConfig) -> MyResult<()> {
.get()?
.run_pending_migrations(MIGRATIONS)
.expect("run migrations");
let data = IbisData {
db_pool,
config,
};
let data = IbisData { db_pool, config };
let data = FederationConfig::builder()
.domain(data.config.federation.domain.clone())
.url_verifier(Box::new(VerifyUrlData(data.config.clone())))
@ -74,7 +71,7 @@ pub async fn start(config: IbisConfig) -> MyResult<()> {
setup(&data.to_request_data()).await?;
}
let conf = get_configuration(Some("Cargo.toml")).await.unwrap();
let conf = get_configuration(Some("Cargo.toml")).await?;
let mut leptos_options = conf.leptos_options;
leptos_options.site_addr = data.config.bind;
let routes = generate_route_list(App);
@ -186,7 +183,9 @@ async fn federation_routes_middleware<B>(request: Request<B>, next: Next<B>) ->
if uri.ends_with('/') && uri.len() > 1 {
uri.pop();
}
parts.uri = uri.parse().unwrap();
parts.uri = uri
.parse()
.expect("can parse uri after dropping trailing slash");
let request = Request::from_parts(parts, body);
next.run(request).await

View file

@ -98,7 +98,8 @@ impl EditVersion {
let mut sha256 = Sha256::new();
sha256.update(diff);
let hash_bytes = sha256.finalize();
let uuid = Uuid::from_slice(&hash_bytes.as_slice()[..16]).unwrap();
let uuid =
Uuid::from_slice(&hash_bytes.as_slice()[..16]).expect("hash is correct size for uuid");
EditVersion(uuid)
}
@ -167,6 +168,12 @@ pub struct DbPerson {
pub local: bool,
}
impl DbPerson {
pub fn inbox_url(&self) -> Url {
Url::parse(&self.inbox_url).expect("can parse inbox url")
}
}
#[derive(Deserialize, Serialize)]
pub struct CreateArticleData {
pub title: String,
@ -246,6 +253,12 @@ pub struct DbInstance {
pub local: bool,
}
impl DbInstance {
pub fn inbox_url(&self) -> Url {
Url::parse(&self.inbox_url).expect("can parse inbox url")
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[cfg_attr(feature = "ssr", derive(Queryable))]
#[cfg_attr(feature = "ssr", diesel(table_name = article, check_for_backend(diesel::pg::Pg)))]

View file

@ -8,7 +8,7 @@ where
if let Some(port_) = url.inner().port() {
port = format!(":{port_}");
}
format!("{}{port}", url.inner().host_str().unwrap())
format!("{}{port}", url.inner().host_str().expect("has domain"))
}
#[cfg(not(feature = "ssr"))]

View file

@ -57,7 +57,10 @@ fn backend_hostname() -> String {
}
#[cfg(feature = "ssr")]
{
backend_hostname = crate::backend::config::IbisConfig::read().unwrap().bind.to_string();
backend_hostname = crate::backend::config::IbisConfig::read()
.unwrap()
.bind
.to_string();
}
backend_hostname
}

View file

@ -1,4 +1,5 @@
#[cfg(feature = "ssr")]
pub mod backend;
pub mod common;
#[allow(clippy::unwrap_used)]
pub mod frontend;