2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-10 06:25:00 +00:00

Add error codes

This commit is contained in:
asonix 2023-09-01 20:50:10 -05:00
parent 3c09aad5e8
commit ba3a23ed43
15 changed files with 380 additions and 46 deletions

View file

@ -1,6 +1,8 @@
use actix_web::{http::StatusCode, HttpResponse, ResponseError}; use actix_web::{http::StatusCode, HttpResponse, ResponseError};
use color_eyre::Report; use color_eyre::Report;
use crate::error_code::ErrorCode;
pub(crate) struct Error { pub(crate) struct Error {
inner: color_eyre::Report, inner: color_eyre::Report,
} }
@ -13,6 +15,12 @@ impl Error {
pub(crate) fn root_cause(&self) -> &(dyn std::error::Error + 'static) { pub(crate) fn root_cause(&self) -> &(dyn std::error::Error + 'static) {
self.inner.root_cause() self.inner.root_cause()
} }
pub(crate) fn error_code(&self) -> ErrorCode {
self.kind()
.map(|e| e.error_code())
.unwrap_or(ErrorCode::UNKNOWN_ERROR)
}
} }
impl std::fmt::Debug for Error { impl std::fmt::Debug for Error {
@ -55,21 +63,12 @@ pub(crate) enum UploadError {
#[error("Error in old repo")] #[error("Error in old repo")]
OldRepo(#[from] crate::repo_04::RepoError), OldRepo(#[from] crate::repo_04::RepoError),
#[error("Error parsing string")]
ParseString(#[from] std::string::FromUtf8Error),
#[error("Error interacting with filesystem")] #[error("Error interacting with filesystem")]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
#[error("Error validating upload")] #[error("Error validating upload")]
Validation(#[from] crate::validate::ValidationError), Validation(#[from] crate::validate::ValidationError),
#[error("Error generating path")]
PathGenerator(#[from] storage_path_generator::PathError),
#[error("Error stripping prefix")]
StripPrefix(#[from] std::path::StripPrefixError),
#[error("Error in store")] #[error("Error in store")]
Store(#[source] crate::store::StoreError), Store(#[source] crate::store::StoreError),
@ -127,11 +126,8 @@ pub(crate) enum UploadError {
#[error("Tried to save an image with an already-taken name")] #[error("Tried to save an image with an already-taken name")]
DuplicateAlias, DuplicateAlias,
#[error("Error in json")] #[error("Failed to serialize job")]
Json(#[from] serde_json::Error), PushJob(#[source] serde_json::Error),
#[error("Error in cbor")]
Cbor(#[from] serde_cbor::Error),
#[error("Range header not satisfiable")] #[error("Range header not satisfiable")]
Range, Range,
@ -143,6 +139,41 @@ pub(crate) enum UploadError {
Timeout(#[from] crate::stream::TimeoutError), Timeout(#[from] crate::stream::TimeoutError),
} }
impl UploadError {
const fn error_code(&self) -> ErrorCode {
match self {
Self::Upload(_) => ErrorCode::FILE_UPLOAD_ERROR,
Self::Repo(e) => e.error_code(),
Self::OldRepo(_) => ErrorCode::OLD_REPO_ERROR,
Self::Io(_) => ErrorCode::IO_ERROR,
Self::Validation(e) => e.error_code(),
Self::Store(e) => e.error_code(),
Self::Ffmpeg(e) => e.error_code(),
Self::Magick(e) => e.error_code(),
Self::Exiftool(e) => e.error_code(),
Self::BuildClient(_) | Self::RequestMiddleware(_) | Self::Request(_) => {
ErrorCode::HTTP_CLIENT_ERROR
}
Self::Download(_) => ErrorCode::DOWNLOAD_FILE_ERROR,
Self::ReadOnly => ErrorCode::READ_ONLY,
Self::InvalidProcessExtension => ErrorCode::INVALID_FILE_EXTENSION,
Self::ParsePath => ErrorCode::INVALID_PROCESS_PATH,
Self::Semaphore => ErrorCode::PROCESS_SEMAPHORE_CLOSED,
Self::Canceled => ErrorCode::PANIC,
Self::NoFiles => ErrorCode::VALIDATE_NO_FILES,
Self::MissingAlias => ErrorCode::ALIAS_NOT_FOUND,
Self::MissingIdentifier => ErrorCode::LOST_FILE,
Self::InvalidToken => ErrorCode::INVALID_DELETE_TOKEN,
Self::UnsupportedProcessExtension => ErrorCode::INVALID_FILE_EXTENSION,
Self::DuplicateAlias => ErrorCode::DUPLICATE_ALIAS,
Self::PushJob(_) => todo!(),
Self::Range => ErrorCode::RANGE_NOT_SATISFIABLE,
Self::Limit(_) => ErrorCode::VALIDATE_FILE_SIZE,
Self::Timeout(_) => ErrorCode::STREAM_TOO_SLOW,
}
}
}
impl From<actix_web::error::BlockingError> for UploadError { impl From<actix_web::error::BlockingError> for UploadError {
fn from(_: actix_web::error::BlockingError) -> Self { fn from(_: actix_web::error::BlockingError) -> Self {
UploadError::Canceled UploadError::Canceled
@ -196,8 +227,13 @@ impl ResponseError for Error {
HttpResponse::build(self.status_code()) HttpResponse::build(self.status_code())
.content_type("application/json") .content_type("application/json")
.body( .body(
serde_json::to_string(&serde_json::json!({ "msg": self.root_cause().to_string() })) serde_json::to_string(&serde_json::json!({
.unwrap_or_else(|_| r#"{"msg":"Request failed"}"#.to_string()), "msg": self.root_cause().to_string(),
"code": self.error_code()
}))
.unwrap_or_else(|_| {
r#"{"msg":"Request failed","code":"unknown-error"}"#.to_string()
}),
) )
} }
} }

137
src/error_code.rs Normal file
View file

@ -0,0 +1,137 @@
#[derive(Debug, serde::Serialize)]
#[serde(transparent)]
pub(crate) struct ErrorCode {
code: &'static str,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[serde(transparent)]
pub(crate) struct OwnedErrorCode {
code: String,
}
impl ErrorCode {
pub(crate) fn into_owned(self) -> OwnedErrorCode {
OwnedErrorCode {
code: self.code.to_string(),
}
}
pub(crate) const COMMAND_TIMEOUT: ErrorCode = ErrorCode {
code: "command-timeout",
};
pub(crate) const COMMAND_ERROR: ErrorCode = ErrorCode {
code: "command-error",
};
pub(crate) const COMMAND_FAILURE: ErrorCode = ErrorCode {
code: "command-failure",
};
pub(crate) const OLD_REPO_ERROR: ErrorCode = ErrorCode {
code: "old-repo-error",
};
pub(crate) const NOT_FOUND: ErrorCode = ErrorCode { code: "not-found" };
pub(crate) const FILE_IO_ERROR: ErrorCode = ErrorCode {
code: "file-io-error",
};
pub(crate) const PARSE_PATH_ERROR: ErrorCode = ErrorCode {
code: "parse-path-error",
};
pub(crate) const FILE_EXISTS: ErrorCode = ErrorCode {
code: "file-exists",
};
pub(crate) const FORMAT_FILE_ID_ERROR: ErrorCode = ErrorCode {
code: "format-file-id-error",
};
pub(crate) const OBJECT_REQUEST_ERROR: ErrorCode = ErrorCode {
code: "object-request-error",
};
pub(crate) const OBJECT_IO_ERROR: ErrorCode = ErrorCode {
code: "object-io-error",
};
pub(crate) const PARSE_OBJECT_ID_ERROR: ErrorCode = ErrorCode {
code: "parse-object-id-error",
};
pub(crate) const PANIC: ErrorCode = ErrorCode { code: "panic" };
pub(crate) const ALREADY_CLAIMED: ErrorCode = ErrorCode {
code: "already-claimed",
};
pub(crate) const SLED_ERROR: ErrorCode = ErrorCode { code: "sled-error" };
pub(crate) const EXTRACT_DETAILS: ErrorCode = ErrorCode {
code: "extract-details",
};
pub(crate) const EXTRACT_UPLOAD_RESULT: ErrorCode = ErrorCode {
code: "extract-upload-result",
};
pub(crate) const CONFLICTED_RECORD: ErrorCode = ErrorCode {
code: "conflicted-record",
};
pub(crate) const COMMAND_NOT_FOUND: ErrorCode = ErrorCode {
code: "command-not-found",
};
pub(crate) const COMMAND_PERMISSION_DENIED: ErrorCode = ErrorCode {
code: "command-permission-denied",
};
pub(crate) const FILE_UPLOAD_ERROR: ErrorCode = ErrorCode {
code: "file-upload-error",
};
pub(crate) const IO_ERROR: ErrorCode = ErrorCode { code: "io-error" };
pub(crate) const VALIDATE_WIDTH: ErrorCode = ErrorCode {
code: "validate-width",
};
pub(crate) const VALIDATE_HEIGHT: ErrorCode = ErrorCode {
code: "validate-height",
};
pub(crate) const VALIDATE_AREA: ErrorCode = ErrorCode {
code: "validate-area",
};
pub(crate) const VALIDATE_FRAMES: ErrorCode = ErrorCode {
code: "validate-frames",
};
pub(crate) const VALIDATE_FILE_EMPTY: ErrorCode = ErrorCode {
code: "validate-file-empty",
};
pub(crate) const VALIDATE_FILE_SIZE: ErrorCode = ErrorCode {
code: "validate-file-size",
};
pub(crate) const VIDEO_DISABLED: ErrorCode = ErrorCode {
code: "video-disabled",
};
pub(crate) const HTTP_CLIENT_ERROR: ErrorCode = ErrorCode {
code: "http-client-error",
};
pub(crate) const DOWNLOAD_FILE_ERROR: ErrorCode = ErrorCode {
code: "download-file-error",
};
pub(crate) const READ_ONLY: ErrorCode = ErrorCode { code: "read-only" };
pub(crate) const INVALID_FILE_EXTENSION: ErrorCode = ErrorCode {
code: "invalid-file-extension",
};
pub(crate) const INVALID_PROCESS_PATH: ErrorCode = ErrorCode {
code: "invalid-process-path",
};
pub(crate) const PROCESS_SEMAPHORE_CLOSED: ErrorCode = ErrorCode {
code: "process-semaphore-closed",
};
pub(crate) const VALIDATE_NO_FILES: ErrorCode = ErrorCode {
code: "validate-no-files",
};
pub(crate) const ALIAS_NOT_FOUND: ErrorCode = ErrorCode {
code: "alias-not-found",
};
pub(crate) const LOST_FILE: ErrorCode = ErrorCode { code: "lost-file" };
pub(crate) const INVALID_DELETE_TOKEN: ErrorCode = ErrorCode {
code: "invalid-delete-token",
};
pub(crate) const DUPLICATE_ALIAS: ErrorCode = ErrorCode {
code: "duplicate-alias",
};
pub(crate) const RANGE_NOT_SATISFIABLE: ErrorCode = ErrorCode {
code: "range-not-satisfiable",
};
pub(crate) const STREAM_TOO_SLOW: ErrorCode = ErrorCode {
code: "stream-too-slow",
};
pub(crate) const UNKNOWN_ERROR: ErrorCode = ErrorCode {
code: "unknown-error",
};
}

View file

@ -1,4 +1,7 @@
use crate::process::{Process, ProcessError}; use crate::{
error_code::ErrorCode,
process::{Process, ProcessError},
};
use actix_web::web::Bytes; use actix_web::web::Bytes;
use tokio::io::{AsyncRead, AsyncReadExt}; use tokio::io::{AsyncRead, AsyncReadExt};
@ -24,6 +27,13 @@ impl From<ProcessError> for ExifError {
} }
impl ExifError { impl ExifError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::Process(e) => e.error_code(),
Self::Read(_) => ErrorCode::COMMAND_ERROR,
Self::CommandFailed(_) => ErrorCode::COMMAND_FAILURE,
}
}
pub(crate) fn is_client_error(&self) -> bool { pub(crate) fn is_client_error(&self) -> bool {
// if exiftool bails we probably have bad input // if exiftool bails we probably have bad input
matches!( matches!(

View file

@ -1,4 +1,5 @@
use crate::{ use crate::{
error_code::ErrorCode,
formats::InternalVideoFormat, formats::InternalVideoFormat,
process::{Process, ProcessError}, process::{Process, ProcessError},
store::{Store, StoreError}, store::{Store, StoreError},
@ -63,6 +64,24 @@ impl From<ProcessError> for FfMpegError {
} }
impl FfMpegError { impl FfMpegError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::CommandFailed(_) => ErrorCode::COMMAND_FAILURE,
Self::Store(s) => s.error_code(),
Self::Process(e) => e.error_code(),
Self::Read(_)
| Self::Write(_)
| Self::Json(_)
| Self::CreateDir(_)
| Self::ReadFile(_)
| Self::OpenFile(_)
| Self::CreateFile(_)
| Self::CloseFile(_)
| Self::RemoveFile(_)
| Self::Path => ErrorCode::COMMAND_ERROR,
}
}
pub(crate) fn is_client_error(&self) -> bool { pub(crate) fn is_client_error(&self) -> bool {
// Failing validation or ffmpeg bailing probably means bad input // Failing validation or ffmpeg bailing probably means bad input
matches!( matches!(

View file

@ -6,6 +6,7 @@ mod details;
mod discover; mod discover;
mod either; mod either;
mod error; mod error;
mod error_code;
mod exiftool; mod exiftool;
mod ffmpeg; mod ffmpeg;
mod file; mod file;
@ -450,11 +451,11 @@ async fn claim_upload<S: Store + 'static>(
}] }]
}))) })))
} }
UploadResult::Failure { message } => Ok(HttpResponse::UnprocessableEntity().json( UploadResult::Failure { message, code } => Ok(HttpResponse::UnprocessableEntity()
&serde_json::json!({ .json(&serde_json::json!({
"msg": message, "msg": message,
}), "code": code,
)), }))),
} }
} }
Err(_) => Ok(HttpResponse::NoContent().finish()), Err(_) => Ok(HttpResponse::NoContent().finish()),

View file

@ -1,4 +1,5 @@
use crate::{ use crate::{
error_code::ErrorCode,
formats::ProcessableFormat, formats::ProcessableFormat,
process::{Process, ProcessError}, process::{Process, ProcessError},
store::Store, store::Store,
@ -57,6 +58,24 @@ impl From<ProcessError> for MagickError {
} }
impl MagickError { impl MagickError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::CommandFailed(_) => ErrorCode::COMMAND_FAILURE,
Self::Store(e) => e.error_code(),
Self::Process(e) => e.error_code(),
Self::Json(_)
| Self::Read(_)
| Self::Write(_)
| Self::CreateFile(_)
| Self::CreateDir(_)
| Self::CloseFile(_)
| Self::RemoveFile(_)
| Self::Discover(_)
| Self::Empty
| Self::Path => ErrorCode::COMMAND_ERROR,
}
}
pub(crate) fn is_client_error(&self) -> bool { pub(crate) fn is_client_error(&self) -> bool {
// Failing validation or imagemagick bailing probably means bad input // Failing validation or imagemagick bailing probably means bad input
matches!( matches!(

View file

@ -14,6 +14,8 @@ use tokio::{
}; };
use tracing::{Instrument, Span}; use tracing::{Instrument, Span};
use crate::error_code::ErrorCode;
struct MetricsGuard { struct MetricsGuard {
start: Instant, start: Instant,
armed: bool, armed: bool,
@ -100,6 +102,18 @@ pub(crate) enum ProcessError {
Other(#[source] std::io::Error), Other(#[source] std::io::Error),
} }
impl ProcessError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::NotFound(_) => ErrorCode::COMMAND_NOT_FOUND,
Self::PermissionDenied(_) => ErrorCode::COMMAND_PERMISSION_DENIED,
Self::LimitReached | Self::Other(_) => ErrorCode::COMMAND_ERROR,
Self::Timeout(_) => ErrorCode::COMMAND_TIMEOUT,
Self::Status(_, _) => ErrorCode::COMMAND_FAILURE,
}
}
}
impl Process { impl Process {
pub(crate) fn run(command: &str, args: &[&str], timeout: u64) -> Result<Self, ProcessError> { pub(crate) fn run(command: &str, args: &[&str], timeout: u64) -> Result<Self, ProcessError> {
let res = tracing::trace_span!(parent: None, "Create command", %command) let res = tracing::trace_span!(parent: None, "Create command", %command)

View file

@ -1,7 +1,7 @@
use crate::{ use crate::{
concurrent_processor::ProcessMap, concurrent_processor::ProcessMap,
config::Configuration, config::Configuration,
error::Error, error::{Error, UploadError},
formats::InputProcessableFormat, formats::InputProcessableFormat,
repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId}, repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId},
serde_str::Serde, serde_str::Serde,
@ -94,13 +94,14 @@ pub(crate) async fn cleanup_alias(
let job = serde_json::to_vec(&Cleanup::Alias { let job = serde_json::to_vec(&Cleanup::Alias {
alias: Serde::new(alias), alias: Serde::new(alias),
token: Serde::new(token), token: Serde::new(token),
})?; })
.map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> { pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::Hash { hash })?; let job = serde_json::to_vec(&Cleanup::Hash { hash }).map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
@ -111,7 +112,8 @@ pub(crate) async fn cleanup_identifier<I: Identifier>(
) -> Result<(), Error> { ) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::Identifier { let job = serde_json::to_vec(&Cleanup::Identifier {
identifier: Base64Bytes(identifier.to_bytes()?), identifier: Base64Bytes(identifier.to_bytes()?),
})?; })
.map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
@ -121,25 +123,26 @@ async fn cleanup_variants(
hash: Hash, hash: Hash,
variant: Option<String>, variant: Option<String>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::Variant { hash, variant })?; let job =
serde_json::to_vec(&Cleanup::Variant { hash, variant }).map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> { pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::OutdatedProxies)?; let job = serde_json::to_vec(&Cleanup::OutdatedProxies).map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> { pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::OutdatedVariants)?; let job = serde_json::to_vec(&Cleanup::OutdatedVariants).map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> { pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
let job = serde_json::to_vec(&Cleanup::AllVariants)?; let job = serde_json::to_vec(&Cleanup::AllVariants).map_err(UploadError::PushJob)?;
repo.push(CLEANUP_QUEUE, job.into()).await?; repo.push(CLEANUP_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
@ -154,7 +157,8 @@ pub(crate) async fn queue_ingest(
identifier: Base64Bytes(identifier), identifier: Base64Bytes(identifier),
declared_alias: declared_alias.map(Serde::new), declared_alias: declared_alias.map(Serde::new),
upload_id: Serde::new(upload_id), upload_id: Serde::new(upload_id),
})?; })
.map_err(UploadError::PushJob)?;
repo.push(PROCESS_QUEUE, job.into()).await?; repo.push(PROCESS_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }
@ -171,7 +175,8 @@ pub(crate) async fn queue_generate(
source: Serde::new(source), source: Serde::new(source),
process_path, process_path,
process_args, process_args,
})?; })
.map_err(UploadError::PushJob)?;
repo.push(PROCESS_QUEUE, job.into()).await?; repo.push(PROCESS_QUEUE, job.into()).await?;
Ok(()) Ok(())
} }

View file

@ -117,6 +117,7 @@ where
UploadResult::Failure { UploadResult::Failure {
message: e.root_cause().to_string(), message: e.root_cause().to_string(),
code: e.error_code().into_owned(),
} }
} }
}; };

View file

@ -1,6 +1,7 @@
use crate::{ use crate::{
config, config,
details::Details, details::Details,
error_code::{ErrorCode, OwnedErrorCode},
store::{Identifier, StoreError}, store::{Identifier, StoreError},
stream::LocalBoxStream, stream::LocalBoxStream,
}; };
@ -52,9 +53,16 @@ pub(crate) struct UploadId {
id: Uuid, id: Uuid,
} }
#[derive(Debug)]
pub(crate) enum UploadResult { pub(crate) enum UploadResult {
Success { alias: Alias, token: DeleteToken }, Success {
Failure { message: String }, alias: Alias,
token: DeleteToken,
},
Failure {
message: String,
code: OwnedErrorCode,
},
} }
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -69,6 +77,16 @@ pub(crate) enum RepoError {
Canceled, Canceled,
} }
impl RepoError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::SledError(e) => e.error_code(),
Self::AlreadyClaimed => ErrorCode::ALREADY_CLAIMED,
Self::Canceled => ErrorCode::PANIC,
}
}
}
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
pub(crate) trait FullRepo: pub(crate) trait FullRepo:
UploadRepo UploadRepo

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
details::HumanDate, details::HumanDate,
error_code::{ErrorCode, OwnedErrorCode},
serde_str::Serde, serde_str::Serde,
store::StoreError, store::StoreError,
stream::{from_iterator, LocalBoxStream}, stream::{from_iterator, LocalBoxStream},
@ -46,7 +47,10 @@ pub(crate) enum SledError {
Sled(#[from] sled::Error), Sled(#[from] sled::Error),
#[error("Invalid details json")] #[error("Invalid details json")]
Details(#[from] serde_json::Error), Details(serde_json::Error),
#[error("Invalid upload result json")]
UploadResult(serde_json::Error),
#[error("Error parsing variant key")] #[error("Error parsing variant key")]
VariantKey(#[from] VariantKeyError), VariantKey(#[from] VariantKeyError),
@ -58,6 +62,18 @@ pub(crate) enum SledError {
Conflict, Conflict,
} }
impl SledError {
pub(super) const fn error_code(&self) -> ErrorCode {
match self {
Self::Sled(_) | Self::VariantKey(_) => ErrorCode::SLED_ERROR,
Self::Details(_) => ErrorCode::EXTRACT_DETAILS,
Self::UploadResult(_) => ErrorCode::EXTRACT_UPLOAD_RESULT,
Self::Panic => ErrorCode::PANIC,
Self::Conflict => ErrorCode::CONFLICTED_RECORD,
}
}
}
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct SledRepo { pub(crate) struct SledRepo {
healthz_count: Arc<AtomicU64>, healthz_count: Arc<AtomicU64>,
@ -442,6 +458,7 @@ enum InnerUploadResult {
}, },
Failure { Failure {
message: String, message: String,
code: OwnedErrorCode,
}, },
} }
@ -452,7 +469,7 @@ impl From<UploadResult> for InnerUploadResult {
alias: Serde::new(alias), alias: Serde::new(alias),
token: Serde::new(token), token: Serde::new(token),
}, },
UploadResult::Failure { message } => InnerUploadResult::Failure { message }, UploadResult::Failure { message, code } => InnerUploadResult::Failure { message, code },
} }
} }
} }
@ -464,7 +481,7 @@ impl From<InnerUploadResult> for UploadResult {
alias: Serde::into_inner(alias), alias: Serde::into_inner(alias),
token: Serde::into_inner(token), token: Serde::into_inner(token),
}, },
InnerUploadResult::Failure { message } => UploadResult::Failure { message }, InnerUploadResult::Failure { message, code } => UploadResult::Failure { message, code },
} }
} }
} }
@ -538,7 +555,7 @@ impl UploadRepo for SledRepo {
if let Some(bytes) = opt { if let Some(bytes) = opt {
if bytes != b"1" { if bytes != b"1" {
let result: InnerUploadResult = let result: InnerUploadResult =
serde_json::from_slice(&bytes).map_err(SledError::from)?; serde_json::from_slice(&bytes).map_err(SledError::UploadResult)?;
return Ok(result.into()); return Ok(result.into());
} }
} else { } else {
@ -553,7 +570,7 @@ impl UploadRepo for SledRepo {
sled::Event::Insert { value, .. } => { sled::Event::Insert { value, .. } => {
if value != b"1" { if value != b"1" {
let result: InnerUploadResult = let result: InnerUploadResult =
serde_json::from_slice(&value).map_err(SledError::from)?; serde_json::from_slice(&value).map_err(SledError::UploadResult)?;
return Ok(result.into()); return Ok(result.into());
} }
} }
@ -576,7 +593,7 @@ impl UploadRepo for SledRepo {
result: UploadResult, result: UploadResult,
) -> Result<(), RepoError> { ) -> Result<(), RepoError> {
let result: InnerUploadResult = result.into(); let result: InnerUploadResult = result.into();
let result = serde_json::to_vec(&result).map_err(SledError::from)?; let result = serde_json::to_vec(&result).map_err(SledError::UploadResult)?;
b!(self.uploads, uploads.insert(upload_id.as_bytes(), result)); b!(self.uploads, uploads.insert(upload_id.as_bytes(), result));
@ -940,7 +957,7 @@ impl DetailsRepo for SledRepo {
) -> Result<(), StoreError> { ) -> Result<(), StoreError> {
let key = identifier.to_bytes()?; let key = identifier.to_bytes()?;
let details = serde_json::to_vec(&details.inner) let details = serde_json::to_vec(&details.inner)
.map_err(SledError::from) .map_err(SledError::Details)
.map_err(RepoError::from)?; .map_err(RepoError::from)?;
b!( b!(
@ -959,7 +976,7 @@ impl DetailsRepo for SledRepo {
opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner })) opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner }))
.transpose() .transpose()
.map_err(SledError::from) .map_err(SledError::Details)
.map_err(RepoError::from) .map_err(RepoError::from)
.map_err(StoreError::from) .map_err(StoreError::from)
} }

View file

@ -4,6 +4,8 @@ use futures_core::Stream;
use std::{fmt::Debug, sync::Arc}; use std::{fmt::Debug, sync::Arc};
use tokio::io::{AsyncRead, AsyncWrite}; use tokio::io::{AsyncRead, AsyncWrite};
use crate::error_code::ErrorCode;
pub(crate) mod file_store; pub(crate) mod file_store;
pub(crate) mod object_store; pub(crate) mod object_store;
@ -29,6 +31,15 @@ pub(crate) enum StoreError {
} }
impl StoreError { impl StoreError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::FileStore(e) => e.error_code(),
Self::ObjectStore(e) => e.error_code(),
Self::Repo(e) => e.error_code(),
Self::Repo04(_) => ErrorCode::OLD_REPO_ERROR,
Self::FileNotFound(_) | Self::ObjectNotFound(_) => ErrorCode::NOT_FOUND,
}
}
pub(crate) const fn is_not_found(&self) -> bool { pub(crate) const fn is_not_found(&self) -> bool {
matches!(self, Self::FileNotFound(_)) || matches!(self, Self::ObjectNotFound(_)) matches!(self, Self::FileNotFound(_)) || matches!(self, Self::ObjectNotFound(_))
} }

View file

@ -1,4 +1,5 @@
use crate::{ use crate::{
error_code::ErrorCode,
file::File, file::File,
repo::{Repo, SettingsRepo}, repo::{Repo, SettingsRepo},
store::Store, store::Store,
@ -32,16 +33,27 @@ pub(crate) enum FileError {
#[error("Failed to generate path")] #[error("Failed to generate path")]
PathGenerator(#[from] storage_path_generator::PathError), PathGenerator(#[from] storage_path_generator::PathError),
#[error("Error formatting file store identifier")] #[error("Error formatting file store ID")]
IdError, IdError,
#[error("Mailformed file store identifier")] #[error("Malformed file store ID")]
PrefixError, PrefixError,
#[error("Tried to save over existing file")] #[error("Tried to save over existing file")]
FileExists, FileExists,
} }
impl FileError {
pub(super) const fn error_code(&self) -> ErrorCode {
match self {
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
Self::FileExists => ErrorCode::FILE_EXISTS,
Self::IdError | Self::PrefixError => ErrorCode::FORMAT_FILE_ID_ERROR,
}
}
}
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct FileStore { pub(crate) struct FileStore {
path_gen: Generator, path_gen: Generator,

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
bytes_stream::BytesStream, bytes_stream::BytesStream,
error_code::ErrorCode,
repo::{Repo, SettingsRepo}, repo::{Repo, SettingsRepo},
store::Store, store::Store,
stream::{IntoStreamer, StreamMap}, stream::{IntoStreamer, StreamMap},
@ -67,21 +68,39 @@ pub(crate) enum ObjectError {
Etag, Etag,
#[error("Task cancelled")] #[error("Task cancelled")]
Cancelled, Canceled,
#[error("Invalid status: {0}\n{1}")] #[error("Invalid status: {0}\n{1}")]
Status(StatusCode, String), Status(StatusCode, String),
} }
impl ObjectError {
pub(super) const fn error_code(&self) -> ErrorCode {
match self {
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
Self::S3(_)
| Self::RequestMiddleware(_)
| Self::Request(_)
| Self::Xml(_)
| Self::Length
| Self::Etag
| Self::Status(_, _) => ErrorCode::OBJECT_REQUEST_ERROR,
Self::IO(_) => ErrorCode::OBJECT_IO_ERROR,
Self::Utf8(_) => ErrorCode::PARSE_OBJECT_ID_ERROR,
Self::Canceled => ErrorCode::PANIC,
}
}
}
impl From<JoinError> for ObjectError { impl From<JoinError> for ObjectError {
fn from(_: JoinError) -> Self { fn from(_: JoinError) -> Self {
Self::Cancelled Self::Canceled
} }
} }
impl From<BlockingError> for ObjectError { impl From<BlockingError> for ObjectError {
fn from(_: BlockingError) -> Self { fn from(_: BlockingError) -> Self {
Self::Cancelled Self::Canceled
} }
} }

View file

@ -6,6 +6,7 @@ use crate::{
discover::Discovery, discover::Discovery,
either::Either, either::Either,
error::Error, error::Error,
error_code::ErrorCode,
formats::{ formats::{
AnimationFormat, AnimationOutput, ImageInput, ImageOutput, InputFile, InputVideoFormat, AnimationFormat, AnimationOutput, ImageInput, ImageOutput, InputFile, InputVideoFormat,
InternalFormat, Validations, InternalFormat, Validations,
@ -38,6 +39,20 @@ pub(crate) enum ValidationError {
VideoDisabled, VideoDisabled,
} }
impl ValidationError {
pub(crate) const fn error_code(&self) -> ErrorCode {
match self {
Self::Width => ErrorCode::VALIDATE_WIDTH,
Self::Height => ErrorCode::VALIDATE_HEIGHT,
Self::Area => ErrorCode::VALIDATE_AREA,
Self::Frames => ErrorCode::VALIDATE_FRAMES,
Self::Empty => ErrorCode::VALIDATE_FILE_EMPTY,
Self::Filesize => ErrorCode::VALIDATE_FILE_SIZE,
Self::VideoDisabled => ErrorCode::VIDEO_DISABLED,
}
}
}
const MEGABYTES: usize = 1024 * 1024; const MEGABYTES: usize = 1024 * 1024;
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]