mirror of
https://git.asonix.dog/asonix/pict-rs
synced 2024-12-22 19:31:35 +00:00
Instrument better with Tracing
This commit is contained in:
parent
e31cbb581b
commit
5d3e6f50b3
15 changed files with 567 additions and 345 deletions
142
Cargo.lock
generated
142
Cargo.lock
generated
|
@ -20,9 +20,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "actix-form-data"
|
name = "actix-form-data"
|
||||||
version = "0.6.0-beta.6"
|
version = "0.6.0-beta.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c6552c90f3283caa08a8114d49f82cb3eacd6038168bc0ffb199b9304f615be2"
|
checksum = "304d237617d707993b9210dfaa2c5243ac8bcda5ed1b7a5b6f3b404a4f31a2f3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-multipart",
|
"actix-multipart",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
|
@ -278,9 +278,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.43"
|
version = "1.0.44"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "28ae2b3dec75a406790005a200b1bd89785afc02517a00ca99ecfe093ee9e6cf"
|
checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atty"
|
name = "atty"
|
||||||
|
@ -401,6 +401,7 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
|
"time 0.1.43",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -557,12 +558,54 @@ dependencies = [
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a12aa0eb539080d55c3f2d45a67c3b58b6b0773c1a3ca2dfec66d58c97fd66ca"
|
||||||
|
dependencies = [
|
||||||
|
"futures-channel",
|
||||||
|
"futures-core",
|
||||||
|
"futures-executor",
|
||||||
|
"futures-io",
|
||||||
|
"futures-sink",
|
||||||
|
"futures-task",
|
||||||
|
"futures-util",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures-channel"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"futures-sink",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-core"
|
name = "futures-core"
|
||||||
version = "0.3.17"
|
version = "0.3.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d"
|
checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures-executor"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "45025be030969d763025784f7f355043dc6bc74093e4ecc5000ca4dc50d8745c"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"futures-task",
|
||||||
|
"futures-util",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures-io"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.17"
|
version = "0.3.17"
|
||||||
|
@ -595,10 +638,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481"
|
checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
"futures-io",
|
||||||
"futures-macro",
|
"futures-macro",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
"futures-task",
|
"futures-task",
|
||||||
|
"memchr",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"pin-utils",
|
"pin-utils",
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
|
@ -625,6 +671,16 @@ dependencies = [
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gethostname"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e692e296bfac1d2533ef168d0b60ff5897b8b70a4009276834014dd8924cc028"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "getrandom"
|
name = "getrandom"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
|
@ -951,7 +1007,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pict-rs"
|
name = "pict-rs"
|
||||||
version = "0.3.0-alpha.30"
|
version = "0.3.0-alpha.31"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-form-data",
|
"actix-form-data",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
|
@ -975,9 +1031,12 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"tracing-actix-web",
|
||||||
|
"tracing-bunyan-formatter",
|
||||||
|
"tracing-error",
|
||||||
"tracing-futures",
|
"tracing-futures",
|
||||||
|
"tracing-log",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"uuid",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1522,6 +1581,16 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.1.43"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.2.27"
|
version = "0.2.27"
|
||||||
|
@ -1630,21 +1699,35 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing"
|
name = "tracing"
|
||||||
version = "0.1.26"
|
version = "0.1.27"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "09adeb8c97449311ccd28a427f96fb563e7fd31aabf994189879d9da2394b89d"
|
checksum = "c2ba9ab62b7d6497a8638dfda5e5c4fb3b2d5a7fca4118f2b96151c8ef1a437e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"log",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"tracing-attributes",
|
"tracing-attributes",
|
||||||
"tracing-core",
|
"tracing-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-attributes"
|
name = "tracing-actix-web"
|
||||||
version = "0.1.15"
|
version = "0.4.0-beta.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2"
|
checksum = "aef43d92080b0429626deba48d01dad848ad515777b373d7a18eac3f129be359"
|
||||||
|
dependencies = [
|
||||||
|
"actix-web",
|
||||||
|
"futures",
|
||||||
|
"tracing",
|
||||||
|
"tracing-futures",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-attributes"
|
||||||
|
version = "0.1.16"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "98863d0dd09fa59a1b79c6750ad80dbda6b75f4e71c437a6a1a8cb91a8bcbd77"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1652,14 +1735,41 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-core"
|
name = "tracing-bunyan-formatter"
|
||||||
version = "0.1.19"
|
version = "0.2.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2ca517f43f0fb96e0c3072ed5c275fe5eece87e8cb52f4a77b69226d3b1c9df8"
|
checksum = "c408910c9b7eabc0215fe2b4a89f8ec95581a91cea1f7619f7c78caf14cbc2a1"
|
||||||
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
|
"gethostname",
|
||||||
|
"log",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tracing",
|
||||||
|
"tracing-core",
|
||||||
|
"tracing-log",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-core"
|
||||||
|
version = "0.1.20"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "46125608c26121c81b0c6d693eab5a420e416da7e43c426d2e8f7df8da8a3acf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-error"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b4d7c0b83d4a500748fa5879461652b361edf5c9d51ede2a2ac03875ca185e24"
|
||||||
|
dependencies = [
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-futures"
|
name = "tracing-futures"
|
||||||
version = "0.2.5"
|
version = "0.2.5"
|
||||||
|
@ -1693,9 +1803,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.2.20"
|
version = "0.2.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b9cbe87a2fa7e35900ce5de20220a582a9483a7063811defce79d7cbd59d4cfe"
|
checksum = "62af966210b88ad5776ee3ba12d5f35b8d6a2b2a12168f3080cf02b814d7376b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi_term 0.12.1",
|
"ansi_term 0.12.1",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pict-rs"
|
name = "pict-rs"
|
||||||
description = "A simple image hosting service"
|
description = "A simple image hosting service"
|
||||||
version = "0.3.0-alpha.30"
|
version = "0.3.0-alpha.31"
|
||||||
authors = ["asonix <asonix@asonix.dog>"]
|
authors = ["asonix <asonix@asonix.dog>"]
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
@ -33,6 +33,10 @@ time = { version = "0.3.0", features = ["serde"] }
|
||||||
tokio = { version = "1", default-features = false, features = ["fs", "io-util", "process", "sync"] }
|
tokio = { version = "1", default-features = false, features = ["fs", "io-util", "process", "sync"] }
|
||||||
tokio-util = { version = "0.6", default-features = false, features = ["codec"] }
|
tokio-util = { version = "0.6", default-features = false, features = ["codec"] }
|
||||||
tracing = "0.1.15"
|
tracing = "0.1.15"
|
||||||
|
tracing-actix-web = { version = "0.4.0-beta.8" }
|
||||||
|
tracing-bunyan-formatter = "0.2.6"
|
||||||
|
tracing-error = "0.1.2"
|
||||||
tracing-futures = "0.2.4"
|
tracing-futures = "0.2.4"
|
||||||
|
tracing-log = "0.1.2"
|
||||||
tracing-subscriber = { version = "0.2.5", features = ["fmt", "tracing-log"] }
|
tracing-subscriber = { version = "0.2.5", features = ["fmt", "tracing-log"] }
|
||||||
uuid = { version = "0.8", features = ["v4"] }
|
# uuid = { version = "0.8", features = ["v4"] }
|
||||||
|
|
|
@ -73,6 +73,9 @@ pub(crate) struct Config {
|
||||||
help = "An optional string to be checked on requests to privileged endpoints"
|
help = "An optional string to be checked on requests to privileged endpoints"
|
||||||
)]
|
)]
|
||||||
api_key: Option<String>,
|
api_key: Option<String>,
|
||||||
|
|
||||||
|
#[structopt(short, long, help = "Enable json logging for the pict-rs server")]
|
||||||
|
json_logging: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
|
@ -113,6 +116,10 @@ impl Config {
|
||||||
pub(crate) fn api_key(&self) -> Option<&str> {
|
pub(crate) fn api_key(&self) -> Option<&str> {
|
||||||
self.api_key.as_deref()
|
self.api_key.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn json_logging(&self) -> bool {
|
||||||
|
self.json_logging
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
|
90
src/error.rs
90
src/error.rs
|
@ -1,5 +1,56 @@
|
||||||
use crate::{ffmpeg::VideoError, magick::MagickError};
|
|
||||||
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
||||||
|
use tracing_error::SpanTrace;
|
||||||
|
|
||||||
|
pub(crate) struct Error {
|
||||||
|
context: SpanTrace,
|
||||||
|
kind: UploadError,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
pub(crate) fn kind(&self) -> &UploadError {
|
||||||
|
&self.kind
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Error {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}\n", self.kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Error {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}\n", self.kind)?;
|
||||||
|
std::fmt::Display::fmt(&self.context, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for Error {
|
||||||
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
|
self.kind.source()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
UploadError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(error: T) -> Self {
|
||||||
|
Error {
|
||||||
|
kind: UploadError::from(error),
|
||||||
|
context: SpanTrace::capture(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<sled::transaction::TransactionError<Error>> for Error {
|
||||||
|
fn from(e: sled::transaction::TransactionError<Error>) -> Self {
|
||||||
|
match e {
|
||||||
|
sled::transaction::TransactionError::Abort(t) => t,
|
||||||
|
sled::transaction::TransactionError::Storage(e) => e.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum UploadError {
|
pub(crate) enum UploadError {
|
||||||
|
@ -39,6 +90,9 @@ pub(crate) enum UploadError {
|
||||||
#[error("Unsupported image format")]
|
#[error("Unsupported image format")]
|
||||||
UnsupportedFormat,
|
UnsupportedFormat,
|
||||||
|
|
||||||
|
#[error("Invalid media dimensions")]
|
||||||
|
Dimensions,
|
||||||
|
|
||||||
#[error("Unable to download image, bad response {0}")]
|
#[error("Unable to download image, bad response {0}")]
|
||||||
Download(actix_web::http::StatusCode),
|
Download(actix_web::http::StatusCode),
|
||||||
|
|
||||||
|
@ -66,11 +120,8 @@ pub(crate) enum UploadError {
|
||||||
#[error("Range header not satisfiable")]
|
#[error("Range header not satisfiable")]
|
||||||
Range,
|
Range,
|
||||||
|
|
||||||
#[error("{0}")]
|
#[error("Command failed")]
|
||||||
VideoError(#[from] VideoError),
|
Status,
|
||||||
|
|
||||||
#[error("{0}")]
|
|
||||||
MagickError(#[from] MagickError),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<awc::error::SendRequestError> for UploadError {
|
impl From<awc::error::SendRequestError> for UploadError {
|
||||||
|
@ -79,18 +130,13 @@ impl From<awc::error::SendRequestError> for UploadError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<sled::transaction::TransactionError<UploadError>> for UploadError {
|
impl From<actix_form_data::Error<Error>> for Error {
|
||||||
fn from(e: sled::transaction::TransactionError<UploadError>) -> Self {
|
fn from(e: actix_form_data::Error<Error>) -> Self {
|
||||||
match e {
|
if let actix_form_data::Error::FileFn(e) = e {
|
||||||
sled::transaction::TransactionError::Abort(t) => t,
|
return e;
|
||||||
sled::transaction::TransactionError::Storage(e) => e.into(),
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<actix_form_data::Error> for UploadError {
|
UploadError::Upload(e.to_string()).into()
|
||||||
fn from(e: actix_form_data::Error) -> Self {
|
|
||||||
UploadError::Upload(e.to_string())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,12 +152,10 @@ impl From<tokio::sync::AcquireError> for UploadError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResponseError for UploadError {
|
impl ResponseError for Error {
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
match self {
|
match self.kind {
|
||||||
UploadError::VideoError(_)
|
UploadError::DuplicateAlias
|
||||||
| UploadError::MagickError(_)
|
|
||||||
| UploadError::DuplicateAlias
|
|
||||||
| UploadError::NoFiles
|
| UploadError::NoFiles
|
||||||
| UploadError::Upload(_)
|
| UploadError::Upload(_)
|
||||||
| UploadError::ParseReq(_) => StatusCode::BAD_REQUEST,
|
| UploadError::ParseReq(_) => StatusCode::BAD_REQUEST,
|
||||||
|
@ -126,8 +170,8 @@ impl ResponseError for UploadError {
|
||||||
HttpResponse::build(self.status_code())
|
HttpResponse::build(self.status_code())
|
||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(
|
.body(
|
||||||
serde_json::to_string(&serde_json::json!({ "msg": self.to_string() }))
|
serde_json::to_string(&serde_json::json!({ "msg": self.kind.to_string() }))
|
||||||
.unwrap_or_else(|_| r#"{"msg":"Internal Server Error"}"#.to_string()),
|
.unwrap_or_else(|_| r#"{"msg":"Request failed"}"#.to_string()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use crate::stream::Process;
|
use crate::stream::Process;
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use tokio::{io::AsyncRead, process::Command};
|
use tokio::io::AsyncRead;
|
||||||
|
|
||||||
pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl AsyncRead + Unpin> {
|
pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let process = Process::spawn(Command::new("exiftool").args(["-all=", "-", "-out", "-"]))?;
|
let process = Process::run("exiftool", &["-all=", "-", "-out", "-"])?;
|
||||||
|
|
||||||
Ok(process.bytes_read(input).unwrap())
|
Ok(process.bytes_read(input).unwrap())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +1,17 @@
|
||||||
use crate::stream::Process;
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
stream::Process,
|
||||||
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use tokio::{io::AsyncRead, process::Command};
|
use tokio::{io::AsyncRead, process::Command};
|
||||||
|
use tracing::instrument;
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub(crate) enum VideoError {
|
|
||||||
#[error("Failed to interface with transcode process")]
|
|
||||||
IO(#[from] std::io::Error),
|
|
||||||
|
|
||||||
#[error("Failed to convert file")]
|
|
||||||
Status,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) enum InputFormat {
|
pub(crate) enum InputFormat {
|
||||||
Gif,
|
Gif,
|
||||||
Mp4,
|
Mp4,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub(crate) enum ThumbnailFormat {
|
pub(crate) enum ThumbnailFormat {
|
||||||
Jpeg,
|
Jpeg,
|
||||||
// Webp,
|
// Webp,
|
||||||
|
@ -50,55 +46,72 @@ pub(crate) fn to_mp4_bytes(
|
||||||
input: Bytes,
|
input: Bytes,
|
||||||
input_format: InputFormat,
|
input_format: InputFormat,
|
||||||
) -> std::io::Result<impl AsyncRead + Unpin> {
|
) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let process = Process::spawn(Command::new("ffmpeg").args([
|
let process = Process::run(
|
||||||
"-f",
|
"ffmpeg",
|
||||||
input_format.as_format(),
|
&[
|
||||||
"-i",
|
"-f",
|
||||||
"pipe:",
|
input_format.as_format(),
|
||||||
"-movflags",
|
"-i",
|
||||||
"faststart+frag_keyframe+empty_moov",
|
"pipe:",
|
||||||
"-pix_fmt",
|
"-movflags",
|
||||||
"yuv420p",
|
"faststart+frag_keyframe+empty_moov",
|
||||||
"-vf",
|
"-pix_fmt",
|
||||||
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
|
"yuv420p",
|
||||||
"-an",
|
"-vf",
|
||||||
"-codec",
|
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
|
||||||
"h264",
|
"-an",
|
||||||
"-f",
|
"-codec",
|
||||||
"mp4",
|
"h264",
|
||||||
"pipe:",
|
"-f",
|
||||||
]))?;
|
"mp4",
|
||||||
|
"pipe:",
|
||||||
|
],
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(process.bytes_read(input).unwrap())
|
Ok(process.bytes_read(input).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Create video thumbnail", skip(from, to))]
|
||||||
pub(crate) async fn thumbnail<P1, P2>(
|
pub(crate) async fn thumbnail<P1, P2>(
|
||||||
from: P1,
|
from: P1,
|
||||||
to: P2,
|
to: P2,
|
||||||
format: ThumbnailFormat,
|
format: ThumbnailFormat,
|
||||||
) -> Result<(), VideoError>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
P1: AsRef<std::path::Path>,
|
P1: AsRef<std::path::Path>,
|
||||||
P2: AsRef<std::path::Path>,
|
P2: AsRef<std::path::Path>,
|
||||||
{
|
{
|
||||||
let mut child = Command::new("ffmpeg")
|
let command = "ffmpeg";
|
||||||
.arg(&"-i")
|
let first_arg = "-i";
|
||||||
.arg(&from.as_ref())
|
let args = [
|
||||||
.args([
|
"-vframes",
|
||||||
"-vframes",
|
"1",
|
||||||
"1",
|
"-codec",
|
||||||
"-codec",
|
format.as_codec(),
|
||||||
format.as_codec(),
|
"-f",
|
||||||
"-f",
|
format.as_format(),
|
||||||
format.as_format(),
|
];
|
||||||
])
|
|
||||||
.arg(&to.as_ref())
|
tracing::info!(
|
||||||
|
"Spawning command: {} {} {:?} {:?} {:?}",
|
||||||
|
command,
|
||||||
|
first_arg,
|
||||||
|
from.as_ref(),
|
||||||
|
args,
|
||||||
|
to.as_ref()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut child = Command::new(command)
|
||||||
|
.arg(first_arg)
|
||||||
|
.arg(from.as_ref())
|
||||||
|
.args(args)
|
||||||
|
.arg(to.as_ref())
|
||||||
.spawn()?;
|
.spawn()?;
|
||||||
|
|
||||||
let status = child.wait().await?;
|
let status = child.wait().await?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
return Err(VideoError::Status);
|
return Err(UploadError::Status.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
124
src/magick.rs
124
src/magick.rs
|
@ -1,21 +1,14 @@
|
||||||
use crate::{config::Format, stream::Process};
|
use crate::{
|
||||||
|
config::Format,
|
||||||
|
error::{Error, UploadError},
|
||||||
|
stream::Process,
|
||||||
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use tokio::{
|
use tokio::{
|
||||||
io::{AsyncRead, AsyncReadExt},
|
io::{AsyncRead, AsyncReadExt},
|
||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
|
use tracing::instrument;
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub(crate) enum MagickError {
|
|
||||||
#[error("{0}")]
|
|
||||||
IO(#[from] std::io::Error),
|
|
||||||
|
|
||||||
#[error("Invalid format")]
|
|
||||||
Format,
|
|
||||||
|
|
||||||
#[error("Image too large")]
|
|
||||||
Dimensions,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) enum ValidInputType {
|
pub(crate) enum ValidInputType {
|
||||||
Mp4,
|
Mp4,
|
||||||
|
@ -32,19 +25,16 @@ pub(crate) struct Details {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl AsyncRead + Unpin> {
|
pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let process = Process::spawn(Command::new("magick").args(["convert", "-", "-strip", "-"]))?;
|
let process = Process::run("magick", &["convert", "-", "-strip", "-"])?;
|
||||||
|
|
||||||
Ok(process.bytes_read(input).unwrap())
|
Ok(process.bytes_read(input).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn details_bytes(input: Bytes) -> Result<Details, MagickError> {
|
pub(crate) async fn details_bytes(input: Bytes) -> Result<Details, Error> {
|
||||||
let process = Process::spawn(Command::new("magick").args([
|
let process = Process::run(
|
||||||
"identify",
|
"magick",
|
||||||
"-ping",
|
&["identify", "-ping", "-format", "%w %h | %m\n", "-"],
|
||||||
"-format",
|
)?;
|
||||||
"%w %h | %m\n",
|
|
||||||
"-",
|
|
||||||
]))?;
|
|
||||||
|
|
||||||
let mut reader = process.bytes_read(input).unwrap();
|
let mut reader = process.bytes_read(input).unwrap();
|
||||||
|
|
||||||
|
@ -59,22 +49,30 @@ pub(crate) fn convert_bytes_read(
|
||||||
input: Bytes,
|
input: Bytes,
|
||||||
format: Format,
|
format: Format,
|
||||||
) -> std::io::Result<impl AsyncRead + Unpin> {
|
) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let process = Process::spawn(Command::new("magick").args([
|
let process = Process::run(
|
||||||
"convert",
|
"magick",
|
||||||
"-",
|
&[
|
||||||
format!("{}:-", format.to_magick_format()).as_str(),
|
"convert",
|
||||||
]))?;
|
"-",
|
||||||
|
format!("{}:-", format.to_magick_format()).as_str(),
|
||||||
|
],
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(process.bytes_read(input).unwrap())
|
Ok(process.bytes_read(input).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn details<P>(file: P) -> Result<Details, MagickError>
|
pub(crate) async fn details<P>(file: P) -> Result<Details, Error>
|
||||||
where
|
where
|
||||||
P: AsRef<std::path::Path>,
|
P: AsRef<std::path::Path>,
|
||||||
{
|
{
|
||||||
let output = Command::new("magick")
|
let command = "magick";
|
||||||
.args([&"identify", &"-ping", &"-format", &"%w %h | %m\n"])
|
let args = ["identify", "-ping", "-format", "%w %h | %m\n"];
|
||||||
.arg(&file.as_ref())
|
let last_arg = file.as_ref();
|
||||||
|
|
||||||
|
tracing::info!("Spawning command: {} {:?} {:?}", command, args, last_arg);
|
||||||
|
let output = Command::new(command)
|
||||||
|
.args(args)
|
||||||
|
.arg(last_arg)
|
||||||
.output()
|
.output()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -83,23 +81,39 @@ where
|
||||||
parse_details(s)
|
parse_details(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_details(s: std::borrow::Cow<'_, str>) -> Result<Details, MagickError> {
|
fn parse_details(s: std::borrow::Cow<'_, str>) -> Result<Details, Error> {
|
||||||
let mut lines = s.lines();
|
let mut lines = s.lines();
|
||||||
let first = lines.next().ok_or(MagickError::Format)?;
|
let first = lines.next().ok_or(UploadError::UnsupportedFormat)?;
|
||||||
|
|
||||||
let mut segments = first.split('|');
|
let mut segments = first.split('|');
|
||||||
|
|
||||||
let dimensions = segments.next().ok_or(MagickError::Format)?.trim();
|
let dimensions = segments
|
||||||
|
.next()
|
||||||
|
.ok_or(UploadError::UnsupportedFormat)?
|
||||||
|
.trim();
|
||||||
tracing::debug!("dimensions: {}", dimensions);
|
tracing::debug!("dimensions: {}", dimensions);
|
||||||
let mut dims = dimensions.split(' ');
|
let mut dims = dimensions.split(' ');
|
||||||
let width = dims.next().ok_or(MagickError::Format)?.trim().parse()?;
|
let width = dims
|
||||||
let height = dims.next().ok_or(MagickError::Format)?.trim().parse()?;
|
.next()
|
||||||
|
.ok_or(UploadError::UnsupportedFormat)?
|
||||||
|
.trim()
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| UploadError::UnsupportedFormat)?;
|
||||||
|
let height = dims
|
||||||
|
.next()
|
||||||
|
.ok_or(UploadError::UnsupportedFormat)?
|
||||||
|
.trim()
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| UploadError::UnsupportedFormat)?;
|
||||||
|
|
||||||
let format = segments.next().ok_or(MagickError::Format)?.trim();
|
let format = segments
|
||||||
|
.next()
|
||||||
|
.ok_or(UploadError::UnsupportedFormat)?
|
||||||
|
.trim();
|
||||||
tracing::debug!("format: {}", format);
|
tracing::debug!("format: {}", format);
|
||||||
|
|
||||||
if !lines.all(|item| item.ends_with(format)) {
|
if !lines.all(|item| item.ends_with(format)) {
|
||||||
return Err(MagickError::Format);
|
return Err(UploadError::UnsupportedFormat.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mime_type = match format {
|
let mime_type = match format {
|
||||||
|
@ -108,7 +122,7 @@ fn parse_details(s: std::borrow::Cow<'_, str>) -> Result<Details, MagickError> {
|
||||||
"PNG" => mime::IMAGE_PNG,
|
"PNG" => mime::IMAGE_PNG,
|
||||||
"JPEG" => mime::IMAGE_JPEG,
|
"JPEG" => mime::IMAGE_JPEG,
|
||||||
"WEBP" => crate::validate::image_webp(),
|
"WEBP" => crate::validate::image_webp(),
|
||||||
_ => return Err(MagickError::Format),
|
_ => return Err(UploadError::UnsupportedFormat.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Details {
|
Ok(Details {
|
||||||
|
@ -118,29 +132,41 @@ fn parse_details(s: std::borrow::Cow<'_, str>) -> Result<Details, MagickError> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn input_type_bytes(input: Bytes) -> Result<ValidInputType, MagickError> {
|
pub(crate) async fn input_type_bytes(input: Bytes) -> Result<ValidInputType, Error> {
|
||||||
details_bytes(input).await?.validate_input()
|
details_bytes(input).await?.validate_input()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Spawning process command", skip(input))]
|
||||||
pub(crate) fn process_image_write_read(
|
pub(crate) fn process_image_write_read(
|
||||||
input: impl AsyncRead + Unpin + 'static,
|
input: impl AsyncRead + Unpin + 'static,
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
format: Format,
|
format: Format,
|
||||||
) -> std::io::Result<impl AsyncRead + Unpin> {
|
) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
|
let command = "magick";
|
||||||
|
let convert_args = ["convert", "-"];
|
||||||
|
let last_arg = format!("{}:-", format.to_magick_format());
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
"Spawning command: {} {:?} {:?} {}",
|
||||||
|
command,
|
||||||
|
convert_args,
|
||||||
|
args,
|
||||||
|
last_arg
|
||||||
|
);
|
||||||
let process = Process::spawn(
|
let process = Process::spawn(
|
||||||
Command::new("magick")
|
Command::new(command)
|
||||||
.args([&"convert", &"-"])
|
.args(convert_args)
|
||||||
.args(args)
|
.args(args)
|
||||||
.arg(format!("{}:-", format.to_magick_format())),
|
.arg(last_arg),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(process.write_read(input).unwrap())
|
Ok(process.write_read(input).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Details {
|
impl Details {
|
||||||
fn validate_input(&self) -> Result<ValidInputType, MagickError> {
|
fn validate_input(&self) -> Result<ValidInputType, Error> {
|
||||||
if self.width > crate::CONFIG.max_width() || self.height > crate::CONFIG.max_height() {
|
if self.width > crate::CONFIG.max_width() || self.height > crate::CONFIG.max_height() {
|
||||||
return Err(MagickError::Dimensions);
|
return Err(UploadError::Dimensions.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let input_type = match (self.mime_type.type_(), self.mime_type.subtype()) {
|
let input_type = match (self.mime_type.type_(), self.mime_type.subtype()) {
|
||||||
|
@ -149,15 +175,9 @@ impl Details {
|
||||||
(mime::IMAGE, mime::PNG) => ValidInputType::Png,
|
(mime::IMAGE, mime::PNG) => ValidInputType::Png,
|
||||||
(mime::IMAGE, mime::JPEG) => ValidInputType::Jpeg,
|
(mime::IMAGE, mime::JPEG) => ValidInputType::Jpeg,
|
||||||
(mime::IMAGE, subtype) if subtype.as_str() == "webp" => ValidInputType::Webp,
|
(mime::IMAGE, subtype) if subtype.as_str() == "webp" => ValidInputType::Webp,
|
||||||
_ => return Err(MagickError::Format),
|
_ => return Err(UploadError::UnsupportedFormat.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(input_type)
|
Ok(input_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<std::num::ParseIntError> for MagickError {
|
|
||||||
fn from(_: std::num::ParseIntError) -> MagickError {
|
|
||||||
MagickError::Format
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
127
src/main.rs
127
src/main.rs
|
@ -2,7 +2,6 @@ use actix_form_data::{Field, Form, Value};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
guard,
|
guard,
|
||||||
http::header::{CacheControl, CacheDirective, LastModified, ACCEPT_RANGES},
|
http::header::{CacheControl, CacheDirective, LastModified, ACCEPT_RANGES},
|
||||||
middleware::Logger,
|
|
||||||
web, App, HttpResponse, HttpResponseBuilder, HttpServer,
|
web, App, HttpResponse, HttpResponseBuilder, HttpServer,
|
||||||
};
|
};
|
||||||
use awc::Client;
|
use awc::Client;
|
||||||
|
@ -28,8 +27,12 @@ use tokio::{
|
||||||
Semaphore,
|
Semaphore,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use tracing::{debug, error, info, instrument, Span};
|
use tracing::{debug, error, info, instrument, subscriber::set_global_default, Span};
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_actix_web::TracingLogger;
|
||||||
|
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
||||||
|
use tracing_error::ErrorLayer;
|
||||||
|
use tracing_log::LogTracer;
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
|
||||||
|
|
||||||
mod config;
|
mod config;
|
||||||
mod error;
|
mod error;
|
||||||
|
@ -40,14 +43,16 @@ mod middleware;
|
||||||
mod migrate;
|
mod migrate;
|
||||||
mod processor;
|
mod processor;
|
||||||
mod range;
|
mod range;
|
||||||
|
mod root_span_builder;
|
||||||
mod stream;
|
mod stream;
|
||||||
mod upload_manager;
|
mod upload_manager;
|
||||||
mod validate;
|
mod validate;
|
||||||
|
|
||||||
use self::{
|
use self::{
|
||||||
config::{Config, Format},
|
config::{Config, Format},
|
||||||
error::UploadError,
|
error::{Error, UploadError},
|
||||||
middleware::{Deadline, Internal, Tracing},
|
middleware::{Deadline, Internal},
|
||||||
|
root_span_builder::RootSpanBuilder,
|
||||||
upload_manager::{Details, UploadManager, UploadManagerSession},
|
upload_manager::{Details, UploadManager, UploadManagerSession},
|
||||||
validate::{image_webp, video_mp4},
|
validate::{image_webp, video_mp4},
|
||||||
};
|
};
|
||||||
|
@ -90,7 +95,7 @@ struct CancelSafeProcessor<F> {
|
||||||
|
|
||||||
impl<F> CancelSafeProcessor<F>
|
impl<F> CancelSafeProcessor<F>
|
||||||
where
|
where
|
||||||
F: Future<Output = Result<(Details, web::Bytes), UploadError>> + Unpin,
|
F: Future<Output = Result<(Details, web::Bytes), Error>> + Unpin,
|
||||||
{
|
{
|
||||||
pub(crate) fn new(path: PathBuf, fut: F) -> Self {
|
pub(crate) fn new(path: PathBuf, fut: F) -> Self {
|
||||||
let entry = PROCESS_MAP.entry(path.clone());
|
let entry = PROCESS_MAP.entry(path.clone());
|
||||||
|
@ -117,15 +122,15 @@ where
|
||||||
|
|
||||||
impl<F> Future for CancelSafeProcessor<F>
|
impl<F> Future for CancelSafeProcessor<F>
|
||||||
where
|
where
|
||||||
F: Future<Output = Result<(Details, web::Bytes), UploadError>> + Unpin,
|
F: Future<Output = Result<(Details, web::Bytes), Error>> + Unpin,
|
||||||
{
|
{
|
||||||
type Output = Result<(Details, web::Bytes), UploadError>;
|
type Output = Result<(Details, web::Bytes), Error>;
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
if let Some(ref mut rx) = self.receiver {
|
if let Some(ref mut rx) = self.receiver {
|
||||||
Pin::new(rx)
|
Pin::new(rx)
|
||||||
.poll(cx)
|
.poll(cx)
|
||||||
.map(|res| res.map_err(|_| UploadError::Canceled))
|
.map(|res| res.map_err(|_| UploadError::Canceled.into()))
|
||||||
} else {
|
} else {
|
||||||
Pin::new(&mut self.fut).poll(cx).map(|res| {
|
Pin::new(&mut self.fut).poll(cx).map(|res| {
|
||||||
let opt = PROCESS_MAP.remove(&self.path);
|
let opt = PROCESS_MAP.remove(&self.path);
|
||||||
|
@ -151,8 +156,8 @@ impl<F> Drop for CancelSafeProcessor<F> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// try moving a file
|
// try moving a file
|
||||||
#[instrument]
|
#[instrument(name = "Moving file")]
|
||||||
async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> {
|
async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), Error> {
|
||||||
if let Some(path) = to.parent() {
|
if let Some(path) = to.parent() {
|
||||||
debug!("Creating directory {:?}", path);
|
debug!("Creating directory {:?}", path);
|
||||||
tokio::fs::create_dir_all(path).await?;
|
tokio::fs::create_dir_all(path).await?;
|
||||||
|
@ -164,7 +169,7 @@ async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> {
|
||||||
return Err(e.into());
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(UploadError::FileExists);
|
return Err(UploadError::FileExists.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Moving {:?} to {:?}", from, to);
|
debug!("Moving {:?} to {:?}", from, to);
|
||||||
|
@ -173,7 +178,7 @@ async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn safe_create_parent<P>(path: P) -> Result<(), UploadError>
|
async fn safe_create_parent<P>(path: P) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
P: AsRef<std::path::Path>,
|
P: AsRef<std::path::Path>,
|
||||||
{
|
{
|
||||||
|
@ -186,8 +191,8 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try writing to a file
|
// Try writing to a file
|
||||||
#[instrument(skip(bytes))]
|
#[instrument(name = "Saving file", skip(bytes))]
|
||||||
async fn safe_save_file(path: PathBuf, mut bytes: web::Bytes) -> Result<(), UploadError> {
|
async fn safe_save_file(path: PathBuf, mut bytes: web::Bytes) -> Result<(), Error> {
|
||||||
if let Some(path) = path.parent() {
|
if let Some(path) = path.parent() {
|
||||||
// create the directory for the file
|
// create the directory for the file
|
||||||
debug!("Creating directory {:?}", path);
|
debug!("Creating directory {:?}", path);
|
||||||
|
@ -240,7 +245,7 @@ pub(crate) fn tmp_file() -> PathBuf {
|
||||||
path
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_ext(mime: mime::Mime) -> Result<&'static str, UploadError> {
|
fn to_ext(mime: mime::Mime) -> Result<&'static str, Error> {
|
||||||
if mime == mime::IMAGE_PNG {
|
if mime == mime::IMAGE_PNG {
|
||||||
Ok(".png")
|
Ok(".png")
|
||||||
} else if mime == mime::IMAGE_JPEG {
|
} else if mime == mime::IMAGE_JPEG {
|
||||||
|
@ -250,16 +255,16 @@ fn to_ext(mime: mime::Mime) -> Result<&'static str, UploadError> {
|
||||||
} else if mime == image_webp() {
|
} else if mime == image_webp() {
|
||||||
Ok(".webp")
|
Ok(".webp")
|
||||||
} else {
|
} else {
|
||||||
Err(UploadError::UnsupportedFormat)
|
Err(UploadError::UnsupportedFormat.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handle responding to succesful uploads
|
/// Handle responding to succesful uploads
|
||||||
#[instrument(skip(value, manager))]
|
#[instrument(name = "Uploaded files", skip(value, manager))]
|
||||||
async fn upload(
|
async fn upload(
|
||||||
value: Value<UploadManagerSession>,
|
value: Value<UploadManagerSession>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let images = value
|
let images = value
|
||||||
.map()
|
.map()
|
||||||
.and_then(|mut m| m.remove("images"))
|
.and_then(|mut m| m.remove("images"))
|
||||||
|
@ -319,16 +324,16 @@ struct UrlQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// download an image from a URL
|
/// download an image from a URL
|
||||||
#[instrument(skip(client, manager))]
|
#[instrument(name = "Downloading file", skip(client, manager))]
|
||||||
async fn download(
|
async fn download(
|
||||||
client: web::Data<Client>,
|
client: web::Data<Client>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
query: web::Query<UrlQuery>,
|
query: web::Query<UrlQuery>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let mut res = client.get(&query.url).send().await?;
|
let mut res = client.get(&query.url).send().await?;
|
||||||
|
|
||||||
if !res.status().is_success() {
|
if !res.status().is_success() {
|
||||||
return Err(UploadError::Download(res.status()));
|
return Err(UploadError::Download(res.status()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let fut = res.body().limit(CONFIG.max_file_size() * MEGABYTES);
|
let fut = res.body().limit(CONFIG.max_file_size() * MEGABYTES);
|
||||||
|
@ -369,11 +374,11 @@ async fn download(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete aliases and files
|
/// Delete aliases and files
|
||||||
#[instrument(skip(manager))]
|
#[instrument(name = "Deleting file", skip(manager))]
|
||||||
async fn delete(
|
async fn delete(
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
path_entries: web::Path<(String, String)>,
|
path_entries: web::Path<(String, String)>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let (alias, token) = path_entries.into_inner();
|
let (alias, token) = path_entries.into_inner();
|
||||||
|
|
||||||
manager.delete(token, alias).await?;
|
manager.delete(token, alias).await?;
|
||||||
|
@ -388,7 +393,7 @@ async fn prepare_process(
|
||||||
ext: &str,
|
ext: &str,
|
||||||
manager: &UploadManager,
|
manager: &UploadManager,
|
||||||
whitelist: &Option<HashSet<String>>,
|
whitelist: &Option<HashSet<String>>,
|
||||||
) -> Result<(Format, String, PathBuf, Vec<String>), UploadError> {
|
) -> Result<(Format, String, PathBuf, Vec<String>), Error> {
|
||||||
let (alias, operations) =
|
let (alias, operations) =
|
||||||
query
|
query
|
||||||
.into_inner()
|
.into_inner()
|
||||||
|
@ -403,7 +408,7 @@ async fn prepare_process(
|
||||||
});
|
});
|
||||||
|
|
||||||
if alias.is_empty() {
|
if alias.is_empty() {
|
||||||
return Err(UploadError::MissingFilename);
|
return Err(UploadError::MissingFilename.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = manager.from_alias(alias).await?;
|
let name = manager.from_alias(alias).await?;
|
||||||
|
@ -430,12 +435,13 @@ async fn prepare_process(
|
||||||
Ok((format, name, thumbnail_path, thumbnail_args))
|
Ok((format, name, thumbnail_path, thumbnail_args))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Fetching derived details", skip(manager, whitelist))]
|
||||||
async fn process_details(
|
async fn process_details(
|
||||||
query: web::Query<ProcessQuery>,
|
query: web::Query<ProcessQuery>,
|
||||||
ext: web::Path<String>,
|
ext: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
whitelist: web::Data<Option<HashSet<String>>>,
|
whitelist: web::Data<Option<HashSet<String>>>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let (_, name, thumbnail_path, _) =
|
let (_, name, thumbnail_path, _) =
|
||||||
prepare_process(query, ext.as_str(), &manager, &whitelist).await?;
|
prepare_process(query, ext.as_str(), &manager, &whitelist).await?;
|
||||||
|
|
||||||
|
@ -447,14 +453,14 @@ async fn process_details(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Process files
|
/// Process files
|
||||||
#[instrument(skip(manager, whitelist))]
|
#[instrument(name = "Processing image", skip(manager, whitelist))]
|
||||||
async fn process(
|
async fn process(
|
||||||
range: Option<range::RangeHeader>,
|
range: Option<range::RangeHeader>,
|
||||||
query: web::Query<ProcessQuery>,
|
query: web::Query<ProcessQuery>,
|
||||||
ext: web::Path<String>,
|
ext: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
whitelist: web::Data<Option<HashSet<String>>>,
|
whitelist: web::Data<Option<HashSet<String>>>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let (format, name, thumbnail_path, thumbnail_args) =
|
let (format, name, thumbnail_path, thumbnail_args) =
|
||||||
prepare_process(query, ext.as_str(), &manager, &whitelist).await?;
|
prepare_process(query, ext.as_str(), &manager, &whitelist).await?;
|
||||||
|
|
||||||
|
@ -544,7 +550,7 @@ async fn process(
|
||||||
drop(entered);
|
drop(entered);
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok((details, bytes)) as Result<(Details, web::Bytes), UploadError>
|
Ok((details, bytes)) as Result<(Details, web::Bytes), Error>
|
||||||
};
|
};
|
||||||
|
|
||||||
let (details, bytes) =
|
let (details, bytes) =
|
||||||
|
@ -553,11 +559,11 @@ async fn process(
|
||||||
return match range {
|
return match range {
|
||||||
Some(range_header) => {
|
Some(range_header) => {
|
||||||
if !range_header.is_bytes() {
|
if !range_header.is_bytes() {
|
||||||
return Err(UploadError::Range);
|
return Err(UploadError::Range.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if range_header.is_empty() {
|
if range_header.is_empty() {
|
||||||
Err(UploadError::Range)
|
Err(UploadError::Range.into())
|
||||||
} else if range_header.len() == 1 {
|
} else if range_header.len() == 1 {
|
||||||
let range = range_header.ranges().next().unwrap();
|
let range = range_header.ranges().next().unwrap();
|
||||||
let content_range = range.to_content_range(bytes.len() as u64);
|
let content_range = range.to_content_range(bytes.len() as u64);
|
||||||
|
@ -573,12 +579,12 @@ async fn process(
|
||||||
details.system_time(),
|
details.system_time(),
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Err(UploadError::Range)
|
Err(UploadError::Range.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => Ok(srv_response(
|
None => Ok(srv_response(
|
||||||
HttpResponse::Ok(),
|
HttpResponse::Ok(),
|
||||||
once(ready(Ok(bytes) as Result<_, UploadError>)),
|
once(ready(Ok(bytes) as Result<_, Error>)),
|
||||||
details.content_type(),
|
details.content_type(),
|
||||||
7 * DAYS,
|
7 * DAYS,
|
||||||
details.system_time(),
|
details.system_time(),
|
||||||
|
@ -600,10 +606,11 @@ async fn process(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch file details
|
/// Fetch file details
|
||||||
|
#[instrument(name = "Fetching details", skip(manager))]
|
||||||
async fn details(
|
async fn details(
|
||||||
alias: web::Path<String>,
|
alias: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let name = manager.from_alias(alias.into_inner()).await?;
|
let name = manager.from_alias(alias.into_inner()).await?;
|
||||||
let mut path = manager.image_dir();
|
let mut path = manager.image_dir();
|
||||||
path.push(name.clone());
|
path.push(name.clone());
|
||||||
|
@ -624,12 +631,12 @@ async fn details(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serve files
|
/// Serve files
|
||||||
#[instrument(skip(manager))]
|
#[instrument(name = "Serving file", skip(manager))]
|
||||||
async fn serve(
|
async fn serve(
|
||||||
range: Option<range::RangeHeader>,
|
range: Option<range::RangeHeader>,
|
||||||
alias: web::Path<String>,
|
alias: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let name = manager.from_alias(alias.into_inner()).await?;
|
let name = manager.from_alias(alias.into_inner()).await?;
|
||||||
let mut path = manager.image_dir();
|
let mut path = manager.image_dir();
|
||||||
path.push(name.clone());
|
path.push(name.clone());
|
||||||
|
@ -653,16 +660,16 @@ async fn ranged_file_resp(
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
range: Option<range::RangeHeader>,
|
range: Option<range::RangeHeader>,
|
||||||
details: Details,
|
details: Details,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let (builder, stream) = match range {
|
let (builder, stream) = match range {
|
||||||
//Range header exists - return as ranged
|
//Range header exists - return as ranged
|
||||||
Some(range_header) => {
|
Some(range_header) => {
|
||||||
if !range_header.is_bytes() {
|
if !range_header.is_bytes() {
|
||||||
return Err(UploadError::Range);
|
return Err(UploadError::Range.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if range_header.is_empty() {
|
if range_header.is_empty() {
|
||||||
return Err(UploadError::Range);
|
return Err(UploadError::Range.into());
|
||||||
} else if range_header.len() == 1 {
|
} else if range_header.len() == 1 {
|
||||||
let file = tokio::fs::File::open(path).await?;
|
let file = tokio::fs::File::open(path).await?;
|
||||||
|
|
||||||
|
@ -675,7 +682,7 @@ async fn ranged_file_resp(
|
||||||
|
|
||||||
(builder, range.chop_file(file).await?)
|
(builder, range.chop_file(file).await?)
|
||||||
} else {
|
} else {
|
||||||
return Err(UploadError::Range);
|
return Err(UploadError::Range.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//No Range header in the request - return the entire document
|
//No Range header in the request - return the entire document
|
||||||
|
@ -727,10 +734,11 @@ enum FileOrAlias {
|
||||||
Alias { alias: String },
|
Alias { alias: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Purging file", skip(upload_manager))]
|
||||||
async fn purge(
|
async fn purge(
|
||||||
query: web::Query<FileOrAlias>,
|
query: web::Query<FileOrAlias>,
|
||||||
upload_manager: web::Data<UploadManager>,
|
upload_manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let aliases = match query.into_inner() {
|
let aliases = match query.into_inner() {
|
||||||
FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?,
|
FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?,
|
||||||
FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?,
|
FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?,
|
||||||
|
@ -748,10 +756,11 @@ async fn purge(
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Fetching aliases", skip(upload_manager))]
|
||||||
async fn aliases(
|
async fn aliases(
|
||||||
query: web::Query<FileOrAlias>,
|
query: web::Query<FileOrAlias>,
|
||||||
upload_manager: web::Data<UploadManager>,
|
upload_manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let aliases = match query.into_inner() {
|
let aliases = match query.into_inner() {
|
||||||
FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?,
|
FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?,
|
||||||
FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?,
|
FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?,
|
||||||
|
@ -768,10 +777,11 @@ struct ByAlias {
|
||||||
alias: String,
|
alias: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Fetching filename", skip(upload_manager))]
|
||||||
async fn filename_by_alias(
|
async fn filename_by_alias(
|
||||||
query: web::Query<ByAlias>,
|
query: web::Query<ByAlias>,
|
||||||
upload_manager: web::Data<UploadManager>,
|
upload_manager: web::Data<UploadManager>,
|
||||||
) -> Result<HttpResponse, UploadError> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let filename = upload_manager.from_alias(query.into_inner().alias).await?;
|
let filename = upload_manager.from_alias(query.into_inner().alias).await?;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||||
|
@ -782,13 +792,23 @@ async fn filename_by_alias(
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> Result<(), anyhow::Error> {
|
async fn main() -> Result<(), anyhow::Error> {
|
||||||
if std::env::var("RUST_LOG").is_err() {
|
let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
|
||||||
std::env::set_var("RUST_LOG", "info");
|
LogTracer::init()?;
|
||||||
}
|
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
let subscriber = Registry::default()
|
||||||
.with_env_filter(EnvFilter::from_default_env())
|
.with(env_filter)
|
||||||
.init();
|
.with(ErrorLayer::default());
|
||||||
|
|
||||||
|
if CONFIG.json_logging() {
|
||||||
|
let formatting_layer = BunyanFormattingLayer::new("pict-rs".into(), std::io::stdout);
|
||||||
|
|
||||||
|
let subscriber = subscriber.with(JsonStorageLayer).with(formatting_layer);
|
||||||
|
|
||||||
|
set_global_default(subscriber)?;
|
||||||
|
} else {
|
||||||
|
let subscriber = subscriber.with(tracing_subscriber::fmt::layer());
|
||||||
|
set_global_default(subscriber)?;
|
||||||
|
};
|
||||||
|
|
||||||
let manager = UploadManager::new(CONFIG.data_dir(), CONFIG.format()).await?;
|
let manager = UploadManager::new(CONFIG.data_dir(), CONFIG.format()).await?;
|
||||||
|
|
||||||
|
@ -799,7 +819,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
||||||
let form = Form::new()
|
let form = Form::new()
|
||||||
.max_files(10)
|
.max_files(10)
|
||||||
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
|
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
|
||||||
.transform_error(|e| UploadError::from(e).into())
|
.transform_error(|e| Error::from(e).into())
|
||||||
.field(
|
.field(
|
||||||
"images",
|
"images",
|
||||||
Field::array(Field::file(move |filename, _, stream| {
|
Field::array(Field::file(move |filename, _, stream| {
|
||||||
|
@ -828,7 +848,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
||||||
let import_form = Form::new()
|
let import_form = Form::new()
|
||||||
.max_files(10)
|
.max_files(10)
|
||||||
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
|
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
|
||||||
.transform_error(|e| UploadError::from(e).into())
|
.transform_error(|e| Error::from(e).into())
|
||||||
.field(
|
.field(
|
||||||
"images",
|
"images",
|
||||||
Field::array(Field::file(move |filename, content_type, stream| {
|
Field::array(Field::file(move |filename, content_type, stream| {
|
||||||
|
@ -858,8 +878,7 @@ async fn main() -> Result<(), anyhow::Error> {
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
App::new()
|
App::new()
|
||||||
.wrap(Logger::default())
|
.wrap(TracingLogger::<RootSpanBuilder>::new())
|
||||||
.wrap(Tracing)
|
|
||||||
.wrap(Deadline)
|
.wrap(Deadline)
|
||||||
.app_data(web::Data::new(manager.clone()))
|
.app_data(web::Data::new(manager.clone()))
|
||||||
.app_data(web::Data::new(client))
|
.app_data(web::Data::new(client))
|
||||||
|
|
|
@ -10,8 +10,6 @@ use std::{
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
use tracing_futures::{Instrument, Instrumented};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
pub(crate) struct Deadline;
|
pub(crate) struct Deadline;
|
||||||
pub(crate) struct DeadlineMiddleware<S> {
|
pub(crate) struct DeadlineMiddleware<S> {
|
||||||
|
@ -29,12 +27,6 @@ pub(crate) struct DeadlineFuture<F> {
|
||||||
inner: DeadlineFutureInner<F>,
|
inner: DeadlineFutureInner<F>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Tracing;
|
|
||||||
|
|
||||||
pub(crate) struct TracingMiddleware<S> {
|
|
||||||
inner: S,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct Internal(pub(crate) Option<String>);
|
pub(crate) struct Internal(pub(crate) Option<String>);
|
||||||
pub(crate) struct InternalMiddleware<S>(Option<String>, S);
|
pub(crate) struct InternalMiddleware<S>(Option<String>, S);
|
||||||
#[derive(Clone, Debug, thiserror::Error)]
|
#[derive(Clone, Debug, thiserror::Error)]
|
||||||
|
@ -171,44 +163,6 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S, Request> Transform<S, Request> for Tracing
|
|
||||||
where
|
|
||||||
S: Service<Request>,
|
|
||||||
S::Future: 'static,
|
|
||||||
{
|
|
||||||
type Response = S::Response;
|
|
||||||
type Error = S::Error;
|
|
||||||
type InitError = ();
|
|
||||||
type Transform = TracingMiddleware<S>;
|
|
||||||
type Future = Ready<Result<Self::Transform, Self::InitError>>;
|
|
||||||
|
|
||||||
fn new_transform(&self, service: S) -> Self::Future {
|
|
||||||
ready(Ok(TracingMiddleware { inner: service }))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S, Request> Service<Request> for TracingMiddleware<S>
|
|
||||||
where
|
|
||||||
S: Service<Request>,
|
|
||||||
S::Future: 'static,
|
|
||||||
{
|
|
||||||
type Response = S::Response;
|
|
||||||
type Error = S::Error;
|
|
||||||
type Future = Instrumented<S::Future>;
|
|
||||||
|
|
||||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
|
||||||
self.inner.poll_ready(cx)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call(&self, req: Request) -> Self::Future {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
self.inner
|
|
||||||
.call(req)
|
|
||||||
.instrument(tracing::info_span!("request", ?uuid))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Transform<S, ServiceRequest> for Internal
|
impl<S> Transform<S, ServiceRequest> for Internal
|
||||||
where
|
where
|
||||||
S: Service<ServiceRequest, Error = actix_web::Error>,
|
S: Service<ServiceRequest, Error = actix_web::Error>,
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
use crate::{error::UploadError, ffmpeg::ThumbnailFormat};
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
ffmpeg::ThumbnailFormat,
|
||||||
|
};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use tracing::{debug, error, instrument};
|
use tracing::{debug, error, instrument};
|
||||||
|
|
||||||
fn ptos(path: &Path) -> Result<String, UploadError> {
|
fn ptos(path: &Path) -> Result<String, Error> {
|
||||||
Ok(path.to_str().ok_or(UploadError::Path)?.to_owned())
|
Ok(path.to_str().ok_or(UploadError::Path)?.to_owned())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -297,7 +300,7 @@ impl Exists {
|
||||||
|
|
||||||
pub(crate) async fn prepare_image(
|
pub(crate) async fn prepare_image(
|
||||||
original_file: PathBuf,
|
original_file: PathBuf,
|
||||||
) -> Result<Option<(PathBuf, Exists)>, UploadError> {
|
) -> Result<Option<(PathBuf, Exists)>, Error> {
|
||||||
let original_path_str = ptos(&original_file)?;
|
let original_path_str = ptos(&original_file)?;
|
||||||
let jpg_path = format!("{}.jpg", original_path_str);
|
let jpg_path = format!("{}.jpg", original_path_str);
|
||||||
let jpg_path = PathBuf::from(jpg_path);
|
let jpg_path = PathBuf::from(jpg_path);
|
||||||
|
@ -317,11 +320,13 @@ pub(crate) async fn prepare_image(
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
error!("transcode error: {:?}", e);
|
error!("transcode error: {:?}", e);
|
||||||
tokio::fs::remove_file(&tmpfile).await?;
|
tokio::fs::remove_file(&tmpfile).await?;
|
||||||
return Err(e.into());
|
return Err(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
return match crate::safe_move_file(tmpfile, jpg_path.clone()).await {
|
return match crate::safe_move_file(tmpfile, jpg_path.clone()).await {
|
||||||
Err(UploadError::FileExists) => Ok(Some((jpg_path, Exists::Exists))),
|
Err(e) if matches!(e.kind(), UploadError::FileExists) => {
|
||||||
|
Ok(Some((jpg_path, Exists::Exists)))
|
||||||
|
}
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
_ => Ok(Some((jpg_path, Exists::New))),
|
_ => Ok(Some((jpg_path, Exists::New))),
|
||||||
};
|
};
|
||||||
|
|
23
src/range.rs
23
src/range.rs
|
@ -1,4 +1,7 @@
|
||||||
use crate::{stream::bytes_stream, UploadError};
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
stream::bytes_stream,
|
||||||
|
};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
dev::Payload,
|
dev::Payload,
|
||||||
http::{
|
http::{
|
||||||
|
@ -46,7 +49,7 @@ impl Range {
|
||||||
pub(crate) fn chop_bytes(
|
pub(crate) fn chop_bytes(
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
) -> impl Stream<Item = Result<Bytes, UploadError>> + Unpin {
|
) -> impl Stream<Item = Result<Bytes, Error>> + Unpin {
|
||||||
match self {
|
match self {
|
||||||
Range::Start(start) => once(ready(Ok(bytes.slice(*start as usize..)))),
|
Range::Start(start) => once(ready(Ok(bytes.slice(*start as usize..)))),
|
||||||
Range::SuffixLength(from_start) => once(ready(Ok(bytes.slice(..*from_start as usize)))),
|
Range::SuffixLength(from_start) => once(ready(Ok(bytes.slice(..*from_start as usize)))),
|
||||||
|
@ -59,7 +62,7 @@ impl Range {
|
||||||
pub(crate) async fn chop_file(
|
pub(crate) async fn chop_file(
|
||||||
&self,
|
&self,
|
||||||
mut file: tokio::fs::File,
|
mut file: tokio::fs::File,
|
||||||
) -> Result<LocalBoxStream<'static, Result<Bytes, UploadError>>, UploadError> {
|
) -> Result<LocalBoxStream<'static, Result<Bytes, Error>>, Error> {
|
||||||
match self {
|
match self {
|
||||||
Range::Start(start) => {
|
Range::Start(start) => {
|
||||||
file.seek(io::SeekFrom::Start(*start)).await?;
|
file.seek(io::SeekFrom::Start(*start)).await?;
|
||||||
|
@ -102,14 +105,14 @@ impl RangeHeader {
|
||||||
|
|
||||||
impl FromRequest for RangeHeader {
|
impl FromRequest for RangeHeader {
|
||||||
type Config = ();
|
type Config = ();
|
||||||
type Error = actix_web::Error;
|
type Error = Error;
|
||||||
type Future = std::future::Ready<Result<Self, Self::Error>>;
|
type Future = std::future::Ready<Result<Self, Self::Error>>;
|
||||||
|
|
||||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||||
if let Some(range_head) = req.headers().get("Range") {
|
if let Some(range_head) = req.headers().get("Range") {
|
||||||
ready(parse_range_header(range_head).map_err(|e| {
|
ready(parse_range_header(range_head).map_err(|e| {
|
||||||
tracing::warn!("Failed to parse range header: {}", e);
|
tracing::warn!("Failed to parse range header: {}", e);
|
||||||
e.into()
|
e
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
ready(Err(UploadError::ParseReq(
|
ready(Err(UploadError::ParseReq(
|
||||||
|
@ -120,7 +123,7 @@ impl FromRequest for RangeHeader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_range_header(range_head: &HeaderValue) -> Result<RangeHeader, UploadError> {
|
fn parse_range_header(range_head: &HeaderValue) -> Result<RangeHeader, Error> {
|
||||||
let range_head_str = range_head.to_str().map_err(|_| {
|
let range_head_str = range_head.to_str().map_err(|_| {
|
||||||
UploadError::ParseReq("Range header contains non-utf8 characters".to_string())
|
UploadError::ParseReq("Range header contains non-utf8 characters".to_string())
|
||||||
})?;
|
})?;
|
||||||
|
@ -135,7 +138,7 @@ fn parse_range_header(range_head: &HeaderValue) -> Result<RangeHeader, UploadErr
|
||||||
let ranges = ranges
|
let ranges = ranges
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(parse_range)
|
.map(parse_range)
|
||||||
.collect::<Result<Vec<Range>, UploadError>>()?;
|
.collect::<Result<Vec<Range>, Error>>()?;
|
||||||
|
|
||||||
Ok(RangeHeader {
|
Ok(RangeHeader {
|
||||||
unit: unit.to_owned(),
|
unit: unit.to_owned(),
|
||||||
|
@ -143,7 +146,7 @@ fn parse_range_header(range_head: &HeaderValue) -> Result<RangeHeader, UploadErr
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_range(s: &str) -> Result<Range, UploadError> {
|
fn parse_range(s: &str) -> Result<Range, Error> {
|
||||||
let dash_pos = s
|
let dash_pos = s
|
||||||
.find('-')
|
.find('-')
|
||||||
.ok_or_else(|| UploadError::ParseReq("Mailformed Range Bound".to_string()))?;
|
.ok_or_else(|| UploadError::ParseReq("Mailformed Range Bound".to_string()))?;
|
||||||
|
@ -153,7 +156,7 @@ fn parse_range(s: &str) -> Result<Range, UploadError> {
|
||||||
let end = end.trim_start_matches('-').trim();
|
let end = end.trim_start_matches('-').trim();
|
||||||
|
|
||||||
if start.is_empty() && end.is_empty() {
|
if start.is_empty() && end.is_empty() {
|
||||||
Err(UploadError::ParseReq("Malformed content range".to_string()))
|
Err(UploadError::ParseReq("Malformed content range".to_string()).into())
|
||||||
} else if start.is_empty() {
|
} else if start.is_empty() {
|
||||||
let suffix_length = end.parse().map_err(|_| {
|
let suffix_length = end.parse().map_err(|_| {
|
||||||
UploadError::ParseReq("Cannot parse suffix length for range header".to_string())
|
UploadError::ParseReq("Cannot parse suffix length for range header".to_string())
|
||||||
|
@ -175,7 +178,7 @@ fn parse_range(s: &str) -> Result<Range, UploadError> {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if range_start > range_end {
|
if range_start > range_end {
|
||||||
return Err(UploadError::Range);
|
return Err(UploadError::Range.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Range::Segment(range_start, range_end))
|
Ok(Range::Segment(range_start, range_end))
|
||||||
|
|
46
src/root_span_builder.rs
Normal file
46
src/root_span_builder.rs
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
use actix_web::{
|
||||||
|
dev::{ServiceRequest, ServiceResponse},
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
use tracing::Span;
|
||||||
|
use tracing_actix_web::root_span;
|
||||||
|
|
||||||
|
pub struct RootSpanBuilder;
|
||||||
|
|
||||||
|
impl tracing_actix_web::RootSpanBuilder for RootSpanBuilder {
|
||||||
|
fn on_request_start(request: &ServiceRequest) -> Span {
|
||||||
|
root_span!(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_request_end<B>(span: Span, outcome: &Result<ServiceResponse<B>, Error>) {
|
||||||
|
match &outcome {
|
||||||
|
Ok(response) => {
|
||||||
|
if let Some(error) = response.response().error() {
|
||||||
|
handle_error(span, error)
|
||||||
|
} else {
|
||||||
|
span.record("http.status_code", &response.response().status().as_u16());
|
||||||
|
span.record("otel.status_code", &"OK");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(error) => handle_error(span, error),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_error(span: Span, error: &Error) {
|
||||||
|
let response_error = error.as_response_error();
|
||||||
|
|
||||||
|
let display = format!("{}", response_error);
|
||||||
|
let debug = format!("{:?}", response_error);
|
||||||
|
span.record("exception.message", &tracing::field::display(display));
|
||||||
|
span.record("exception.details", &tracing::field::display(debug));
|
||||||
|
|
||||||
|
let status_code = response_error.status_code();
|
||||||
|
span.record("http.status_code", &status_code.as_u16());
|
||||||
|
|
||||||
|
if status_code.is_client_error() {
|
||||||
|
span.record("otel.status_code", &"OK");
|
||||||
|
} else {
|
||||||
|
span.record("otel.status_code", &"ERROR");
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,37 +1,51 @@
|
||||||
use crate::error::UploadError;
|
use crate::error::Error;
|
||||||
|
use actix_rt::task::JoinHandle;
|
||||||
use actix_web::web::{Bytes, BytesMut};
|
use actix_web::web::{Bytes, BytesMut};
|
||||||
use futures_util::Stream;
|
use futures_util::Stream;
|
||||||
use std::{
|
use std::{
|
||||||
future::Future,
|
future::Future,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
|
process::Stdio,
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
use tokio::io::{AsyncRead, AsyncWriteExt, ReadBuf};
|
use tokio::{
|
||||||
|
io::{AsyncRead, AsyncWriteExt, ReadBuf},
|
||||||
|
process::{Child, Command},
|
||||||
|
sync::oneshot::{channel, Receiver},
|
||||||
|
};
|
||||||
|
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct StatusError;
|
struct StatusError;
|
||||||
|
|
||||||
pub(crate) struct Process {
|
pub(crate) struct Process {
|
||||||
child: tokio::process::Child,
|
child: Child,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct ProcessRead<I> {
|
pub(crate) struct ProcessRead<I> {
|
||||||
inner: I,
|
inner: I,
|
||||||
err_recv: tokio::sync::oneshot::Receiver<std::io::Error>,
|
err_recv: Receiver<std::io::Error>,
|
||||||
err_closed: bool,
|
err_closed: bool,
|
||||||
handle: actix_rt::task::JoinHandle<()>,
|
handle: JoinHandle<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BytesFreezer<S>(S);
|
struct BytesFreezer<S>(S);
|
||||||
|
|
||||||
impl Process {
|
impl Process {
|
||||||
fn new(child: tokio::process::Child) -> Self {
|
fn new(child: Child) -> Self {
|
||||||
Process { child }
|
Process { child }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn spawn(cmd: &mut tokio::process::Command) -> std::io::Result<Self> {
|
#[instrument(name = "Spawning command")]
|
||||||
cmd.stdin(std::process::Stdio::piped())
|
pub(crate) fn run(command: &str, args: &[&str]) -> std::io::Result<Self> {
|
||||||
.stdout(std::process::Stdio::piped())
|
tracing::info!("Spawning");
|
||||||
|
Self::spawn(Command::new(command).args(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn spawn(cmd: &mut Command) -> std::io::Result<Self> {
|
||||||
|
cmd.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
.map(Process::new)
|
.map(Process::new)
|
||||||
}
|
}
|
||||||
|
@ -40,7 +54,7 @@ impl Process {
|
||||||
let mut stdin = self.child.stdin.take()?;
|
let mut stdin = self.child.stdin.take()?;
|
||||||
let stdout = self.child.stdout.take()?;
|
let stdout = self.child.stdout.take()?;
|
||||||
|
|
||||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
let (tx, rx) = channel();
|
||||||
|
|
||||||
let mut child = self.child;
|
let mut child = self.child;
|
||||||
|
|
||||||
|
@ -79,7 +93,7 @@ impl Process {
|
||||||
let mut stdin = self.child.stdin.take()?;
|
let mut stdin = self.child.stdin.take()?;
|
||||||
let stdout = self.child.stdout.take()?;
|
let stdout = self.child.stdout.take()?;
|
||||||
|
|
||||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
let (tx, rx) = channel();
|
||||||
|
|
||||||
let mut child = self.child;
|
let mut child = self.child;
|
||||||
|
|
||||||
|
@ -114,11 +128,8 @@ impl Process {
|
||||||
|
|
||||||
pub(crate) fn bytes_stream(
|
pub(crate) fn bytes_stream(
|
||||||
input: impl AsyncRead + Unpin,
|
input: impl AsyncRead + Unpin,
|
||||||
) -> impl Stream<Item = Result<Bytes, UploadError>> + Unpin {
|
) -> impl Stream<Item = Result<Bytes, Error>> + Unpin {
|
||||||
BytesFreezer(tokio_util::codec::FramedRead::new(
|
BytesFreezer(FramedRead::new(input, BytesCodec::new()))
|
||||||
input,
|
|
||||||
tokio_util::codec::BytesCodec::new(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I> AsyncRead for ProcessRead<I>
|
impl<I> AsyncRead for ProcessRead<I>
|
||||||
|
@ -157,13 +168,13 @@ impl<S> Stream for BytesFreezer<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<BytesMut>> + Unpin,
|
S: Stream<Item = std::io::Result<BytesMut>> + Unpin,
|
||||||
{
|
{
|
||||||
type Item = Result<Bytes, UploadError>;
|
type Item = Result<Bytes, Error>;
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
Pin::new(&mut self.0)
|
Pin::new(&mut self.0)
|
||||||
.poll_next(cx)
|
.poll_next(cx)
|
||||||
.map(|opt| opt.map(|res| res.map(|bytes_mut| bytes_mut.freeze())))
|
.map(|opt| opt.map(|res| res.map(|bytes_mut| bytes_mut.freeze())))
|
||||||
.map_err(UploadError::from)
|
.map_err(Error::from)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Format,
|
config::Format,
|
||||||
error::UploadError,
|
error::{Error, UploadError},
|
||||||
migrate::{alias_id_key, alias_key, alias_key_bounds, variant_key_bounds, LatestDb},
|
migrate::{alias_id_key, alias_key, alias_key_bounds, variant_key_bounds, LatestDb},
|
||||||
to_ext,
|
to_ext,
|
||||||
};
|
};
|
||||||
|
@ -98,7 +98,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn finalize_reset(self) -> Result<Hash, UploadError> {
|
async fn finalize_reset(self) -> Result<Hash, Error> {
|
||||||
let mut hasher = self.hasher;
|
let mut hasher = self.hasher;
|
||||||
let hash = web::block(move || Hash::new(hasher.finalize_reset().to_vec())).await?;
|
let hash = web::block(move || Hash::new(hasher.finalize_reset().to_vec())).await?;
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
|
@ -194,7 +194,7 @@ pub(crate) struct Details {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Details {
|
impl Details {
|
||||||
pub(crate) async fn from_bytes(input: web::Bytes) -> Result<Self, UploadError> {
|
pub(crate) async fn from_bytes(input: web::Bytes) -> Result<Self, Error> {
|
||||||
let details = crate::magick::details_bytes(input).await?;
|
let details = crate::magick::details_bytes(input).await?;
|
||||||
|
|
||||||
Ok(Details::now(
|
Ok(Details::now(
|
||||||
|
@ -204,7 +204,7 @@ impl Details {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn from_path<P>(path: P) -> Result<Self, UploadError>
|
pub(crate) async fn from_path<P>(path: P) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
P: AsRef<std::path::Path>,
|
P: AsRef<std::path::Path>,
|
||||||
{
|
{
|
||||||
|
@ -285,10 +285,7 @@ impl UploadManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new UploadManager
|
/// Create a new UploadManager
|
||||||
pub(crate) async fn new(
|
pub(crate) async fn new(mut root_dir: PathBuf, format: Option<Format>) -> Result<Self, Error> {
|
||||||
mut root_dir: PathBuf,
|
|
||||||
format: Option<Format>,
|
|
||||||
) -> Result<Self, UploadError> {
|
|
||||||
let root_clone = root_dir.clone();
|
let root_clone = root_dir.clone();
|
||||||
// sled automatically creates it's own directories
|
// sled automatically creates it's own directories
|
||||||
let db = web::block(move || LatestDb::exists(root_clone).migrate()).await??;
|
let db = web::block(move || LatestDb::exists(root_clone).migrate()).await??;
|
||||||
|
@ -313,11 +310,7 @@ impl UploadManager {
|
||||||
|
|
||||||
/// Store the path to a generated image variant so we can easily clean it up later
|
/// Store the path to a generated image variant so we can easily clean it up later
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub(crate) async fn store_variant(
|
pub(crate) async fn store_variant(&self, path: PathBuf, filename: String) -> Result<(), Error> {
|
||||||
&self,
|
|
||||||
path: PathBuf,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<(), UploadError> {
|
|
||||||
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
||||||
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
let fname_tree = self.inner.filename_tree.clone();
|
||||||
|
@ -340,7 +333,7 @@ impl UploadManager {
|
||||||
&self,
|
&self,
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
filename: String,
|
filename: String,
|
||||||
) -> Result<Option<Details>, UploadError> {
|
) -> Result<Option<Details>, Error> {
|
||||||
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
||||||
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
let fname_tree = self.inner.filename_tree.clone();
|
||||||
|
@ -369,7 +362,7 @@ impl UploadManager {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
filename: String,
|
filename: String,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), UploadError> {
|
) -> Result<(), Error> {
|
||||||
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
let path_string = path.to_str().ok_or(UploadError::Path)?.to_string();
|
||||||
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
let fname_tree = self.inner.filename_tree.clone();
|
||||||
|
@ -389,10 +382,7 @@ impl UploadManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a list of aliases for a given file
|
/// Get a list of aliases for a given file
|
||||||
pub(crate) async fn aliases_by_filename(
|
pub(crate) async fn aliases_by_filename(&self, filename: String) -> Result<Vec<String>, Error> {
|
||||||
&self,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<Vec<String>, UploadError> {
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
let fname_tree = self.inner.filename_tree.clone();
|
||||||
let hash = web::block(move || fname_tree.get(filename.as_bytes()))
|
let hash = web::block(move || fname_tree.get(filename.as_bytes()))
|
||||||
.await??
|
.await??
|
||||||
|
@ -402,7 +392,7 @@ impl UploadManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a list of aliases for a given alias
|
/// Get a list of aliases for a given alias
|
||||||
pub(crate) async fn aliases_by_alias(&self, alias: String) -> Result<Vec<String>, UploadError> {
|
pub(crate) async fn aliases_by_alias(&self, alias: String) -> Result<Vec<String>, Error> {
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
let alias_tree = self.inner.alias_tree.clone();
|
||||||
let hash = web::block(move || alias_tree.get(alias.as_bytes()))
|
let hash = web::block(move || alias_tree.get(alias.as_bytes()))
|
||||||
.await??
|
.await??
|
||||||
|
@ -411,7 +401,7 @@ impl UploadManager {
|
||||||
self.aliases_by_hash(&hash).await
|
self.aliases_by_hash(&hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn aliases_by_hash(&self, hash: &sled::IVec) -> Result<Vec<String>, UploadError> {
|
async fn aliases_by_hash(&self, hash: &sled::IVec) -> Result<Vec<String>, Error> {
|
||||||
let (start, end) = alias_key_bounds(hash);
|
let (start, end) = alias_key_bounds(hash);
|
||||||
let main_tree = self.inner.main_tree.clone();
|
let main_tree = self.inner.main_tree.clone();
|
||||||
let aliases = web::block(move || {
|
let aliases = web::block(move || {
|
||||||
|
@ -436,7 +426,7 @@ impl UploadManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete an alias without a delete token
|
/// Delete an alias without a delete token
|
||||||
pub(crate) async fn delete_without_token(&self, alias: String) -> Result<(), UploadError> {
|
pub(crate) async fn delete_without_token(&self, alias: String) -> Result<(), Error> {
|
||||||
let token_key = delete_key(&alias);
|
let token_key = delete_key(&alias);
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
let alias_tree = self.inner.alias_tree.clone();
|
||||||
let token = web::block(move || alias_tree.get(token_key.as_bytes()))
|
let token = web::block(move || alias_tree.get(token_key.as_bytes()))
|
||||||
|
@ -448,7 +438,7 @@ impl UploadManager {
|
||||||
|
|
||||||
/// Delete the alias, and the file & variants if no more aliases exist
|
/// Delete the alias, and the file & variants if no more aliases exist
|
||||||
#[instrument(skip(self, alias, token))]
|
#[instrument(skip(self, alias, token))]
|
||||||
pub(crate) async fn delete(&self, alias: String, token: String) -> Result<(), UploadError> {
|
pub(crate) async fn delete(&self, alias: String, token: String) -> Result<(), Error> {
|
||||||
use sled::Transactional;
|
use sled::Transactional;
|
||||||
let main_tree = self.inner.main_tree.clone();
|
let main_tree = self.inner.main_tree.clone();
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
let alias_tree = self.inner.alias_tree.clone();
|
||||||
|
@ -478,7 +468,7 @@ impl UploadManager {
|
||||||
let id = alias_tree
|
let id = alias_tree
|
||||||
.remove(alias_id_key(&alias2).as_bytes())?
|
.remove(alias_id_key(&alias2).as_bytes())?
|
||||||
.ok_or_else(|| trans_err(UploadError::MissingAlias))?;
|
.ok_or_else(|| trans_err(UploadError::MissingAlias))?;
|
||||||
let id = String::from_utf8(id.to_vec()).map_err(|e| trans_err(e.into()))?;
|
let id = String::from_utf8(id.to_vec()).map_err(trans_err)?;
|
||||||
|
|
||||||
// -- GET HASH FOR HASH TREE CLEANUP --
|
// -- GET HASH FOR HASH TREE CLEANUP --
|
||||||
debug!("Deleting alias -> hash mapping");
|
debug!("Deleting alias -> hash mapping");
|
||||||
|
@ -498,13 +488,13 @@ impl UploadManager {
|
||||||
self.check_delete_files(hash).await
|
self.check_delete_files(hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn check_delete_files(&self, hash: sled::IVec) -> Result<(), UploadError> {
|
async fn check_delete_files(&self, hash: sled::IVec) -> Result<(), Error> {
|
||||||
// -- CHECK IF ANY OTHER ALIASES EXIST --
|
// -- CHECK IF ANY OTHER ALIASES EXIST --
|
||||||
let main_tree = self.inner.main_tree.clone();
|
let main_tree = self.inner.main_tree.clone();
|
||||||
let (start, end) = alias_key_bounds(&hash);
|
let (start, end) = alias_key_bounds(&hash);
|
||||||
debug!("Checking for additional aliases referencing hash");
|
debug!("Checking for additional aliases referencing hash");
|
||||||
let any_aliases = web::block(move || {
|
let any_aliases = web::block(move || {
|
||||||
Ok(main_tree.range(start..end).next().is_some()) as Result<bool, UploadError>
|
Ok(main_tree.range(start..end).next().is_some()) as Result<bool, Error>
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
|
|
||||||
|
@ -546,7 +536,7 @@ impl UploadManager {
|
||||||
|
|
||||||
/// Fetch the real on-disk filename given an alias
|
/// Fetch the real on-disk filename given an alias
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub(crate) async fn from_alias(&self, alias: String) -> Result<String, UploadError> {
|
pub(crate) async fn from_alias(&self, alias: String) -> Result<String, Error> {
|
||||||
let tree = self.inner.alias_tree.clone();
|
let tree = self.inner.alias_tree.clone();
|
||||||
debug!("Getting hash from alias");
|
debug!("Getting hash from alias");
|
||||||
let hash = web::block(move || tree.get(alias.as_bytes()))
|
let hash = web::block(move || tree.get(alias.as_bytes()))
|
||||||
|
@ -574,7 +564,7 @@ impl UploadManager {
|
||||||
|
|
||||||
// Find image variants and remove them from the DB and the disk
|
// Find image variants and remove them from the DB and the disk
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
async fn cleanup_files(&self, filename: FilenameIVec) -> Result<(), UploadError> {
|
async fn cleanup_files(&self, filename: FilenameIVec) -> Result<(), Error> {
|
||||||
let filename = filename.inner;
|
let filename = filename.inner;
|
||||||
let mut path = self.image_dir();
|
let mut path = self.image_dir();
|
||||||
let fname = String::from_utf8(filename.to_vec())?;
|
let fname = String::from_utf8(filename.to_vec())?;
|
||||||
|
@ -601,7 +591,7 @@ impl UploadManager {
|
||||||
keys.push(key?.to_owned());
|
keys.push(key?.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(keys) as Result<Vec<sled::IVec>, UploadError>
|
Ok(keys) as Result<Vec<sled::IVec>, Error>
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
|
|
||||||
|
@ -631,7 +621,7 @@ impl UploadManager {
|
||||||
impl UploadManagerSession {
|
impl UploadManagerSession {
|
||||||
/// Generate a delete token for an alias
|
/// Generate a delete token for an alias
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub(crate) async fn delete_token(&self) -> Result<String, UploadError> {
|
pub(crate) async fn delete_token(&self) -> Result<String, Error> {
|
||||||
let alias = self.alias.clone().ok_or(UploadError::MissingAlias)?;
|
let alias = self.alias.clone().ok_or(UploadError::MissingAlias)?;
|
||||||
|
|
||||||
debug!("Generating delete token");
|
debug!("Generating delete token");
|
||||||
|
@ -679,9 +669,9 @@ impl UploadManagerSession {
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
validate: bool,
|
validate: bool,
|
||||||
mut stream: UploadStream<E>,
|
mut stream: UploadStream<E>,
|
||||||
) -> Result<Self, UploadError>
|
) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
UploadError: From<E>,
|
Error: From<E>,
|
||||||
E: Unpin + 'static,
|
E: Unpin + 'static,
|
||||||
{
|
{
|
||||||
let mut bytes_mut = actix_web::web::BytesMut::new();
|
let mut bytes_mut = actix_web::web::BytesMut::new();
|
||||||
|
@ -718,12 +708,9 @@ impl UploadManagerSession {
|
||||||
|
|
||||||
/// Upload the file, discarding bytes if it's already present, or saving if it's new
|
/// Upload the file, discarding bytes if it's already present, or saving if it's new
|
||||||
#[instrument(skip(self, stream))]
|
#[instrument(skip(self, stream))]
|
||||||
pub(crate) async fn upload<E>(
|
pub(crate) async fn upload<E>(mut self, mut stream: UploadStream<E>) -> Result<Self, Error>
|
||||||
mut self,
|
|
||||||
mut stream: UploadStream<E>,
|
|
||||||
) -> Result<Self, UploadError>
|
|
||||||
where
|
where
|
||||||
UploadError: From<E>,
|
Error: From<E>,
|
||||||
{
|
{
|
||||||
let mut bytes_mut = actix_web::web::BytesMut::new();
|
let mut bytes_mut = actix_web::web::BytesMut::new();
|
||||||
|
|
||||||
|
@ -762,7 +749,7 @@ impl UploadManagerSession {
|
||||||
tmpfile: PathBuf,
|
tmpfile: PathBuf,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(), UploadError> {
|
) -> Result<(), Error> {
|
||||||
let (dup, name) = self.check_duplicate(hash, content_type).await?;
|
let (dup, name) = self.check_duplicate(hash, content_type).await?;
|
||||||
|
|
||||||
// bail early with alias to existing file if this is a duplicate
|
// bail early with alias to existing file if this is a duplicate
|
||||||
|
@ -786,7 +773,7 @@ impl UploadManagerSession {
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(Dup, String), UploadError> {
|
) -> Result<(Dup, String), Error> {
|
||||||
let main_tree = self.manager.inner.main_tree.clone();
|
let main_tree = self.manager.inner.main_tree.clone();
|
||||||
|
|
||||||
let filename = self.next_file(content_type).await?;
|
let filename = self.next_file(content_type).await?;
|
||||||
|
@ -822,7 +809,7 @@ impl UploadManagerSession {
|
||||||
|
|
||||||
// generate a short filename that isn't already in-use
|
// generate a short filename that isn't already in-use
|
||||||
#[instrument(skip(self, content_type))]
|
#[instrument(skip(self, content_type))]
|
||||||
async fn next_file(&self, content_type: mime::Mime) -> Result<String, UploadError> {
|
async fn next_file(&self, content_type: mime::Mime) -> Result<String, Error> {
|
||||||
let image_dir = self.manager.image_dir();
|
let image_dir = self.manager.image_dir();
|
||||||
use rand::distributions::{Alphanumeric, Distribution};
|
use rand::distributions::{Alphanumeric, Distribution};
|
||||||
let mut limit: usize = 10;
|
let mut limit: usize = 10;
|
||||||
|
@ -855,7 +842,7 @@ impl UploadManagerSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, hash, alias))]
|
#[instrument(skip(self, hash, alias))]
|
||||||
async fn add_existing_alias(&self, hash: &Hash, alias: &str) -> Result<(), UploadError> {
|
async fn add_existing_alias(&self, hash: &Hash, alias: &str) -> Result<(), Error> {
|
||||||
self.save_alias_hash_mapping(hash, alias).await??;
|
self.save_alias_hash_mapping(hash, alias).await??;
|
||||||
|
|
||||||
self.store_hash_id_alias_mapping(hash, alias).await?;
|
self.store_hash_id_alias_mapping(hash, alias).await?;
|
||||||
|
@ -867,11 +854,7 @@ impl UploadManagerSession {
|
||||||
//
|
//
|
||||||
// This will help if multiple 'users' upload the same file, and one of them wants to delete it
|
// This will help if multiple 'users' upload the same file, and one of them wants to delete it
|
||||||
#[instrument(skip(self, hash, content_type))]
|
#[instrument(skip(self, hash, content_type))]
|
||||||
async fn add_alias(
|
async fn add_alias(&mut self, hash: &Hash, content_type: mime::Mime) -> Result<(), Error> {
|
||||||
&mut self,
|
|
||||||
hash: &Hash,
|
|
||||||
content_type: mime::Mime,
|
|
||||||
) -> Result<(), UploadError> {
|
|
||||||
let alias = self.next_alias(hash, content_type).await?;
|
let alias = self.next_alias(hash, content_type).await?;
|
||||||
|
|
||||||
self.store_hash_id_alias_mapping(hash, &alias).await?;
|
self.store_hash_id_alias_mapping(hash, &alias).await?;
|
||||||
|
@ -883,11 +866,7 @@ impl UploadManagerSession {
|
||||||
//
|
//
|
||||||
// DANGER: this can cause BAD BAD BAD conflicts if the same alias is used for multiple files
|
// DANGER: this can cause BAD BAD BAD conflicts if the same alias is used for multiple files
|
||||||
#[instrument(skip(self, hash))]
|
#[instrument(skip(self, hash))]
|
||||||
async fn store_hash_id_alias_mapping(
|
async fn store_hash_id_alias_mapping(&self, hash: &Hash, alias: &str) -> Result<(), Error> {
|
||||||
&self,
|
|
||||||
hash: &Hash,
|
|
||||||
alias: &str,
|
|
||||||
) -> Result<(), UploadError> {
|
|
||||||
let alias = alias.to_string();
|
let alias = alias.to_string();
|
||||||
loop {
|
loop {
|
||||||
debug!("hash -> alias save loop");
|
debug!("hash -> alias save loop");
|
||||||
|
@ -921,11 +900,7 @@ impl UploadManagerSession {
|
||||||
|
|
||||||
// Generate an alias to the file
|
// Generate an alias to the file
|
||||||
#[instrument(skip(self, hash, content_type))]
|
#[instrument(skip(self, hash, content_type))]
|
||||||
async fn next_alias(
|
async fn next_alias(&mut self, hash: &Hash, content_type: mime::Mime) -> Result<String, Error> {
|
||||||
&mut self,
|
|
||||||
hash: &Hash,
|
|
||||||
content_type: mime::Mime,
|
|
||||||
) -> Result<String, UploadError> {
|
|
||||||
use rand::distributions::{Alphanumeric, Distribution};
|
use rand::distributions::{Alphanumeric, Distribution};
|
||||||
let mut limit: usize = 10;
|
let mut limit: usize = 10;
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
|
@ -956,7 +931,7 @@ impl UploadManagerSession {
|
||||||
&self,
|
&self,
|
||||||
hash: &Hash,
|
hash: &Hash,
|
||||||
alias: &str,
|
alias: &str,
|
||||||
) -> Result<Result<(), UploadError>, UploadError> {
|
) -> Result<Result<(), Error>, Error> {
|
||||||
let tree = self.manager.inner.alias_tree.clone();
|
let tree = self.manager.inner.alias_tree.clone();
|
||||||
let vec = hash.inner.clone();
|
let vec = hash.inner.clone();
|
||||||
let alias = alias.to_string();
|
let alias = alias.to_string();
|
||||||
|
@ -969,7 +944,7 @@ impl UploadManagerSession {
|
||||||
|
|
||||||
if res.is_err() {
|
if res.is_err() {
|
||||||
warn!("Duplicate alias");
|
warn!("Duplicate alias");
|
||||||
return Ok(Err(UploadError::DuplicateAlias));
|
return Ok(Err(UploadError::DuplicateAlias.into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Ok(()))
|
Ok(Ok(()))
|
||||||
|
@ -980,7 +955,7 @@ impl UploadManagerSession {
|
||||||
pub(crate) async fn safe_save_reader(
|
pub(crate) async fn safe_save_reader(
|
||||||
to: PathBuf,
|
to: PathBuf,
|
||||||
input: &mut (impl AsyncRead + Unpin),
|
input: &mut (impl AsyncRead + Unpin),
|
||||||
) -> Result<(), UploadError> {
|
) -> Result<(), Error> {
|
||||||
if let Some(path) = to.parent() {
|
if let Some(path) = to.parent() {
|
||||||
debug!("Creating directory {:?}", path);
|
debug!("Creating directory {:?}", path);
|
||||||
tokio::fs::create_dir_all(path.to_owned()).await?;
|
tokio::fs::create_dir_all(path.to_owned()).await?;
|
||||||
|
@ -992,7 +967,7 @@ pub(crate) async fn safe_save_reader(
|
||||||
return Err(e.into());
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(UploadError::FileExists);
|
return Err(UploadError::FileExists.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Writing stream to {:?}", to);
|
debug!("Writing stream to {:?}", to);
|
||||||
|
@ -1008,9 +983,9 @@ pub(crate) async fn safe_save_reader(
|
||||||
pub(crate) async fn safe_save_stream<E>(
|
pub(crate) async fn safe_save_stream<E>(
|
||||||
to: PathBuf,
|
to: PathBuf,
|
||||||
mut stream: UploadStream<E>,
|
mut stream: UploadStream<E>,
|
||||||
) -> Result<(), UploadError>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
UploadError: From<E>,
|
Error: From<E>,
|
||||||
E: Unpin,
|
E: Unpin,
|
||||||
{
|
{
|
||||||
if let Some(path) = to.parent() {
|
if let Some(path) = to.parent() {
|
||||||
|
@ -1024,7 +999,7 @@ where
|
||||||
return Err(e.into());
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(UploadError::FileExists);
|
return Err(UploadError::FileExists.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Writing stream to {:?}", to);
|
debug!("Writing stream to {:?}", to);
|
||||||
|
@ -1050,17 +1025,20 @@ where
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_path(path: sled::IVec) -> Result<(), UploadError> {
|
async fn remove_path(path: sled::IVec) -> Result<(), Error> {
|
||||||
let path_string = String::from_utf8(path.to_vec())?;
|
let path_string = String::from_utf8(path.to_vec())?;
|
||||||
tokio::fs::remove_file(path_string).await?;
|
tokio::fs::remove_file(path_string).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trans_err(e: UploadError) -> sled::transaction::ConflictableTransactionError<UploadError> {
|
fn trans_err<E>(e: E) -> sled::transaction::ConflictableTransactionError<Error>
|
||||||
sled::transaction::ConflictableTransactionError::Abort(e)
|
where
|
||||||
|
Error: From<E>,
|
||||||
|
{
|
||||||
|
sled::transaction::ConflictableTransactionError::Abort(e.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_name(name: String, content_type: mime::Mime) -> Result<String, UploadError> {
|
fn file_name(name: String, content_type: mime::Mime) -> Result<String, Error> {
|
||||||
Ok(format!("{}{}", name, to_ext(content_type)?))
|
Ok(format!("{}{}", name, to_ext(content_type)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::{config::Format, error::UploadError, ffmpeg::InputFormat, magick::ValidInputType};
|
use crate::{config::Format, error::Error, ffmpeg::InputFormat, magick::ValidInputType};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use tokio::io::AsyncRead;
|
use tokio::io::AsyncRead;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub(crate) fn image_webp() -> mime::Mime {
|
pub(crate) fn image_webp() -> mime::Mime {
|
||||||
"image/webp".parse().unwrap()
|
"image/webp".parse().unwrap()
|
||||||
|
@ -10,36 +11,43 @@ pub(crate) fn video_mp4() -> mime::Mime {
|
||||||
"video/mp4".parse().unwrap()
|
"video/mp4".parse().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(name = "Validate image", skip(bytes))]
|
||||||
pub(crate) async fn validate_image_bytes(
|
pub(crate) async fn validate_image_bytes(
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
prescribed_format: Option<Format>,
|
prescribed_format: Option<Format>,
|
||||||
) -> Result<(mime::Mime, Box<dyn AsyncRead + Unpin>), UploadError> {
|
) -> Result<(mime::Mime, Box<dyn AsyncRead + Unpin>), Error> {
|
||||||
let input_type = crate::magick::input_type_bytes(bytes.clone()).await?;
|
let input_type = crate::magick::input_type_bytes(bytes.clone()).await?;
|
||||||
|
|
||||||
match (prescribed_format, input_type) {
|
match (prescribed_format, input_type) {
|
||||||
(_, ValidInputType::Gif) => Ok((
|
(_, ValidInputType::Gif) => Ok((
|
||||||
video_mp4(),
|
video_mp4(),
|
||||||
Box::new(crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Gif)?),
|
Box::new(crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Gif)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
(_, ValidInputType::Mp4) => Ok((
|
(_, ValidInputType::Mp4) => Ok((
|
||||||
video_mp4(),
|
video_mp4(),
|
||||||
Box::new(crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Mp4)?),
|
Box::new(crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Mp4)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
(Some(Format::Jpeg) | None, ValidInputType::Jpeg) => Ok((
|
(Some(Format::Jpeg) | None, ValidInputType::Jpeg) => Ok((
|
||||||
mime::IMAGE_JPEG,
|
mime::IMAGE_JPEG,
|
||||||
Box::new(crate::exiftool::clear_metadata_bytes_read(bytes)?),
|
Box::new(crate::exiftool::clear_metadata_bytes_read(bytes)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
(Some(Format::Png) | None, ValidInputType::Png) => Ok((
|
(Some(Format::Png) | None, ValidInputType::Png) => Ok((
|
||||||
mime::IMAGE_PNG,
|
mime::IMAGE_PNG,
|
||||||
Box::new(crate::exiftool::clear_metadata_bytes_read(bytes)?),
|
Box::new(crate::exiftool::clear_metadata_bytes_read(bytes)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
(Some(Format::Webp) | None, ValidInputType::Webp) => Ok((
|
(Some(Format::Webp) | None, ValidInputType::Webp) => Ok((
|
||||||
image_webp(),
|
image_webp(),
|
||||||
Box::new(crate::magick::clear_metadata_bytes_read(bytes)?),
|
Box::new(crate::magick::clear_metadata_bytes_read(bytes)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
(Some(format), _) => Ok((
|
(Some(format), _) => Ok((
|
||||||
format.to_mime(),
|
format.to_mime(),
|
||||||
Box::new(crate::magick::convert_bytes_read(bytes, format)?),
|
Box::new(crate::magick::convert_bytes_read(bytes, format)?)
|
||||||
|
as Box<dyn AsyncRead + Unpin>,
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue