2022-12-23 18:56:15 +00:00
|
|
|
mod backgrounded;
|
|
|
|
mod bytes_stream;
|
|
|
|
mod concurrent_processor;
|
|
|
|
mod config;
|
|
|
|
mod details;
|
2023-07-13 03:12:21 +00:00
|
|
|
mod discover;
|
2022-12-23 18:56:15 +00:00
|
|
|
mod either;
|
|
|
|
mod error;
|
|
|
|
mod exiftool;
|
|
|
|
mod ffmpeg;
|
|
|
|
mod file;
|
2023-07-12 04:11:23 +00:00
|
|
|
mod formats;
|
2022-12-23 18:56:15 +00:00
|
|
|
mod generate;
|
|
|
|
mod ingest;
|
|
|
|
mod init_tracing;
|
|
|
|
mod magick;
|
|
|
|
mod middleware;
|
2023-07-17 03:07:42 +00:00
|
|
|
mod migrate_store;
|
2022-12-23 18:56:15 +00:00
|
|
|
mod process;
|
|
|
|
mod processor;
|
|
|
|
mod queue;
|
|
|
|
mod range;
|
|
|
|
mod repo;
|
|
|
|
mod serde_str;
|
|
|
|
mod store;
|
|
|
|
mod stream;
|
|
|
|
mod tmp_file;
|
|
|
|
mod validate;
|
|
|
|
|
|
|
|
use actix_form_data::{Field, Form, FormData, Multipart, Value};
|
|
|
|
use actix_web::{
|
|
|
|
guard,
|
|
|
|
http::header::{CacheControl, CacheDirective, LastModified, Range, ACCEPT_RANGES},
|
|
|
|
web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer,
|
|
|
|
};
|
|
|
|
use futures_util::{
|
|
|
|
stream::{empty, once},
|
|
|
|
Stream, StreamExt, TryStreamExt,
|
|
|
|
};
|
2023-07-22 21:47:59 +00:00
|
|
|
use metrics_exporter_prometheus::PrometheusBuilder;
|
|
|
|
use middleware::Metrics;
|
2023-07-22 17:31:01 +00:00
|
|
|
use once_cell::sync::Lazy;
|
2023-07-21 21:58:31 +00:00
|
|
|
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
|
|
|
use reqwest_tracing::TracingMiddleware;
|
2022-12-23 18:56:15 +00:00
|
|
|
use rusty_s3::UrlStyle;
|
|
|
|
use std::{
|
|
|
|
future::ready,
|
|
|
|
path::Path,
|
|
|
|
path::PathBuf,
|
|
|
|
sync::atomic::{AtomicU64, Ordering},
|
2023-07-17 03:07:42 +00:00
|
|
|
time::{Duration, SystemTime},
|
2022-12-23 18:56:15 +00:00
|
|
|
};
|
|
|
|
use tokio::sync::Semaphore;
|
|
|
|
use tracing_actix_web::TracingLogger;
|
|
|
|
use tracing_futures::Instrument;
|
|
|
|
|
|
|
|
use self::{
|
|
|
|
backgrounded::Backgrounded,
|
2023-07-22 17:47:32 +00:00
|
|
|
concurrent_processor::ProcessMap,
|
2023-07-13 18:48:59 +00:00
|
|
|
config::{Configuration, Operation},
|
2022-12-23 18:56:15 +00:00
|
|
|
details::Details,
|
|
|
|
either::Either,
|
|
|
|
error::{Error, UploadError},
|
2023-07-22 17:47:32 +00:00
|
|
|
formats::InputProcessableFormat,
|
2022-12-23 18:56:15 +00:00
|
|
|
ingest::Session,
|
|
|
|
init_tracing::init_tracing,
|
|
|
|
middleware::{Deadline, Internal},
|
2023-07-17 03:07:42 +00:00
|
|
|
migrate_store::migrate_store,
|
2022-12-23 18:56:15 +00:00
|
|
|
queue::queue_generate,
|
|
|
|
repo::{
|
2023-07-22 17:47:32 +00:00
|
|
|
sled::SledRepo, Alias, DeleteToken, FullRepo, HashRepo, IdentifierRepo, QueueRepo, Repo,
|
2023-07-22 23:50:04 +00:00
|
|
|
SettingsRepo, UploadId, UploadResult, VariantAccessRepo,
|
2022-12-23 18:56:15 +00:00
|
|
|
},
|
|
|
|
serde_str::Serde,
|
|
|
|
store::{
|
|
|
|
file_store::FileStore,
|
|
|
|
object_store::{ObjectStore, ObjectStoreConfig},
|
2023-06-23 16:20:20 +00:00
|
|
|
Identifier, Store,
|
2022-12-23 18:56:15 +00:00
|
|
|
},
|
|
|
|
stream::{StreamLimit, StreamTimeout},
|
|
|
|
};
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
pub use self::config::{ConfigSource, PictRsConfiguration};
|
2022-12-26 22:35:25 +00:00
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
const MEGABYTES: usize = 1024 * 1024;
|
|
|
|
const MINUTES: u32 = 60;
|
|
|
|
const HOURS: u32 = 60 * MINUTES;
|
|
|
|
const DAYS: u32 = 24 * HOURS;
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
const NOT_FOUND_KEY: &str = "404-alias";
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
static PROCESS_SEMAPHORE: Lazy<Semaphore> = Lazy::new(|| {
|
|
|
|
tracing::trace_span!(parent: None, "Initialize semaphore")
|
|
|
|
.in_scope(|| Semaphore::new(num_cpus::get().saturating_sub(1).max(1)))
|
|
|
|
});
|
|
|
|
|
|
|
|
async fn ensure_details<R: FullRepo, S: Store + 'static>(
|
|
|
|
repo: &R,
|
|
|
|
store: &S,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: &Configuration,
|
2022-12-23 18:56:15 +00:00
|
|
|
alias: &Alias,
|
|
|
|
) -> Result<Details, Error> {
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(identifier) = repo.identifier_from_alias::<S::Identifier>(alias).await? else {
|
|
|
|
return Err(UploadError::MissingAlias.into());
|
|
|
|
};
|
|
|
|
|
2023-07-13 18:48:59 +00:00
|
|
|
let details = repo.details(&identifier).await?.and_then(|details| {
|
|
|
|
if details.internal_format().is_some() {
|
|
|
|
Some(details)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
});
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
if let Some(details) = details {
|
|
|
|
tracing::debug!("details exist");
|
|
|
|
Ok(details)
|
|
|
|
} else {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("generating new details from {:?}", identifier);
|
2023-07-13 18:48:59 +00:00
|
|
|
let new_details = Details::from_store(store, &identifier).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("storing details for {:?}", identifier);
|
|
|
|
repo.relate_details(&identifier, &new_details).await?;
|
|
|
|
tracing::debug!("stored");
|
|
|
|
Ok(new_details)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct Upload<R: FullRepo + 'static, S: Store + 'static>(Value<Session<R, S>>);
|
|
|
|
|
|
|
|
impl<R: FullRepo, S: Store + 'static> FormData for Upload<R, S> {
|
|
|
|
type Item = Session<R, S>;
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> {
|
|
|
|
// Create a new Multipart Form validator
|
|
|
|
//
|
|
|
|
// This form is expecting a single array field, 'images' with at most 10 files in it
|
|
|
|
let repo = req
|
|
|
|
.app_data::<web::Data<R>>()
|
|
|
|
.expect("No repo in request")
|
|
|
|
.clone();
|
|
|
|
let store = req
|
|
|
|
.app_data::<web::Data<S>>()
|
|
|
|
.expect("No store in request")
|
|
|
|
.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = req
|
|
|
|
.app_data::<web::Data<Configuration>>()
|
|
|
|
.expect("No configuration in request")
|
|
|
|
.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Form::new()
|
2023-07-22 17:31:01 +00:00
|
|
|
.max_files(config.server.max_file_count)
|
|
|
|
.max_file_size(config.media.max_file_size * MEGABYTES)
|
2022-12-23 18:56:15 +00:00
|
|
|
.transform_error(transform_error)
|
|
|
|
.field(
|
|
|
|
"images",
|
|
|
|
Field::array(Field::file(move |filename, _, stream| {
|
|
|
|
let repo = repo.clone();
|
|
|
|
let store = store.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = config.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.files", "upload" => "inline");
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let span = tracing::info_span!("file-upload", ?filename);
|
|
|
|
|
|
|
|
let stream = stream.map_err(Error::from);
|
|
|
|
|
|
|
|
Box::pin(
|
2023-07-13 22:42:21 +00:00
|
|
|
async move {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
ingest::ingest(&**repo, &**store, stream, None, &config.media).await
|
2023-07-13 22:42:21 +00:00
|
|
|
}
|
|
|
|
.instrument(span),
|
2022-12-23 18:56:15 +00:00
|
|
|
)
|
|
|
|
})),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract(value: Value<Session<R, S>>) -> Result<Self, Self::Error> {
|
|
|
|
Ok(Upload(value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct Import<R: FullRepo + 'static, S: Store + 'static>(Value<Session<R, S>>);
|
|
|
|
|
|
|
|
impl<R: FullRepo, S: Store + 'static> FormData for Import<R, S> {
|
|
|
|
type Item = Session<R, S>;
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
|
|
|
let repo = req
|
|
|
|
.app_data::<web::Data<R>>()
|
|
|
|
.expect("No repo in request")
|
|
|
|
.clone();
|
|
|
|
let store = req
|
|
|
|
.app_data::<web::Data<S>>()
|
|
|
|
.expect("No store in request")
|
|
|
|
.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = req
|
|
|
|
.app_data::<web::Data<Configuration>>()
|
|
|
|
.expect("No configuration in request")
|
|
|
|
.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
// Create a new Multipart Form validator for internal imports
|
|
|
|
//
|
|
|
|
// This form is expecting a single array field, 'images' with at most 10 files in it
|
|
|
|
Form::new()
|
2023-07-22 17:31:01 +00:00
|
|
|
.max_files(config.server.max_file_count)
|
|
|
|
.max_file_size(config.media.max_file_size * MEGABYTES)
|
2022-12-23 18:56:15 +00:00
|
|
|
.transform_error(transform_error)
|
|
|
|
.field(
|
|
|
|
"images",
|
|
|
|
Field::array(Field::file(move |filename, _, stream| {
|
|
|
|
let repo = repo.clone();
|
|
|
|
let store = store.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = config.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.files", "import" => "inline");
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let span = tracing::info_span!("file-import", ?filename);
|
|
|
|
|
|
|
|
let stream = stream.map_err(Error::from);
|
|
|
|
|
|
|
|
Box::pin(
|
|
|
|
async move {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
ingest::ingest(
|
|
|
|
&**repo,
|
|
|
|
&**store,
|
|
|
|
stream,
|
|
|
|
Some(Alias::from_existing(&filename)),
|
2023-07-22 17:31:01 +00:00
|
|
|
&config.media,
|
2022-12-23 18:56:15 +00:00
|
|
|
)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
.instrument(span),
|
|
|
|
)
|
|
|
|
})),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error>
|
|
|
|
where
|
|
|
|
Self: Sized,
|
|
|
|
{
|
|
|
|
Ok(Import(value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-29 16:45:06 +00:00
|
|
|
/// Handle responding to successful uploads
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn upload<R: FullRepo, S: Store + 'static>(
|
|
|
|
Multipart(Upload(value)): Multipart<Upload<R, S>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
handle_upload(value, repo, store, config).await
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-06-29 16:45:06 +00:00
|
|
|
/// Handle responding to successful uploads
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Imported files", skip(value, repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn import<R: FullRepo, S: Store + 'static>(
|
|
|
|
Multipart(Import(value)): Multipart<Import<R, S>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
handle_upload(value, repo, store, config).await
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-06-29 16:45:06 +00:00
|
|
|
/// Handle responding to successful uploads
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn handle_upload<R: FullRepo, S: Store + 'static>(
|
|
|
|
value: Value<Session<R, S>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let images = value
|
|
|
|
.map()
|
|
|
|
.and_then(|mut m| m.remove("images"))
|
|
|
|
.and_then(|images| images.array())
|
|
|
|
.ok_or(UploadError::NoFiles)?;
|
|
|
|
|
|
|
|
let mut files = Vec::new();
|
|
|
|
let images = images
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|i| i.file())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
for image in &images {
|
|
|
|
if let Some(alias) = image.result.alias() {
|
|
|
|
tracing::debug!("Uploaded {} as {:?}", image.filename, alias);
|
|
|
|
let delete_token = image.result.delete_token().await?;
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
files.push(serde_json::json!({
|
|
|
|
"file": alias.to_string(),
|
|
|
|
"delete_token": delete_token.to_string(),
|
|
|
|
"details": details,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for mut image in images {
|
|
|
|
image.result.disarm();
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(HttpResponse::Created().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"files": files
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
struct BackgroundedUpload<R: FullRepo + 'static, S: Store + 'static>(Value<Backgrounded<R, S>>);
|
|
|
|
|
|
|
|
impl<R: FullRepo, S: Store + 'static> FormData for BackgroundedUpload<R, S> {
|
|
|
|
type Item = Backgrounded<R, S>;
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
|
|
|
// Create a new Multipart Form validator for backgrounded uploads
|
|
|
|
//
|
|
|
|
// This form is expecting a single array field, 'images' with at most 10 files in it
|
|
|
|
let repo = req
|
|
|
|
.app_data::<web::Data<R>>()
|
|
|
|
.expect("No repo in request")
|
|
|
|
.clone();
|
|
|
|
let store = req
|
|
|
|
.app_data::<web::Data<S>>()
|
|
|
|
.expect("No store in request")
|
|
|
|
.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = req
|
|
|
|
.app_data::<web::Data<Configuration>>()
|
|
|
|
.expect("No configuration in request")
|
|
|
|
.clone();
|
|
|
|
|
|
|
|
let read_only = config.server.read_only;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Form::new()
|
2023-07-22 17:31:01 +00:00
|
|
|
.max_files(config.server.max_file_count)
|
|
|
|
.max_file_size(config.media.max_file_size * MEGABYTES)
|
2022-12-23 18:56:15 +00:00
|
|
|
.transform_error(transform_error)
|
|
|
|
.field(
|
|
|
|
"images",
|
|
|
|
Field::array(Field::file(move |filename, _, stream| {
|
|
|
|
let repo = (**repo).clone();
|
|
|
|
let store = (**store).clone();
|
|
|
|
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.files", "upload" => "background");
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let span = tracing::info_span!("file-proxy", ?filename);
|
|
|
|
|
|
|
|
let stream = stream.map_err(Error::from);
|
|
|
|
|
|
|
|
Box::pin(
|
2023-07-17 19:24:49 +00:00
|
|
|
async move {
|
2023-07-22 17:31:01 +00:00
|
|
|
if read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
|
|
|
Backgrounded::proxy(repo, store, stream).await
|
|
|
|
}
|
|
|
|
.instrument(span),
|
2022-12-23 18:56:15 +00:00
|
|
|
)
|
|
|
|
})),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error>
|
|
|
|
where
|
|
|
|
Self: Sized,
|
|
|
|
{
|
|
|
|
Ok(BackgroundedUpload(value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo))]
|
|
|
|
async fn upload_backgrounded<R: FullRepo, S: Store>(
|
|
|
|
Multipart(BackgroundedUpload(value)): Multipart<BackgroundedUpload<R, S>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let images = value
|
|
|
|
.map()
|
|
|
|
.and_then(|mut m| m.remove("images"))
|
|
|
|
.and_then(|images| images.array())
|
|
|
|
.ok_or(UploadError::NoFiles)?;
|
|
|
|
|
|
|
|
let mut files = Vec::new();
|
|
|
|
let images = images
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|i| i.file())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
for image in &images {
|
|
|
|
let upload_id = image.result.upload_id().expect("Upload ID exists");
|
|
|
|
let identifier = image
|
|
|
|
.result
|
|
|
|
.identifier()
|
|
|
|
.expect("Identifier exists")
|
|
|
|
.to_bytes()?;
|
|
|
|
|
2023-07-13 03:12:21 +00:00
|
|
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
files.push(serde_json::json!({
|
|
|
|
"upload_id": upload_id.to_string(),
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
|
|
|
|
for image in images {
|
|
|
|
image.result.disarm();
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(HttpResponse::Accepted().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"uploads": files
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, serde::Deserialize)]
|
|
|
|
struct ClaimQuery {
|
|
|
|
upload_id: Serde<UploadId>,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Claim a backgrounded upload
|
|
|
|
#[tracing::instrument(name = "Waiting on upload", skip_all)]
|
|
|
|
async fn claim_upload<R: FullRepo, S: Store + 'static>(
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
query: web::Query<ClaimQuery>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let upload_id = Serde::into_inner(query.into_inner().upload_id);
|
|
|
|
|
|
|
|
match actix_rt::time::timeout(Duration::from_secs(10), repo.wait(upload_id)).await {
|
|
|
|
Ok(wait_res) => {
|
|
|
|
let upload_result = wait_res?;
|
|
|
|
repo.claim(upload_id).await?;
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.background.upload.claim");
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
match upload_result {
|
|
|
|
UploadResult::Success { alias, token } => {
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"files": [{
|
|
|
|
"file": alias.to_string(),
|
|
|
|
"delete_token": token.to_string(),
|
|
|
|
"details": details,
|
|
|
|
}]
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
UploadResult::Failure { message } => Ok(HttpResponse::UnprocessableEntity().json(
|
|
|
|
&serde_json::json!({
|
|
|
|
"msg": message,
|
|
|
|
}),
|
|
|
|
)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(_) => Ok(HttpResponse::NoContent().finish()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, serde::Deserialize)]
|
|
|
|
struct UrlQuery {
|
|
|
|
url: String,
|
|
|
|
|
|
|
|
#[serde(default)]
|
|
|
|
backgrounded: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// download an image from a URL
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Downloading file", skip(client, repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn download<R: FullRepo + 'static, S: Store + 'static>(
|
2023-07-21 21:58:31 +00:00
|
|
|
client: web::Data<ClientWithMiddleware>,
|
2022-12-23 18:56:15 +00:00
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
query: web::Query<UrlQuery>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let res = client.get(&query.url).send().await?;
|
|
|
|
|
|
|
|
if !res.status().is_success() {
|
|
|
|
return Err(UploadError::Download(res.status()).into());
|
|
|
|
}
|
|
|
|
|
|
|
|
let stream = res
|
2023-07-21 21:58:31 +00:00
|
|
|
.bytes_stream()
|
2022-12-23 18:56:15 +00:00
|
|
|
.map_err(Error::from)
|
2023-07-22 17:31:01 +00:00
|
|
|
.limit((config.media.max_file_size * MEGABYTES) as u64);
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
if query.backgrounded {
|
2023-02-25 17:34:48 +00:00
|
|
|
do_download_backgrounded(stream, repo, store).await
|
2022-12-23 18:56:15 +00:00
|
|
|
} else {
|
2023-07-22 17:31:01 +00:00
|
|
|
do_download_inline(stream, repo, store, config).await
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Downloading file inline", skip(stream, repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn do_download_inline<R: FullRepo + 'static, S: Store + 'static>(
|
|
|
|
stream: impl Stream<Item = Result<web::Bytes, Error>> + Unpin + 'static,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.files", "download" => "inline");
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let mut session = ingest::ingest(&repo, &store, stream, None, &config.media).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let alias = session.alias().expect("alias should exist").to_owned();
|
|
|
|
let delete_token = session.delete_token().await?;
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
session.disarm();
|
|
|
|
|
|
|
|
Ok(HttpResponse::Created().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"files": [{
|
|
|
|
"file": alias.to_string(),
|
|
|
|
"delete_token": delete_token.to_string(),
|
|
|
|
"details": details,
|
|
|
|
}]
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(name = "Downloading file in background", skip(stream, repo, store))]
|
|
|
|
async fn do_download_backgrounded<R: FullRepo + 'static, S: Store + 'static>(
|
|
|
|
stream: impl Stream<Item = Result<web::Bytes, Error>> + Unpin + 'static,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 21:47:59 +00:00
|
|
|
metrics::increment_counter!("pict-rs.files", "download" => "background");
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
|
|
|
|
|
|
|
|
let upload_id = backgrounded.upload_id().expect("Upload ID exists");
|
|
|
|
let identifier = backgrounded
|
|
|
|
.identifier()
|
|
|
|
.expect("Identifier exists")
|
|
|
|
.to_bytes()?;
|
|
|
|
|
2023-07-13 03:12:21 +00:00
|
|
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
backgrounded.disarm();
|
|
|
|
|
|
|
|
Ok(HttpResponse::Accepted().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"uploads": [{
|
|
|
|
"upload_id": upload_id.to_string(),
|
|
|
|
}]
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Delete aliases and files
|
|
|
|
#[tracing::instrument(name = "Deleting file", skip(repo))]
|
|
|
|
async fn delete<R: FullRepo>(
|
|
|
|
repo: web::Data<R>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
path_entries: web::Path<(String, String)>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let (token, alias) = path_entries.into_inner();
|
|
|
|
|
|
|
|
let token = DeleteToken::from_existing(&token);
|
|
|
|
let alias = Alias::from_existing(&alias);
|
|
|
|
|
|
|
|
queue::cleanup_alias(&repo, alias, token).await?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::NoContent().finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
type ProcessQuery = Vec<(String, String)>;
|
|
|
|
|
|
|
|
fn prepare_process(
|
2023-07-22 17:31:01 +00:00
|
|
|
config: &Configuration,
|
2022-12-23 18:56:15 +00:00
|
|
|
query: web::Query<ProcessQuery>,
|
|
|
|
ext: &str,
|
2023-07-13 03:12:21 +00:00
|
|
|
) -> Result<(InputProcessableFormat, Alias, PathBuf, Vec<String>), Error> {
|
2022-12-23 18:56:15 +00:00
|
|
|
let (alias, operations) =
|
|
|
|
query
|
|
|
|
.into_inner()
|
|
|
|
.into_iter()
|
|
|
|
.fold((String::new(), Vec::new()), |(s, mut acc), (k, v)| {
|
|
|
|
if k == "src" {
|
|
|
|
(v, acc)
|
|
|
|
} else {
|
|
|
|
acc.push((k, v));
|
|
|
|
(s, acc)
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if alias.is_empty() {
|
|
|
|
return Err(UploadError::MissingAlias.into());
|
|
|
|
}
|
|
|
|
|
|
|
|
let alias = Alias::from_existing(&alias);
|
|
|
|
|
|
|
|
let operations = operations
|
|
|
|
.into_iter()
|
2023-07-22 17:31:01 +00:00
|
|
|
.filter(|(k, _)| config.media.filters.contains(&k.to_lowercase()))
|
2022-12-23 18:56:15 +00:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let format = ext
|
2023-07-13 03:12:21 +00:00
|
|
|
.parse::<InputProcessableFormat>()
|
2023-07-09 20:07:49 +00:00
|
|
|
.map_err(|_| UploadError::UnsupportedProcessExtension)?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-13 03:12:21 +00:00
|
|
|
let (thumbnail_path, thumbnail_args) =
|
|
|
|
self::processor::build_chain(&operations, &format.to_string())?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Ok((format, alias, thumbnail_path, thumbnail_args))
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Fetching derived details", skip(repo, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn process_details<R: FullRepo, S: Store>(
|
|
|
|
query: web::Query<ProcessQuery>,
|
|
|
|
ext: web::Path<String>,
|
|
|
|
repo: web::Data<R>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
let (_, alias, thumbnail_path, _) = prepare_process(&config, query, ext.as_str())?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(hash) = repo.hash(&alias).await? else {
|
|
|
|
// Invalid alias
|
|
|
|
return Ok(HttpResponse::NotFound().json(&serde_json::json!({
|
|
|
|
"msg": "No images associated with provided alias",
|
|
|
|
})));
|
|
|
|
};
|
|
|
|
|
2023-07-22 23:50:04 +00:00
|
|
|
let thumbnail_string = thumbnail_path.to_string_lossy().to_string();
|
|
|
|
|
|
|
|
if !config.server.read_only {
|
|
|
|
repo.accessed(hash.clone(), thumbnail_string.clone())
|
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let identifier = repo
|
2023-07-22 23:50:04 +00:00
|
|
|
.variant_identifier::<S::Identifier>(hash, thumbnail_string)
|
2022-12-23 18:56:15 +00:00
|
|
|
.await?
|
|
|
|
.ok_or(UploadError::MissingAlias)?;
|
|
|
|
|
|
|
|
let details = repo.details(&identifier).await?;
|
|
|
|
|
|
|
|
let details = details.ok_or(UploadError::NoFiles)?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&details))
|
|
|
|
}
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
async fn not_found_hash<R: FullRepo>(repo: &R) -> Result<Option<(Alias, R::Bytes)>, Error> {
|
|
|
|
let Some(not_found) = repo.get(NOT_FOUND_KEY).await? else {
|
|
|
|
return Ok(None);
|
|
|
|
};
|
|
|
|
|
2023-07-07 18:33:27 +00:00
|
|
|
let Some(alias) = Alias::from_slice(not_found.as_ref()) else {
|
|
|
|
tracing::warn!("Couldn't parse not-found alias");
|
|
|
|
return Ok(None);
|
|
|
|
};
|
2023-07-06 00:06:48 +00:00
|
|
|
|
2023-07-07 18:33:27 +00:00
|
|
|
let Some(hash) = repo.hash(&alias).await? else {
|
|
|
|
tracing::warn!("No hash found for not-found alias");
|
|
|
|
return Ok(None);
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(Some((alias, hash)))
|
2023-07-06 00:06:48 +00:00
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
/// Process files
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(
|
|
|
|
name = "Serving processed image",
|
|
|
|
skip(repo, store, config, process_map)
|
|
|
|
)]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn process<R: FullRepo, S: Store + 'static>(
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
query: web::Query<ProcessQuery>,
|
|
|
|
ext: web::Path<String>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2023-07-22 16:15:30 +00:00
|
|
|
process_map: web::Data<ProcessMap>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
let (format, alias, thumbnail_path, thumbnail_args) =
|
|
|
|
prepare_process(&config, query, ext.as_str())?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let path_string = thumbnail_path.to_string_lossy().to_string();
|
2023-07-06 00:06:48 +00:00
|
|
|
|
|
|
|
let (hash, alias, not_found) = if let Some(hash) = repo.hash(&alias).await? {
|
|
|
|
(hash, alias, false)
|
|
|
|
} else {
|
|
|
|
let Some((alias, hash)) = not_found_hash(&repo).await? else {
|
|
|
|
return Ok(HttpResponse::NotFound().finish());
|
|
|
|
};
|
|
|
|
|
|
|
|
(hash, alias, true)
|
2023-07-05 21:46:44 +00:00
|
|
|
};
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 23:50:04 +00:00
|
|
|
if !config.server.read_only {
|
|
|
|
repo.accessed(hash.clone(), path_string.clone()).await?;
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let identifier_opt = repo
|
|
|
|
.variant_identifier::<S::Identifier>(hash.clone(), path_string)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
if let Some(identifier) = identifier_opt {
|
2023-07-13 18:48:59 +00:00
|
|
|
let details = repo.details(&identifier).await?.and_then(|details| {
|
|
|
|
if details.internal_format().is_some() {
|
|
|
|
Some(details)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
});
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let details = if let Some(details) = details {
|
|
|
|
tracing::debug!("details exist");
|
|
|
|
details
|
|
|
|
} else {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("generating new details from {:?}", identifier);
|
2023-07-13 18:48:59 +00:00
|
|
|
let new_details = Details::from_store(&store, &identifier).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("storing details for {:?}", identifier);
|
|
|
|
repo.relate_details(&identifier, &new_details).await?;
|
|
|
|
tracing::debug!("stored");
|
|
|
|
new_details
|
|
|
|
};
|
|
|
|
|
2023-07-14 19:53:37 +00:00
|
|
|
if let Some(public_url) = store.public_url(&identifier) {
|
|
|
|
return Ok(HttpResponse::SeeOther()
|
|
|
|
.insert_header((actix_web::http::header::LOCATION, public_url.as_str()))
|
|
|
|
.finish());
|
|
|
|
}
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
return ranged_file_resp(&store, identifier, range, details, not_found).await;
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let original_details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let (details, bytes) = generate::generate(
|
|
|
|
&repo,
|
|
|
|
&store,
|
2023-07-22 16:15:30 +00:00
|
|
|
&process_map,
|
2022-12-23 18:56:15 +00:00
|
|
|
format,
|
|
|
|
alias,
|
|
|
|
thumbnail_path,
|
|
|
|
thumbnail_args,
|
2023-07-13 18:48:59 +00:00
|
|
|
original_details.video_format(),
|
2022-12-23 18:56:15 +00:00
|
|
|
None,
|
2023-07-22 17:31:01 +00:00
|
|
|
&config.media,
|
2022-12-23 18:56:15 +00:00
|
|
|
hash,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let (builder, stream) = if let Some(web::Header(range_header)) = range {
|
|
|
|
if let Some(range) = range::single_bytes_range(&range_header) {
|
|
|
|
let len = bytes.len() as u64;
|
|
|
|
|
|
|
|
if let Some(content_range) = range::to_content_range(range, len) {
|
|
|
|
let mut builder = HttpResponse::PartialContent();
|
|
|
|
builder.insert_header(content_range);
|
|
|
|
let stream = range::chop_bytes(range, bytes, len)?;
|
|
|
|
|
|
|
|
(builder, Either::left(Either::left(stream)))
|
|
|
|
} else {
|
|
|
|
(
|
|
|
|
HttpResponse::RangeNotSatisfiable(),
|
|
|
|
Either::left(Either::right(empty())),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return Err(UploadError::Range.into());
|
|
|
|
}
|
2023-07-06 00:06:48 +00:00
|
|
|
} else if not_found {
|
|
|
|
(
|
|
|
|
HttpResponse::NotFound(),
|
|
|
|
Either::right(once(ready(Ok(bytes)))),
|
|
|
|
)
|
2022-12-23 18:56:15 +00:00
|
|
|
} else {
|
|
|
|
(HttpResponse::Ok(), Either::right(once(ready(Ok(bytes)))))
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(srv_response(
|
|
|
|
builder,
|
|
|
|
stream,
|
2023-07-14 19:53:37 +00:00
|
|
|
details.media_type(),
|
2022-12-23 18:56:15 +00:00
|
|
|
7 * DAYS,
|
|
|
|
details.system_time(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Serving processed image headers", skip(repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn process_head<R: FullRepo, S: Store + 'static>(
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
query: web::Query<ProcessQuery>,
|
|
|
|
ext: web::Path<String>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
let (_, alias, thumbnail_path, _) = prepare_process(&config, query, ext.as_str())?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let path_string = thumbnail_path.to_string_lossy().to_string();
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(hash) = repo.hash(&alias).await? else {
|
|
|
|
// Invalid alias
|
|
|
|
return Ok(HttpResponse::NotFound().finish());
|
|
|
|
};
|
|
|
|
|
2023-07-22 23:50:04 +00:00
|
|
|
if !config.server.read_only {
|
|
|
|
repo.accessed(hash.clone(), path_string.clone()).await?;
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let identifier_opt = repo
|
|
|
|
.variant_identifier::<S::Identifier>(hash.clone(), path_string)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
if let Some(identifier) = identifier_opt {
|
2023-07-13 18:48:59 +00:00
|
|
|
let details = repo.details(&identifier).await?.and_then(|details| {
|
|
|
|
if details.internal_format().is_some() {
|
|
|
|
Some(details)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
});
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let details = if let Some(details) = details {
|
|
|
|
tracing::debug!("details exist");
|
|
|
|
details
|
|
|
|
} else {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("generating new details from {:?}", identifier);
|
2023-07-13 18:48:59 +00:00
|
|
|
let new_details = Details::from_store(&store, &identifier).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
tracing::debug!("storing details for {:?}", identifier);
|
|
|
|
repo.relate_details(&identifier, &new_details).await?;
|
|
|
|
tracing::debug!("stored");
|
|
|
|
new_details
|
|
|
|
};
|
|
|
|
|
2023-07-14 19:53:37 +00:00
|
|
|
if let Some(public_url) = store.public_url(&identifier) {
|
|
|
|
return Ok(HttpResponse::SeeOther()
|
|
|
|
.insert_header((actix_web::http::header::LOCATION, public_url.as_str()))
|
|
|
|
.finish());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
return ranged_file_head_resp(&store, identifier, range, details).await;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(HttpResponse::NotFound().finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Process files
|
|
|
|
#[tracing::instrument(name = "Spawning image process", skip(repo))]
|
|
|
|
async fn process_backgrounded<R: FullRepo, S: Store>(
|
|
|
|
query: web::Query<ProcessQuery>,
|
|
|
|
ext: web::Path<String>,
|
|
|
|
repo: web::Data<R>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
let (target_format, source, process_path, process_args) =
|
|
|
|
prepare_process(&config, query, ext.as_str())?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
let path_string = process_path.to_string_lossy().to_string();
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(hash) = repo.hash(&source).await? else {
|
|
|
|
// Invalid alias
|
|
|
|
return Ok(HttpResponse::BadRequest().finish());
|
|
|
|
};
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let identifier_opt = repo
|
|
|
|
.variant_identifier::<S::Identifier>(hash.clone(), path_string)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
if identifier_opt.is_some() {
|
|
|
|
return Ok(HttpResponse::Accepted().finish());
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
queue_generate(&repo, target_format, source, process_path, process_args).await?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Accepted().finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Fetch file details
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Fetching details", skip(repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn details<R: FullRepo, S: Store + 'static>(
|
|
|
|
alias: web::Path<Serde<Alias>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let alias = alias.into_inner();
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&details))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Serve files
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Serving file", skip(repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn serve<R: FullRepo, S: Store + 'static>(
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
alias: web::Path<Serde<Alias>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let alias = alias.into_inner();
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
let (hash, alias, not_found) = if let Some(hash) = repo.hash(&alias).await? {
|
|
|
|
(hash, Serde::into_inner(alias), false)
|
|
|
|
} else {
|
|
|
|
let Some((alias, hash)) = not_found_hash(&repo).await? else {
|
|
|
|
return Ok(HttpResponse::NotFound().finish());
|
|
|
|
};
|
|
|
|
|
|
|
|
(hash, alias, true)
|
2023-07-05 21:46:44 +00:00
|
|
|
};
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-07 18:17:26 +00:00
|
|
|
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
|
|
|
tracing::warn!(
|
|
|
|
"Original File identifier for hash {} is missing, queue cleanup task",
|
|
|
|
hex::encode(&hash)
|
|
|
|
);
|
|
|
|
crate::queue::cleanup_hash(&repo, hash).await?;
|
|
|
|
return Ok(HttpResponse::NotFound().finish());
|
|
|
|
};
|
2023-07-06 00:06:48 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-14 19:53:37 +00:00
|
|
|
if let Some(public_url) = store.public_url(&identifier) {
|
|
|
|
return Ok(HttpResponse::SeeOther()
|
|
|
|
.insert_header((actix_web::http::header::LOCATION, public_url.as_str()))
|
|
|
|
.finish());
|
|
|
|
}
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
ranged_file_resp(&store, identifier, range, details, not_found).await
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Serving file headers", skip(repo, store, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn serve_head<R: FullRepo, S: Store + 'static>(
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
alias: web::Path<Serde<Alias>>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let alias = alias.into_inner();
|
|
|
|
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(identifier) = repo.identifier_from_alias::<S::Identifier>(&alias).await? else {
|
|
|
|
// Invalid alias
|
|
|
|
return Ok(HttpResponse::NotFound().finish());
|
|
|
|
};
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let details = ensure_details(&repo, &store, &config, &alias).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-14 19:53:37 +00:00
|
|
|
if let Some(public_url) = store.public_url(&identifier) {
|
|
|
|
return Ok(HttpResponse::SeeOther()
|
|
|
|
.insert_header((actix_web::http::header::LOCATION, public_url.as_str()))
|
|
|
|
.finish());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
ranged_file_head_resp(&store, identifier, range, details).await
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn ranged_file_head_resp<S: Store + 'static>(
|
|
|
|
store: &S,
|
|
|
|
identifier: S::Identifier,
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
details: Details,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let builder = if let Some(web::Header(range_header)) = range {
|
|
|
|
//Range header exists - return as ranged
|
|
|
|
if let Some(range) = range::single_bytes_range(&range_header) {
|
|
|
|
let len = store.len(&identifier).await?;
|
|
|
|
|
|
|
|
if let Some(content_range) = range::to_content_range(range, len) {
|
|
|
|
let mut builder = HttpResponse::PartialContent();
|
|
|
|
builder.insert_header(content_range);
|
|
|
|
builder
|
|
|
|
} else {
|
|
|
|
HttpResponse::RangeNotSatisfiable()
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return Err(UploadError::Range.into());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// no range header
|
|
|
|
HttpResponse::Ok()
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(srv_head(
|
|
|
|
builder,
|
2023-07-14 19:53:37 +00:00
|
|
|
details.media_type(),
|
2022-12-23 18:56:15 +00:00
|
|
|
7 * DAYS,
|
|
|
|
details.system_time(),
|
|
|
|
)
|
|
|
|
.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn ranged_file_resp<S: Store + 'static>(
|
|
|
|
store: &S,
|
|
|
|
identifier: S::Identifier,
|
|
|
|
range: Option<web::Header<Range>>,
|
|
|
|
details: Details,
|
2023-07-06 00:06:48 +00:00
|
|
|
not_found: bool,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let (builder, stream) = if let Some(web::Header(range_header)) = range {
|
|
|
|
//Range header exists - return as ranged
|
|
|
|
if let Some(range) = range::single_bytes_range(&range_header) {
|
|
|
|
let len = store.len(&identifier).await?;
|
|
|
|
|
|
|
|
if let Some(content_range) = range::to_content_range(range, len) {
|
|
|
|
let mut builder = HttpResponse::PartialContent();
|
|
|
|
builder.insert_header(content_range);
|
|
|
|
(
|
|
|
|
builder,
|
|
|
|
Either::left(Either::left(
|
|
|
|
range::chop_store(range, store, &identifier, len)
|
|
|
|
.await?
|
|
|
|
.map_err(Error::from),
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
(
|
|
|
|
HttpResponse::RangeNotSatisfiable(),
|
|
|
|
Either::left(Either::right(empty())),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return Err(UploadError::Range.into());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//No Range header in the request - return the entire document
|
|
|
|
let stream = store
|
|
|
|
.to_stream(&identifier, None, None)
|
|
|
|
.await?
|
|
|
|
.map_err(Error::from);
|
2023-07-06 00:06:48 +00:00
|
|
|
|
|
|
|
if not_found {
|
|
|
|
(HttpResponse::NotFound(), Either::right(stream))
|
|
|
|
} else {
|
|
|
|
(HttpResponse::Ok(), Either::right(stream))
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(srv_response(
|
|
|
|
builder,
|
|
|
|
stream,
|
2023-07-14 19:53:37 +00:00
|
|
|
details.media_type(),
|
2022-12-23 18:56:15 +00:00
|
|
|
7 * DAYS,
|
|
|
|
details.system_time(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
|
|
|
|
// A helper method to produce responses with proper cache headers
|
|
|
|
fn srv_response<S, E>(
|
|
|
|
builder: HttpResponseBuilder,
|
|
|
|
stream: S,
|
|
|
|
ext: mime::Mime,
|
|
|
|
expires: u32,
|
|
|
|
modified: SystemTime,
|
|
|
|
) -> HttpResponse
|
|
|
|
where
|
|
|
|
S: Stream<Item = Result<web::Bytes, E>> + 'static,
|
|
|
|
E: std::error::Error + 'static,
|
|
|
|
actix_web::Error: From<E>,
|
|
|
|
{
|
|
|
|
let stream = stream.timeout(Duration::from_secs(5)).map(|res| match res {
|
|
|
|
Ok(Ok(item)) => Ok(item),
|
|
|
|
Ok(Err(e)) => Err(actix_web::Error::from(e)),
|
|
|
|
Err(e) => Err(Error::from(e).into()),
|
|
|
|
});
|
|
|
|
|
|
|
|
srv_head(builder, ext, expires, modified).streaming(stream)
|
|
|
|
}
|
|
|
|
|
|
|
|
// A helper method to produce responses with proper cache headers
|
|
|
|
fn srv_head(
|
|
|
|
mut builder: HttpResponseBuilder,
|
|
|
|
ext: mime::Mime,
|
|
|
|
expires: u32,
|
|
|
|
modified: SystemTime,
|
|
|
|
) -> HttpResponseBuilder {
|
|
|
|
builder
|
|
|
|
.insert_header(LastModified(modified.into()))
|
|
|
|
.insert_header(CacheControl(vec![
|
|
|
|
CacheDirective::Public,
|
|
|
|
CacheDirective::MaxAge(expires),
|
|
|
|
CacheDirective::Extension("immutable".to_owned(), None),
|
|
|
|
]))
|
|
|
|
.insert_header((ACCEPT_RANGES, "bytes"))
|
|
|
|
.content_type(ext.to_string());
|
|
|
|
|
|
|
|
builder
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Spawning variant cleanup", skip(repo, config))]
|
|
|
|
async fn clean_variants<R: FullRepo>(
|
|
|
|
repo: web::Data<R>,
|
|
|
|
config: web::Data<Configuration>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
queue::cleanup_all_variants(&repo).await?;
|
|
|
|
Ok(HttpResponse::NoContent().finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, serde::Deserialize)]
|
|
|
|
struct AliasQuery {
|
|
|
|
alias: Serde<Alias>,
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Setting 404 Image", skip(repo, config))]
|
2023-07-06 00:06:48 +00:00
|
|
|
async fn set_not_found<R: FullRepo>(
|
|
|
|
json: web::Json<AliasQuery>,
|
|
|
|
repo: web::Data<R>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2023-07-06 00:06:48 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2023-07-06 00:06:48 +00:00
|
|
|
let alias = json.into_inner().alias;
|
|
|
|
|
2023-07-07 18:33:27 +00:00
|
|
|
if repo.hash(&alias).await?.is_none() {
|
|
|
|
return Ok(HttpResponse::BadRequest().json(serde_json::json!({
|
|
|
|
"msg": "No hash associated with provided alias"
|
|
|
|
})));
|
|
|
|
}
|
|
|
|
|
|
|
|
repo.set(NOT_FOUND_KEY, alias.to_bytes().into()).await?;
|
2023-07-06 00:06:48 +00:00
|
|
|
|
|
|
|
Ok(HttpResponse::Created().json(serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
#[tracing::instrument(name = "Purging file", skip(repo, config))]
|
2022-12-23 18:56:15 +00:00
|
|
|
async fn purge<R: FullRepo>(
|
|
|
|
query: web::Query<AliasQuery>,
|
|
|
|
repo: web::Data<R>,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: web::Data<Configuration>,
|
2022-12-23 18:56:15 +00:00
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
2023-07-17 19:24:49 +00:00
|
|
|
return Err(UploadError::ReadOnly.into());
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
let alias = query.into_inner().alias;
|
|
|
|
let aliases = repo.aliases_from_alias(&alias).await?;
|
|
|
|
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(hash) = repo.hash(&alias).await? else {
|
|
|
|
return Ok(HttpResponse::BadRequest().json(&serde_json::json!({
|
|
|
|
"msg": "No images associated with provided alias",
|
|
|
|
})));
|
|
|
|
};
|
2022-12-23 18:56:15 +00:00
|
|
|
queue::cleanup_hash(&repo, hash).await?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"aliases": aliases.iter().map(|a| a.to_string()).collect::<Vec<_>>()
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(name = "Fetching aliases", skip(repo))]
|
|
|
|
async fn aliases<R: FullRepo>(
|
|
|
|
query: web::Query<AliasQuery>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let alias = query.into_inner().alias;
|
|
|
|
let aliases = repo.aliases_from_alias(&alias).await?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"aliases": aliases.iter().map(|a| a.to_string()).collect::<Vec<_>>()
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(name = "Fetching identifier", skip(repo))]
|
|
|
|
async fn identifier<R: FullRepo, S: Store>(
|
|
|
|
query: web::Query<AliasQuery>,
|
|
|
|
repo: web::Data<R>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let alias = query.into_inner().alias;
|
2023-07-05 21:46:44 +00:00
|
|
|
let Some(identifier) = repo.identifier_from_alias::<S::Identifier>(&alias).await? else {
|
|
|
|
// Invalid alias
|
|
|
|
return Ok(HttpResponse::NotFound().json(serde_json::json!({
|
|
|
|
"msg": "No identifiers associated with provided alias"
|
|
|
|
})));
|
|
|
|
};
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
|
|
|
"msg": "ok",
|
|
|
|
"identifier": identifier.string_repr(),
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
2023-07-07 17:05:42 +00:00
|
|
|
async fn healthz<R: FullRepo, S: Store>(
|
|
|
|
repo: web::Data<R>,
|
|
|
|
store: web::Data<S>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2023-01-29 17:36:09 +00:00
|
|
|
repo.health_check().await?;
|
2023-07-07 17:05:42 +00:00
|
|
|
store.health_check().await?;
|
2023-01-29 17:36:09 +00:00
|
|
|
Ok(HttpResponse::Ok().finish())
|
|
|
|
}
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
fn transform_error(error: actix_form_data::Error) -> actix_web::Error {
|
|
|
|
let error: Error = error.into();
|
|
|
|
let error: actix_web::Error = error.into();
|
|
|
|
error
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
fn build_client(config: &Configuration) -> Result<ClientWithMiddleware, Error> {
|
2023-07-21 21:58:31 +00:00
|
|
|
let client = reqwest::Client::builder()
|
|
|
|
.user_agent("pict-rs v0.5.0-main")
|
|
|
|
.use_rustls_tls()
|
2023-07-22 17:31:01 +00:00
|
|
|
.pool_max_idle_per_host(config.client.pool_size)
|
2023-07-21 21:58:31 +00:00
|
|
|
.build()
|
|
|
|
.map_err(UploadError::BuildClient)?;
|
|
|
|
|
|
|
|
Ok(ClientBuilder::new(client)
|
|
|
|
.with(TracingMiddleware::default())
|
|
|
|
.build())
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
fn next_worker_id(config: &Configuration) -> String {
|
2022-12-23 18:56:15 +00:00
|
|
|
static WORKER_ID: AtomicU64 = AtomicU64::new(0);
|
|
|
|
|
|
|
|
let next_id = WORKER_ID.fetch_add(1, Ordering::Relaxed);
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
format!("{}-{}", config.server.worker_id, next_id)
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-08 22:35:57 +00:00
|
|
|
fn configure_endpoints<
|
|
|
|
R: FullRepo + 'static,
|
|
|
|
S: Store + 'static,
|
|
|
|
F: Fn(&mut web::ServiceConfig),
|
|
|
|
>(
|
2023-06-23 16:20:20 +00:00
|
|
|
config: &mut web::ServiceConfig,
|
2022-12-23 18:56:15 +00:00
|
|
|
repo: R,
|
2023-06-23 16:20:20 +00:00
|
|
|
store: S,
|
2023-07-22 17:31:01 +00:00
|
|
|
configuration: Configuration,
|
2023-07-21 21:58:31 +00:00
|
|
|
client: ClientWithMiddleware,
|
2023-07-08 22:35:57 +00:00
|
|
|
extra_config: F,
|
2023-06-23 16:20:20 +00:00
|
|
|
) {
|
|
|
|
config
|
|
|
|
.app_data(web::Data::new(repo))
|
|
|
|
.app_data(web::Data::new(store))
|
|
|
|
.app_data(web::Data::new(client))
|
2023-07-22 17:31:01 +00:00
|
|
|
.app_data(web::Data::new(configuration.clone()))
|
2023-07-07 17:05:42 +00:00
|
|
|
.route("/healthz", web::get().to(healthz::<R, S>))
|
2023-06-23 16:20:20 +00:00
|
|
|
.service(
|
|
|
|
web::scope("/image")
|
|
|
|
.service(
|
|
|
|
web::resource("")
|
|
|
|
.guard(guard::Post())
|
|
|
|
.route(web::post().to(upload::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::scope("/backgrounded")
|
|
|
|
.service(
|
|
|
|
web::resource("")
|
|
|
|
.guard(guard::Post())
|
|
|
|
.route(web::post().to(upload_backgrounded::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::resource("/claim").route(web::get().to(claim_upload::<R, S>)),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.service(web::resource("/download").route(web::get().to(download::<R, S>)))
|
|
|
|
.service(
|
|
|
|
web::resource("/delete/{delete_token}/{filename}")
|
|
|
|
.route(web::delete().to(delete::<R>))
|
|
|
|
.route(web::get().to(delete::<R>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::resource("/original/{filename}")
|
|
|
|
.route(web::get().to(serve::<R, S>))
|
|
|
|
.route(web::head().to(serve_head::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::resource("/process.{ext}")
|
|
|
|
.route(web::get().to(process::<R, S>))
|
|
|
|
.route(web::head().to(process_head::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::resource("/process_backgrounded.{ext}")
|
|
|
|
.route(web::get().to(process_backgrounded::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::scope("/details")
|
|
|
|
.service(
|
|
|
|
web::resource("/original/{filename}")
|
|
|
|
.route(web::get().to(details::<R, S>)),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::resource("/process.{ext}")
|
|
|
|
.route(web::get().to(process_details::<R, S>)),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.service(
|
|
|
|
web::scope("/internal")
|
|
|
|
.wrap(Internal(
|
2023-07-22 17:31:01 +00:00
|
|
|
configuration.server.api_key.as_ref().map(|s| s.to_owned()),
|
2023-06-23 16:20:20 +00:00
|
|
|
))
|
|
|
|
.service(web::resource("/import").route(web::post().to(import::<R, S>)))
|
|
|
|
.service(web::resource("/variants").route(web::delete().to(clean_variants::<R>)))
|
|
|
|
.service(web::resource("/purge").route(web::post().to(purge::<R>)))
|
|
|
|
.service(web::resource("/aliases").route(web::get().to(aliases::<R>)))
|
2023-07-06 00:06:48 +00:00
|
|
|
.service(web::resource("/identifier").route(web::get().to(identifier::<R, S>)))
|
2023-07-08 22:35:57 +00:00
|
|
|
.service(web::resource("/set_not_found").route(web::post().to(set_not_found::<R>)))
|
|
|
|
.configure(extra_config),
|
2023-06-23 16:20:20 +00:00
|
|
|
);
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
fn spawn_workers<R, S>(repo: R, store: S, config: &Configuration, process_map: ProcessMap)
|
2023-07-05 14:52:19 +00:00
|
|
|
where
|
|
|
|
R: FullRepo + 'static,
|
|
|
|
S: Store + 'static,
|
|
|
|
{
|
|
|
|
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
|
|
|
actix_rt::spawn(queue::process_cleanup(
|
|
|
|
repo.clone(),
|
|
|
|
store.clone(),
|
2023-07-22 17:31:01 +00:00
|
|
|
next_worker_id(config),
|
2023-07-05 14:52:19 +00:00
|
|
|
))
|
|
|
|
});
|
2023-07-22 16:15:30 +00:00
|
|
|
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
|
|
|
actix_rt::spawn(queue::process_images(
|
|
|
|
repo,
|
|
|
|
store,
|
|
|
|
process_map,
|
2023-07-22 17:31:01 +00:00
|
|
|
config.clone(),
|
|
|
|
next_worker_id(config),
|
2023-07-22 16:15:30 +00:00
|
|
|
))
|
|
|
|
});
|
2023-07-05 14:52:19 +00:00
|
|
|
}
|
|
|
|
|
2023-07-08 22:35:57 +00:00
|
|
|
async fn launch_file_store<R: FullRepo + 'static, F: Fn(&mut web::ServiceConfig) + Send + Clone>(
|
2023-06-23 16:20:20 +00:00
|
|
|
repo: R,
|
|
|
|
store: FileStore,
|
2023-07-21 21:58:31 +00:00
|
|
|
client: ClientWithMiddleware,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: Configuration,
|
2023-07-08 22:35:57 +00:00
|
|
|
extra_config: F,
|
2023-06-23 16:20:20 +00:00
|
|
|
) -> std::io::Result<()> {
|
2023-07-22 16:15:30 +00:00
|
|
|
let process_map = ProcessMap::new();
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let address = config.server.address;
|
|
|
|
|
2022-12-23 18:56:15 +00:00
|
|
|
HttpServer::new(move || {
|
2023-07-21 21:58:31 +00:00
|
|
|
let client = client.clone();
|
2023-06-23 16:20:20 +00:00
|
|
|
let store = store.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
let repo = repo.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = config.clone();
|
2023-07-08 22:35:57 +00:00
|
|
|
let extra_config = extra_config.clone();
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
spawn_workers(repo.clone(), store.clone(), &config, process_map.clone());
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
App::new()
|
|
|
|
.wrap(TracingLogger::default())
|
|
|
|
.wrap(Deadline)
|
2023-07-22 21:47:59 +00:00
|
|
|
.wrap(Metrics)
|
2023-07-22 16:15:30 +00:00
|
|
|
.app_data(web::Data::new(process_map.clone()))
|
2023-07-22 17:31:01 +00:00
|
|
|
.configure(move |sc| configure_endpoints(sc, repo, store, config, client, extra_config))
|
2022-12-23 18:56:15 +00:00
|
|
|
})
|
2023-07-22 17:31:01 +00:00
|
|
|
.bind(address)?
|
2022-12-23 18:56:15 +00:00
|
|
|
.run()
|
2023-06-23 16:20:20 +00:00
|
|
|
.await
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-08 22:35:57 +00:00
|
|
|
async fn launch_object_store<
|
|
|
|
R: FullRepo + 'static,
|
|
|
|
F: Fn(&mut web::ServiceConfig) + Send + Clone,
|
|
|
|
>(
|
2023-06-23 16:20:20 +00:00
|
|
|
repo: R,
|
|
|
|
store_config: ObjectStoreConfig,
|
2023-07-21 21:58:31 +00:00
|
|
|
client: ClientWithMiddleware,
|
2023-07-22 17:31:01 +00:00
|
|
|
config: Configuration,
|
2023-07-08 22:35:57 +00:00
|
|
|
extra_config: F,
|
2023-06-23 16:20:20 +00:00
|
|
|
) -> std::io::Result<()> {
|
2023-07-22 16:15:30 +00:00
|
|
|
let process_map = ProcessMap::new();
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let address = config.server.address;
|
|
|
|
|
2023-06-23 16:20:20 +00:00
|
|
|
HttpServer::new(move || {
|
2023-07-21 21:58:31 +00:00
|
|
|
let client = client.clone();
|
2023-06-23 16:20:20 +00:00
|
|
|
let store = store_config.clone().build(client.clone());
|
|
|
|
let repo = repo.clone();
|
2023-07-22 17:31:01 +00:00
|
|
|
let config = config.clone();
|
2023-07-08 22:35:57 +00:00
|
|
|
let extra_config = extra_config.clone();
|
2023-06-23 16:20:20 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
spawn_workers(repo.clone(), store.clone(), &config, process_map.clone());
|
2023-06-23 16:20:20 +00:00
|
|
|
|
|
|
|
App::new()
|
|
|
|
.wrap(TracingLogger::default())
|
|
|
|
.wrap(Deadline)
|
2023-07-22 21:47:59 +00:00
|
|
|
.wrap(Metrics)
|
2023-07-22 16:15:30 +00:00
|
|
|
.app_data(web::Data::new(process_map.clone()))
|
2023-07-22 17:31:01 +00:00
|
|
|
.configure(move |sc| configure_endpoints(sc, repo, store, config, client, extra_config))
|
2023-06-23 16:20:20 +00:00
|
|
|
})
|
2023-07-22 17:31:01 +00:00
|
|
|
.bind(address)?
|
2023-06-23 16:20:20 +00:00
|
|
|
.run()
|
|
|
|
.await
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-06-20 20:59:08 +00:00
|
|
|
async fn migrate_inner<S1>(
|
2023-07-17 03:07:42 +00:00
|
|
|
repo: Repo,
|
2023-07-21 21:58:31 +00:00
|
|
|
client: ClientWithMiddleware,
|
2023-06-20 20:59:08 +00:00
|
|
|
from: S1,
|
2023-07-11 18:01:58 +00:00
|
|
|
to: config::primitives::Store,
|
2023-06-20 20:59:08 +00:00
|
|
|
skip_missing_files: bool,
|
|
|
|
) -> color_eyre::Result<()>
|
2022-12-23 18:56:15 +00:00
|
|
|
where
|
2023-07-14 19:53:37 +00:00
|
|
|
S1: Store + 'static,
|
2022-12-23 18:56:15 +00:00
|
|
|
{
|
|
|
|
match to {
|
2023-07-11 18:01:58 +00:00
|
|
|
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
2023-06-23 16:20:20 +00:00
|
|
|
let to = FileStore::build(path.clone(), repo.clone()).await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
match repo {
|
2023-06-20 20:59:08 +00:00
|
|
|
Repo::Sled(repo) => migrate_store(repo, from, to, skip_missing_files).await?,
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-11 18:01:58 +00:00
|
|
|
config::primitives::Store::ObjectStorage(config::primitives::ObjectStorage {
|
2022-12-23 18:56:15 +00:00
|
|
|
endpoint,
|
|
|
|
bucket_name,
|
|
|
|
use_path_style,
|
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
2023-07-11 18:01:58 +00:00
|
|
|
signature_duration,
|
|
|
|
client_timeout,
|
2023-07-14 19:53:37 +00:00
|
|
|
public_endpoint,
|
2022-12-23 18:56:15 +00:00
|
|
|
}) => {
|
|
|
|
let to = ObjectStore::build(
|
|
|
|
endpoint.clone(),
|
|
|
|
bucket_name,
|
|
|
|
if use_path_style {
|
|
|
|
UrlStyle::Path
|
|
|
|
} else {
|
|
|
|
UrlStyle::VirtualHost
|
|
|
|
},
|
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
2023-07-11 18:01:58 +00:00
|
|
|
signature_duration.unwrap_or(15),
|
|
|
|
client_timeout.unwrap_or(30),
|
2023-07-14 19:53:37 +00:00
|
|
|
public_endpoint,
|
2022-12-23 18:56:15 +00:00
|
|
|
repo.clone(),
|
|
|
|
)
|
|
|
|
.await?
|
2023-06-23 16:20:20 +00:00
|
|
|
.build(client);
|
2022-12-23 18:56:15 +00:00
|
|
|
|
|
|
|
match repo {
|
2023-06-20 20:59:08 +00:00
|
|
|
Repo::Sled(repo) => migrate_store(repo, from, to, skip_missing_files).await?,
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-12-26 22:35:25 +00:00
|
|
|
impl<P: AsRef<Path>, T: serde::Serialize> ConfigSource<P, T> {
|
|
|
|
/// Initialize the pict-rs configuration
|
|
|
|
///
|
|
|
|
/// This takes an optional config_file path which is a valid pict-rs configuration file, and an
|
|
|
|
/// optional save_to path, which the generated configuration will be saved into. Since many
|
|
|
|
/// parameters have defaults, it can be useful to dump a valid configuration with default values to
|
|
|
|
/// see what is available for tweaking.
|
|
|
|
///
|
|
|
|
/// When running pict-rs as a library, configuration is limited to environment variables and
|
|
|
|
/// configuration files. Commandline options are not available.
|
|
|
|
///
|
|
|
|
/// ```rust
|
|
|
|
/// fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
/// pict_rs::ConfigSource::memory(serde_json::json!({
|
|
|
|
/// "server": {
|
|
|
|
/// "address": "127.0.0.1:8080"
|
|
|
|
/// },
|
|
|
|
/// "old_db": {
|
|
|
|
/// "path": "./old"
|
|
|
|
/// },
|
|
|
|
/// "repo": {
|
|
|
|
/// "type": "sled",
|
|
|
|
/// "path": "./sled-repo"
|
|
|
|
/// },
|
|
|
|
/// "store": {
|
|
|
|
/// "type": "filesystem",
|
|
|
|
/// "path": "./files"
|
|
|
|
/// }
|
|
|
|
/// })).init::<&str>(None)?;
|
|
|
|
///
|
|
|
|
/// Ok(())
|
|
|
|
/// }
|
|
|
|
/// ```
|
2023-07-22 17:31:01 +00:00
|
|
|
pub fn init<Q: AsRef<Path>>(
|
|
|
|
self,
|
|
|
|
save_to: Option<Q>,
|
|
|
|
) -> color_eyre::Result<PictRsConfiguration> {
|
|
|
|
config::configure_without_clap(self, save_to)
|
2022-12-26 22:35:25 +00:00
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-08 22:35:57 +00:00
|
|
|
async fn export_handler(repo: web::Data<SledRepo>) -> Result<HttpResponse, Error> {
|
|
|
|
repo.export().await?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Created().json(&serde_json::json!({
|
|
|
|
"msg": "ok"
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn sled_extra_config(sc: &mut web::ServiceConfig) {
|
|
|
|
sc.service(web::resource("/export").route(web::post().to(export_handler)));
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
impl PictRsConfiguration {
|
|
|
|
/// Build the pict-rs configuration from commandline arguments
|
|
|
|
///
|
|
|
|
/// This is probably not useful for 3rd party applications that handle their own commandline
|
|
|
|
pub fn build_default() -> color_eyre::Result<Self> {
|
|
|
|
config::configure()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Install the default pict-rs tracer
|
|
|
|
///
|
|
|
|
/// This is probably not useful for 3rd party applications that install their own tracing
|
|
|
|
/// subscribers.
|
2023-07-22 17:39:39 +00:00
|
|
|
pub fn install_tracing(self) -> color_eyre::Result<Self> {
|
|
|
|
init_tracing(&self.config.tracing)?;
|
|
|
|
Ok(self)
|
2023-07-22 17:31:01 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 21:47:59 +00:00
|
|
|
pub fn install_metrics(self) -> color_eyre::Result<Self> {
|
|
|
|
if let Some(addr) = self.config.metrics.prometheus_address {
|
|
|
|
PrometheusBuilder::new()
|
|
|
|
.with_http_listener(addr)
|
|
|
|
.install()?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(self)
|
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
/// Run the pict-rs application
|
|
|
|
///
|
|
|
|
/// This must be called after `init_config`, or else the default configuration builder will run and
|
|
|
|
/// fail.
|
|
|
|
pub async fn run(self) -> color_eyre::Result<()> {
|
|
|
|
let PictRsConfiguration { config, operation } = self;
|
|
|
|
|
|
|
|
let repo = Repo::open(config.repo.clone())?;
|
|
|
|
repo.migrate_from_db(config.old_db.path.clone()).await?;
|
|
|
|
let client = build_client(&config)?;
|
|
|
|
|
|
|
|
match operation {
|
|
|
|
Operation::Run => (),
|
|
|
|
Operation::MigrateStore {
|
|
|
|
skip_missing_files,
|
|
|
|
from,
|
|
|
|
to,
|
|
|
|
} => {
|
|
|
|
match from {
|
|
|
|
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
|
|
|
let from = FileStore::build(path.clone(), repo.clone()).await?;
|
|
|
|
migrate_inner(repo, client, from, to, skip_missing_files).await?;
|
|
|
|
}
|
|
|
|
config::primitives::Store::ObjectStorage(
|
|
|
|
config::primitives::ObjectStorage {
|
|
|
|
endpoint,
|
|
|
|
bucket_name,
|
|
|
|
use_path_style,
|
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
|
|
|
signature_duration,
|
|
|
|
client_timeout,
|
|
|
|
public_endpoint,
|
2022-12-23 18:56:15 +00:00
|
|
|
},
|
2023-07-22 17:31:01 +00:00
|
|
|
) => {
|
|
|
|
let from = ObjectStore::build(
|
|
|
|
endpoint,
|
|
|
|
bucket_name,
|
|
|
|
if use_path_style {
|
|
|
|
UrlStyle::Path
|
|
|
|
} else {
|
|
|
|
UrlStyle::VirtualHost
|
|
|
|
},
|
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
|
|
|
signature_duration.unwrap_or(15),
|
|
|
|
client_timeout.unwrap_or(30),
|
|
|
|
public_endpoint,
|
|
|
|
repo.clone(),
|
|
|
|
)
|
|
|
|
.await?
|
|
|
|
.build(client.clone());
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
migrate_inner(repo, client, from, to, skip_missing_files).await?;
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
if config.server.read_only {
|
|
|
|
tracing::warn!("Launching in READ ONLY mode");
|
|
|
|
}
|
2023-07-17 19:24:49 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
match config.store.clone() {
|
|
|
|
config::Store::Filesystem(config::Filesystem { path }) => {
|
|
|
|
repo.migrate_identifiers().await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
let store = FileStore::build(path, repo.clone()).await?;
|
|
|
|
match repo {
|
|
|
|
Repo::Sled(sled_repo) => {
|
|
|
|
sled_repo
|
|
|
|
.requeue_in_progress(config.server.worker_id.as_bytes().to_vec())
|
|
|
|
.await?;
|
2023-06-23 16:20:20 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
launch_file_store(sled_repo, store, client, config, sled_extra_config)
|
|
|
|
.await?;
|
|
|
|
}
|
2023-06-23 16:20:20 +00:00
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
2023-07-22 17:31:01 +00:00
|
|
|
config::Store::ObjectStorage(config::ObjectStorage {
|
2022-12-23 18:56:15 +00:00
|
|
|
endpoint,
|
|
|
|
bucket_name,
|
2023-07-22 17:31:01 +00:00
|
|
|
use_path_style,
|
2022-12-23 18:56:15 +00:00
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
2023-07-11 18:01:58 +00:00
|
|
|
signature_duration,
|
|
|
|
client_timeout,
|
2023-07-14 19:53:37 +00:00
|
|
|
public_endpoint,
|
2023-07-22 17:31:01 +00:00
|
|
|
}) => {
|
|
|
|
let store = ObjectStore::build(
|
|
|
|
endpoint,
|
|
|
|
bucket_name,
|
|
|
|
if use_path_style {
|
|
|
|
UrlStyle::Path
|
|
|
|
} else {
|
|
|
|
UrlStyle::VirtualHost
|
|
|
|
},
|
|
|
|
region,
|
|
|
|
access_key,
|
|
|
|
secret_key,
|
|
|
|
session_token,
|
|
|
|
signature_duration,
|
|
|
|
client_timeout,
|
|
|
|
public_endpoint,
|
|
|
|
repo.clone(),
|
|
|
|
)
|
|
|
|
.await?;
|
2022-12-23 18:56:15 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
match repo {
|
|
|
|
Repo::Sled(sled_repo) => {
|
|
|
|
sled_repo
|
|
|
|
.requeue_in_progress(config.server.worker_id.as_bytes().to_vec())
|
|
|
|
.await?;
|
2023-06-23 16:20:20 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
launch_object_store(sled_repo, store, client, config, sled_extra_config)
|
|
|
|
.await?;
|
|
|
|
}
|
2023-06-23 16:20:20 +00:00
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|
|
|
|
}
|
2023-06-23 16:20:20 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
self::tmp_file::remove_tmp_dir().await?;
|
2023-06-23 16:20:20 +00:00
|
|
|
|
2023-07-22 17:31:01 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
2022-12-23 18:56:15 +00:00
|
|
|
}
|