2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-12-22 19:31:35 +00:00

Add backgrounding routes

- Accept backgrounded uploads
- Allow backgrounded processing

Still TODO:
- Endpoint for waiting on/claiming an upload
This commit is contained in:
Aode (lion) 2022-04-02 20:56:29 -05:00
parent c4d014597e
commit 8734dfbdc7
6 changed files with 252 additions and 10 deletions

92
src/backgrounded.rs Normal file
View file

@ -0,0 +1,92 @@
use crate::{
error::Error,
repo::{FullRepo, UploadId, UploadRepo},
store::Store,
};
use actix_web::web::Bytes;
use futures_util::{Stream, TryStreamExt};
use tokio_util::io::StreamReader;
pub(crate) struct Backgrounded<R, S>
where
R: FullRepo + 'static,
S: Store,
{
repo: R,
identifier: Option<S::Identifier>,
upload_id: Option<UploadId>,
}
impl<R, S> Backgrounded<R, S>
where
R: FullRepo + 'static,
S: Store,
{
pub(crate) fn disarm(mut self) {
let _ = self.identifier.take();
let _ = self.upload_id.take();
}
pub(crate) fn upload_id(&self) -> Option<UploadId> {
self.upload_id
}
pub(crate) fn identifier(&self) -> Option<&S::Identifier> {
self.identifier.as_ref()
}
pub(crate) async fn proxy<P>(repo: R, store: S, stream: P) -> Result<Self, Error>
where
P: Stream<Item = Result<Bytes, Error>>,
{
let mut this = Self {
repo,
identifier: None,
upload_id: Some(UploadId::generate()),
};
this.do_proxy(store, stream).await?;
Ok(this)
}
async fn do_proxy<P>(&mut self, store: S, stream: P) -> Result<(), Error>
where
P: Stream<Item = Result<Bytes, Error>>,
{
UploadRepo::create(&self.repo, self.upload_id.expect("Upload id exists")).await?;
let stream = stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e));
let mut reader = StreamReader::new(Box::pin(stream));
let identifier = store.save_async_read(&mut reader).await?;
self.identifier = Some(identifier.clone());
Ok(())
}
}
impl<R, S> Drop for Backgrounded<R, S>
where
R: FullRepo + 'static,
S: Store,
{
fn drop(&mut self) {
if let Some(identifier) = self.identifier.take() {
let repo = self.repo.clone();
actix_rt::spawn(async move {
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
});
}
if let Some(upload_id) = self.upload_id {
let repo = self.repo.clone();
actix_rt::spawn(async move {
let _ = repo.claim(upload_id).await;
});
}
}
}

View file

@ -22,6 +22,7 @@ use tracing_actix_web::TracingLogger;
use tracing_awc::Tracing; use tracing_awc::Tracing;
use tracing_futures::Instrument; use tracing_futures::Instrument;
mod backgrounded;
mod concurrent_processor; mod concurrent_processor;
mod config; mod config;
mod details; mod details;
@ -48,6 +49,7 @@ mod tmp_file;
mod validate; mod validate;
use self::{ use self::{
backgrounded::Backgrounded,
config::{Configuration, ImageFormat, Operation}, config::{Configuration, ImageFormat, Operation},
details::Details, details::Details,
either::Either, either::Either,
@ -57,6 +59,7 @@ use self::{
magick::details_hint, magick::details_hint,
middleware::{Deadline, Internal}, middleware::{Deadline, Internal},
migrate::LatestDb, migrate::LatestDb,
queue::queue_generate,
repo::{Alias, DeleteToken, FullRepo, HashRepo, IdentifierRepo, Repo, SettingsRepo}, repo::{Alias, DeleteToken, FullRepo, HashRepo, IdentifierRepo, Repo, SettingsRepo},
serde_str::Serde, serde_str::Serde,
store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store}, store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store},
@ -134,6 +137,48 @@ async fn upload<R: FullRepo, S: Store + 'static>(
}))) })))
} }
#[instrument(name = "Uploaded files", skip(value))]
async fn upload_backgrounded<R: FullRepo, S: Store>(
value: Value<Backgrounded<R, S>>,
repo: web::Data<R>,
) -> Result<HttpResponse, Error> {
let images = value
.map()
.and_then(|mut m| m.remove("images"))
.and_then(|images| images.array())
.ok_or(UploadError::NoFiles)?;
let mut files = Vec::new();
let images = images
.into_iter()
.filter_map(|i| i.file())
.collect::<Vec<_>>();
for image in &images {
let upload_id = image.result.upload_id().expect("Upload ID exists");
let identifier = image
.result
.identifier()
.expect("Identifier exists")
.to_bytes()?;
queue::queue_ingest(&**repo, identifier, upload_id, None, true).await?;
files.push(serde_json::json!({
"file": upload_id.to_string(),
}));
}
for image in images {
image.result.disarm();
}
Ok(HttpResponse::Created().json(&serde_json::json!({
"msg": "ok",
"files": files
})))
}
#[derive(Debug, serde::Deserialize)] #[derive(Debug, serde::Deserialize)]
struct UrlQuery { struct UrlQuery {
url: String, url: String,
@ -339,6 +384,30 @@ async fn process<R: FullRepo, S: Store + 'static>(
)) ))
} }
/// Process files
#[instrument(name = "Spawning image process", skip(repo))]
async fn process_backgrounded<R: FullRepo, S: Store>(
query: web::Query<ProcessQuery>,
ext: web::Path<String>,
repo: web::Data<R>,
) -> Result<HttpResponse, Error> {
let (target_format, source, process_path, process_args) = prepare_process(query, ext.as_str())?;
let path_string = process_path.to_string_lossy().to_string();
let hash = repo.hash(&source).await?;
let identifier_opt = repo
.variant_identifier::<S::Identifier>(hash.clone(), path_string)
.await?;
if identifier_opt.is_some() {
return Ok(HttpResponse::Accepted().finish());
}
queue_generate(&**repo, target_format, source, process_path, process_args).await?;
Ok(HttpResponse::Accepted().finish())
}
/// Fetch file details /// Fetch file details
#[instrument(name = "Fetching details", skip(repo))] #[instrument(name = "Fetching details", skip(repo))]
async fn details<R: FullRepo, S: Store + 'static>( async fn details<R: FullRepo, S: Store + 'static>(
@ -603,6 +672,31 @@ async fn launch<R: FullRepo + Clone + 'static, S: Store + Clone + 'static>(
})), })),
); );
// Create a new Multipart Form validator for backgrounded uploads
//
// This form is expecting a single array field, 'images' with at most 10 files in it
let repo2 = repo.clone();
let store2 = store.clone();
let backgrounded_form = Form::new()
.max_files(10)
.max_file_size(CONFIG.media.max_file_size * MEGABYTES)
.transform_error(transform_error)
.field(
"images",
Field::array(Field::file(move |filename, _, stream| {
let repo = repo2.clone();
let store = store2.clone();
let span = tracing::info_span!("file-proxy", ?filename);
let stream = stream.map_err(Error::from);
Box::pin(
async move { Backgrounded::proxy(repo, store, stream).await }.instrument(span),
)
})),
);
HttpServer::new(move || { HttpServer::new(move || {
let store = store.clone(); let store = store.clone();
let repo = repo.clone(); let repo = repo.clone();
@ -632,6 +726,11 @@ async fn launch<R: FullRepo + Clone + 'static, S: Store + Clone + 'static>(
.wrap(form.clone()) .wrap(form.clone())
.route(web::post().to(upload::<R, S>)), .route(web::post().to(upload::<R, S>)),
) )
.service(
web::resource("/backgrounded")
.wrap(backgrounded_form.clone())
.route(web::post().to(upload_backgrounded::<R, S>)),
)
.service(web::resource("/download").route(web::get().to(download::<R, S>))) .service(web::resource("/download").route(web::get().to(download::<R, S>)))
.service( .service(
web::resource("/delete/{delete_token}/{filename}") web::resource("/delete/{delete_token}/{filename}")
@ -642,6 +741,10 @@ async fn launch<R: FullRepo + Clone + 'static, S: Store + Clone + 'static>(
web::resource("/original/{filename}").route(web::get().to(serve::<R, S>)), web::resource("/original/{filename}").route(web::get().to(serve::<R, S>)),
) )
.service(web::resource("/process.{ext}").route(web::get().to(process::<R, S>))) .service(web::resource("/process.{ext}").route(web::get().to(process::<R, S>)))
.service(
web::resource("/process_backgrounded.{ext}")
.route(web::get().to(process_backgrounded::<R, S>)),
)
.service( .service(
web::scope("/details") web::scope("/details")
.service( .service(

View file

@ -1,13 +1,14 @@
use crate::{ use crate::{
config::ImageFormat, config::ImageFormat,
error::Error, error::Error,
repo::{Alias, AliasRepo, DeleteToken, FullRepo, HashRepo, IdentifierRepo, QueueRepo}, repo::{
Alias, AliasRepo, DeleteToken, FullRepo, HashRepo, IdentifierRepo, QueueRepo, UploadId,
},
serde_str::Serde, serde_str::Serde,
store::{Identifier, Store}, store::{Identifier, Store},
}; };
use std::{future::Future, path::PathBuf, pin::Pin}; use std::{future::Future, path::PathBuf, pin::Pin};
use tracing::Instrument; use tracing::Instrument;
use uuid::Uuid;
mod cleanup; mod cleanup;
mod process; mod process;
@ -33,7 +34,7 @@ enum Cleanup {
enum Process { enum Process {
Ingest { Ingest {
identifier: Vec<u8>, identifier: Vec<u8>,
upload_id: Uuid, upload_id: Serde<UploadId>,
declared_alias: Option<Serde<Alias>>, declared_alias: Option<Serde<Alias>>,
should_validate: bool, should_validate: bool,
}, },
@ -80,14 +81,14 @@ pub(crate) async fn cleanup_identifier<R: QueueRepo, I: Identifier>(
pub(crate) async fn queue_ingest<R: QueueRepo>( pub(crate) async fn queue_ingest<R: QueueRepo>(
repo: &R, repo: &R,
identifier: Vec<u8>, identifier: Vec<u8>,
upload_id: Uuid, upload_id: UploadId,
declared_alias: Option<Alias>, declared_alias: Option<Alias>,
should_validate: bool, should_validate: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
let job = serde_json::to_vec(&Process::Ingest { let job = serde_json::to_vec(&Process::Ingest {
identifier, identifier,
declared_alias: declared_alias.map(Serde::new), declared_alias: declared_alias.map(Serde::new),
upload_id, upload_id: Serde::new(upload_id),
should_validate, should_validate,
})?; })?;
repo.push(PROCESS_QUEUE, job.into()).await?; repo.push(PROCESS_QUEUE, job.into()).await?;

View file

@ -32,7 +32,7 @@ where
repo, repo,
store, store,
identifier, identifier,
upload_id.into(), Serde::into_inner(upload_id),
declared_alias.map(Serde::into_inner), declared_alias.map(Serde::into_inner),
should_validate, should_validate,
) )

View file

@ -31,7 +31,7 @@ pub(crate) struct DeleteToken {
pub(crate) struct AlreadyExists; pub(crate) struct AlreadyExists;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct UploadId { pub(crate) struct UploadId {
id: Uuid, id: Uuid,
} }
@ -88,6 +88,8 @@ pub(crate) trait BaseRepo {
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
pub(crate) trait UploadRepo: BaseRepo { pub(crate) trait UploadRepo: BaseRepo {
async fn create(&self, upload_id: UploadId) -> Result<(), Error>;
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error>; async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error>;
async fn claim(&self, upload_id: UploadId) -> Result<(), Error>; async fn claim(&self, upload_id: UploadId) -> Result<(), Error>;
@ -439,6 +441,26 @@ impl From<Uuid> for UploadId {
} }
} }
impl From<UploadId> for Uuid {
fn from(uid: UploadId) -> Self {
uid.id
}
}
impl std::str::FromStr for UploadId {
type Err = <Uuid as std::str::FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(UploadId { id: s.parse()? })
}
}
impl std::fmt::Display for UploadId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.id, f)
}
}
impl std::fmt::Display for MaybeUuid { impl std::fmt::Display for MaybeUuid {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {

View file

@ -125,14 +125,38 @@ impl From<InnerUploadResult> for UploadResult {
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl UploadRepo for SledRepo { impl UploadRepo for SledRepo {
async fn create(&self, upload_id: UploadId) -> Result<(), Error> {
b!(self.uploads, uploads.insert(upload_id.as_bytes(), b"1"));
Ok(())
}
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error> { async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error> {
let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes()); let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes());
while let Some(event) = (&mut subscriber).await { let bytes = upload_id.as_bytes().to_vec();
if let sled::Event::Insert { value, .. } = event { let opt = b!(self.uploads, uploads.get(bytes));
let result: InnerUploadResult = serde_json::from_slice(&value)?;
if let Some(bytes) = opt {
if bytes != b"1" {
let result: InnerUploadResult = serde_json::from_slice(&bytes)?;
return Ok(result.into()); return Ok(result.into());
} }
} else {
return Err(UploadError::NoFiles.into());
}
while let Some(event) = (&mut subscriber).await {
match event {
sled::Event::Remove { .. } => {
return Err(UploadError::NoFiles.into());
}
sled::Event::Insert { value, .. } => {
if value != b"1" {
let result: InnerUploadResult = serde_json::from_slice(&value)?;
return Ok(result.into());
}
}
}
} }
Err(UploadError::Canceled.into()) Err(UploadError::Canceled.into())