From cf7c753e65cb8e607735e42cb77e7c1850690ddd Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 13:31:54 -0600 Subject: [PATCH 1/8] Pass a State value around rather than a bunch of arguments --- src/generate.rs | 120 +++--- src/generate/magick.rs | 26 +- src/ingest.rs | 110 ++--- src/lib.rs | 891 ++++++++++++++--------------------------- src/magick.rs | 36 +- src/queue.rs | 140 ++----- src/queue/cleanup.rs | 23 +- src/queue/process.rs | 93 +---- src/state.rs | 13 + src/validate.rs | 108 ++--- src/validate/magick.rs | 34 +- 11 files changed, 548 insertions(+), 1046 deletions(-) create mode 100644 src/state.rs diff --git a/src/generate.rs b/src/generate.rs index a17d22c..79129b7 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -8,7 +8,8 @@ use crate::{ formats::{ImageFormat, InputProcessableFormat, InternalVideoFormat, ProcessableFormat}, future::{WithMetrics, WithTimeout}, magick::PolicyDir, - repo::{ArcRepo, Hash, VariantAlreadyExists}, + repo::{Hash, VariantAlreadyExists}, + state::State, store::Store, tmp_file::TmpDir, }; @@ -49,47 +50,43 @@ impl Drop for MetricsGuard { } } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, repo, store, hash, process_map, config))] +#[tracing::instrument(skip(state, process_map, hash))] pub(crate) async fn generate( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, + state: &State, process_map: &ProcessMap, format: InputProcessableFormat, thumbnail_path: PathBuf, thumbnail_args: Vec, original_details: &Details, - config: &crate::config::Configuration, hash: Hash, ) -> Result<(Details, Bytes), Error> { - if config.server.danger_dummy_mode { - let identifier = repo + if state.config.server.danger_dummy_mode { + let identifier = state + .repo .identifier(hash) .await? .ok_or(UploadError::MissingIdentifier)?; - let bytes = store.to_bytes(&identifier, None, None).await?.into_bytes(); + let bytes = state + .store + .to_bytes(&identifier, None, None) + .await? + .into_bytes(); Ok((original_details.clone(), bytes)) } else { let process_fut = process( - tmp_dir, - policy_dir, - repo, - store, + state, format, thumbnail_path.clone(), thumbnail_args, original_details, - config, hash.clone(), ); let (details, bytes) = process_map .process(hash, thumbnail_path, process_fut) - .with_timeout(Duration::from_secs(config.media.process_timeout * 4)) + .with_timeout(Duration::from_secs(state.config.media.process_timeout * 4)) .with_metrics("pict-rs.generate.process") .await .map_err(|_| UploadError::ProcessTimeout)??; @@ -99,37 +96,21 @@ pub(crate) async fn generate( } #[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, repo, store, hash, config))] +#[tracing::instrument(skip(state, hash))] async fn process( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, + state: &State, output_format: InputProcessableFormat, thumbnail_path: PathBuf, thumbnail_args: Vec, original_details: &Details, - config: &crate::config::Configuration, hash: Hash, ) -> Result<(Details, Bytes), Error> { let guard = MetricsGuard::guard(); let permit = crate::process_semaphore().acquire().await?; - let identifier = input_identifier( - tmp_dir, - policy_dir, - repo, - store, - output_format, - hash.clone(), - original_details, - &config.media, - ) - .await?; + let identifier = input_identifier(state, output_format, hash.clone(), original_details).await?; - let input_details = - crate::ensure_details_identifier(tmp_dir, policy_dir, repo, store, config, &identifier) - .await?; + let input_details = crate::ensure_details_identifier(state, &identifier).await?; let input_format = input_details .internal_format() @@ -139,21 +120,19 @@ async fn process( let format = input_format.process_to(output_format); let quality = match format { - ProcessableFormat::Image(format) => config.media.image.quality_for(format), - ProcessableFormat::Animation(format) => config.media.animation.quality_for(format), + ProcessableFormat::Image(format) => state.config.media.image.quality_for(format), + ProcessableFormat::Animation(format) => state.config.media.animation.quality_for(format), }; - let stream = store.to_stream(&identifier, None, None).await?; + let stream = state.store.to_stream(&identifier, None, None).await?; let vec = crate::magick::process_image_stream_read( - tmp_dir, - policy_dir, + state, stream, thumbnail_args, input_format, format, quality, - config.media.process_timeout, ) .await? .into_vec() @@ -165,18 +144,20 @@ async fn process( drop(permit); let details = Details::from_bytes( - tmp_dir, - policy_dir, - config.media.process_timeout, + &state.tmp_dir, + &state.policy_dir, + &state.config.media.process_timeout, bytes.clone(), ) .await?; - let identifier = store + let identifier = state + .store .save_bytes(bytes.clone(), details.media_type()) .await?; - if let Err(VariantAlreadyExists) = repo + if let Err(VariantAlreadyExists) = state + .repo .relate_variant_identifier( hash, thumbnail_path.to_string_lossy().to_string(), @@ -184,10 +165,10 @@ async fn process( ) .await? { - store.remove(&identifier).await?; + state.store.remove(&identifier).await?; } - repo.relate_details(&identifier, &details).await?; + state.repo.relate_details(&identifier, &details).await?; guard.disarm(); @@ -197,14 +178,10 @@ async fn process( #[allow(clippy::too_many_arguments)] #[tracing::instrument(skip_all)] async fn input_identifier( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, + state: &State, output_format: InputProcessableFormat, hash: Hash, original_details: &Details, - media: &crate::config::Media, ) -> Result, Error> where S: Store + 'static, @@ -220,11 +197,12 @@ where }; if should_thumbnail { - if let Some(identifier) = repo.motion_identifier(hash.clone()).await? { + if let Some(identifier) = state.repo.motion_identifier(hash.clone()).await? { return Ok(identifier); }; - let identifier = repo + let identifier = state + .repo .identifier(hash.clone()) .await? .ok_or(UploadError::MissingIdentifier)?; @@ -232,24 +210,22 @@ where let (reader, media_type) = if let Some(processable_format) = original_details.internal_format().processable_format() { - let thumbnail_format = media.image.format.unwrap_or(ImageFormat::Webp); + let thumbnail_format = state.config.media.image.format.unwrap_or(ImageFormat::Webp); - let stream = store.to_stream(&identifier, None, None).await?; + let stream = state.store.to_stream(&identifier, None, None).await?; let reader = magick::thumbnail( - tmp_dir, - policy_dir, + state, stream, processable_format, ProcessableFormat::Image(thumbnail_format), - media.image.quality_for(thumbnail_format), - media.process_timeout, + config.media.image.quality_for(thumbnail_format), ) .await?; (reader, thumbnail_format.media_type()) } else { - let thumbnail_format = match media.image.format { + let thumbnail_format = match state.config.media.image.format { Some(ImageFormat::Webp | ImageFormat::Avif | ImageFormat::Jxl) => { ffmpeg::ThumbnailFormat::Webp } @@ -258,14 +234,14 @@ where }; let reader = ffmpeg::thumbnail( - tmp_dir, - store.clone(), + state.tmp_dir, + state.store.clone(), identifier, original_details .video_format() .unwrap_or(InternalVideoFormat::Mp4), thumbnail_format, - media.process_timeout, + state.config.media.process_timeout, ) .await?; @@ -273,16 +249,20 @@ where }; let motion_identifier = reader - .with_stdout(|stdout| async { store.save_async_read(stdout, media_type).await }) + .with_stdout(|stdout| async { state.store.save_async_read(stdout, media_type).await }) .await??; - repo.relate_motion_identifier(hash, &motion_identifier) + state + .repo + .relate_motion_identifier(hash, &motion_identifier) .await?; return Ok(motion_identifier); } - repo.identifier(hash) + state + .repo + .identifier(hash) .await? .ok_or(UploadError::MissingIdentifier) .map_err(From::from) diff --git a/src/generate/magick.rs b/src/generate/magick.rs index e0b1799..d722d57 100644 --- a/src/generate/magick.rs +++ b/src/generate/magick.rs @@ -6,29 +6,29 @@ use crate::{ formats::ProcessableFormat, magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::{Process, ProcessRead}, + state::State, stream::LocalBoxStream, tmp_file::TmpDir, }; -async fn thumbnail_animation( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +async fn thumbnail_animation( + state: &State, input_format: ProcessableFormat, format: ProcessableFormat, quality: Option, - timeout: u64, write_file: F, ) -> Result where F: FnOnce(crate::file::File) -> Fut, Fut: std::future::Future>, { - let temporary_path = tmp_dir + let temporary_path = state + .tmp_dir .tmp_folder() .await .map_err(MagickError::CreateTemporaryDirectory)?; - let input_file = tmp_dir.tmp_file(None); + let input_file = state.tmp_dir.tmp_file(None); crate::store::file_store::safe_create_parent(&input_file) .await .map_err(MagickError::CreateDir)?; @@ -62,10 +62,10 @@ where let envs = [ (MAGICK_TEMPORARY_PATH, temporary_path.as_os_str()), - (MAGICK_CONFIGURE_PATH, policy_dir.as_os_str()), + (MAGICK_CONFIGURE_PATH, state.policy_dir.as_os_str()), ]; - let reader = Process::run("magick", &args, &envs, timeout)? + let reader = Process::run("magick", &args, &envs, state.config.media.process_timeout)? .read() .add_extras(input_file) .add_extras(temporary_path); @@ -73,22 +73,18 @@ where Ok(reader) } -pub(super) async fn thumbnail( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(super) async fn thumbnail( + state: &State, stream: LocalBoxStream<'static, std::io::Result>, input_format: ProcessableFormat, format: ProcessableFormat, quality: Option, - timeout: u64, ) -> Result { thumbnail_animation( - tmp_dir, - policy_dir, + state, input_format, format, quality, - timeout, |mut tmp_file| async move { tmp_file .write_from_stream(stream) diff --git a/src/ingest.rs b/src/ingest.rs index 7e4358c..d140fe7 100644 --- a/src/ingest.rs +++ b/src/ingest.rs @@ -8,6 +8,7 @@ use crate::{ future::WithMetrics, magick::PolicyDir, repo::{Alias, ArcRepo, DeleteToken, Hash}, + state::State, store::Store, tmp_file::TmpDir, }; @@ -19,7 +20,7 @@ use streem::IntoStreamer; use tracing::{Instrument, Span}; mod hasher; -use hasher::{Hasher, State}; +use hasher::Hasher; #[derive(Debug)] pub(crate) struct Session { @@ -50,12 +51,17 @@ where } async fn process_ingest( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - store: &S, + state: &State, stream: impl Stream> + 'static, - media: &crate::config::Media, -) -> Result<(InternalFormat, Arc, Details, Rc>), Error> +) -> Result< + ( + InternalFormat, + Arc, + Details, + Rc>, + ), + Error, +> where S: Store, { @@ -65,43 +71,30 @@ where let permit = crate::process_semaphore().acquire().await?; - let prescribed = Validations { - image: &media.image, - animation: &media.animation, - video: &media.video, - }; - tracing::trace!("Validating bytes"); - let (input_type, process_read) = crate::validate::validate_bytes( - tmp_dir, - policy_dir, - bytes, - prescribed, - media.process_timeout, - ) - .await?; + let (input_type, process_read) = crate::validate::validate_bytes(state, bytes).await?; - let process_read = if let Some(operations) = media.preprocess_steps() { + let process_read = if let Some(operations) = state.config.media.preprocess_steps() { if let Some(format) = input_type.processable_format() { let (_, magick_args) = crate::processor::build_chain(operations, format.file_extension())?; let quality = match format { - crate::formats::ProcessableFormat::Image(format) => media.image.quality_for(format), + crate::formats::ProcessableFormat::Image(format) => { + state.config.media.image.quality_for(format) + } crate::formats::ProcessableFormat::Animation(format) => { - media.animation.quality_for(format) + state.config.media.animation.quality_for(format) } }; crate::magick::process_image_process_read( - tmp_dir, - policy_dir, + state, process_read, magick_args, format, format, quality, - media.process_timeout, ) .await? } else { @@ -111,7 +104,7 @@ where process_read }; - let (state, identifier) = process_read + let (hash_state, identifier) = process_read .with_stdout(|stdout| async move { let hasher_reader = Hasher::new(stdout); let state = hasher_reader.state(); @@ -119,11 +112,11 @@ where store .save_async_read(hasher_reader, input_type.media_type()) .await - .map(move |identifier| (state, identifier)) + .map(move |identifier| (hash_state, identifier)) }) .await??; - let bytes_stream = store.to_bytes(&identifier, None, None).await?; + let bytes_stream = state.store.to_bytes(&identifier, None, None).await?; let details = Details::from_bytes( tmp_dir, policy_dir, @@ -134,13 +127,21 @@ where drop(permit); - Ok((input_type, identifier, details, state)) + Ok((input_type, identifier, details, hash_state)) } async fn dummy_ingest( - store: &S, + state: &State, stream: impl Stream> + 'static, -) -> Result<(InternalFormat, Arc, Details, Rc>), Error> +) -> Result< + ( + InternalFormat, + Arc, + Details, + Rc>, + ), + Error, +> where S: Store, { @@ -156,7 +157,8 @@ where let input_type = InternalFormat::Image(crate::formats::ImageFormat::Png); - let identifier = store + let identifier = state + .store .save_async_read(hasher_reader, input_type.media_type()) .await?; @@ -166,41 +168,37 @@ where } #[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, repo, store, client, stream, config))] +#[tracing::instrument(skip(state, stream))] pub(crate) async fn ingest( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, - client: &ClientWithMiddleware, + state: &State, stream: impl Stream> + 'static, declared_alias: Option, - config: &crate::config::Configuration, ) -> Result where S: Store, { - let (input_type, identifier, details, state) = if config.server.danger_dummy_mode { - dummy_ingest(store, stream).await? + let (input_type, identifier, details, hash_state) = if state.config.server.danger_dummy_mode { + dummy_ingest(state, stream).await? } else { - process_ingest(tmp_dir, policy_dir, store, stream, &config.media).await? + process_ingest(state, stream).await? }; let mut session = Session { - repo: repo.clone(), + repo: state.repo.clone(), delete_token: DeleteToken::generate(), hash: None, alias: None, identifier: Some(identifier.clone()), }; - if let Some(endpoint) = &config.media.external_validation { + if let Some(endpoint) = &state.config.media.external_validation { let stream = store.to_stream(&identifier, None, None).await?; - let response = client + let response = state + .client .post(endpoint.as_str()) .timeout(Duration::from_secs( - config.media.external_validation_timeout, + state.config.media.external_validation_timeout, )) .header("Content-Type", input_type.media_type().as_ref()) .body(Body::wrap_stream(crate::stream::make_send(stream))) @@ -214,13 +212,13 @@ where } } - let (hash, size) = state.borrow_mut().finalize_reset(); + let (hash, size) = hash_state.borrow_mut().finalize_reset(); let hash = Hash::new(hash, size, input_type); - save_upload(&mut session, repo, store, hash.clone(), &identifier).await?; + save_upload(&mut session, state, hash.clone(), &identifier).await?; - repo.relate_details(&identifier, &details).await?; + state.repo.relate_details(&identifier, &details).await?; if let Some(alias) = declared_alias { session.add_existing_alias(hash, alias).await? @@ -234,17 +232,21 @@ where #[tracing::instrument(level = "trace", skip_all)] async fn save_upload( session: &mut Session, - repo: &ArcRepo, - store: &S, + state: &State, hash: Hash, identifier: &Arc, ) -> Result<(), Error> where S: Store, { - if repo.create_hash(hash.clone(), identifier).await?.is_err() { + if state + .repo + .create_hash(hash.clone(), identifier) + .await? + .is_err() + { // duplicate upload - store.remove(identifier).await?; + state.store.remove(identifier).await?; session.identifier.take(); return Ok(()); } diff --git a/src/lib.rs b/src/lib.rs index c47717f..2afa6be 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -26,6 +26,7 @@ mod read; mod repo; mod repo_04; mod serde_str; +mod state; mod store; mod stream; mod sync; @@ -49,6 +50,7 @@ use repo::ArcRepo; use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; use reqwest_tracing::TracingMiddleware; use rusty_s3::UrlStyle; +use state::State; use std::{ marker::PhantomData, path::Path, @@ -106,53 +108,45 @@ fn process_semaphore() -> &'static Semaphore { } async fn ensure_details( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, alias: &Alias, ) -> Result { - let Some(identifier) = repo.identifier_from_alias(alias).await? else { + let Some(identifier) = state.repo.identifier_from_alias(alias).await? else { return Err(UploadError::MissingAlias.into()); }; - ensure_details_identifier(tmp_dir, policy_dir, repo, store, config, &identifier).await + ensure_details_identifier(state, &identifier).await } async fn ensure_details_identifier( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, identifier: &Arc, ) -> Result { - let details = repo.details(identifier).await?; + let details = state.repo.details(identifier).await?; if let Some(details) = details { tracing::debug!("details exist"); Ok(details) } else { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); - } else if config.server.danger_dummy_mode { + } else if state.config.server.danger_dummy_mode { return Ok(Details::danger_dummy(formats::InternalFormat::Image( formats::ImageFormat::Png, ))); } tracing::debug!("generating new details from {:?}", identifier); - let bytes_stream = store.to_bytes(identifier, None, None).await?; + let bytes_stream = state.store.to_bytes(identifier, None, None).await?; let new_details = Details::from_bytes( - tmp_dir, - policy_dir, - config.media.process_timeout, + &state.tmp_dir, + &state.policy_dir, + state.config.media.process_timeout, bytes_stream.into_bytes(), ) .await?; tracing::debug!("storing details for {:?}", identifier); - repo.relate_details(identifier, &new_details).await?; + state.repo.relate_details(identifier, &new_details).await?; tracing::debug!("stored"); Ok(new_details) } @@ -165,47 +159,22 @@ impl FormData for Upload { type Error = Error; fn form(req: &HttpRequest) -> Form { - let tmp_dir = req - .app_data::>() - .expect("No TmpDir in request") - .clone(); - let policy_dir = req - .app_data::>() - .expect("No TmpDir in request") - .clone(); - let repo = req - .app_data::>() - .expect("No repo in request") - .clone(); - let store = req - .app_data::>() - .expect("No store in request") - .clone(); - let client = req - .app_data::>() - .expect("No client in request") - .clone(); - let config = req - .app_data::>() - .expect("No configuration in request") + let state = req + .app_data::>>() + .expect("No state in request") .clone(); // Create a new Multipart Form validator // // This form is expecting a single array field, 'images' with at most 10 files in it Form::new() - .max_files(config.server.max_file_count) - .max_file_size(config.media.max_file_size * MEGABYTES) + .max_files(state.config.server.max_file_count) + .max_file_size(state.config.media.max_file_size * MEGABYTES) .transform_error(transform_error) .field( "images", Field::array(Field::file(move |filename, _, stream| { - let tmp_dir = tmp_dir.clone(); - let policy_dir = policy_dir.clone(); - let repo = repo.clone(); - let store = store.clone(); - let client = client.clone(); - let config = config.clone(); + let state = state.clone(); metrics::counter!("pict-rs.files", "upload" => "inline").increment(1); @@ -213,23 +182,13 @@ impl FormData for Upload { Box::pin( async move { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } let stream = crate::stream::from_err(stream); - ingest::ingest( - &tmp_dir, - &policy_dir, - &repo, - &**store, - &client, - stream, - None, - &config, - ) - .await + ingest::ingest(&state, stream, None).await } .instrument(span), ) @@ -249,47 +208,22 @@ impl FormData for Import { type Error = Error; fn form(req: &actix_web::HttpRequest) -> Form { - let tmp_dir = req - .app_data::>() - .expect("No TmpDir in request") - .clone(); - let policy_dir = req - .app_data::>() - .expect("No TmpDir in request") - .clone(); - let repo = req - .app_data::>() - .expect("No repo in request") - .clone(); - let store = req - .app_data::>() - .expect("No store in request") - .clone(); - let client = req - .app_data::() - .expect("No client in request") - .clone(); - let config = req - .app_data::>() - .expect("No configuration in request") + let state = req + .app_data::>>() + .expect("No state in request") .clone(); // Create a new Multipart Form validator for internal imports // // This form is expecting a single array field, 'images' with at most 10 files in it Form::new() - .max_files(config.server.max_file_count) - .max_file_size(config.media.max_file_size * MEGABYTES) + .max_files(state.config.server.max_file_count) + .max_file_size(state.config.media.max_file_size * MEGABYTES) .transform_error(transform_error) .field( "images", Field::array(Field::file(move |filename, _, stream| { - let tmp_dir = tmp_dir.clone(); - let policy_dir = policy_dir.clone(); - let repo = repo.clone(); - let store = store.clone(); - let client = client.clone(); - let config = config.clone(); + let state = state.clone(); metrics::counter!("pict-rs.files", "import" => "inline").increment(1); @@ -297,23 +231,14 @@ impl FormData for Import { Box::pin( async move { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } let stream = crate::stream::from_err(stream); - ingest::ingest( - &tmp_dir, - &policy_dir, - &repo, - &**store, - &client, - stream, - Some(Alias::from_existing(&filename)), - &config, - ) - .await + ingest::ingest(&state, stream, Some(Alias::from_existing(&filename))) + .await } .instrument(span), ) @@ -330,49 +255,28 @@ impl FormData for Import { } /// Handle responding to successful uploads -#[tracing::instrument( - name = "Uploaded files", - skip(value, tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Uploaded files", skip(value, state))] async fn upload( Multipart(Upload(value, _)): Multipart>, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - handle_upload(value, tmp_dir, policy_dir, repo, store, config).await + handle_upload(value, state).await } /// Handle responding to successful uploads -#[tracing::instrument( - name = "Imported files", - skip(value, tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Imported files", skip(value, state))] async fn import( Multipart(Import(value, _)): Multipart>, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - handle_upload(value, tmp_dir, policy_dir, repo, store, config).await + handle_upload(value, state).await } /// Handle responding to successful uploads -#[tracing::instrument( - name = "Uploaded files", - skip(value, tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Uploaded files", skip(value, state))] async fn handle_upload( value: Value, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let images = value .map() @@ -391,8 +295,7 @@ async fn handle_upload( tracing::debug!("Uploaded {} as {:?}", image.filename, alias); let delete_token = image.result.delete_token(); - let details = - ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, alias).await?; + let details = ensure_details(&state, alias).await?; files.push(serde_json::json!({ "file": alias.to_string(), @@ -422,30 +325,19 @@ impl FormData for BackgroundedUpload { // Create a new Multipart Form validator for backgrounded uploads // // This form is expecting a single array field, 'images' with at most 10 files in it - let repo = req - .app_data::>() - .expect("No repo in request") + let state = req + .app_data::>>() + .expect("No state in request") .clone(); - let store = req - .app_data::>() - .expect("No store in request") - .clone(); - let config = req - .app_data::>() - .expect("No configuration in request") - .clone(); - - let read_only = config.server.read_only; Form::new() - .max_files(config.server.max_file_count) - .max_file_size(config.media.max_file_size * MEGABYTES) + .max_files(state.config.server.max_file_count) + .max_file_size(state.config.media.max_file_size * MEGABYTES) .transform_error(transform_error) .field( "images", Field::array(Field::file(move |filename, _, stream| { - let repo = (**repo).clone(); - let store = (**store).clone(); + let state = state.clone(); metrics::counter!("pict-rs.files", "upload" => "background").increment(1); @@ -453,13 +345,13 @@ impl FormData for BackgroundedUpload { Box::pin( async move { - if read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } let stream = crate::stream::from_err(stream); - Backgrounded::proxy(repo, store, stream).await + Backgrounded::proxy(&state.repo, &state.store, stream).await } .instrument(span), ) @@ -475,10 +367,10 @@ impl FormData for BackgroundedUpload { } } -#[tracing::instrument(name = "Uploaded files", skip(value, repo))] +#[tracing::instrument(name = "Uploaded files", skip(value, state))] async fn upload_backgrounded( Multipart(BackgroundedUpload(value, _)): Multipart>, - repo: web::Data, + state: web::Data>, ) -> Result { let images = value .map() @@ -496,7 +388,7 @@ async fn upload_backgrounded( let upload_id = image.result.upload_id().expect("Upload ID exists"); let identifier = image.result.identifier().expect("Identifier exists"); - queue::queue_ingest(&repo, identifier, upload_id, None).await?; + queue::queue_ingest(&state.repo, identifier, upload_id, None).await?; files.push(serde_json::json!({ "upload_id": upload_id.to_string(), @@ -521,30 +413,25 @@ struct ClaimQuery { /// Claim a backgrounded upload #[tracing::instrument(name = "Waiting on upload", skip_all)] async fn claim_upload( - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, query: web::Query, + state: web::Data>, ) -> Result { let upload_id = Serde::into_inner(query.into_inner().upload_id); - match repo + match state + .repo .wait(upload_id) .with_timeout(Duration::from_secs(10)) .await { Ok(wait_res) => { let upload_result = wait_res?; - repo.claim(upload_id).await?; + state.repo.claim(upload_id).await?; metrics::counter!("pict-rs.background.upload.claim").increment(1); match upload_result { UploadResult::Success { alias, token } => { - let details = - ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, &alias) - .await?; + let details = ensure_details(&state, &alias).await?; Ok(HttpResponse::Ok().json(&serde_json::json!({ "msg": "ok", @@ -576,21 +463,13 @@ struct UrlQuery { async fn ingest_inline( stream: impl Stream> + 'static, - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, - client: &ClientWithMiddleware, - config: &Configuration, + state: &State, ) -> Result<(Alias, DeleteToken, Details), Error> { - let session = ingest::ingest( - tmp_dir, policy_dir, repo, store, client, stream, None, config, - ) - .await?; + let session = ingest::ingest(state, stream, None).await?; let alias = session.alias().expect("alias should exist").to_owned(); - let details = ensure_details(tmp_dir, policy_dir, repo, store, config, &alias).await?; + let details = ensure_details(state, &alias).await?; let delete_token = session.disarm(); @@ -598,68 +477,50 @@ async fn ingest_inline( } /// download an image from a URL -#[tracing::instrument( - name = "Downloading file", - skip(client, tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Downloading file", skip(state))] async fn download( - client: web::Data, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, query: web::Query, + state: web::Data>, ) -> Result { - let stream = download_stream(&client, &query.url, &config).await?; + let stream = download_stream(&query.url, &state).await?; if query.backgrounded { - do_download_backgrounded(stream, repo, store).await + do_download_backgrounded(stream, state).await } else { - do_download_inline(stream, &tmp_dir, &policy_dir, repo, store, &client, config).await + do_download_inline(stream, &state).await } } -async fn download_stream( - client: &ClientWithMiddleware, +async fn download_stream( url: &str, - config: &Configuration, + state: &State, ) -> Result> + 'static, Error> { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } - let res = client.get(url).send().await?; + let res = state.client.get(url).send().await?; if !res.status().is_success() { return Err(UploadError::Download(res.status()).into()); } let stream = crate::stream::limit( - config.media.max_file_size * MEGABYTES, + state.config.media.max_file_size * MEGABYTES, crate::stream::from_err(res.bytes_stream()), ); Ok(stream) } -#[tracing::instrument( - name = "Downloading file inline", - skip(stream, tmp_dir, policy_dir, repo, store, client, config) -)] +#[tracing::instrument(name = "Downloading file inline", skip(stream, state))] async fn do_download_inline( stream: impl Stream> + 'static, - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: web::Data, - store: web::Data, - client: &ClientWithMiddleware, - config: web::Data, + state: &State, ) -> Result { metrics::counter!("pict-rs.files", "download" => "inline").increment(1); - let (alias, delete_token, details) = - ingest_inline(stream, tmp_dir, policy_dir, &repo, &store, client, &config).await?; + let (alias, delete_token, details) = ingest_inline(stream, state).await?; Ok(HttpResponse::Created().json(&serde_json::json!({ "msg": "ok", @@ -671,11 +532,10 @@ async fn do_download_inline( }))) } -#[tracing::instrument(name = "Downloading file in background", skip(stream, repo, store))] +#[tracing::instrument(name = "Downloading file in background", skip(stream, state))] async fn do_download_backgrounded( stream: impl Stream> + 'static, - repo: web::Data, - store: web::Data, + state: web::Data>, ) -> Result { metrics::counter!("pict-rs.files", "download" => "background").increment(1); @@ -727,9 +587,9 @@ struct HashJson { } /// Get a page of hashes -#[tracing::instrument(name = "Hash Page", skip(repo))] -async fn page( - repo: web::Data, +#[tracing::instrument(name = "Hash Page", skip(state))] +async fn page( + state: web::Data>, web::Query(PageQuery { slug, timestamp, @@ -739,9 +599,12 @@ async fn page( let limit = limit.unwrap_or(20); let page = if let Some(timestamp) = timestamp { - repo.hash_page_by_date(timestamp.timestamp, limit).await? + state + .repo + .hash_page_by_date(timestamp.timestamp, limit) + .await? } else { - repo.hash_page(slug, limit).await? + state.repo.hash_page(slug, limit).await? }; let mut hashes = Vec::with_capacity(page.hashes.len()); @@ -755,9 +618,11 @@ async fn page( .map(|a| a.to_string()) .collect(); - let identifier = repo.identifier(hash.clone()).await?; + let identifier = state.repo.identifier(hash.clone()).await?; let details = if let Some(identifier) = identifier { - repo.details(&identifier) + state + .repo + .details(&identifier) .await? .map(|d| d.into_api_details()) } else { @@ -786,13 +651,12 @@ async fn page( } /// Delete aliases and files -#[tracing::instrument(name = "Deleting file", skip(repo, config))] -async fn delete( - repo: web::Data, - config: web::Data, +#[tracing::instrument(name = "Deleting file", skip(state))] +async fn delete( + state: web::Data>, path_entries: web::Path<(String, String)>, ) -> Result { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } @@ -802,7 +666,7 @@ async fn delete( let alias = Alias::from_existing(&alias); // delete alias inline - queue::cleanup::alias(&repo, alias, token).await?; + queue::cleanup::alias(&state.repo, alias, token).await?; Ok(HttpResponse::NoContent().finish()) } @@ -844,19 +708,18 @@ fn prepare_process( Ok((format, thumbnail_path, thumbnail_args)) } -#[tracing::instrument(name = "Fetching derived details", skip(repo, config))] +#[tracing::instrument(name = "Fetching derived details", skip(state))] async fn process_details( web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, - repo: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let alias = match source { ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { Serde::into_inner(alias) } ProcessSource::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().json(&serde_json::json!({ "msg": "No images associated with provided proxy url" }))); @@ -865,9 +728,9 @@ async fn process_details( } }; - let (_, thumbnail_path, _) = prepare_process(&config, operations, ext.as_str())?; + let (_, thumbnail_path, _) = prepare_process(&state.config, operations, ext.as_str())?; - let Some(hash) = repo.hash(&alias).await? else { + let Some(hash) = state.repo.hash(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().json(&serde_json::json!({ "msg": "No images associated with provided alias", @@ -876,17 +739,20 @@ async fn process_details( let thumbnail_string = thumbnail_path.to_string_lossy().to_string(); - if !config.server.read_only { - repo.accessed_variant(hash.clone(), thumbnail_string.clone()) + if !state.config.server.read_only { + state + .repo + .accessed_variant(hash.clone(), thumbnail_string.clone()) .await?; } - let identifier = repo + let identifier = state + .repo .variant_identifier(hash, thumbnail_string) .await? .ok_or(UploadError::MissingAlias)?; - let details = repo.details(&identifier).await?; + let details = state.repo.details(&identifier).await?; let details = details.ok_or(UploadError::NoFiles)?; @@ -913,20 +779,12 @@ async fn not_found_hash(repo: &ArcRepo) -> Result, Error> /// Process files #[allow(clippy::too_many_arguments)] -#[tracing::instrument( - name = "Serving processed image", - skip(tmp_dir, policy_dir, repo, store, client, config, process_map) -)] +#[tracing::instrument(name = "Serving processed image", skip(state, process_map))] async fn process( range: Option>, web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - client: web::Data, - config: web::Data, + state: web::Data>, process_map: web::Data, ) -> Result { let alias = match source { @@ -934,31 +792,22 @@ async fn process( Serde::into_inner(alias) } ProcessSource::Proxy { proxy } => { - let alias = if let Some(alias) = repo.related(proxy.clone()).await? { + let alias = if let Some(alias) = state.repo.related(proxy.clone()).await? { alias } else if !config.server.read_only { - let stream = download_stream(&client, proxy.as_str(), &config).await?; + let stream = download_stream(proxy.as_str(), &state).await?; - let (alias, _, _) = ingest_inline( - stream, - &tmp_dir, - &policy_dir, - &repo, - &store, - &client, - &config, - ) - .await?; + let (alias, _, _) = ingest_inline(stream, &state).await?; - repo.relate_url(proxy, alias.clone()).await?; + state.repo.relate_url(proxy, alias.clone()).await?; alias } else { return Err(UploadError::ReadOnly.into()); }; - if !config.server.read_only { - repo.accessed_alias(alias.clone()).await?; + if !state.config.server.read_only { + state.repo.accessed_alias(alias.clone()).await?; } alias @@ -966,59 +815,54 @@ async fn process( }; let (format, thumbnail_path, thumbnail_args) = - prepare_process(&config, operations, ext.as_str())?; + prepare_process(&state.config, operations, ext.as_str())?; let path_string = thumbnail_path.to_string_lossy().to_string(); - let (hash, alias, not_found) = if let Some(hash) = repo.hash(&alias).await? { + let (hash, alias, not_found) = if let Some(hash) = state.repo.hash(&alias).await? { (hash, alias, false) } else { - let Some((alias, hash)) = not_found_hash(&repo).await? else { + let Some((alias, hash)) = not_found_hash(&state.repo).await? else { return Ok(HttpResponse::NotFound().finish()); }; (hash, alias, true) }; - if !config.server.read_only { - repo.accessed_variant(hash.clone(), path_string.clone()) + if !state.config.server.read_only { + state + .repo + .accessed_variant(hash.clone(), path_string.clone()) .await?; } let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?; if let Some(identifier) = identifier_opt { - let details = - ensure_details_identifier(&tmp_dir, &policy_dir, &repo, &store, &config, &identifier) - .await?; + let details = ensure_details_identifier(&state, &identifier).await?; - if let Some(public_url) = store.public_url(&identifier) { + if let Some(public_url) = state.store.public_url(&identifier) { return Ok(HttpResponse::SeeOther() .insert_header((actix_web::http::header::LOCATION, public_url.as_str())) .finish()); } - return ranged_file_resp(&store, identifier, range, details, not_found).await; + return ranged_file_resp(&state.store, identifier, range, details, not_found).await; } if config.server.read_only { return Err(UploadError::ReadOnly.into()); } - let original_details = - ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, &alias).await?; + let original_details = ensure_details(&state, &alias).await?; let (details, bytes) = generate::generate( - &tmp_dir, - &policy_dir, - &repo, - &store, + &state, &process_map, format, thumbnail_path, thumbnail_args, &original_details, - &config, hash, ) .await?; @@ -1058,78 +902,71 @@ async fn process( } #[allow(clippy::too_many_arguments)] -#[tracing::instrument( - name = "Serving processed image headers", - skip(tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Serving processed image headers", skip(state))] async fn process_head( range: Option>, web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let alias = match source { ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { Serde::into_inner(alias) } ProcessSource::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - let (_, thumbnail_path, _) = prepare_process(&config, operations, ext.as_str())?; + let (_, thumbnail_path, _) = prepare_process(&state.config, operations, ext.as_str())?; let path_string = thumbnail_path.to_string_lossy().to_string(); - let Some(hash) = repo.hash(&alias).await? else { + let Some(hash) = state.repo.hash(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().finish()); }; - if !config.server.read_only { + if !state.config.server.read_only { repo.accessed_variant(hash.clone(), path_string.clone()) .await?; } - let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?; + let identifier_opt = state + .repo + .variant_identifier(hash.clone(), path_string) + .await?; if let Some(identifier) = identifier_opt { - let details = - ensure_details_identifier(&tmp_dir, &policy_dir, &repo, &store, &config, &identifier) - .await?; + let details = ensure_details_identifier(&state, &identifier).await?; - if let Some(public_url) = store.public_url(&identifier) { + if let Some(public_url) = state.store.public_url(&identifier) { return Ok(HttpResponse::SeeOther() .insert_header((actix_web::http::header::LOCATION, public_url.as_str())) .finish()); } - return ranged_file_head_resp(&store, identifier, range, details).await; + return ranged_file_head_resp(&state.store, identifier, range, details).await; } Ok(HttpResponse::NotFound().finish()) } /// Process files -#[tracing::instrument(name = "Spawning image process", skip(repo))] +#[tracing::instrument(name = "Spawning image process", skip(state))] async fn process_backgrounded( web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, - repo: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let source = match source { ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { Serde::into_inner(alias) } ProcessSource::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias @@ -1137,46 +974,49 @@ async fn process_backgrounded( }; let (target_format, process_path, process_args) = - prepare_process(&config, operations, ext.as_str())?; + prepare_process(&state.config, operations, ext.as_str())?; let path_string = process_path.to_string_lossy().to_string(); - let Some(hash) = repo.hash(&source).await? else { + let Some(hash) = state.repo.hash(&source).await? else { // Invalid alias return Ok(HttpResponse::BadRequest().finish()); }; - let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?; + let identifier_opt = state + .repo + .variant_identifier(hash.clone(), path_string) + .await?; if identifier_opt.is_some() { return Ok(HttpResponse::Accepted().finish()); } - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } - queue_generate(&repo, target_format, source, process_path, process_args).await?; + queue_generate( + &state.repo, + target_format, + source, + process_path, + process_args, + ) + .await?; Ok(HttpResponse::Accepted().finish()) } /// Fetch file details -#[tracing::instrument( - name = "Fetching query details", - skip(tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Fetching query details", skip(state))] async fn details_query( web::Query(alias_query): web::Query, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().json(&serde_json::json!({ "msg": "Provided proxy URL has not been cached", }))); @@ -1185,232 +1025,147 @@ async fn details_query( } }; - do_details(alias, tmp_dir, policy_dir, repo, store, config).await + let details = ensure_details(&state, &alias).await?; + + Ok(HttpResponse::Ok().json(&details.into_api_details())) } /// Fetch file details -#[tracing::instrument( - name = "Fetching details", - skip(tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Fetching details", skip(state))] async fn details( alias: web::Path>, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - do_details( - Serde::into_inner(alias.into_inner()), - tmp_dir, - policy_dir, - repo, - store, - config, - ) - .await -} - -async fn do_details( - alias: Alias, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, -) -> Result { - let details = ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, &alias).await?; + let details = ensure_details(&state, &alias).await?; Ok(HttpResponse::Ok().json(&details.into_api_details())) } /// Serve files based on alias query #[allow(clippy::too_many_arguments)] -#[tracing::instrument( - name = "Serving file query", - skip(tmp_dir, policy_dir, repo, store, client, config) -)] +#[tracing::instrument(name = "Serving file query", skip(state))] async fn serve_query( range: Option>, web::Query(alias_query): web::Query, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - client: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let alias = if let Some(alias) = repo.related(proxy.clone()).await? { + let alias = if let Some(alias) = state.repo.related(proxy.clone()).await? { alias - } else if !config.server.read_only { - let stream = download_stream(&client, proxy.as_str(), &config).await?; + } else if !state.config.server.read_only { + let stream = download_stream(proxy.as_str(), &state).await?; - let (alias, _, _) = ingest_inline( - stream, - &tmp_dir, - &policy_dir, - &repo, - &store, - &client, - &config, - ) - .await?; + let (alias, _, _) = ingest_inline(stream, &state).await?; - repo.relate_url(proxy, alias.clone()).await?; + state.repo.relate_url(proxy, alias.clone()).await?; alias } else { return Err(UploadError::ReadOnly.into()); }; - if !config.server.read_only { - repo.accessed_alias(alias.clone()).await?; + if !state.config.server.read_only { + state.repo.accessed_alias(alias.clone()).await?; } alias } }; - do_serve(range, alias, tmp_dir, policy_dir, repo, store, config).await + do_serve(range, alias, state).await } /// Serve files -#[tracing::instrument(name = "Serving file", skip(tmp_dir, policy_dir, repo, store, config))] +#[tracing::instrument(name = "Serving file", skip(state))] async fn serve( range: Option>, alias: web::Path>, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - do_serve( - range, - Serde::into_inner(alias.into_inner()), - tmp_dir, - policy_dir, - repo, - store, - config, - ) - .await + do_serve(range, Serde::into_inner(alias.into_inner()), state).await } async fn do_serve( range: Option>, alias: Alias, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - let (hash, alias, not_found) = if let Some(hash) = repo.hash(&alias).await? { + let (hash, alias, not_found) = if let Some(hash) = state.repo.hash(&alias).await? { (hash, alias, false) } else { - let Some((alias, hash)) = not_found_hash(&repo).await? else { + let Some((alias, hash)) = not_found_hash(&state.repo).await? else { return Ok(HttpResponse::NotFound().finish()); }; (hash, alias, true) }; - let Some(identifier) = repo.identifier(hash.clone()).await? else { + let Some(identifier) = state.repo.identifier(hash.clone()).await? else { tracing::warn!("Original File identifier for hash {hash:?} is missing, queue cleanup task",); - crate::queue::cleanup_hash(&repo, hash).await?; + crate::queue::cleanup_hash(&state.repo, hash).await?; return Ok(HttpResponse::NotFound().finish()); }; - let details = ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, &alias).await?; + let details = ensure_details(&state, &alias).await?; - if let Some(public_url) = store.public_url(&identifier) { + if let Some(public_url) = state.store.public_url(&identifier) { return Ok(HttpResponse::SeeOther() .insert_header((actix_web::http::header::LOCATION, public_url.as_str())) .finish()); } - ranged_file_resp(&store, identifier, range, details, not_found).await + ranged_file_resp(&state.store, identifier, range, details, not_found).await } -#[tracing::instrument( - name = "Serving query file headers", - skip(repo, tmp_dir, policy_dir, store, config) -)] +#[tracing::instrument(name = "Serving query file headers", skip(state))] async fn serve_query_head( range: Option>, web::Query(alias_query): web::Query, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - do_serve_head(range, alias, tmp_dir, policy_dir, repo, store, config).await + do_serve_head(range, alias, state).await } -#[tracing::instrument( - name = "Serving file headers", - skip(tmp_dir, policy_dir, repo, store, config) -)] +#[tracing::instrument(name = "Serving file headers", skip(state))] async fn serve_head( range: Option>, alias: web::Path>, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - do_serve_head( - range, - Serde::into_inner(alias.into_inner()), - tmp_dir, - policy_dir, - repo, - store, - config, - ) - .await + do_serve_head(range, Serde::into_inner(alias.into_inner()), state).await } async fn do_serve_head( range: Option>, alias: Alias, - tmp_dir: web::Data, - policy_dir: web::Data, - repo: web::Data, - store: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - let Some(identifier) = repo.identifier_from_alias(&alias).await? else { + let Some(identifier) = state.repo.identifier_from_alias(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().finish()); }; - let details = ensure_details(&tmp_dir, &policy_dir, &repo, &store, &config, &alias).await?; + let details = ensure_details(&state, &alias).await?; - if let Some(public_url) = store.public_url(&identifier) { + if let Some(public_url) = state.store.public_url(&identifier) { return Ok(HttpResponse::SeeOther() .insert_header((actix_web::http::header::LOCATION, public_url.as_str())) .finish()); } - ranged_file_head_resp(&store, identifier, range, details).await + ranged_file_head_resp(&state.store, identifier, range, details).await } async fn ranged_file_head_resp( @@ -1562,14 +1317,18 @@ struct PruneQuery { force: bool, } -#[tracing::instrument(name = "Prune missing identifiers", skip(repo))] -async fn prune_missing( - repo: web::Data, +#[tracing::instrument(name = "Prune missing identifiers", skip(state))] +async fn prune_missing( + state: web::Data>, query: Option>, ) -> Result { - let total = repo.size().await?; + if state.config.server.read_only { + return Err(UploadError::ReadOnly.into()); + } - let progress = if let Some(progress) = repo.get("prune-missing-queued").await? { + let total = state.repo.size().await?; + + let progress = if let Some(progress) = state.repo.get("prune-missing-queued").await? { progress .as_ref() .try_into() @@ -1579,12 +1338,12 @@ async fn prune_missing( 0 }; - let complete = repo.get("prune-missing-complete").await?.is_some(); + let complete = state.repo.get("prune-missing-complete").await?.is_some(); - let started = repo.get("prune-missing-started").await?.is_some(); + let started = state.repo.get("prune-missing-started").await?.is_some(); if !started || query.is_some_and(|q| q.force) { - queue::prune_missing(&repo).await?; + queue::prune_missing(&state.repo).await?; } Ok(HttpResponse::Ok().json(PruneResponse { @@ -1594,16 +1353,13 @@ async fn prune_missing( })) } -#[tracing::instrument(name = "Spawning variant cleanup", skip(repo, config))] -async fn clean_variants( - repo: web::Data, - config: web::Data, -) -> Result { - if config.server.read_only { +#[tracing::instrument(name = "Spawning variant cleanup", skip(state))] +async fn clean_variants(state: web::Data>) -> Result { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } - queue::cleanup_all_variants(&repo).await?; + queue::cleanup_all_variants(&state.repo).await?; Ok(HttpResponse::NoContent().finish()) } @@ -1614,14 +1370,12 @@ enum AliasQuery { Alias { alias: Serde }, } -#[tracing::instrument(name = "Setting 404 Image", skip(repo, config))] -async fn set_not_found( +#[tracing::instrument(name = "Setting 404 Image", skip(state))] +async fn set_not_found( json: web::Json, - repo: web::Data, - client: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } @@ -1634,47 +1388,49 @@ async fn set_not_found( } }; - if repo.hash(&alias).await?.is_none() { + if state.repo.hash(&alias).await?.is_none() { return Ok(HttpResponse::BadRequest().json(serde_json::json!({ "msg": "No hash associated with provided alias" }))); } - repo.set(NOT_FOUND_KEY, alias.to_bytes().into()).await?; + state + .repo + .set(NOT_FOUND_KEY, alias.to_bytes().into()) + .await?; Ok(HttpResponse::Created().json(serde_json::json!({ "msg": "ok", }))) } -#[tracing::instrument(name = "Purging file", skip(repo, config))] -async fn purge( +#[tracing::instrument(name = "Purging file", skip(state))] +async fn purge( web::Query(alias_query): web::Query, - repo: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - let aliases = repo.aliases_from_alias(&alias).await?; + let aliases = state.repo.aliases_from_alias(&alias).await?; - let Some(hash) = repo.hash(&alias).await? else { + let Some(hash) = state.repo.hash(&alias).await? else { return Ok(HttpResponse::BadRequest().json(&serde_json::json!({ "msg": "No images associated with provided alias", }))); }; - queue::cleanup_hash(&repo, hash).await?; + queue::cleanup_hash(&state.repo, hash).await?; Ok(HttpResponse::Ok().json(&serde_json::json!({ "msg": "ok", @@ -1682,28 +1438,27 @@ async fn purge( }))) } -#[tracing::instrument(name = "Deleting alias", skip(repo, config))] -async fn delete_alias( +#[tracing::instrument(name = "Deleting alias", skip(state))] +async fn delete_alias( web::Query(alias_query): web::Query, - repo: web::Data, - config: web::Data, + state: web::Data>, ) -> Result { - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - if let Some(token) = repo.delete_token(&alias).await? { - queue::cleanup_alias(&repo, alias, token).await?; + if let Some(token) = state.repo.delete_token(&alias).await? { + queue::cleanup_alias(&state.repo, alias, token).await?; } else { return Ok(HttpResponse::NotFound().finish()); } @@ -1713,22 +1468,22 @@ async fn delete_alias( }))) } -#[tracing::instrument(name = "Fetching aliases", skip(repo))] -async fn aliases( +#[tracing::instrument(name = "Fetching aliases", skip(state))] +async fn aliases( web::Query(alias_query): web::Query, - repo: web::Data, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - let aliases = repo.aliases_from_alias(&alias).await?; + let aliases = state.repo.aliases_from_alias(&alias).await?; Ok(HttpResponse::Ok().json(&serde_json::json!({ "msg": "ok", @@ -1736,22 +1491,22 @@ async fn aliases( }))) } -#[tracing::instrument(name = "Fetching identifier", skip(repo))] -async fn identifier( +#[tracing::instrument(name = "Fetching identifier", skip(state))] +async fn identifier( web::Query(alias_query): web::Query, - repo: web::Data, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), AliasQuery::Proxy { proxy } => { - let Some(alias) = repo.related(proxy).await? else { + let Some(alias) = state.repo.related(proxy).await? else { return Ok(HttpResponse::NotFound().finish()); }; alias } }; - let Some(identifier) = repo.identifier_from_alias(&alias).await? else { + let Some(identifier) = state.repo.identifier_from_alias(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().json(serde_json::json!({ "msg": "No identifiers associated with provided alias" @@ -1764,13 +1519,10 @@ async fn identifier( }))) } -#[tracing::instrument(skip(repo, store))] -async fn healthz( - repo: web::Data, - store: web::Data, -) -> Result { - repo.health_check().await?; - store.health_check().await?; +#[tracing::instrument(skip(state))] +async fn healthz(state: web::Data>) -> Result { + state.repo.health_check().await?; + state.store.health_check().await?; Ok(HttpResponse::Ok().finish()) } @@ -1794,17 +1546,11 @@ fn build_client() -> Result { fn configure_endpoints( config: &mut web::ServiceConfig, - repo: ArcRepo, - store: S, - configuration: Configuration, - client: ClientWithMiddleware, + state: State, extra_config: F, ) { config - .app_data(web::Data::new(repo)) - .app_data(web::Data::new(store)) - .app_data(web::Data::new(client)) - .app_data(web::Data::new(configuration.clone())) + .app_data(web::Data::new(state)) .route("/healthz", web::get().to(healthz::)) .service( web::scope("/image") @@ -1825,8 +1571,8 @@ fn configure_endpoints( .service(web::resource("/download").route(web::get().to(download::))) .service( web::resource("/delete/{delete_token}/{filename}") - .route(web::delete().to(delete)) - .route(web::get().to(delete)), + .route(web::delete().to(delete::)) + .route(web::get().to(delete::)), ) .service( web::scope("/original") @@ -1868,23 +1614,23 @@ fn configure_endpoints( .service( web::scope("/internal") .wrap(Internal( - configuration.server.api_key.as_ref().map(|s| s.to_owned()), + state.config.server.api_key.as_ref().map(|s| s.to_owned()), )) .service(web::resource("/import").route(web::post().to(import::))) - .service(web::resource("/variants").route(web::delete().to(clean_variants))) - .service(web::resource("/purge").route(web::post().to(purge))) - .service(web::resource("/delete").route(web::post().to(delete_alias))) - .service(web::resource("/aliases").route(web::get().to(aliases))) + .service(web::resource("/variants").route(web::delete().to(clean_variants::))) + .service(web::resource("/purge").route(web::post().to(purge::))) + .service(web::resource("/delete").route(web::post().to(delete_alias::))) + .service(web::resource("/aliases").route(web::get().to(aliases::))) .service(web::resource("/identifier").route(web::get().to(identifier::))) - .service(web::resource("/set_not_found").route(web::post().to(set_not_found))) + .service(web::resource("/set_not_found").route(web::post().to(set_not_found::))) .service(web::resource("/hashes").route(web::get().to(page))) - .service(web::resource("/prune_missing").route(web::post().to(prune_missing))) + .service(web::resource("/prune_missing").route(web::post().to(prune_missing::))) .configure(extra_config), ); } -fn spawn_cleanup(repo: ArcRepo, config: &Configuration) { - if config.server.read_only { +fn spawn_cleanup(state: State) { + if state.config.server.read_only { return; } @@ -1896,14 +1642,14 @@ fn spawn_cleanup(repo: ArcRepo, config: &Configuration) { interval.tick().await; - if let Err(e) = queue::cleanup_outdated_variants(&repo).await { + if let Err(e) = queue::cleanup_outdated_variants(&state.repo).await { tracing::warn!( "Failed to spawn cleanup for outdated variants:{}", format!("\n{e}\n{e:?}") ); } - if let Err(e) = queue::cleanup_outdated_proxies(&repo).await { + if let Err(e) = queue::cleanup_outdated_proxies(&state.repo).await { tracing::warn!( "Failed to spawn cleanup for outdated proxies:{}", format!("\n{e}\n{e:?}") @@ -1913,33 +1659,12 @@ fn spawn_cleanup(repo: ArcRepo, config: &Configuration) { }); } -fn spawn_workers( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, - store: S, - client: ClientWithMiddleware, - config: Configuration, - process_map: ProcessMap, -) where +fn spawn_workers(state: State, process_map: ProcessMap) +where S: Store + 'static, { - crate::sync::spawn( - "cleanup-worker", - queue::process_cleanup(repo.clone(), store.clone(), config.clone()), - ); - crate::sync::spawn( - "process-worker", - queue::process_images( - tmp_dir, - policy_dir, - repo, - store, - client, - process_map, - config, - ), - ); + crate::sync::spawn("cleanup-worker", queue::process_cleanup(state.clone())); + crate::sync::spawn("process-worker", queue::process_images(state, process_map)); } async fn launch_file_store( @@ -1955,38 +1680,32 @@ async fn launch_file_store( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +async fn process_image( + state: &State, process_args: Vec, input_format: ProcessableFormat, format: ProcessableFormat, quality: Option, - timeout: u64, write_file: F, ) -> Result where F: FnOnce(crate::file::File) -> Fut, Fut: std::future::Future>, { - let temporary_path = tmp_dir + let temporary_path = state + .tmp_dir .tmp_folder() .await .map_err(MagickError::CreateTemporaryDirectory)?; - let input_file = tmp_dir.tmp_file(None); + let input_file = state.tmp_dir.tmp_file(None); crate::store::file_store::safe_create_parent(&input_file) .await .map_err(MagickError::CreateDir)?; @@ -143,10 +141,10 @@ where let envs = [ (MAGICK_TEMPORARY_PATH, temporary_path.as_os_str()), - (MAGICK_CONFIGURE_PATH, policy_dir.as_os_str()), + (MAGICK_CONFIGURE_PATH, state.policy_dir.as_os_str()), ]; - let reader = Process::run("magick", &args, &envs, timeout)? + let reader = Process::run("magick", &args, &envs, state.config.media.process_timeout)? .read() .add_extras(input_file) .add_extras(temporary_path); @@ -154,25 +152,20 @@ where Ok(reader) } -#[allow(clippy::too_many_arguments)] -pub(crate) async fn process_image_stream_read( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(crate) async fn process_image_stream_read( + state: &State, stream: LocalBoxStream<'static, std::io::Result>, args: Vec, input_format: ProcessableFormat, format: ProcessableFormat, quality: Option, - timeout: u64, ) -> Result { process_image( - tmp_dir, - policy_dir, + state, args, input_format, format, quality, - timeout, |mut tmp_file| async move { tmp_file .write_from_stream(stream) @@ -184,25 +177,20 @@ pub(crate) async fn process_image_stream_read( .await } -#[allow(clippy::too_many_arguments)] pub(crate) async fn process_image_process_read( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, + state: &State, process_read: ProcessRead, args: Vec, input_format: ProcessableFormat, format: ProcessableFormat, quality: Option, - timeout: u64, ) -> Result { process_image( - tmp_dir, - policy_dir, + state, args, input_format, format, quality, - timeout, |mut tmp_file| async move { process_read .with_stdout(|stdout| async { diff --git a/src/queue.rs b/src/queue.rs index 9107c5e..5c906e0 100644 --- a/src/queue.rs +++ b/src/queue.rs @@ -188,35 +188,12 @@ pub(crate) async fn queue_generate( Ok(()) } -pub(crate) async fn process_cleanup( - repo: ArcRepo, - store: S, - config: Configuration, -) { - process_jobs(&repo, &store, &config, CLEANUP_QUEUE, cleanup::perform).await +pub(crate) async fn process_cleanup(state: State) { + process_jobs(state, CLEANUP_QUEUE, cleanup::perform).await } -pub(crate) async fn process_images( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, - store: S, - client: ClientWithMiddleware, - process_map: ProcessMap, - config: Configuration, -) { - process_image_jobs( - &tmp_dir, - &policy_dir, - &repo, - &store, - &client, - &process_map, - &config, - PROCESS_QUEUE, - process::perform, - ) - .await +pub(crate) async fn process_images(state: State, process_map: ProcessMap) { + process_image_jobs(state, process_map, PROCESS_QUEUE, process::perform).await } struct MetricsGuard { @@ -250,21 +227,10 @@ impl Drop for MetricsGuard { } } -async fn process_jobs( - repo: &ArcRepo, - store: &S, - config: &Configuration, - queue: &'static str, - callback: F, -) where +async fn process_jobs(state: State, queue: &'static str, callback: F) +where S: Store, - for<'a> F: Fn( - &'a ArcRepo, - &'a S, - &'a Configuration, - serde_json::Value, - ) -> LocalBoxFuture<'a, Result<(), Error>> - + Copy, + for<'a> F: Fn(&'a State, serde_json::Value) -> LocalBoxFuture<'a, Result<(), Error>> + Copy, { let worker_id = uuid::Uuid::new_v4(); @@ -273,7 +239,7 @@ async fn process_jobs( tokio::task::yield_now().await; - let res = job_loop(repo, store, config, worker_id, queue, callback).await; + let res = job_loop(&state, worker_id, queue, callback).await; if let Err(e) = res { tracing::warn!("Error processing jobs: {}", format!("{e}")); @@ -291,22 +257,14 @@ async fn process_jobs( } async fn job_loop( - repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, worker_id: uuid::Uuid, queue: &'static str, callback: F, ) -> Result<(), Error> where S: Store, - for<'a> F: Fn( - &'a ArcRepo, - &'a S, - &'a Configuration, - serde_json::Value, - ) -> LocalBoxFuture<'a, Result<(), Error>> - + Copy, + for<'a> F: Fn(&'a State, serde_json::Value) -> LocalBoxFuture<'a, Result<(), Error>> + Copy, { loop { tracing::trace!("job_loop: looping"); @@ -314,20 +272,20 @@ where tokio::task::yield_now().await; async { - let (job_id, job) = repo.pop(queue, worker_id).await?; + let (job_id, job) = state.repo.pop(queue, worker_id).await?; let guard = MetricsGuard::guard(worker_id, queue); let res = heartbeat( - repo, + &state.repo, queue, worker_id, job_id, - (callback)(repo, store, config, job), + (callback)(state, job), ) .await; - repo.complete_job(queue, worker_id, job_id).await?; + state.repo.complete_job(queue, worker_id, job_id).await?; res?; @@ -340,29 +298,14 @@ where } } -#[allow(clippy::too_many_arguments)] async fn process_image_jobs( - tmp_dir: &ArcTmpDir, - policy_dir: &ArcPolicyDir, - repo: &ArcRepo, - store: &S, - client: &ClientWithMiddleware, - process_map: &ProcessMap, - config: &Configuration, + state: State, + process_map: ProcessMap, queue: &'static str, callback: F, ) where S: Store, - for<'a> F: Fn( - &'a ArcTmpDir, - &'a ArcPolicyDir, - &'a ArcRepo, - &'a S, - &'a ClientWithMiddleware, - &'a ProcessMap, - &'a Configuration, - serde_json::Value, - ) -> LocalBoxFuture<'a, Result<(), Error>> + for<'a> F: Fn(&'a State, &'a ProcessMap, serde_json::Value) -> LocalBoxFuture<'a, Result<(), Error>> + Copy, { let worker_id = uuid::Uuid::new_v4(); @@ -372,19 +315,7 @@ async fn process_image_jobs( tokio::task::yield_now().await; - let res = image_job_loop( - tmp_dir, - policy_dir, - repo, - store, - client, - process_map, - config, - worker_id, - queue, - callback, - ) - .await; + let res = image_job_loop(&state, &process_map, worker_id, queue, callback).await; if let Err(e) = res { tracing::warn!("Error processing jobs: {}", format!("{e}")); @@ -403,29 +334,15 @@ async fn process_image_jobs( #[allow(clippy::too_many_arguments)] async fn image_job_loop( - tmp_dir: &ArcTmpDir, - policy_dir: &ArcPolicyDir, - repo: &ArcRepo, - store: &S, - client: &ClientWithMiddleware, + state: &State, process_map: &ProcessMap, - config: &Configuration, worker_id: uuid::Uuid, queue: &'static str, callback: F, ) -> Result<(), Error> where S: Store, - for<'a> F: Fn( - &'a ArcTmpDir, - &'a ArcPolicyDir, - &'a ArcRepo, - &'a S, - &'a ClientWithMiddleware, - &'a ProcessMap, - &'a Configuration, - serde_json::Value, - ) -> LocalBoxFuture<'a, Result<(), Error>> + for<'a> F: Fn(&'a State, &'a ProcessMap, serde_json::Value) -> LocalBoxFuture<'a, Result<(), Error>> + Copy, { loop { @@ -434,29 +351,20 @@ where tokio::task::yield_now().await; async { - let (job_id, job) = repo.pop(queue, worker_id).await?; + let (job_id, job) = state.repo.pop(queue, worker_id).await?; let guard = MetricsGuard::guard(worker_id, queue); let res = heartbeat( - repo, + &state.repo, queue, worker_id, job_id, - (callback)( - tmp_dir, - policy_dir, - repo, - store, - client, - process_map, - config, - job, - ), + (callback)(state, process_map, job), ) .await; - repo.complete_job(queue, worker_id, job_id).await?; + state.repo.complete_job(queue, worker_id, job_id).await?; res?; diff --git a/src/queue/cleanup.rs b/src/queue/cleanup.rs index e99cad0..473b37c 100644 --- a/src/queue/cleanup.rs +++ b/src/queue/cleanup.rs @@ -10,13 +10,12 @@ use crate::{ queue::Cleanup, repo::{Alias, ArcRepo, DeleteToken, Hash}, serde_str::Serde, + state::State, store::Store, }; pub(super) fn perform<'a, S>( - repo: &'a ArcRepo, - store: &'a S, - configuration: &'a Configuration, + state: &'a State, job: serde_json::Value, ) -> LocalBoxFuture<'a, Result<(), Error>> where @@ -25,26 +24,28 @@ where Box::pin(async move { match serde_json::from_value(job) { Ok(job) => match job { - Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?, + Cleanup::Hash { hash: in_hash } => hash(&state.repo, in_hash).await?, Cleanup::Identifier { identifier: in_identifier, - } => identifier(repo, store, Arc::from(in_identifier)).await?, + } => identifier(&state.repo, &state.store, Arc::from(in_identifier)).await?, Cleanup::Alias { alias: stored_alias, token, } => { alias( - repo, + &state.repo, Serde::into_inner(stored_alias), Serde::into_inner(token), ) .await? } - Cleanup::Variant { hash, variant } => hash_variant(repo, hash, variant).await?, - Cleanup::AllVariants => all_variants(repo).await?, - Cleanup::OutdatedVariants => outdated_variants(repo, configuration).await?, - Cleanup::OutdatedProxies => outdated_proxies(repo, configuration).await?, - Cleanup::Prune => prune(repo, store).await?, + Cleanup::Variant { hash, variant } => { + hash_variant(&state.repo, hash, variant).await? + } + Cleanup::AllVariants => all_variants(&state.repo).await?, + Cleanup::OutdatedVariants => outdated_variants(&state.repo, &state.config).await?, + Cleanup::OutdatedProxies => outdated_proxies(&state.repo, &state.config).await?, + Cleanup::Prune => prune(&state.repo, &state.store).await?, }, Err(e) => { tracing::warn!("Invalid job: {}", format!("{e}")); diff --git a/src/queue/process.rs b/src/queue/process.rs index 9cfbed7..e2d2c9f 100644 --- a/src/queue/process.rs +++ b/src/queue/process.rs @@ -13,20 +13,15 @@ use crate::{ queue::Process, repo::{Alias, ArcRepo, UploadId, UploadResult}, serde_str::Serde, + state::State, store::Store, tmp_file::{ArcTmpDir, TmpDir}, }; use std::{path::PathBuf, sync::Arc}; -#[allow(clippy::too_many_arguments)] pub(super) fn perform<'a, S>( - tmp_dir: &'a ArcTmpDir, - policy_dir: &'a ArcPolicyDir, - repo: &'a ArcRepo, - store: &'a S, - client: &'a ClientWithMiddleware, + state: &'a State, process_map: &'a ProcessMap, - config: &'a Configuration, job: serde_json::Value, ) -> LocalBoxFuture<'a, Result<(), Error>> where @@ -41,15 +36,10 @@ where declared_alias, } => { process_ingest( - tmp_dir, - policy_dir, - repo, - store, - client, + state, Arc::from(identifier), Serde::into_inner(upload_id), declared_alias.map(Serde::into_inner), - config, ) .await? } @@ -60,16 +50,12 @@ where process_args, } => { generate( - tmp_dir, - policy_dir, - repo, - store, + state, process_map, target_format, Serde::into_inner(source), process_path, process_args, - config, ) .await? } @@ -117,18 +103,12 @@ impl Drop for UploadGuard { } } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, repo, store, client, config))] +#[tracing::instrument(skip(state))] async fn process_ingest( - tmp_dir: &ArcTmpDir, - policy_dir: &ArcPolicyDir, - repo: &ArcRepo, - store: &S, - client: &ClientWithMiddleware, + state: &State, unprocessed_identifier: Arc, upload_id: UploadId, declared_alias: Option, - config: &Configuration, ) -> Result<(), Error> where S: Store + 'static, @@ -136,33 +116,18 @@ where let guard = UploadGuard::guard(upload_id); let fut = async { - let tmp_dir = tmp_dir.clone(); - let policy_dir = policy_dir.clone(); - let ident = unprocessed_identifier.clone(); - let store2 = store.clone(); - let repo = repo.clone(); - let client = client.clone(); + let state2 = state.clone(); let current_span = Span::current(); let span = tracing::info_span!(parent: current_span, "error_boundary"); - let config = config.clone(); let error_boundary = crate::sync::abort_on_drop(crate::sync::spawn( "ingest-media", async move { - let stream = crate::stream::from_err(store2.to_stream(&ident, None, None).await?); + let stream = + crate::stream::from_err(state2.store.to_stream(&ident, None, None).await?); - let session = crate::ingest::ingest( - &tmp_dir, - &policy_dir, - &repo, - &store2, - &client, - stream, - declared_alias, - &config, - ) - .await?; + let session = crate::ingest::ingest(&state2, stream, declared_alias).await?; Ok(session) as Result } @@ -170,7 +135,7 @@ where )) .await; - store.remove(&unprocessed_identifier).await?; + state.store.remove(&unprocessed_identifier).await?; error_boundary.map_err(|_| UploadError::Canceled)? }; @@ -191,62 +156,46 @@ where } }; - repo.complete_upload(upload_id, result).await?; + state.repo.complete_upload(upload_id, result).await?; guard.disarm(); Ok(()) } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip( - tmp_dir, - policy_dir, - repo, - store, - process_map, - process_path, - process_args, - config -))] +#[tracing::instrument(skip(state, process_map, process_path, process_args))] async fn generate( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, - store: &S, + state: &State, process_map: &ProcessMap, target_format: InputProcessableFormat, source: Alias, process_path: PathBuf, process_args: Vec, - config: &Configuration, ) -> Result<(), Error> { - let Some(hash) = repo.hash(&source).await? else { + let Some(hash) = state.repo.hash(&source).await? else { // Nothing to do return Ok(()); }; let path_string = process_path.to_string_lossy().to_string(); - let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?; + let identifier_opt = state + .repo + .variant_identifier(hash.clone(), path_string) + .await?; if identifier_opt.is_some() { return Ok(()); } - let original_details = - crate::ensure_details(tmp_dir, policy_dir, repo, store, config, &source).await?; + let original_details = crate::ensure_details(state, &source).await?; crate::generate::generate( - tmp_dir, - policy_dir, - repo, - store, + state, process_map, target_format, process_path, process_args, &original_details, - config, hash, ) .await?; diff --git a/src/state.rs b/src/state.rs new file mode 100644 index 0000000..1319e09 --- /dev/null +++ b/src/state.rs @@ -0,0 +1,13 @@ +use reqwest_middleware::ClientWithMiddleware; + +use crate::{config::Configuration, magick::ArcPolicyDir, repo::ArcRepo, tmp_file::ArcTmpDir}; + +#[derive(Clone)] +pub(crate) struct State { + pub(super) config: Configuration, + pub(super) tmp_dir: ArcTmpDir, + pub(super) policy_dir: ArcPolicyDir, + pub(super) repo: ArcRepo, + pub(super) store: S, + pub(super) client: ClientWithMiddleware, +} diff --git a/src/validate.rs b/src/validate.rs index 14c9a21..bed66e7 100644 --- a/src/validate.rs +++ b/src/validate.rs @@ -8,10 +8,11 @@ use crate::{ error_code::ErrorCode, formats::{ AnimationFormat, AnimationOutput, ImageInput, ImageOutput, InputFile, InputVideoFormat, - InternalFormat, Validations, + InternalFormat, }, magick::PolicyDir, process::ProcessRead, + state::State, tmp_file::TmpDir, }; use actix_web::web::Bytes; @@ -57,12 +58,9 @@ impl ValidationError { const MEGABYTES: usize = 1024 * 1024; #[tracing::instrument(skip_all)] -pub(crate) async fn validate_bytes( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(crate) async fn validate_bytes( + state: &State, bytes: Bytes, - validations: Validations<'_>, - timeout: u64, ) -> Result<(InternalFormat, ProcessRead), Error> { if bytes.is_empty() { return Err(ValidationError::Empty.into()); @@ -77,66 +75,35 @@ pub(crate) async fn validate_bytes( match &input { InputFile::Image(input) => { - let (format, process_read) = process_image( - tmp_dir, - policy_dir, - bytes, - *input, - width, - height, - validations.image, - timeout, - ) - .await?; + let (format, process_read) = process_image(state, bytes, *input, width, height).await?; Ok((format, process_read)) } InputFile::Animation(input) => { - let (format, process_read) = process_animation( - tmp_dir, - policy_dir, - bytes, - *input, - width, - height, - frames.unwrap_or(1), - validations.animation, - timeout, - ) - .await?; + let (format, process_read) = + process_animation(state, bytes, *input, width, height, frames.unwrap_or(1)).await?; Ok((format, process_read)) } InputFile::Video(input) => { - let (format, process_read) = process_video( - tmp_dir, - bytes, - *input, - width, - height, - frames.unwrap_or(1), - validations.video, - timeout, - ) - .await?; + let (format, process_read) = + process_video(state, bytes, *input, width, height, frames.unwrap_or(1)).await?; Ok((format, process_read)) } } } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, bytes, validations))] -async fn process_image( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +#[tracing::instrument(skip(state, bytes))] +async fn process_image( + state: &State, bytes: Bytes, input: ImageInput, width: u16, height: u16, - validations: &crate::config::Image, - timeout: u64, ) -> Result<(InternalFormat, ProcessRead), Error> { + let validations = &state.config.media.image; + if width > validations.max_width { return Err(ValidationError::Width.into()); } @@ -158,16 +125,7 @@ async fn process_image( let process_read = if needs_transcode { let quality = validations.quality_for(format); - magick::convert_image( - tmp_dir, - policy_dir, - input.format, - format, - quality, - timeout, - bytes, - ) - .await? + magick::convert_image(state, input.format, format, quality, bytes).await? } else { exiftool::clear_metadata_bytes_read(bytes, timeout)? }; @@ -201,19 +159,17 @@ fn validate_animation( Ok(()) } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, policy_dir, bytes, validations))] +#[tracing::instrument(skip(state, bytes))] async fn process_animation( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, + state: &State, bytes: Bytes, input: AnimationFormat, width: u16, height: u16, frames: u32, - validations: &crate::config::Animation, - timeout: u64, ) -> Result<(InternalFormat, ProcessRead), Error> { + let validations = &state.config.media.animation; + validate_animation(bytes.len(), width, height, frames, validations)?; let AnimationOutput { @@ -224,10 +180,9 @@ async fn process_animation( let process_read = if needs_transcode { let quality = validations.quality_for(format); - magick::convert_animation(tmp_dir, policy_dir, input, format, quality, timeout, bytes) - .await? + magick::convert_animation(state, input, format, quality, bytes).await? } else { - exiftool::clear_metadata_bytes_read(bytes, timeout)? + exiftool::clear_metadata_bytes_read(bytes, state.config.media.process_timeout)? }; Ok((InternalFormat::Animation(format), process_read)) @@ -262,18 +217,17 @@ fn validate_video( Ok(()) } -#[allow(clippy::too_many_arguments)] -#[tracing::instrument(skip(tmp_dir, bytes, validations))] -async fn process_video( - tmp_dir: &TmpDir, +#[tracing::instrument(skip(state, bytes))] +async fn process_video( + state: &State, bytes: Bytes, input: InputVideoFormat, width: u16, height: u16, frames: u32, - validations: &crate::config::Video, - timeout: u64, ) -> Result<(InternalFormat, ProcessRead), Error> { + let validations = &state.config.media.video; + validate_video(bytes.len(), width, height, frames, validations)?; let output = input.build_output( @@ -284,7 +238,15 @@ async fn process_video( let crf = validations.crf_for(width, height); - let process_read = ffmpeg::transcode_bytes(tmp_dir, input, output, crf, timeout, bytes).await?; + let process_read = ffmpeg::transcode_bytes( + &state.tmp_dir, + input, + output, + crf, + state.config.media.process_timeout, + bytes, + ) + .await?; Ok(( InternalFormat::Video(output.format.internal_format()), diff --git a/src/validate/magick.rs b/src/validate/magick.rs index c1d6131..edb4bbe 100644 --- a/src/validate/magick.rs +++ b/src/validate/magick.rs @@ -9,67 +9,57 @@ use crate::{ tmp_file::TmpDir, }; -pub(super) async fn convert_image( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(super) async fn convert_image( + state: &State, input: ImageFormat, output: ImageFormat, quality: Option, - timeout: u64, bytes: Bytes, ) -> Result { convert( - tmp_dir, - policy_dir, + state, input.magick_format(), output.magick_format(), false, quality, - timeout, bytes, ) .await } -pub(super) async fn convert_animation( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(super) async fn convert_animation( + state: &State, input: AnimationFormat, output: AnimationFormat, quality: Option, - timeout: u64, bytes: Bytes, ) -> Result { convert( - tmp_dir, - policy_dir, + state, input.magick_format(), output.magick_format(), true, quality, timeout, - bytes, ) .await } -#[allow(clippy::too_many_arguments)] async fn convert( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, + state: &State, input: &'static str, output: &'static str, coalesce: bool, quality: Option, - timeout: u64, bytes: Bytes, ) -> Result { - let temporary_path = tmp_dir + let temporary_path = state + .tmp_dir .tmp_folder() .await .map_err(MagickError::CreateTemporaryDirectory)?; - let input_file = tmp_dir.tmp_file(None); + let input_file = state.tmp_dir.tmp_file(None); crate::store::file_store::safe_create_parent(&input_file) .await @@ -104,10 +94,10 @@ async fn convert( let envs = [ (MAGICK_TEMPORARY_PATH, temporary_path.as_os_str()), - (MAGICK_CONFIGURE_PATH, policy_dir.as_os_str()), + (MAGICK_CONFIGURE_PATH, state.policy_dir.as_os_str()), ]; - let reader = Process::run("magick", &args, &envs, timeout)?.read(); + let reader = Process::run("magick", &args, &envs, state.config.media.process_timeout)?.read(); let clean_reader = reader.add_extras(input_file).add_extras(temporary_path); From f2410a9283aa8ee0c6edf6d6539ae7b41b1b8732 Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 13:32:20 -0600 Subject: [PATCH 2/8] use State --- src/queue.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/queue.rs b/src/queue.rs index 5c906e0..9227709 100644 --- a/src/queue.rs +++ b/src/queue.rs @@ -7,6 +7,7 @@ use crate::{ magick::ArcPolicyDir, repo::{Alias, ArcRepo, DeleteToken, Hash, JobId, UploadId}, serde_str::Serde, + state::State, store::Store, tmp_file::ArcTmpDir, }; From 50e31f96da69342836fc3dfd661eaf4196b648bc Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 18:18:13 -0600 Subject: [PATCH 3/8] It compiles again --- src/backgrounded.rs | 9 +- src/details.rs | 10 +- src/discover.rs | 16 ++- src/discover/exiftool.rs | 2 +- src/discover/ffmpeg.rs | 20 ++-- src/discover/magick.rs | 25 ++--- src/generate.rs | 22 +--- src/generate/ffmpeg.rs | 16 +-- src/generate/magick.rs | 14 +-- src/ingest.rs | 19 ++-- src/lib.rs | 226 +++++++++++++++------------------------ src/magick.rs | 3 +- src/migrate_store.rs | 159 +++++++++------------------ src/queue/process.rs | 1 + src/repo/migrate.rs | 138 +++++++----------------- src/validate.rs | 6 +- src/validate/magick.rs | 5 +- 17 files changed, 240 insertions(+), 451 deletions(-) diff --git a/src/backgrounded.rs b/src/backgrounded.rs index eb95556..1585afc 100644 --- a/src/backgrounded.rs +++ b/src/backgrounded.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use crate::{ error::Error, repo::{ArcRepo, UploadId}, + state::State, store::Store, }; use actix_web::web::Bytes; @@ -30,23 +31,23 @@ impl Backgrounded { self.identifier.as_ref() } - pub(crate) async fn proxy(repo: ArcRepo, store: S, stream: P) -> Result + pub(crate) async fn proxy(state: &State, stream: P) -> Result where S: Store, P: Stream> + 'static, { let mut this = Self { - repo, + repo: state.repo.clone(), identifier: None, upload_id: None, }; - this.do_proxy(store, stream).await?; + this.do_proxy(&state.store, stream).await?; Ok(this) } - async fn do_proxy(&mut self, store: S, stream: P) -> Result<(), Error> + async fn do_proxy(&mut self, store: &S, stream: P) -> Result<(), Error> where S: Store, P: Stream> + 'static, diff --git a/src/details.rs b/src/details.rs index 5c0647e..c2fd9fc 100644 --- a/src/details.rs +++ b/src/details.rs @@ -4,6 +4,7 @@ use crate::{ formats::{InternalFormat, InternalVideoFormat}, magick::PolicyDir, serde_str::Serde, + state::State, tmp_file::TmpDir, }; use actix_web::web; @@ -81,18 +82,13 @@ impl Details { } #[tracing::instrument(level = "debug", skip_all)] - pub(crate) async fn from_bytes( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - timeout: u64, - input: web::Bytes, - ) -> Result { + pub(crate) async fn from_bytes(state: &State, input: web::Bytes) -> Result { let Discovery { input, width, height, frames, - } = crate::discover::discover_bytes(tmp_dir, policy_dir, timeout, input).await?; + } = crate::discover::discover_bytes(state, input).await?; Ok(Details::from_parts( input.internal_format(), diff --git a/src/discover.rs b/src/discover.rs index 1c4736c..005f76e 100644 --- a/src/discover.rs +++ b/src/discover.rs @@ -4,7 +4,7 @@ mod magick; use actix_web::web::Bytes; -use crate::{formats::InputFile, magick::PolicyDir, tmp_file::TmpDir}; +use crate::{formats::InputFile, magick::PolicyDir, state::State, tmp_file::TmpDir}; #[derive(Debug, PartialEq, Eq)] pub(crate) struct Discovery { @@ -27,18 +27,16 @@ pub(crate) enum DiscoverError { } #[tracing::instrument(level = "trace", skip_all)] -pub(crate) async fn discover_bytes( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - timeout: u64, +pub(crate) async fn discover_bytes( + state: &State, bytes: Bytes, ) -> Result { - let discovery = ffmpeg::discover_bytes(tmp_dir, timeout, bytes.clone()).await?; + let discovery = ffmpeg::discover_bytes(state, bytes.clone()).await?; + + let discovery = magick::confirm_bytes(state, discovery, bytes.clone()).await?; let discovery = - magick::confirm_bytes(tmp_dir, policy_dir, discovery, timeout, bytes.clone()).await?; - - let discovery = exiftool::check_reorient(discovery, timeout, bytes).await?; + exiftool::check_reorient(discovery, bytes, state.config.media.process_timeout).await?; Ok(discovery) } diff --git a/src/discover/exiftool.rs b/src/discover/exiftool.rs index 253810f..d8ea421 100644 --- a/src/discover/exiftool.rs +++ b/src/discover/exiftool.rs @@ -16,8 +16,8 @@ pub(super) async fn check_reorient( height, frames, }: Discovery, - timeout: u64, bytes: Bytes, + timeout: u64, ) -> Result { let input = match input { InputFile::Image(ImageInput { format, .. }) => { diff --git a/src/discover/ffmpeg.rs b/src/discover/ffmpeg.rs index e92425b..004870e 100644 --- a/src/discover/ffmpeg.rs +++ b/src/discover/ffmpeg.rs @@ -10,6 +10,7 @@ use crate::{ Mp4AudioCodec, Mp4Codec, WebmAlphaCodec, WebmAudioCodec, WebmCodec, }, process::Process, + state::State, tmp_file::TmpDir, }; use actix_web::web::Bytes; @@ -158,12 +159,11 @@ struct Flags { } #[tracing::instrument(skip_all)] -pub(super) async fn discover_bytes( - tmp_dir: &TmpDir, - timeout: u64, +pub(super) async fn discover_bytes( + state: &State, bytes: Bytes, ) -> Result, FfMpegError> { - discover_file(tmp_dir, timeout, move |mut file| { + discover_file(state, move |mut file| { let bytes = bytes.clone(); async move { @@ -191,16 +191,12 @@ async fn allows_alpha(pixel_format: &str, timeout: u64) -> Result( - tmp_dir: &TmpDir, - timeout: u64, - f: F, -) -> Result, FfMpegError> +async fn discover_file(state: &State, f: F) -> Result, FfMpegError> where F: FnOnce(crate::file::File) -> Fut, Fut: std::future::Future>, { - let input_file = tmp_dir.tmp_file(None); + let input_file = state.tmp_dir.tmp_file(None); crate::store::file_store::safe_create_parent(&input_file) .await .map_err(FfMpegError::CreateDir)?; @@ -226,7 +222,7 @@ where input_file.as_os_str(), ], &[], - timeout, + state.config.media.process_timeout, )? .read() .into_vec() @@ -250,7 +246,7 @@ where .. }) = &mut discovery.input { - *alpha = allows_alpha(&pixel_format, timeout).await?; + *alpha = allows_alpha(&pixel_format, state.config.media.process_timeout).await?; } } diff --git a/src/discover/magick.rs b/src/discover/magick.rs index 0d1ee7c..92e458b 100644 --- a/src/discover/magick.rs +++ b/src/discover/magick.rs @@ -8,6 +8,7 @@ use crate::{ formats::{AnimationFormat, ImageFormat, ImageInput, InputFile}, magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::Process, + state::State, tmp_file::TmpDir, }; @@ -31,11 +32,9 @@ struct Geometry { } #[tracing::instrument(skip_all)] -pub(super) async fn confirm_bytes( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, +pub(super) async fn confirm_bytes( + state: &State, discovery: Option, - timeout: u64, bytes: Bytes, ) -> Result { match discovery { @@ -51,7 +50,7 @@ pub(super) async fn confirm_bytes( } } - discover_file(tmp_dir, policy_dir, timeout, move |mut file| async move { + discover_file(state, move |mut file| async move { file.write_from_bytes(bytes) .await .map_err(MagickError::Write)?; @@ -62,22 +61,18 @@ pub(super) async fn confirm_bytes( } #[tracing::instrument(level = "debug", skip_all)] -async fn discover_file( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - timeout: u64, - f: F, -) -> Result +async fn discover_file(state: &State, f: F) -> Result where F: FnOnce(crate::file::File) -> Fut, Fut: std::future::Future>, { - let temporary_path = tmp_dir + let temporary_path = state + .tmp_dir .tmp_folder() .await .map_err(MagickError::CreateTemporaryDirectory)?; - let input_file = tmp_dir.tmp_file(None); + let input_file = state.tmp_dir.tmp_file(None); crate::store::file_store::safe_create_parent(&input_file) .await .map_err(MagickError::CreateDir)?; @@ -90,7 +85,7 @@ where let envs = [ (MAGICK_TEMPORARY_PATH, temporary_path.as_os_str()), - (MAGICK_CONFIGURE_PATH, policy_dir.as_os_str()), + (MAGICK_CONFIGURE_PATH, state.policy_dir.as_os_str()), ]; let res = Process::run( @@ -102,7 +97,7 @@ where "JSON:".as_ref(), ], &envs, - timeout, + state.config.media.process_timeout, )? .read() .into_string() diff --git a/src/generate.rs b/src/generate.rs index 79129b7..ee5f2f2 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -143,13 +143,7 @@ async fn process( drop(permit); - let details = Details::from_bytes( - &state.tmp_dir, - &state.policy_dir, - &state.config.media.process_timeout, - bytes.clone(), - ) - .await?; + let details = Details::from_bytes(state, bytes.clone()).await?; let identifier = state .store @@ -214,14 +208,8 @@ where let stream = state.store.to_stream(&identifier, None, None).await?; - let reader = magick::thumbnail( - state, - stream, - processable_format, - ProcessableFormat::Image(thumbnail_format), - config.media.image.quality_for(thumbnail_format), - ) - .await?; + let reader = + magick::thumbnail(state, stream, processable_format, thumbnail_format).await?; (reader, thumbnail_format.media_type()) } else { @@ -234,14 +222,12 @@ where }; let reader = ffmpeg::thumbnail( - state.tmp_dir, - state.store.clone(), + &state, identifier, original_details .video_format() .unwrap_or(InternalVideoFormat::Mp4), thumbnail_format, - state.config.media.process_timeout, ) .await?; diff --git a/src/generate/ffmpeg.rs b/src/generate/ffmpeg.rs index ab73097..aec518d 100644 --- a/src/generate/ffmpeg.rs +++ b/src/generate/ffmpeg.rs @@ -6,6 +6,7 @@ use crate::{ ffmpeg::FfMpegError, formats::InternalVideoFormat, process::{Process, ProcessRead}, + state::State, store::Store, tmp_file::TmpDir, }; @@ -50,21 +51,19 @@ impl ThumbnailFormat { } } -#[tracing::instrument(skip(tmp_dir, store, timeout))] +#[tracing::instrument(skip(state))] pub(super) async fn thumbnail( - tmp_dir: &TmpDir, - store: S, + state: &State, from: Arc, input_format: InternalVideoFormat, format: ThumbnailFormat, - timeout: u64, ) -> Result { - let input_file = tmp_dir.tmp_file(Some(input_format.file_extension())); + let input_file = state.tmp_dir.tmp_file(Some(input_format.file_extension())); crate::store::file_store::safe_create_parent(&input_file) .await .map_err(FfMpegError::CreateDir)?; - let output_file = tmp_dir.tmp_file(Some(format.to_file_extension())); + let output_file = state.tmp_dir.tmp_file(Some(format.to_file_extension())); crate::store::file_store::safe_create_parent(&output_file) .await .map_err(FfMpegError::CreateDir)?; @@ -72,7 +71,8 @@ pub(super) async fn thumbnail( let mut tmp_one = crate::file::File::create(&input_file) .await .map_err(FfMpegError::CreateFile)?; - let stream = store + let stream = state + .store .to_stream(&from, None, None) .await .map_err(FfMpegError::Store)?; @@ -99,7 +99,7 @@ pub(super) async fn thumbnail( output_file.as_os_str(), ], &[], - timeout, + state.config.media.process_timeout, )?; let res = process.wait().await; diff --git a/src/generate/magick.rs b/src/generate/magick.rs index d722d57..3b03a2a 100644 --- a/src/generate/magick.rs +++ b/src/generate/magick.rs @@ -3,7 +3,7 @@ use std::ffi::OsStr; use actix_web::web::Bytes; use crate::{ - formats::ProcessableFormat, + formats::{ImageFormat, ProcessableFormat}, magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::{Process, ProcessRead}, state::State, @@ -14,14 +14,16 @@ use crate::{ async fn thumbnail_animation( state: &State, input_format: ProcessableFormat, - format: ProcessableFormat, - quality: Option, + thumbnail_format: ImageFormat, write_file: F, ) -> Result where F: FnOnce(crate::file::File) -> Fut, Fut: std::future::Future>, { + let format = ProcessableFormat::Image(thumbnail_format); + let quality = state.config.media.image.quality_for(thumbnail_format); + let temporary_path = state .tmp_dir .tmp_folder() @@ -77,14 +79,12 @@ pub(super) async fn thumbnail( state: &State, stream: LocalBoxStream<'static, std::io::Result>, input_format: ProcessableFormat, - format: ProcessableFormat, - quality: Option, + thumbnail_format: ImageFormat, ) -> Result { thumbnail_animation( state, input_format, - format, - quality, + thumbnail_format, |mut tmp_file| async move { tmp_file .write_from_stream(stream) diff --git a/src/ingest.rs b/src/ingest.rs index d140fe7..12bcf2b 100644 --- a/src/ingest.rs +++ b/src/ingest.rs @@ -107,9 +107,10 @@ where let (hash_state, identifier) = process_read .with_stdout(|stdout| async move { let hasher_reader = Hasher::new(stdout); - let state = hasher_reader.state(); + let hash_state = hasher_reader.state(); - store + state + .store .save_async_read(hasher_reader, input_type.media_type()) .await .map(move |identifier| (hash_state, identifier)) @@ -117,13 +118,7 @@ where .await??; let bytes_stream = state.store.to_bytes(&identifier, None, None).await?; - let details = Details::from_bytes( - tmp_dir, - policy_dir, - media.process_timeout, - bytes_stream.into_bytes(), - ) - .await?; + let details = Details::from_bytes(state, bytes_stream.into_bytes()).await?; drop(permit); @@ -153,7 +148,7 @@ where let reader = Box::pin(tokio_util::io::StreamReader::new(stream)); let hasher_reader = Hasher::new(reader); - let state = hasher_reader.state(); + let hash_state = hasher_reader.state(); let input_type = InternalFormat::Image(crate::formats::ImageFormat::Png); @@ -164,7 +159,7 @@ where let details = Details::danger_dummy(input_type); - Ok((input_type, identifier, details, state)) + Ok((input_type, identifier, details, hash_state)) } #[allow(clippy::too_many_arguments)] @@ -192,7 +187,7 @@ where }; if let Some(endpoint) = &state.config.media.external_validation { - let stream = store.to_stream(&identifier, None, None).await?; + let stream = state.store.to_stream(&identifier, None, None).await?; let response = state .client diff --git a/src/lib.rs b/src/lib.rs index 2afa6be..277c9b2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -138,13 +138,7 @@ async fn ensure_details_identifier( tracing::debug!("generating new details from {:?}", identifier); let bytes_stream = state.store.to_bytes(identifier, None, None).await?; - let new_details = Details::from_bytes( - &state.tmp_dir, - &state.policy_dir, - state.config.media.process_timeout, - bytes_stream.into_bytes(), - ) - .await?; + let new_details = Details::from_bytes(state, bytes_stream.into_bytes()).await?; tracing::debug!("storing details for {:?}", identifier); state.repo.relate_details(identifier, &new_details).await?; tracing::debug!("stored"); @@ -351,7 +345,7 @@ impl FormData for BackgroundedUpload { let stream = crate::stream::from_err(stream); - Backgrounded::proxy(&state.repo, &state.store, stream).await + Backgrounded::proxy(&state, stream).await } .instrument(span), ) @@ -539,12 +533,12 @@ async fn do_download_backgrounded( ) -> Result { metrics::counter!("pict-rs.files", "download" => "background").increment(1); - let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?; + let backgrounded = Backgrounded::proxy(&state, stream).await?; let upload_id = backgrounded.upload_id().expect("Upload ID exists"); let identifier = backgrounded.identifier().expect("Identifier exists"); - queue::queue_ingest(&repo, identifier, upload_id, None).await?; + queue::queue_ingest(&state.repo, identifier, upload_id, None).await?; backgrounded.disarm(); @@ -611,7 +605,8 @@ async fn page( for hash in &page.hashes { let hex = hash.to_hex(); - let aliases = repo + let aliases = state + .repo .aliases_for_hash(hash.clone()) .await? .into_iter() @@ -794,7 +789,7 @@ async fn process( ProcessSource::Proxy { proxy } => { let alias = if let Some(alias) = state.repo.related(proxy.clone()).await? { alias - } else if !config.server.read_only { + } else if !state.config.server.read_only { let stream = download_stream(proxy.as_str(), &state).await?; let (alias, _, _) = ingest_inline(stream, &state).await?; @@ -836,7 +831,10 @@ async fn process( .await?; } - let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?; + let identifier_opt = state + .repo + .variant_identifier(hash.clone(), path_string) + .await?; if let Some(identifier) = identifier_opt { let details = ensure_details_identifier(&state, &identifier).await?; @@ -850,7 +848,7 @@ async fn process( return ranged_file_resp(&state.store, identifier, range, details, not_found).await; } - if config.server.read_only { + if state.config.server.read_only { return Err(UploadError::ReadOnly.into()); } @@ -930,7 +928,9 @@ async fn process_head( }; if !state.config.server.read_only { - repo.accessed_variant(hash.clone(), path_string.clone()) + state + .repo + .accessed_variant(hash.clone(), path_string.clone()) .await?; } @@ -959,7 +959,7 @@ async fn process_head( async fn process_backgrounded( web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, - state: web::Data>, + state: web::Data>, ) -> Result { let source = match source { ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { @@ -1123,7 +1123,7 @@ async fn do_serve( async fn serve_query_head( range: Option>, web::Query(alias_query): web::Query, - state: web::Data>, + state: web::Data>, ) -> Result { let alias = match alias_query { AliasQuery::Alias { alias } => Serde::into_inner(alias), @@ -1547,10 +1547,12 @@ fn build_client() -> Result { fn configure_endpoints( config: &mut web::ServiceConfig, state: State, + process_map: ProcessMap, extra_config: F, ) { config - .app_data(web::Data::new(state)) + .app_data(web::Data::new(state.clone())) + .app_data(web::Data::new(process_map.clone())) .route("/healthz", web::get().to(healthz::)) .service( web::scope("/image") @@ -1613,9 +1615,7 @@ fn configure_endpoints( ) .service( web::scope("/internal") - .wrap(Internal( - state.config.server.api_key.as_ref().map(|s| s.to_owned()), - )) + .wrap(Internal(state.config.server.api_key.clone())) .service(web::resource("/import").route(web::post().to(import::))) .service(web::resource("/variants").route(web::delete().to(clean_variants::))) .service(web::resource("/purge").route(web::post().to(purge::))) @@ -1623,13 +1623,13 @@ fn configure_endpoints( .service(web::resource("/aliases").route(web::get().to(aliases::))) .service(web::resource("/identifier").route(web::get().to(identifier::))) .service(web::resource("/set_not_found").route(web::post().to(set_not_found::))) - .service(web::resource("/hashes").route(web::get().to(page))) + .service(web::resource("/hashes").route(web::get().to(page::))) .service(web::resource("/prune_missing").route(web::post().to(prune_missing::))) .configure(extra_config), ); } -fn spawn_cleanup(state: State) { +fn spawn_cleanup(state: State) { if state.config.server.read_only { return; } @@ -1668,34 +1668,21 @@ where } async fn launch_file_store( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, - store: FileStore, - client: ClientWithMiddleware, - config: Configuration, + state: State, extra_config: F, ) -> color_eyre::Result<()> { let process_map = ProcessMap::new(); - let address = config.server.address; + let address = state.config.server.address; + + let tls = Tls::from_config(&state.config); spawn_cleanup(state.clone()); - let tls = Tls::from_config(&config); - - let state = State { - config, - tmp_dir, - policy_dir, - repo, - store, - client, - }; - let server = HttpServer::new(move || { let extra_config = extra_config.clone(); let state = state.clone(); + let process_map = process_map.clone(); spawn_workers(state.clone(), process_map.clone()); @@ -1704,8 +1691,9 @@ async fn launch_file_store( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, - store: ObjectStore, - client: ClientWithMiddleware, - config: Configuration, + state: State, extra_config: F, ) -> color_eyre::Result<()> { let process_map = ProcessMap::new(); - let address = config.server.address; + let address = state.config.server.address; - let tls = Tls::from_config(&config); - - let state = State { - config: config.clone(), - tmp_dir: tmp_dir.clone(), - policy_dir: policy_dir.clone(), - repo: repo.clone(), - store: store.clone(), - client: client.clone(), - }; + let tls = Tls::from_config(&state.config); spawn_cleanup(state.clone()); let server = HttpServer::new(move || { let extra_config = extra_config.clone(); let state = state.clone(); + let process_map = process_map.clone(); spawn_workers(state.clone(), process_map.clone()); @@ -1784,8 +1759,9 @@ async fn launch_object_store( + config: Configuration, tmp_dir: ArcTmpDir, policy_dir: ArcPolicyDir, repo: ArcRepo, @@ -1832,7 +1809,6 @@ async fn migrate_inner( from: S1, to: config::primitives::Store, skip_missing_files: bool, - timeout: u64, concurrency: usize, ) -> color_eyre::Result<()> where @@ -1840,19 +1816,18 @@ where { match to { config::primitives::Store::Filesystem(config::Filesystem { path }) => { - let to = FileStore::build(path.clone(), repo.clone()).await?; + let store = FileStore::build(path.clone(), repo.clone()).await?; - migrate_store( + let to = State { + config, tmp_dir, policy_dir, repo, - from, - to, - skip_missing_files, - timeout, - concurrency, - ) - .await? + store, + client, + }; + + migrate_store(from, to, skip_missing_files, concurrency).await? } config::primitives::Store::ObjectStorage(config::primitives::ObjectStorage { endpoint, @@ -1866,7 +1841,7 @@ where client_timeout, public_endpoint, }) => { - let to = ObjectStore::build( + let store = ObjectStore::build( endpoint.clone(), bucket_name, if use_path_style { @@ -1884,19 +1859,18 @@ where repo.clone(), ) .await? - .build(client); + .build(client.clone()); - migrate_store( + let to = State { + config, tmp_dir, policy_dir, repo, - from, - to, - skip_missing_files, - timeout, - concurrency, - ) - .await? + store, + client, + }; + + migrate_store(from, to, skip_missing_files, concurrency).await? } } @@ -2066,6 +2040,7 @@ impl PictRsConfiguration { config::primitives::Store::Filesystem(config::Filesystem { path }) => { let from = FileStore::build(path.clone(), repo.clone()).await?; migrate_inner( + config, tmp_dir, policy_dir, repo, @@ -2073,7 +2048,6 @@ impl PictRsConfiguration { from, to, skip_missing_files, - config.media.process_timeout, concurrency, ) .await?; @@ -2113,6 +2087,7 @@ impl PictRsConfiguration { .build(client.clone()); migrate_inner( + config, tmp_dir, policy_dir, repo, @@ -2120,7 +2095,6 @@ impl PictRsConfiguration { from, to, skip_missing_files, - config.media.process_timeout, concurrency, ) .await?; @@ -2150,18 +2124,19 @@ impl PictRsConfiguration { let store = FileStore::build(path, arc_repo.clone()).await?; + let state = State { + tmp_dir: tmp_dir.clone(), + policy_dir: policy_dir.clone(), + repo: arc_repo.clone(), + store: store.clone(), + config: config.clone(), + client: client.clone(), + }; + if arc_repo.get("migrate-0.4").await?.is_none() { if let Some(path) = config.old_repo_path() { if let Some(old_repo) = repo_04::open(path)? { - repo::migrate_04( - tmp_dir.clone(), - policy_dir.clone(), - old_repo, - arc_repo.clone(), - store.clone(), - config.clone(), - ) - .await?; + repo::migrate_04(old_repo, state.clone()).await?; arc_repo .set("migrate-0.4", Arc::from(b"migrated".to_vec())) .await?; @@ -2171,28 +2146,13 @@ impl PictRsConfiguration { match repo { Repo::Sled(sled_repo) => { - launch_file_store( - tmp_dir.clone(), - policy_dir.clone(), - arc_repo, - store, - client, - config, - move |sc| sled_extra_config(sc, sled_repo.clone()), - ) + launch_file_store(state, move |sc| { + sled_extra_config(sc, sled_repo.clone()) + }) .await?; } Repo::Postgres(_) => { - launch_file_store( - tmp_dir.clone(), - policy_dir.clone(), - arc_repo, - store, - client, - config, - |_| {}, - ) - .await?; + launch_file_store(state, |_| {}).await?; } } } @@ -2230,18 +2190,19 @@ impl PictRsConfiguration { .await? .build(client.clone()); + let state = State { + tmp_dir: tmp_dir.clone(), + policy_dir: policy_dir.clone(), + repo: arc_repo.clone(), + store: store.clone(), + config: config.clone(), + client: client.clone(), + }; + if arc_repo.get("migrate-0.4").await?.is_none() { if let Some(path) = config.old_repo_path() { if let Some(old_repo) = repo_04::open(path)? { - repo::migrate_04( - tmp_dir.clone(), - policy_dir.clone(), - old_repo, - arc_repo.clone(), - store.clone(), - config.clone(), - ) - .await?; + repo::migrate_04(old_repo, state.clone()).await?; arc_repo .set("migrate-0.4", Arc::from(b"migrated".to_vec())) .await?; @@ -2251,28 +2212,13 @@ impl PictRsConfiguration { match repo { Repo::Sled(sled_repo) => { - launch_object_store( - tmp_dir.clone(), - policy_dir.clone(), - arc_repo, - store, - client, - config, - move |sc| sled_extra_config(sc, sled_repo.clone()), - ) + launch_object_store(state, move |sc| { + sled_extra_config(sc, sled_repo.clone()) + }) .await?; } Repo::Postgres(_) => { - launch_object_store( - tmp_dir.clone(), - policy_dir.clone(), - arc_repo, - store, - client, - config, - |_| {}, - ) - .await?; + launch_object_store(state, |_| {}).await?; } } } diff --git a/src/magick.rs b/src/magick.rs index 01632e1..05200ff 100644 --- a/src/magick.rs +++ b/src/magick.rs @@ -7,6 +7,7 @@ use crate::{ error_code::ErrorCode, formats::ProcessableFormat, process::{Process, ProcessError, ProcessRead}, + state::State, stream::LocalBoxStream, tmp_file::{TmpDir, TmpFolder}, }; @@ -177,7 +178,7 @@ pub(crate) async fn process_image_stream_read( .await } -pub(crate) async fn process_image_process_read( +pub(crate) async fn process_image_process_read( state: &State, process_read: ProcessRead, args: Vec, diff --git a/src/migrate_store.rs b/src/migrate_store.rs index 1c339c8..c420e88 100644 --- a/src/migrate_store.rs +++ b/src/migrate_store.rs @@ -14,19 +14,16 @@ use crate::{ error::{Error, UploadError}, magick::{ArcPolicyDir, PolicyDir}, repo::{ArcRepo, Hash}, + state::State, store::Store, tmp_file::{ArcTmpDir, TmpDir}, }; #[allow(clippy::too_many_arguments)] pub(super) async fn migrate_store( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, from: S1, - to: S2, + to: State, skip_missing_files: bool, - timeout: u64, concurrency: usize, ) -> Result<(), Error> where @@ -39,7 +36,7 @@ where tracing::warn!("Old store is not configured correctly"); return Err(e.into()); } - if let Err(e) = to.health_check().await { + if let Err(e) = to.repo.health_check().await { tracing::warn!("New store is not configured correctly"); return Err(e.into()); } @@ -48,17 +45,8 @@ where let mut failure_count = 0; - while let Err(e) = do_migrate_store( - tmp_dir.clone(), - policy_dir.clone(), - repo.clone(), - from.clone(), - to.clone(), - skip_missing_files, - timeout, - concurrency, - ) - .await + while let Err(e) = + do_migrate_store(from.clone(), to.clone(), skip_missing_files, concurrency).await { tracing::error!("Migration failed with {}", format!("{e:?}")); @@ -78,11 +66,8 @@ where } struct MigrateState { - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, from: S1, - to: S2, + to: State, continuing_migration: bool, skip_missing_files: bool, initial_repo_size: u64, @@ -90,26 +75,21 @@ struct MigrateState { pct: AtomicU64, index: AtomicU64, started_at: Instant, - timeout: u64, } #[allow(clippy::too_many_arguments)] async fn do_migrate_store( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - repo: ArcRepo, from: S1, - to: S2, + to: State, skip_missing_files: bool, - timeout: u64, concurrency: usize, ) -> Result<(), Error> where S1: Store + 'static, S2: Store + 'static, { - let continuing_migration = repo.is_continuing_migration().await?; - let initial_repo_size = repo.size().await?; + let continuing_migration = to.repo.is_continuing_migration().await?; + let initial_repo_size = to.repo.size().await?; if continuing_migration { tracing::warn!("Continuing previous migration of {initial_repo_size} total hashes"); @@ -122,15 +102,12 @@ where } // Hashes are read in a consistent order - let stream = std::pin::pin!(repo.hashes()); + let stream = std::pin::pin!(to.repo.hashes()); let mut stream = stream.into_streamer(); let state = Rc::new(MigrateState { - tmp_dir: tmp_dir.clone(), - policy_dir: policy_dir.clone(), - repo: repo.clone(), from, - to, + to: to.clone(), continuing_migration, skip_missing_files, initial_repo_size, @@ -138,7 +115,6 @@ where pct: AtomicU64::new(initial_repo_size / 100), index: AtomicU64::new(0), started_at: Instant::now(), - timeout, }); let mut joinset = tokio::task::JoinSet::new(); @@ -165,7 +141,7 @@ where } // clean up the migration table to avoid interfering with future migrations - repo.clear().await?; + to.repo.clear().await?; tracing::warn!("Migration completed successfully"); @@ -179,9 +155,6 @@ where S2: Store, { let MigrateState { - tmp_dir, - policy_dir, - repo, from, to, continuing_migration, @@ -191,24 +164,23 @@ where pct, index, started_at, - timeout, } = state; let current_index = index.fetch_add(1, Ordering::Relaxed); - let original_identifier = match repo.identifier(hash.clone()).await { + let original_identifier = match to.repo.identifier(hash.clone()).await { Ok(Some(identifier)) => identifier, Ok(None) => { tracing::warn!( "Original File identifier for hash {hash:?} is missing, queue cleanup task", ); - crate::queue::cleanup_hash(repo, hash.clone()).await?; + crate::queue::cleanup_hash(&to.repo, hash.clone()).await?; return Ok(()); } Err(e) => return Err(e.into()), }; - if repo.is_migrated(&original_identifier).await? { + if to.repo.is_migrated(&original_identifier).await? { // migrated original for hash - this means we can skip return Ok(()); } @@ -241,26 +213,16 @@ where } } - if let Some(identifier) = repo.motion_identifier(hash.clone()).await? { - if !repo.is_migrated(&identifier).await? { - match migrate_file( - tmp_dir, - policy_dir, - repo, - from, - to, - &identifier, - *skip_missing_files, - *timeout, - ) - .await - { + if let Some(identifier) = to.repo.motion_identifier(hash.clone()).await? { + if !to.repo.is_migrated(&identifier).await? { + match migrate_file(from, to, &identifier, *skip_missing_files).await { Ok(new_identifier) => { - migrate_details(repo, &identifier, &new_identifier).await?; - repo.relate_motion_identifier(hash.clone(), &new_identifier) + migrate_details(&to.repo, &identifier, &new_identifier).await?; + to.repo + .relate_motion_identifier(hash.clone(), &new_identifier) .await?; - repo.mark_migrated(&identifier, &new_identifier).await?; + to.repo.mark_migrated(&identifier, &new_identifier).await?; } Err(MigrateError::From(e)) if e.is_not_found() && *skip_missing_files => { tracing::warn!("Skipping motion file for hash {hash:?}"); @@ -281,28 +243,20 @@ where } } - for (variant, identifier) in repo.variants(hash.clone()).await? { - if !repo.is_migrated(&identifier).await? { - match migrate_file( - tmp_dir, - policy_dir, - repo, - from, - to, - &identifier, - *skip_missing_files, - *timeout, - ) - .await - { + for (variant, identifier) in to.repo.variants(hash.clone()).await? { + if !to.repo.is_migrated(&identifier).await? { + match migrate_file(from, to, &identifier, *skip_missing_files).await { Ok(new_identifier) => { - migrate_details(repo, &identifier, &new_identifier).await?; - repo.remove_variant(hash.clone(), variant.clone()).await?; - let _ = repo + migrate_details(&to.repo, &identifier, &new_identifier).await?; + to.repo + .remove_variant(hash.clone(), variant.clone()) + .await?; + let _ = to + .repo .relate_variant_identifier(hash.clone(), variant, &new_identifier) .await?; - repo.mark_migrated(&identifier, &new_identifier).await?; + to.repo.mark_migrated(&identifier, &new_identifier).await?; } Err(MigrateError::From(e)) if e.is_not_found() && *skip_missing_files => { tracing::warn!("Skipping variant {variant} for hash {hash:?}",); @@ -323,23 +277,14 @@ where } } - match migrate_file( - tmp_dir, - policy_dir, - repo, - from, - to, - &original_identifier, - *skip_missing_files, - *timeout, - ) - .await - { + match migrate_file(from, to, &original_identifier, *skip_missing_files).await { Ok(new_identifier) => { - migrate_details(repo, &original_identifier, &new_identifier).await?; - repo.update_identifier(hash.clone(), &new_identifier) + migrate_details(&to.repo, &original_identifier, &new_identifier).await?; + to.repo + .update_identifier(hash.clone(), &new_identifier) .await?; - repo.mark_migrated(&original_identifier, &new_identifier) + to.repo + .mark_migrated(&original_identifier, &new_identifier) .await?; } Err(MigrateError::From(e)) if e.is_not_found() && *skip_missing_files => { @@ -385,14 +330,10 @@ where #[allow(clippy::too_many_arguments)] async fn migrate_file( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, from: &S1, - to: &S2, + to: &State, identifier: &Arc, skip_missing_files: bool, - timeout: u64, ) -> Result, MigrateError> where S1: Store, @@ -403,7 +344,7 @@ where loop { tracing::trace!("migrate_file: looping"); - match do_migrate_file(tmp_dir, policy_dir, repo, from, to, identifier, timeout).await { + match do_migrate_file(from, to, identifier).await { Ok(identifier) => return Ok(identifier), Err(MigrateError::From(e)) if e.is_not_found() && skip_missing_files => { return Err(MigrateError::From(e)); @@ -432,13 +373,9 @@ enum MigrateError { } async fn do_migrate_file( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, - repo: &ArcRepo, from: &S1, - to: &S2, + to: &State, identifier: &Arc, - timeout: u64, ) -> Result, MigrateError> where S1: Store, @@ -449,7 +386,8 @@ where .await .map_err(MigrateError::From)?; - let details_opt = repo + let details_opt = to + .repo .details(identifier) .await .map_err(Error::from) @@ -463,11 +401,11 @@ where .await .map_err(From::from) .map_err(MigrateError::Details)?; - let new_details = - Details::from_bytes(tmp_dir, policy_dir, timeout, bytes_stream.into_bytes()) - .await - .map_err(MigrateError::Details)?; - repo.relate_details(identifier, &new_details) + let new_details = Details::from_bytes(to, bytes_stream.into_bytes()) + .await + .map_err(MigrateError::Details)?; + to.repo + .relate_details(identifier, &new_details) .await .map_err(Error::from) .map_err(MigrateError::Details)?; @@ -475,6 +413,7 @@ where }; let new_identifier = to + .store .save_stream(stream, details.media_type()) .await .map_err(MigrateError::To)?; diff --git a/src/queue/process.rs b/src/queue/process.rs index e2d2c9f..bdef92a 100644 --- a/src/queue/process.rs +++ b/src/queue/process.rs @@ -116,6 +116,7 @@ where let guard = UploadGuard::guard(upload_id); let fut = async { + let ident = unprocessed_identifier.clone(); let state2 = state.clone(); let current_span = Span::current(); diff --git a/src/repo/migrate.rs b/src/repo/migrate.rs index 702498e..f2b6efa 100644 --- a/src/repo/migrate.rs +++ b/src/repo/migrate.rs @@ -16,6 +16,7 @@ use crate::{ AliasRepo as _, HashRepo as _, IdentifierRepo as _, SettingsRepo as _, SledRepo as OldSledRepo, }, + state::State, store::Store, tmp_file::{ArcTmpDir, TmpDir}, }; @@ -80,23 +81,19 @@ pub(crate) async fn migrate_repo(old_repo: ArcRepo, new_repo: ArcRepo) -> Result #[tracing::instrument(skip_all)] pub(crate) async fn migrate_04( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, old_repo: OldSledRepo, - new_repo: ArcRepo, - store: S, - config: Configuration, + state: State, ) -> Result<(), Error> { tracing::info!("Running checks"); if let Err(e) = old_repo.health_check().await { tracing::warn!("Old repo is not configured correctly"); return Err(e.into()); } - if let Err(e) = new_repo.health_check().await { + if let Err(e) = state.repo.health_check().await { tracing::warn!("New repo is not configured correctly"); return Err(e.into()); } - if let Err(e) = store.health_check().await { + if let Err(e) = state.store.health_check().await { tracing::warn!("Store is not configured correctly"); return Err(e.into()); } @@ -116,19 +113,15 @@ pub(crate) async fn migrate_04( if let Ok(hash) = res { set.spawn_local(migrate_hash_04( - tmp_dir.clone(), - policy_dir.clone(), old_repo.clone(), - new_repo.clone(), - store.clone(), - config.clone(), + state.clone(), hash.clone(), )); } else { tracing::warn!("Failed to read hash, skipping"); } - while set.len() >= config.upgrade.concurrency { + while set.len() >= state.config.upgrade.concurrency { tracing::trace!("migrate_04: join looping"); if set.join_next().await.is_some() { @@ -156,13 +149,15 @@ pub(crate) async fn migrate_04( } if let Some(generator_state) = old_repo.get(GENERATOR_KEY).await? { - new_repo + state + .repo .set(GENERATOR_KEY, generator_state.to_vec().into()) .await?; } if let Some(generator_state) = old_repo.get(crate::NOT_FOUND_KEY).await? { - new_repo + state + .repo .set(crate::NOT_FOUND_KEY, generator_state.to_vec().into()) .await?; } @@ -193,28 +188,10 @@ async fn migrate_hash(old_repo: ArcRepo, new_repo: ArcRepo, hash: Hash) { } } -async fn migrate_hash_04( - tmp_dir: ArcTmpDir, - policy_dir: ArcPolicyDir, - old_repo: OldSledRepo, - new_repo: ArcRepo, - store: S, - config: Configuration, - old_hash: sled::IVec, -) { +async fn migrate_hash_04(old_repo: OldSledRepo, state: State, old_hash: sled::IVec) { let mut hash_failures = 0; - while let Err(e) = timed_migrate_hash_04( - &tmp_dir, - &policy_dir, - &old_repo, - &new_repo, - &store, - &config, - old_hash.clone(), - ) - .await - { + while let Err(e) = timed_migrate_hash_04(&old_repo, &state, old_hash.clone()).await { hash_failures += 1; if hash_failures > 10 { @@ -300,19 +277,13 @@ async fn do_migrate_hash(old_repo: &ArcRepo, new_repo: &ArcRepo, hash: Hash) -> } async fn timed_migrate_hash_04( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, old_repo: &OldSledRepo, - new_repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, old_hash: sled::IVec, ) -> Result<(), Error> { tokio::time::timeout( - Duration::from_secs(config.media.external_validation_timeout * 6), - do_migrate_hash_04( - tmp_dir, policy_dir, old_repo, new_repo, store, config, old_hash, - ), + Duration::from_secs(state.config.media.process_timeout * 6), + do_migrate_hash_04(old_repo, state, old_hash), ) .await .map_err(|_| UploadError::ProcessTimeout)? @@ -320,12 +291,8 @@ async fn timed_migrate_hash_04( #[tracing::instrument(skip_all)] async fn do_migrate_hash_04( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, old_repo: &OldSledRepo, - new_repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, old_hash: sled::IVec, ) -> Result<(), Error> { let Some(identifier) = old_repo.identifier(old_hash.clone()).await? else { @@ -333,18 +300,9 @@ async fn do_migrate_hash_04( return Ok(()); }; - let size = store.len(&identifier).await?; + let size = state.store.len(&identifier).await?; - let hash_details = set_details( - tmp_dir, - policy_dir, - old_repo, - new_repo, - store, - config, - &identifier, - ) - .await?; + let hash_details = set_details(old_repo, state, &identifier).await?; let aliases = old_repo.aliases_for_hash(old_hash.clone()).await?; let variants = old_repo.variants(old_hash.clone()).await?; @@ -354,7 +312,8 @@ async fn do_migrate_hash_04( let hash = Hash::new(hash, size, hash_details.internal_format()); - let _ = new_repo + let _ = state + .repo .create_hash_with_timestamp(hash.clone(), &identifier, hash_details.created_at()) .await?; @@ -364,66 +323,45 @@ async fn do_migrate_hash_04( .await? .unwrap_or_else(DeleteToken::generate); - let _ = new_repo + let _ = state + .repo .create_alias(&alias, &delete_token, hash.clone()) .await?; } if let Some(identifier) = motion_identifier { - new_repo + state + .repo .relate_motion_identifier(hash.clone(), &identifier) .await?; - set_details( - tmp_dir, - policy_dir, - old_repo, - new_repo, - store, - config, - &identifier, - ) - .await?; + set_details(old_repo, state, &identifier).await?; } for (variant, identifier) in variants { - let _ = new_repo + let _ = state + .repo .relate_variant_identifier(hash.clone(), variant.clone(), &identifier) .await?; - set_details( - tmp_dir, - policy_dir, - old_repo, - new_repo, - store, - config, - &identifier, - ) - .await?; + set_details(old_repo, state, &identifier).await?; - new_repo.accessed_variant(hash.clone(), variant).await?; + state.repo.accessed_variant(hash.clone(), variant).await?; } Ok(()) } async fn set_details( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, old_repo: &OldSledRepo, - new_repo: &ArcRepo, - store: &S, - config: &Configuration, + state: &State, identifier: &Arc, ) -> Result { - if let Some(details) = new_repo.details(identifier).await? { + if let Some(details) = state.repo.details(identifier).await? { Ok(details) } else { - let details = - fetch_or_generate_details(tmp_dir, policy_dir, old_repo, store, config, identifier) - .await?; - new_repo.relate_details(identifier, &details).await?; + let details = fetch_or_generate_details(old_repo, state, identifier).await?; + state.repo.relate_details(identifier, &details).await?; Ok(details) } } @@ -442,11 +380,8 @@ fn details_semaphore() -> &'static Semaphore { #[tracing::instrument(skip_all)] async fn fetch_or_generate_details( - tmp_dir: &TmpDir, - policy_dir: &PolicyDir, old_repo: &OldSledRepo, - store: &S, - config: &Configuration, + state: &State, identifier: &Arc, ) -> Result { let details_opt = old_repo.details(identifier.clone()).await?; @@ -454,12 +389,11 @@ async fn fetch_or_generate_details( if let Some(details) = details_opt { Ok(details) } else { - let bytes_stream = store.to_bytes(identifier, None, None).await?; + let bytes_stream = state.store.to_bytes(identifier, None, None).await?; let bytes = bytes_stream.into_bytes(); let guard = details_semaphore().acquire().await?; - let details = - Details::from_bytes(tmp_dir, policy_dir, config.media.process_timeout, bytes).await?; + let details = Details::from_bytes(state, bytes).await?; drop(guard); Ok(details) diff --git a/src/validate.rs b/src/validate.rs index bed66e7..e4a6f29 100644 --- a/src/validate.rs +++ b/src/validate.rs @@ -71,7 +71,7 @@ pub(crate) async fn validate_bytes( width, height, frames, - } = crate::discover::discover_bytes(tmp_dir, policy_dir, timeout, bytes.clone()).await?; + } = crate::discover::discover_bytes(state, bytes.clone()).await?; match &input { InputFile::Image(input) => { @@ -127,7 +127,7 @@ async fn process_image( magick::convert_image(state, input.format, format, quality, bytes).await? } else { - exiftool::clear_metadata_bytes_read(bytes, timeout)? + exiftool::clear_metadata_bytes_read(bytes, state.config.media.process_timeout)? }; Ok((InternalFormat::Image(format), process_read)) @@ -160,7 +160,7 @@ fn validate_animation( } #[tracing::instrument(skip(state, bytes))] -async fn process_animation( +async fn process_animation( state: &State, bytes: Bytes, input: AnimationFormat, diff --git a/src/validate/magick.rs b/src/validate/magick.rs index edb4bbe..65fa367 100644 --- a/src/validate/magick.rs +++ b/src/validate/magick.rs @@ -6,6 +6,7 @@ use crate::{ formats::{AnimationFormat, ImageFormat}, magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::{Process, ProcessRead}, + state::State, tmp_file::TmpDir, }; @@ -40,12 +41,12 @@ pub(super) async fn convert_animation( output.magick_format(), true, quality, - timeout, + bytes, ) .await } -async fn convert( +async fn convert( state: &State, input: &'static str, output: &'static str, From 7eba45f37e428b81b8f5fce288b9f1a1a6fd5de0 Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 18:19:14 -0600 Subject: [PATCH 4/8] cargo fix & cargo fmt --- src/details.rs | 2 -- src/discover.rs | 2 +- src/discover/ffmpeg.rs | 1 - src/discover/magick.rs | 3 +-- src/generate.rs | 2 -- src/generate/ffmpeg.rs | 1 - src/generate/magick.rs | 3 +-- src/ingest.rs | 6 ++---- src/lib.rs | 2 +- src/migrate_store.rs | 2 -- src/queue.rs | 5 +---- src/queue/process.rs | 6 +----- src/repo/migrate.rs | 3 --- src/validate.rs | 2 -- src/validate/magick.rs | 3 +-- 15 files changed, 9 insertions(+), 34 deletions(-) diff --git a/src/details.rs b/src/details.rs index c2fd9fc..f95c828 100644 --- a/src/details.rs +++ b/src/details.rs @@ -2,10 +2,8 @@ use crate::{ discover::Discovery, error::Error, formats::{InternalFormat, InternalVideoFormat}, - magick::PolicyDir, serde_str::Serde, state::State, - tmp_file::TmpDir, }; use actix_web::web; use time::{format_description::well_known::Rfc3339, OffsetDateTime}; diff --git a/src/discover.rs b/src/discover.rs index 005f76e..f394c52 100644 --- a/src/discover.rs +++ b/src/discover.rs @@ -4,7 +4,7 @@ mod magick; use actix_web::web::Bytes; -use crate::{formats::InputFile, magick::PolicyDir, state::State, tmp_file::TmpDir}; +use crate::{formats::InputFile, state::State}; #[derive(Debug, PartialEq, Eq)] pub(crate) struct Discovery { diff --git a/src/discover/ffmpeg.rs b/src/discover/ffmpeg.rs index 004870e..1597b05 100644 --- a/src/discover/ffmpeg.rs +++ b/src/discover/ffmpeg.rs @@ -11,7 +11,6 @@ use crate::{ }, process::Process, state::State, - tmp_file::TmpDir, }; use actix_web::web::Bytes; diff --git a/src/discover/magick.rs b/src/discover/magick.rs index 92e458b..a280736 100644 --- a/src/discover/magick.rs +++ b/src/discover/magick.rs @@ -6,10 +6,9 @@ use actix_web::web::Bytes; use crate::{ discover::DiscoverError, formats::{AnimationFormat, ImageFormat, ImageInput, InputFile}, - magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, + magick::{MagickError, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::Process, state::State, - tmp_file::TmpDir, }; use super::Discovery; diff --git a/src/generate.rs b/src/generate.rs index ee5f2f2..75966c5 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -7,11 +7,9 @@ use crate::{ error::{Error, UploadError}, formats::{ImageFormat, InputProcessableFormat, InternalVideoFormat, ProcessableFormat}, future::{WithMetrics, WithTimeout}, - magick::PolicyDir, repo::{Hash, VariantAlreadyExists}, state::State, store::Store, - tmp_file::TmpDir, }; use actix_web::web::Bytes; use std::{ diff --git a/src/generate/ffmpeg.rs b/src/generate/ffmpeg.rs index aec518d..39976be 100644 --- a/src/generate/ffmpeg.rs +++ b/src/generate/ffmpeg.rs @@ -8,7 +8,6 @@ use crate::{ process::{Process, ProcessRead}, state::State, store::Store, - tmp_file::TmpDir, }; #[derive(Clone, Copy, Debug)] diff --git a/src/generate/magick.rs b/src/generate/magick.rs index 3b03a2a..b753261 100644 --- a/src/generate/magick.rs +++ b/src/generate/magick.rs @@ -4,11 +4,10 @@ use actix_web::web::Bytes; use crate::{ formats::{ImageFormat, ProcessableFormat}, - magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, + magick::{MagickError, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::{Process, ProcessRead}, state::State, stream::LocalBoxStream, - tmp_file::TmpDir, }; async fn thumbnail_animation( diff --git a/src/ingest.rs b/src/ingest.rs index 12bcf2b..a952305 100644 --- a/src/ingest.rs +++ b/src/ingest.rs @@ -4,18 +4,16 @@ use crate::{ bytes_stream::BytesStream, details::Details, error::{Error, UploadError}, - formats::{InternalFormat, Validations}, + formats::InternalFormat, future::WithMetrics, - magick::PolicyDir, repo::{Alias, ArcRepo, DeleteToken, Hash}, state::State, store::Store, - tmp_file::TmpDir, }; use actix_web::web::Bytes; use futures_core::Stream; use reqwest::Body; -use reqwest_middleware::ClientWithMiddleware; + use streem::IntoStreamer; use tracing::{Instrument, Span}; diff --git a/src/lib.rs b/src/lib.rs index 277c9b2..798e384 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -43,7 +43,7 @@ use actix_web::{ use details::{ApiDetails, HumanDate}; use future::WithTimeout; use futures_core::Stream; -use magick::{ArcPolicyDir, PolicyDir}; +use magick::ArcPolicyDir; use metrics_exporter_prometheus::PrometheusBuilder; use middleware::{Metrics, Payload}; use repo::ArcRepo; diff --git a/src/migrate_store.rs b/src/migrate_store.rs index c420e88..9bd3cd8 100644 --- a/src/migrate_store.rs +++ b/src/migrate_store.rs @@ -12,11 +12,9 @@ use streem::IntoStreamer; use crate::{ details::Details, error::{Error, UploadError}, - magick::{ArcPolicyDir, PolicyDir}, repo::{ArcRepo, Hash}, state::State, store::Store, - tmp_file::{ArcTmpDir, TmpDir}, }; #[allow(clippy::too_many_arguments)] diff --git a/src/queue.rs b/src/queue.rs index 9227709..c58b1b8 100644 --- a/src/queue.rs +++ b/src/queue.rs @@ -1,17 +1,14 @@ use crate::{ concurrent_processor::ProcessMap, - config::Configuration, error::{Error, UploadError}, formats::InputProcessableFormat, future::LocalBoxFuture, - magick::ArcPolicyDir, repo::{Alias, ArcRepo, DeleteToken, Hash, JobId, UploadId}, serde_str::Serde, state::State, store::Store, - tmp_file::ArcTmpDir, }; -use reqwest_middleware::ClientWithMiddleware; + use std::{ path::PathBuf, sync::Arc, diff --git a/src/queue/process.rs b/src/queue/process.rs index bdef92a..656e2c9 100644 --- a/src/queue/process.rs +++ b/src/queue/process.rs @@ -1,21 +1,17 @@ -use reqwest_middleware::ClientWithMiddleware; use time::Instant; use tracing::{Instrument, Span}; use crate::{ concurrent_processor::ProcessMap, - config::Configuration, error::{Error, UploadError}, formats::InputProcessableFormat, future::LocalBoxFuture, ingest::Session, - magick::{ArcPolicyDir, PolicyDir}, queue::Process, - repo::{Alias, ArcRepo, UploadId, UploadResult}, + repo::{Alias, UploadId, UploadResult}, serde_str::Serde, state::State, store::Store, - tmp_file::{ArcTmpDir, TmpDir}, }; use std::{path::PathBuf, sync::Arc}; diff --git a/src/repo/migrate.rs b/src/repo/migrate.rs index f2b6efa..9cc803f 100644 --- a/src/repo/migrate.rs +++ b/src/repo/migrate.rs @@ -7,10 +7,8 @@ use streem::IntoStreamer; use tokio::{sync::Semaphore, task::JoinSet}; use crate::{ - config::Configuration, details::Details, error::{Error, UploadError}, - magick::{ArcPolicyDir, PolicyDir}, repo::{ArcRepo, DeleteToken, Hash}, repo_04::{ AliasRepo as _, HashRepo as _, IdentifierRepo as _, SettingsRepo as _, @@ -18,7 +16,6 @@ use crate::{ }, state::State, store::Store, - tmp_file::{ArcTmpDir, TmpDir}, }; const GENERATOR_KEY: &str = "last-path"; diff --git a/src/validate.rs b/src/validate.rs index e4a6f29..b69b5df 100644 --- a/src/validate.rs +++ b/src/validate.rs @@ -10,10 +10,8 @@ use crate::{ AnimationFormat, AnimationOutput, ImageInput, ImageOutput, InputFile, InputVideoFormat, InternalFormat, }, - magick::PolicyDir, process::ProcessRead, state::State, - tmp_file::TmpDir, }; use actix_web::web::Bytes; diff --git a/src/validate/magick.rs b/src/validate/magick.rs index 65fa367..ac8b720 100644 --- a/src/validate/magick.rs +++ b/src/validate/magick.rs @@ -4,10 +4,9 @@ use actix_web::web::Bytes; use crate::{ formats::{AnimationFormat, ImageFormat}, - magick::{MagickError, PolicyDir, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, + magick::{MagickError, MAGICK_CONFIGURE_PATH, MAGICK_TEMPORARY_PATH}, process::{Process, ProcessRead}, state::State, - tmp_file::TmpDir, }; pub(super) async fn convert_image( From 880dfc20eee39d178de27adc7ae996197a18fa86 Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 18:20:17 -0600 Subject: [PATCH 5/8] Remove unneeded struct, import --- src/config.rs | 4 ++-- src/formats.rs | 7 ------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/src/config.rs b/src/config.rs index d5ec99d..dea312a 100644 --- a/src/config.rs +++ b/src/config.rs @@ -12,8 +12,8 @@ use defaults::Defaults; pub(crate) use commandline::Operation; pub(crate) use file::{ - Animation, ConfigFile as Configuration, Image, Media, ObjectStorage, OpenTelemetry, Postgres, - Repo, Sled, Store, Tracing, Video, + Animation, ConfigFile as Configuration, Media, ObjectStorage, OpenTelemetry, Postgres, Repo, + Sled, Store, Tracing, Video, }; pub(crate) use primitives::{Filesystem, LogFormat}; diff --git a/src/formats.rs b/src/formats.rs index 076a600..3759e8b 100644 --- a/src/formats.rs +++ b/src/formats.rs @@ -12,13 +12,6 @@ pub(crate) use video::{ OutputVideo, VideoCodec, WebmAlphaCodec, WebmAudioCodec, WebmCodec, }; -#[derive(Clone, Debug)] -pub(crate) struct Validations<'a> { - pub(crate) image: &'a crate::config::Image, - pub(crate) animation: &'a crate::config::Animation, - pub(crate) video: &'a crate::config::Video, -} - #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum InputFile { Image(ImageInput), From c176e4c686da2945146f855dc9eff8cf6b512647 Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 18:30:52 -0600 Subject: [PATCH 6/8] clippy --- src/generate.rs | 4 +--- src/ingest.rs | 1 - src/lib.rs | 3 --- src/migrate_store.rs | 3 --- src/queue.rs | 1 - src/queue/cleanup.rs | 6 +++--- 6 files changed, 4 insertions(+), 14 deletions(-) diff --git a/src/generate.rs b/src/generate.rs index 75966c5..42704fb 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -93,7 +93,6 @@ pub(crate) async fn generate( } } -#[allow(clippy::too_many_arguments)] #[tracing::instrument(skip(state, hash))] async fn process( state: &State, @@ -167,7 +166,6 @@ async fn process( Ok((details, bytes)) as Result<(Details, Bytes), Error> } -#[allow(clippy::too_many_arguments)] #[tracing::instrument(skip_all)] async fn input_identifier( state: &State, @@ -220,7 +218,7 @@ where }; let reader = ffmpeg::thumbnail( - &state, + state, identifier, original_details .video_format() diff --git a/src/ingest.rs b/src/ingest.rs index a952305..5c245ce 100644 --- a/src/ingest.rs +++ b/src/ingest.rs @@ -160,7 +160,6 @@ where Ok((input_type, identifier, details, hash_state)) } -#[allow(clippy::too_many_arguments)] #[tracing::instrument(skip(state, stream))] pub(crate) async fn ingest( state: &State, diff --git a/src/lib.rs b/src/lib.rs index 798e384..5983eb6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -773,7 +773,6 @@ async fn not_found_hash(repo: &ArcRepo) -> Result, Error> } /// Process files -#[allow(clippy::too_many_arguments)] #[tracing::instrument(name = "Serving processed image", skip(state, process_map))] async fn process( range: Option>, @@ -899,7 +898,6 @@ async fn process( )) } -#[allow(clippy::too_many_arguments)] #[tracing::instrument(name = "Serving processed image headers", skip(state))] async fn process_head( range: Option>, @@ -1042,7 +1040,6 @@ async fn details( } /// Serve files based on alias query -#[allow(clippy::too_many_arguments)] #[tracing::instrument(name = "Serving file query", skip(state))] async fn serve_query( range: Option>, diff --git a/src/migrate_store.rs b/src/migrate_store.rs index 9bd3cd8..0578bbe 100644 --- a/src/migrate_store.rs +++ b/src/migrate_store.rs @@ -17,7 +17,6 @@ use crate::{ store::Store, }; -#[allow(clippy::too_many_arguments)] pub(super) async fn migrate_store( from: S1, to: State, @@ -75,7 +74,6 @@ struct MigrateState { started_at: Instant, } -#[allow(clippy::too_many_arguments)] async fn do_migrate_store( from: S1, to: State, @@ -326,7 +324,6 @@ where Ok(()) } -#[allow(clippy::too_many_arguments)] async fn migrate_file( from: &S1, to: &State, diff --git a/src/queue.rs b/src/queue.rs index c58b1b8..95afe7b 100644 --- a/src/queue.rs +++ b/src/queue.rs @@ -330,7 +330,6 @@ async fn process_image_jobs( } } -#[allow(clippy::too_many_arguments)] async fn image_job_loop( state: &State, process_map: &ProcessMap, diff --git a/src/queue/cleanup.rs b/src/queue/cleanup.rs index 473b37c..6c432a3 100644 --- a/src/queue/cleanup.rs +++ b/src/queue/cleanup.rs @@ -14,10 +14,10 @@ use crate::{ store::Store, }; -pub(super) fn perform<'a, S>( - state: &'a State, +pub(super) fn perform( + state: &State, job: serde_json::Value, -) -> LocalBoxFuture<'a, Result<(), Error>> +) -> LocalBoxFuture<'_, Result<(), Error>> where S: Store + 'static, { From 835647d290230b2ff6b6aaa982c5fe1c23ed0eed Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 18:42:34 -0600 Subject: [PATCH 7/8] Unite launch-with-store fns --- src/lib.rs | 117 +++++++++++++---------------------------------------- 1 file changed, 28 insertions(+), 89 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 5983eb6..ecfe0d6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -49,6 +49,7 @@ use middleware::{Metrics, Payload}; use repo::ArcRepo; use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; use reqwest_tracing::TracingMiddleware; +use rustls_channel_resolver::ChannelSender; use rusty_s3::UrlStyle; use state::State; use std::{ @@ -59,6 +60,7 @@ use std::{ time::{Duration, SystemTime}, }; use streem::IntoStreamer; +use sync::DropHandle; use tmp_file::{ArcTmpDir, TmpDir}; use tokio::sync::Semaphore; use tracing::Instrument; @@ -1664,8 +1666,27 @@ where crate::sync::spawn("process-worker", queue::process_images(state, process_map)); } -async fn launch_file_store( - state: State, +fn watch_keys(tls: Tls, sender: ChannelSender) -> DropHandle<()> { + crate::sync::abort_on_drop(crate::sync::spawn("cert-reader", async move { + let mut interval = tokio::time::interval(Duration::from_secs(30)); + interval.tick().await; + + loop { + interval.tick().await; + + match tls.open_keys().await { + Ok(certified_key) => sender.update(certified_key), + Err(e) => tracing::error!("Failed to open keys {}", format!("{e}")), + } + } + })) +} + +async fn launch< + S: Store + Send + 'static, + F: Fn(&mut web::ServiceConfig) + Send + Clone + 'static, +>( + state: State, extra_config: F, ) -> color_eyre::Result<()> { let process_map = ProcessMap::new(); @@ -1698,19 +1719,7 @@ async fn launch_file_store(certified_key); - let handle = crate::sync::abort_on_drop(crate::sync::spawn("cert-reader", async move { - let mut interval = tokio::time::interval(Duration::from_secs(30)); - interval.tick().await; - - loop { - interval.tick().await; - - match tls.open_keys().await { - Ok(certified_key) => tx.update(certified_key), - Err(e) => tracing::error!("Failed to open keys {}", format!("{e}")), - } - } - })); + let handle = watch_keys(tls, tx); let config = rustls_021::ServerConfig::builder() .with_safe_defaults() @@ -1732,70 +1741,6 @@ async fn launch_file_store( - state: State, - extra_config: F, -) -> color_eyre::Result<()> { - let process_map = ProcessMap::new(); - - let address = state.config.server.address; - - let tls = Tls::from_config(&state.config); - - spawn_cleanup(state.clone()); - - let server = HttpServer::new(move || { - let extra_config = extra_config.clone(); - let state = state.clone(); - let process_map = process_map.clone(); - - spawn_workers(state.clone(), process_map.clone()); - - App::new() - .wrap(TracingLogger::default()) - .wrap(Deadline) - .wrap(Metrics) - .wrap(Payload::new()) - .configure(move |sc| { - configure_endpoints(sc, state.clone(), process_map.clone(), extra_config) - }) - }); - - if let Some(tls) = tls { - let certified_key = tls.open_keys().await?; - - let (tx, rx) = rustls_channel_resolver::channel::<32>(certified_key); - - let handle = crate::sync::abort_on_drop(crate::sync::spawn("cert-reader", async move { - let mut interval = tokio::time::interval(Duration::from_secs(30)); - interval.tick().await; - - loop { - interval.tick().await; - - match tls.open_keys().await { - Ok(certified_key) => tx.update(certified_key), - Err(e) => tracing::error!("Failed to open keys {}", format!("{e}")), - } - } - })); - - let config = rustls_021::ServerConfig::builder() - .with_safe_defaults() - .with_no_client_auth() - .with_cert_resolver(rx); - - server.bind_rustls_021(address, config)?.run().await?; - - handle.abort(); - let _ = handle.await; - } else { - server.bind(address)?.run().await?; - } - - Ok(()) -} - #[allow(clippy::too_many_arguments)] async fn migrate_inner( config: Configuration, @@ -2143,13 +2088,10 @@ impl PictRsConfiguration { match repo { Repo::Sled(sled_repo) => { - launch_file_store(state, move |sc| { - sled_extra_config(sc, sled_repo.clone()) - }) - .await?; + launch(state, move |sc| sled_extra_config(sc, sled_repo.clone())).await?; } Repo::Postgres(_) => { - launch_file_store(state, |_| {}).await?; + launch(state, |_| {}).await?; } } } @@ -2209,13 +2151,10 @@ impl PictRsConfiguration { match repo { Repo::Sled(sled_repo) => { - launch_object_store(state, move |sc| { - sled_extra_config(sc, sled_repo.clone()) - }) - .await?; + launch(state, move |sc| sled_extra_config(sc, sled_repo.clone())).await?; } Repo::Postgres(_) => { - launch_object_store(state, |_| {}).await?; + launch(state, |_| {}).await?; } } } From 85f635602587bfcb9f706b258aff00cd7a2221d1 Mon Sep 17 00:00:00 2001 From: asonix Date: Sat, 3 Feb 2024 19:50:00 -0600 Subject: [PATCH 8/8] Fix images with trailing bytes failing to upload --- src/process.rs | 28 ++++++++++++++++++++-------- src/validate.rs | 6 +++--- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/process.rs b/src/process.rs index 270903a..bfcc8e0 100644 --- a/src/process.rs +++ b/src/process.rs @@ -126,8 +126,8 @@ pub(crate) enum ProcessError { #[error("Failed to cleanup for command {0}")] Cleanup(Arc, #[source] std::io::Error), - #[error("Unknown process error")] - Other(#[source] std::io::Error), + #[error("Unknown process error for command {0}")] + Other(Arc, #[source] std::io::Error), } impl ProcessError { @@ -135,7 +135,7 @@ impl ProcessError { match self { Self::NotFound(_) => ErrorCode::COMMAND_NOT_FOUND, Self::PermissionDenied(_) => ErrorCode::COMMAND_PERMISSION_DENIED, - Self::LimitReached | Self::Read(_, _) | Self::Cleanup(_, _) | Self::Other(_) => { + Self::LimitReached | Self::Read(_, _) | Self::Cleanup(_, _) | Self::Other(_, _) => { ErrorCode::COMMAND_ERROR } Self::Timeout(_) => ErrorCode::COMMAND_TIMEOUT, @@ -180,7 +180,7 @@ impl Process { Err(ProcessError::PermissionDenied(command)) } std::io::ErrorKind::WouldBlock => Err(ProcessError::LimitReached), - _ => Err(ProcessError::Other(e)), + _ => Err(ProcessError::Other(command, e)), }, } } @@ -223,7 +223,7 @@ impl Process { Ok(()) } Ok(Ok(status)) => Err(ProcessError::Status(command, status)), - Ok(Err(e)) => Err(ProcessError::Other(e)), + Ok(Err(e)) => Err(ProcessError::Other(command, e)), Err(_) => { let _ = child.kill().await; Err(ProcessError::Timeout(command)) @@ -234,7 +234,16 @@ impl Process { pub(crate) fn bytes_read(self, input: Bytes) -> ProcessRead { self.spawn_fn(move |mut stdin| { let mut input = input; - async move { stdin.write_all_buf(&mut input).await } + async move { + match stdin.write_all_buf(&mut input).await { + Ok(()) => Ok(()), + // BrokenPipe means we finished reading from Stdout, so we don't need to write + // to stdin. We'll still error out if the command failed so treat this as a + // success + Err(e) if e.kind() == std::io::ErrorKind::BrokenPipe => Ok(()), + Err(e) => Err(e), + } + } }) } @@ -275,9 +284,12 @@ impl Process { Ok(()) } Ok(Ok(status)) => Err(ProcessError::Status(command2, status)), - Ok(Err(e)) => Err(ProcessError::Other(e)), + Ok(Err(e)) => Err(ProcessError::Other(command2, e)), Err(_) => { - child.kill().await.map_err(ProcessError::Other)?; + child + .kill() + .await + .map_err(|e| ProcessError::Other(command2.clone(), e))?; Err(ProcessError::Timeout(command2)) } } diff --git a/src/validate.rs b/src/validate.rs index b69b5df..ee857bd 100644 --- a/src/validate.rs +++ b/src/validate.rs @@ -92,7 +92,7 @@ pub(crate) async fn validate_bytes( } } -#[tracing::instrument(skip(state, bytes))] +#[tracing::instrument(skip(state, bytes), fields(len = bytes.len()))] async fn process_image( state: &State, bytes: Bytes, @@ -157,7 +157,7 @@ fn validate_animation( Ok(()) } -#[tracing::instrument(skip(state, bytes))] +#[tracing::instrument(skip(state, bytes), fields(len = bytes.len()))] async fn process_animation( state: &State, bytes: Bytes, @@ -215,7 +215,7 @@ fn validate_video( Ok(()) } -#[tracing::instrument(skip(state, bytes))] +#[tracing::instrument(skip(state, bytes), fields(len = bytes.len()))] async fn process_video( state: &State, bytes: Bytes,