From 75bf425c6edabceff8c0bae41e0bbc158e290ff1 Mon Sep 17 00:00:00 2001 From: asonix Date: Sun, 23 Jul 2023 11:44:41 -0500 Subject: [PATCH] Update alias query, process query to accept proxy url --- Cargo.lock | 10 ++ Cargo.toml | 1 + src/lib.rs | 297 +++++++++++++++++++++++++++++++++++++++-------- src/serde_str.rs | 2 +- 4 files changed, 259 insertions(+), 51 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f015057..918a2ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1747,6 +1747,7 @@ dependencies = [ "reqwest-tracing", "rusty-s3", "serde", + "serde-tuple-vec-map", "serde_cbor", "serde_json", "serde_urlencoded", @@ -2248,6 +2249,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-tuple-vec-map" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a04d0ebe0de77d7d445bb729a895dcb0a288854b267ca85f030ce51cdc578c82" +dependencies = [ + "serde", +] + [[package]] name = "serde_cbor" version = "0.11.2" diff --git a/Cargo.toml b/Cargo.toml index 98fe0ef..03f3ba0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,6 +44,7 @@ reqwest-middleware = "0.2.2" reqwest-tracing = { version = "0.4.5", features = ["opentelemetry_0_19"] } rusty-s3 = "0.4.1" serde = { version = "1.0", features = ["derive"] } +serde-tuple-vec-map = "1.0.1" serde_cbor = "0.11.2" serde_json = "1.0" serde_urlencoded = "0.7.1" diff --git a/src/lib.rs b/src/lib.rs index 8b9cb26..cef18a4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -590,32 +590,28 @@ async fn delete( Ok(HttpResponse::NoContent().finish()) } -type ProcessQuery = Vec<(String, String)>; +#[derive(Debug, serde::Deserialize, serde::Serialize, PartialEq, Eq, PartialOrd, Ord)] +#[serde(untagged)] +enum ProcessSource { + Source { src: Serde }, + Alias { alias: Serde }, + Proxy { proxy: url::Url }, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize, PartialEq, Eq, PartialOrd, Ord)] +struct ProcessQuery { + #[serde(flatten)] + source: ProcessSource, + + #[serde(with = "tuple_vec_map", flatten)] + operations: Vec<(String, String)>, +} fn prepare_process( config: &Configuration, - query: web::Query, + operations: Vec<(String, String)>, ext: &str, -) -> Result<(InputProcessableFormat, Alias, PathBuf, Vec), Error> { - let (alias, operations) = - query - .into_inner() - .into_iter() - .fold((String::new(), Vec::new()), |(s, mut acc), (k, v)| { - if k == "src" { - (v, acc) - } else { - acc.push((k, v)); - (s, acc) - } - }); - - if alias.is_empty() { - return Err(UploadError::MissingAlias.into()); - } - - let alias = Alias::from_existing(&alias); - +) -> Result<(InputProcessableFormat, PathBuf, Vec), Error> { let operations = operations .into_iter() .filter(|(k, _)| config.media.filters.contains(&k.to_lowercase())) @@ -628,17 +624,24 @@ fn prepare_process( let (thumbnail_path, thumbnail_args) = self::processor::build_chain(&operations, &format.to_string())?; - Ok((format, alias, thumbnail_path, thumbnail_args)) + Ok((format, thumbnail_path, thumbnail_args)) } #[tracing::instrument(name = "Fetching derived details", skip(repo, config))] async fn process_details( - query: web::Query, + web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, repo: web::Data, config: web::Data, ) -> Result { - let (_, alias, thumbnail_path, _) = prepare_process(&config, query, ext.as_str())?; + let alias = match source { + ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { + Serde::into_inner(alias) + } + ProcessSource::Proxy { proxy } => todo!("proxy URL"), + }; + + let (_, thumbnail_path, _) = prepare_process(&config, operations, ext.as_str())?; let Some(hash) = repo.hash(&alias).await? else { // Invalid alias @@ -691,15 +694,22 @@ async fn not_found_hash(repo: &R) -> Result( range: Option>, - query: web::Query, + web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, repo: web::Data, store: web::Data, config: web::Data, process_map: web::Data, ) -> Result { - let (format, alias, thumbnail_path, thumbnail_args) = - prepare_process(&config, query, ext.as_str())?; + let alias = match source { + ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { + Serde::into_inner(alias) + } + ProcessSource::Proxy { proxy } => todo!("proxy URL"), + }; + + let (format, thumbnail_path, thumbnail_args) = + prepare_process(&config, operations, ext.as_str())?; let path_string = thumbnail_path.to_string_lossy().to_string(); @@ -816,13 +826,20 @@ async fn process( #[tracing::instrument(name = "Serving processed image headers", skip(repo, store, config))] async fn process_head( range: Option>, - query: web::Query, + web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, repo: web::Data, store: web::Data, config: web::Data, ) -> Result { - let (_, alias, thumbnail_path, _) = prepare_process(&config, query, ext.as_str())?; + let alias = match source { + ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { + Serde::into_inner(alias) + } + ProcessSource::Proxy { proxy } => todo!("proxy URL"), + }; + + let (_, thumbnail_path, _) = prepare_process(&config, operations, ext.as_str())?; let path_string = thumbnail_path.to_string_lossy().to_string(); let Some(hash) = repo.hash(&alias).await? else { @@ -878,13 +895,20 @@ async fn process_head( /// Process files #[tracing::instrument(name = "Spawning image process", skip(repo))] async fn process_backgrounded( - query: web::Query, + web::Query(ProcessQuery { source, operations }): web::Query, ext: web::Path, repo: web::Data, config: web::Data, ) -> Result { - let (target_format, source, process_path, process_args) = - prepare_process(&config, query, ext.as_str())?; + let source = match source { + ProcessSource::Alias { alias } | ProcessSource::Source { src: alias } => { + Serde::into_inner(alias) + } + ProcessSource::Proxy { proxy } => todo!("proxy URL"), + }; + + let (target_format, process_path, process_args) = + prepare_process(&config, operations, ext.as_str())?; let path_string = process_path.to_string_lossy().to_string(); let Some(hash) = repo.hash(&source).await? else { @@ -909,6 +933,24 @@ async fn process_backgrounded( Ok(HttpResponse::Accepted().finish()) } +/// Fetch file details +#[tracing::instrument(name = "Fetching query details", skip(repo, store, config))] +async fn details_query( + web::Query(alias_query): web::Query, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + + do_details(alias, repo, store, config).await +} + /// Fetch file details #[tracing::instrument(name = "Fetching details", skip(repo, store, config))] async fn details( @@ -917,13 +959,39 @@ async fn details( store: web::Data, config: web::Data, ) -> Result { - let alias = alias.into_inner(); + do_details(alias.into_inner(), repo, store, config).await +} +async fn do_details( + alias: Serde, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { let details = ensure_details(&repo, &store, &config, &alias).await?; Ok(HttpResponse::Ok().json(&details)) } +/// Serve files based on alias query +#[tracing::instrument(name = "Serving file query", skip(repo, store, config))] +async fn serve_query( + range: Option>, + web::Query(alias_query): web::Query, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + + do_serve(range, alias, repo, store, config).await +} + /// Serve files #[tracing::instrument(name = "Serving file", skip(repo, store, config))] async fn serve( @@ -933,8 +1001,16 @@ async fn serve( store: web::Data, config: web::Data, ) -> Result { - let alias = alias.into_inner(); + do_serve(range, alias.into_inner(), repo, store, config).await +} +async fn do_serve( + range: Option>, + alias: Serde, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { let (hash, alias, not_found) = if let Some(hash) = repo.hash(&alias).await? { (hash, Serde::into_inner(alias), false) } else { @@ -965,6 +1041,24 @@ async fn serve( ranged_file_resp(&store, identifier, range, details, not_found).await } +#[tracing::instrument(name = "Serving query file headers", skip(repo, store, config))] +async fn serve_query_head( + range: Option>, + web::Query(alias_query): web::Query, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + + do_serve_head(range, alias, repo, store, config).await +} + #[tracing::instrument(name = "Serving file headers", skip(repo, store, config))] async fn serve_head( range: Option>, @@ -973,8 +1067,16 @@ async fn serve_head( store: web::Data, config: web::Data, ) -> Result { - let alias = alias.into_inner(); + do_serve_head(range, alias.into_inner(), repo, store, config).await +} +async fn do_serve_head( + range: Option>, + alias: Serde, + repo: web::Data, + store: web::Data, + config: web::Data, +) -> Result { let Some(identifier) = repo.identifier_from_alias::(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().finish()); @@ -1137,8 +1239,9 @@ async fn clean_variants( } #[derive(Debug, serde::Deserialize)] -struct AliasQuery { - alias: Serde, +enum AliasQuery { + Proxy { proxy: url::Url }, + Alias { alias: Serde }, } #[tracing::instrument(name = "Setting 404 Image", skip(repo, config))] @@ -1151,7 +1254,12 @@ async fn set_not_found( return Err(UploadError::ReadOnly.into()); } - let alias = json.into_inner().alias; + let alias = match json.into_inner() { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; if repo.hash(&alias).await?.is_none() { return Ok(HttpResponse::BadRequest().json(serde_json::json!({ @@ -1168,7 +1276,7 @@ async fn set_not_found( #[tracing::instrument(name = "Purging file", skip(repo, config))] async fn purge( - query: web::Query, + web::Query(alias_query): web::Query, repo: web::Data, config: web::Data, ) -> Result { @@ -1176,7 +1284,13 @@ async fn purge( return Err(UploadError::ReadOnly.into()); } - let alias = query.into_inner().alias; + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + let aliases = repo.aliases_from_alias(&alias).await?; let Some(hash) = repo.hash(&alias).await? else { @@ -1194,10 +1308,16 @@ async fn purge( #[tracing::instrument(name = "Fetching aliases", skip(repo))] async fn aliases( - query: web::Query, + web::Query(alias_query): web::Query, repo: web::Data, ) -> Result { - let alias = query.into_inner().alias; + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + let aliases = repo.aliases_from_alias(&alias).await?; Ok(HttpResponse::Ok().json(&serde_json::json!({ @@ -1208,10 +1328,16 @@ async fn aliases( #[tracing::instrument(name = "Fetching identifier", skip(repo))] async fn identifier( - query: web::Query, + web::Query(alias_query): web::Query, repo: web::Data, ) -> Result { - let alias = query.into_inner().alias; + let alias = match alias_query { + AliasQuery::Alias { alias } => alias, + AliasQuery::Proxy { proxy } => { + todo!("Proxy URL") + } + }; + let Some(identifier) = repo.identifier_from_alias::(&alias).await? else { // Invalid alias return Ok(HttpResponse::NotFound().json(serde_json::json!({ @@ -1304,9 +1430,17 @@ fn configure_endpoints< .route(web::get().to(delete::)), ) .service( - web::resource("/original/{filename}") - .route(web::get().to(serve::)) - .route(web::head().to(serve_head::)), + web::scope("/original") + .service( + web::resource("") + .route(web::get().to(serve_query::)) + .route(web::head().to(serve_query_head::)), + ) + .service( + web::resource("/{filename}") + .route(web::get().to(serve::)) + .route(web::head().to(serve_head::)), + ), ) .service( web::resource("/process.{ext}") @@ -1320,8 +1454,14 @@ fn configure_endpoints< .service( web::scope("/details") .service( - web::resource("/original/{filename}") - .route(web::get().to(details::)), + web::scope("/original") + .service( + web::resource("").route(web::get().to(details_query::)), + ) + .service( + web::resource("/{filename}") + .route(web::get().to(details::)), + ), ) .service( web::resource("/process.{ext}") @@ -1757,3 +1897,60 @@ impl PictRsConfiguration { Ok(()) } } + +#[cfg(test)] +mod tests { + #[test] + fn source() { + let query = super::ProcessQuery { + source: super::ProcessSource::Source { + src: super::Serde::new(super::Alias::from_existing("example.png")), + }, + operations: vec![("resize".into(), "200".into())], + }; + let encoded = serde_urlencoded::to_string(&query).expect("Encoded"); + let new_query: super::ProcessQuery = serde_urlencoded::from_str(&encoded).expect("Decoded"); + // Don't compare entire query - "src" gets deserialized twice + assert_eq!(new_query.source, query.source); + + assert!(new_query + .operations + .contains(&("resize".into(), "200".into()))); + } + + #[test] + fn alias() { + let query = super::ProcessQuery { + source: super::ProcessSource::Alias { + alias: super::Serde::new(super::Alias::from_existing("example.png")), + }, + operations: vec![("resize".into(), "200".into())], + }; + let encoded = serde_urlencoded::to_string(&query).expect("Encoded"); + let new_query: super::ProcessQuery = serde_urlencoded::from_str(&encoded).expect("Decoded"); + // Don't compare entire query - "alias" gets deserialized twice + assert_eq!(new_query.source, query.source); + + assert!(new_query + .operations + .contains(&("resize".into(), "200".into()))); + } + + #[test] + fn url() { + let query = super::ProcessQuery { + source: super::ProcessSource::Proxy { + proxy: "http://example.com/image.png".parse().expect("valid url"), + }, + operations: vec![("resize".into(), "200".into())], + }; + let encoded = serde_urlencoded::to_string(&query).expect("Encoded"); + let new_query: super::ProcessQuery = serde_urlencoded::from_str(&encoded).expect("Decoded"); + // Don't compare entire query - "proxy" gets deserialized twice + assert_eq!(new_query.source, query.source); + + assert!(new_query + .operations + .contains(&("resize".into(), "200".into()))); + } +} diff --git a/src/serde_str.rs b/src/serde_str.rs index be311e7..6b7962f 100644 --- a/src/serde_str.rs +++ b/src/serde_str.rs @@ -3,7 +3,7 @@ use std::{ str::FromStr, }; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) struct Serde { inner: T, }