From a7467931877a7bd9d344827198f3b740e01cdee1 Mon Sep 17 00:00:00 2001 From: asonix Date: Wed, 24 Jun 2020 11:58:46 -0500 Subject: [PATCH] Enable mp4 with ffmpeg --- Cargo.lock | 78 ++++--- Cargo.toml | 9 +- README.md | 16 +- docker/dev/Dockerfile | 11 +- docker/prod/Dockerfile.amd64 | 16 ++ docker/prod/Dockerfile.arm32v7 | 16 ++ docker/prod/Dockerfile.arm64v8 | 16 ++ docker/prod/deploy.sh | 5 +- src/error.rs | 6 +- src/main.rs | 228 +++++++++++++++----- src/processor.rs | 148 ++++++++----- src/upload_manager.rs | 63 ++---- src/{validate.rs => validate/mod.rs} | 61 +++--- src/validate/transcode.rs | 302 +++++++++++++++++++++++++++ 14 files changed, 746 insertions(+), 229 deletions(-) rename src/{validate.rs => validate/mod.rs} (80%) create mode 100644 src/validate/transcode.rs diff --git a/Cargo.lock b/Cargo.lock index 200c146..4330308 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -469,6 +469,30 @@ dependencies = [ "which", ] +[[package]] +name = "bindgen" +version = "0.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66c0bb6167449588ff70803f4127f0684f9063097eca5016f37eb52b92c2cf36" +dependencies = [ + "bitflags", + "cexpr", + "cfg-if", + "clang-sys", + "clap", + "env_logger", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "which", +] + [[package]] name = "bitflags" version = "1.2.1" @@ -631,12 +655,6 @@ dependencies = [ "bitflags", ] -[[package]] -name = "color_quant" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dbbb57365263e881e805dc77d94697c9118fd94d8da011240555aa7b23445bd" - [[package]] name = "copyless" version = "0.1.5" @@ -765,6 +783,31 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" +[[package]] +name = "ffmpeg-next" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e37dadc7853e912107a8a671b7b22bff1309c09d0ba8851493028f06a36fc9f" +dependencies = [ + "bitflags", + "ffmpeg-sys-next", + "libc", +] + +[[package]] +name = "ffmpeg-sys-next" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01bba495d04757f7a3e471b3e411759a8968571c4618235bc1c9e1099b4a84d1" +dependencies = [ + "bindgen 0.54.0", + "cc", + "libc", + "num_cpus", + "pkg-config", + "regex", +] + [[package]] name = "flate2" version = "1.0.14" @@ -966,16 +1009,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "gif" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "471d90201b3b223f3451cd4ad53e34295f16a1df17b1edf3736d47761c3981af" -dependencies = [ - "color_quant", - "lzw", -] - [[package]] name = "gimli" version = "0.21.0" @@ -1206,18 +1239,12 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "lzw" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d947cbb889ed21c2a84be6ffbaebf5b4e0f4340638cba0444907e38b56be084" - [[package]] name = "magick_rust" version = "0.14.0" source = "git+https://git.asonix.dog/asonix/magick-rust#cd25f601629a2af0cffa5e4767720de69fd256dd" dependencies = [ - "bindgen", + "bindgen 0.53.3", "libc", "pkg-config", ] @@ -1438,7 +1465,7 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pict-rs" -version = "0.1.12" +version = "0.2.0-alpha.1" dependencies = [ "actix-form-data", "actix-fs", @@ -1447,8 +1474,9 @@ dependencies = [ "anyhow", "base64 0.12.2", "bytes", + "ffmpeg-next", + "ffmpeg-sys-next", "futures", - "gif", "magick_rust", "mime", "once_cell", diff --git a/Cargo.toml b/Cargo.toml index c0e5308..2928488 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pict-rs" description = "A simple image hosting service" -version = "0.1.12" +version = "0.2.0-alpha.1" authors = ["asonix "] license = "AGPL-3.0" readme = "README.md" @@ -19,7 +19,7 @@ anyhow = "1.0" base64 = "0.12.1" bytes = "0.5" futures = "0.3.4" -gif = "0.10.3" +ffmpeg-sys-next = "4.3.0" magick_rust = { version = "0.14.0", git = "https://git.asonix.dog/asonix/magick-rust" } mime = "0.3.1" once_cell = "1.4.0" @@ -35,3 +35,8 @@ tracing = "0.1.15" tracing-futures = "0.2.4" tracing-subscriber = { version = "0.2.5", features = ["fmt", "tracing-log"] } uuid = { version = "0.8", features = ["v4"] } + +[dependencies.ffmpeg-next] +version = "4.3.0" +default-features = false +features = ["ffmpeg42", "codec", "filter", "device", "format", "resampling", "postprocessing", "software-resampling", "software-scaling"] diff --git a/README.md b/README.md index 27a180c..7e88280 100644 --- a/README.md +++ b/README.md @@ -93,19 +93,19 @@ pict-rs offers four endpoints: response format are the same as the `POST /image` endpoint. - `GET /image/download?url=...` Download an image from a remote server, returning the same JSON payload as the `POST` endpoint -- `GET /image/{file}` for getting a full-resolution image. `file` here is the `file` key from the +- `GET /image/original/{file}` for getting a full-resolution image. `file` here is the `file` key from the `/image` endpoint's JSON -- `GET /image/{transformations...}/{file}` get a file with transformations applied. +- `GET /image/process.{ext}?src={file}&...` get a file with transformations applied. existing transformations include - - `identity`: apply no changes - - `blur{float}`: apply a gaussian blur to the file - - `thumbnail{int}`: produce a thumbnail of the image fitting inside an `{int}` by `{int}` square + - `identity=true`: apply no changes + - `blur={float}`: apply a gaussian blur to the file + - `thumbnail={int}`: produce a thumbnail of the image fitting inside an `{int}` by `{int}` square + Supported `ext` file extensions include `png`, `jpg`, and `webp` An example of usage could be ``` - GET /image/thumbnail256/blur3.0/asdf.png + GET /image/process.jpg?src=asdf.png&thumbnail=256&blur=3.0 ``` - which would create a 256x256px - thumbnail and blur it + which would create a 256x256px JPEG thumbnail and blur it - `DELETE /image/delete/{delete_token}/{file}` or `GET /image/delete/{delete_token}/{file}` to delete a file, where `delete_token` and `file` are from the `/image` endpoint's JSON diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile index 3ecf1e6..339b789 100644 --- a/docker/dev/Dockerfile +++ b/docker/dev/Dockerfile @@ -132,7 +132,16 @@ RUN \ libwebp6:$ARCH \ libwebpdemux2:$ARCH \ libwebpmux3:$ARCH \ - libgomp1:$ARCH + libgomp1:$ARCH \ + libavcodec-dev:$ARCH \ + libavfilter-dev:$ARCH \ + libavdevice-dev:$ARCH \ + libavformat-dev:$ARCH \ + libavresample-dev:$ARCH \ + libavutil-dev:$ARCH \ + libswscale-dev:$ARCH \ + libswresample-dev:$ARCH \ + ffmpeg ENV \ PATH=$PATH:/opt/build/.cargo/bin \ diff --git a/docker/prod/Dockerfile.amd64 b/docker/prod/Dockerfile.amd64 index cb6296e..ecdf81c 100644 --- a/docker/prod/Dockerfile.amd64 +++ b/docker/prod/Dockerfile.amd64 @@ -128,6 +128,14 @@ RUN \ libgexiv2-dev:$ARCH \ libxml2:$ARCH \ libltdl7:$ARCH \ + libavcodec-dev:$ARCH \ + libavfilter-dev:$ARCH \ + libavdevice-dev:$ARCH \ + libavformat-dev:$ARCH \ + libavresample-dev:$ARCH \ + libavutil-dev:$ARCH \ + libswscale-dev:$ARCH \ + libswresample-dev:$ARCH \ llvm-dev \ libclang-dev \ clang @@ -192,6 +200,14 @@ RUN \ libltdl7 \ libgomp1 \ libxml2 \ + libavcodec58 \ + libavfilter7 \ + libavdevice58 \ + libavformat58 \ + libavresample4 \ + libavutil56 \ + libswscale5 \ + libswresample3 \ tini COPY --from=pict-rs-builder /opt/build/pict-rs/target/$TARGET/$BUILD_MODE/pict-rs /usr/local/bin/pict-rs diff --git a/docker/prod/Dockerfile.arm32v7 b/docker/prod/Dockerfile.arm32v7 index 5dc78a5..7c2cd67 100644 --- a/docker/prod/Dockerfile.arm32v7 +++ b/docker/prod/Dockerfile.arm32v7 @@ -128,6 +128,14 @@ RUN \ libgexiv2-dev:$ARCH \ libxml2:$ARCH \ libltdl7:$ARCH \ + libavcodec-dev:$ARCH \ + libavfilter-dev:$ARCH \ + libavdevice-dev:$ARCH \ + libavformat-dev:$ARCH \ + libavresample-dev:$ARCH \ + libavutil-dev:$ARCH \ + libswscale-dev:$ARCH \ + libswresample-dev:$ARCH \ llvm-dev \ libclang-dev \ clang && \ @@ -193,6 +201,14 @@ RUN \ libltdl7 \ libgomp1 \ libxml2 \ + libavcodec58 \ + libavfilter7 \ + libavdevice58 \ + libavformat58 \ + libavresample4 \ + libavutil56 \ + libswscale5 \ + libswresample3 \ tini COPY --from=pict-rs-builder /opt/build/pict-rs/target/$TARGET/$BUILD_MODE/pict-rs /usr/local/bin/pict-rs diff --git a/docker/prod/Dockerfile.arm64v8 b/docker/prod/Dockerfile.arm64v8 index 5815aa0..4c06232 100644 --- a/docker/prod/Dockerfile.arm64v8 +++ b/docker/prod/Dockerfile.arm64v8 @@ -128,6 +128,14 @@ RUN \ libgexiv2-dev:$ARCH \ libxml2:$ARCH \ libltdl7:$ARCH \ + libavcodec-dev:$ARCH \ + libavfilter-dev:$ARCH \ + libavdevice-dev:$ARCH \ + libavformat-dev:$ARCH \ + libavresample-dev:$ARCH \ + libavutil-dev:$ARCH \ + libswscale-dev:$ARCH \ + libswresample-dev:$ARCH \ llvm-dev \ libclang-dev \ clang && \ @@ -193,6 +201,14 @@ RUN \ libltdl7 \ libgomp1 \ libxml2 \ + libavcodec58 \ + libavfilter7 \ + libavdevice58 \ + libavformat58 \ + libavresample4 \ + libavutil56 \ + libswscale5 \ + libswresample3 \ tini COPY --from=pict-rs-builder /opt/build/pict-rs/target/$TARGET/$BUILD_MODE/pict-rs /usr/local/bin/pict-rs diff --git a/docker/prod/deploy.sh b/docker/prod/deploy.sh index 0106091..c98cb83 100755 --- a/docker/prod/deploy.sh +++ b/docker/prod/deploy.sh @@ -16,6 +16,7 @@ function print_help() { echo "" echo "Args:" echo " tag: The git tag to be applied to the repository and docker build" + echo " branch: The git branch to use for tagging and publishing" } function build_image() { @@ -30,8 +31,10 @@ function build_image() { # Creating the new tag new_tag="$1" +branch="$2" require "$new_tag" "tag" +require "$branch" "branch" if ! docker run --rm -it arm64v8/alpine:3.11 /bin/sh -c 'echo "docker is configured correctly"' then @@ -41,7 +44,7 @@ fi set -xe -git checkout master +git checkout $branch # Changing the docker-compose prod sed -i "s/asonix\/pictrs:.*/asonix\/pictrs:$new_tag/" docker-compose.yml diff --git a/src/error.rs b/src/error.rs index 401e4a9..cf80c35 100644 --- a/src/error.rs +++ b/src/error.rs @@ -24,9 +24,6 @@ pub(crate) enum UploadError { #[error("No files present in upload")] NoFiles, - #[error("Uploaded image could not be served, extension is missing")] - MissingExtension, - #[error("Requested a file that doesn't exist")] MissingAlias, @@ -60,6 +57,9 @@ pub(crate) enum UploadError { #[error("Error validating Gif file, {0}")] Gif(#[from] GifError), + #[error("Error transcoding, {0}")] + Transcode(crate::validate::transcode::Error), + #[error("Tried to create file, but file already exists")] FileExists, diff --git a/src/main.rs b/src/main.rs index a132dd3..44de970 100644 --- a/src/main.rs +++ b/src/main.rs @@ -21,8 +21,12 @@ mod upload_manager; mod validate; use self::{ - config::Config, error::UploadError, middleware::Tracing, processor::process_image, - upload_manager::UploadManager, validate::image_webp, + config::Config, + error::UploadError, + middleware::Tracing, + processor::process_image, + upload_manager::UploadManager, + validate::{image_webp, video_mp4}, }; const MEGABYTES: usize = 1024 * 1024; @@ -31,6 +35,29 @@ const HOURS: u32 = 60 * 60; static CONFIG: Lazy = Lazy::new(|| Config::from_args()); static MAGICK_INIT: Once = Once::new(); +// try moving a file +#[instrument] +async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> { + if let Some(path) = to.parent() { + debug!("Creating directory {:?}", path); + actix_fs::create_dir_all(path.to_owned()).await?; + } + + debug!("Checking if {:?} already exists", to); + if let Err(e) = actix_fs::metadata(to.clone()).await { + if e.kind() != Some(std::io::ErrorKind::NotFound) { + return Err(e.into()); + } + } else { + return Err(UploadError::FileExists); + } + + debug!("Moving {:?} to {:?}", from, to); + actix_fs::copy(from.clone(), to).await?; + actix_fs::remove_file(from).await?; + Ok(()) +} + // Try writing to a file #[instrument(skip(bytes))] async fn safe_save_file(path: PathBuf, bytes: bytes::Bytes) -> Result<(), UploadError> { @@ -67,24 +94,57 @@ async fn safe_save_file(path: PathBuf, bytes: bytes::Bytes) -> Result<(), Upload Ok(()) } -fn to_ext(mime: mime::Mime) -> &'static str { +pub(crate) fn tmp_file() -> PathBuf { + use rand::distributions::{Alphanumeric, Distribution}; + let limit: usize = 10; + let rng = rand::thread_rng(); + + let s: String = Alphanumeric.sample_iter(rng).take(limit).collect(); + + let name = format!("{}.tmp", s); + + let mut path = std::env::temp_dir(); + path.push("pict-rs"); + path.push(&name); + + path +} + +fn to_ext(mime: mime::Mime) -> Result<&'static str, UploadError> { if mime == mime::IMAGE_PNG { - ".png" + Ok(".png") } else if mime == mime::IMAGE_JPEG { - ".jpg" - } else if mime == mime::IMAGE_GIF { - ".gif" + Ok(".jpg") + } else if mime == video_mp4() { + Ok(".mp4") + } else if mime == image_webp() { + Ok(".webp") } else { - ".webp" + Err(UploadError::UnsupportedFormat) } } -fn from_ext(ext: std::ffi::OsString) -> mime::Mime { - match ext.to_str() { - Some("png") => mime::IMAGE_PNG, - Some("jpg") => mime::IMAGE_JPEG, - Some("gif") => mime::IMAGE_GIF, - _ => image_webp(), +fn from_name(name: &str) -> Result { + match name + .rsplit('.') + .next() + .ok_or(UploadError::UnsupportedFormat)? + { + "jpg" => Ok(mime::IMAGE_JPEG), + "webp" => Ok(image_webp()), + "png" => Ok(mime::IMAGE_PNG), + "mp4" => Ok(video_mp4()), + "gif" => Ok(mime::IMAGE_GIF), + _ => Err(UploadError::UnsupportedFormat), + } +} + +fn from_ext(ext: &str) -> Result { + match ext { + "jpg" => Ok(mime::IMAGE_JPEG), + "png" => Ok(mime::IMAGE_PNG), + "webp" => Ok(image_webp()), + _ => Err(UploadError::UnsupportedFormat), } } @@ -123,6 +183,11 @@ async fn upload( }))) } +#[derive(Debug, serde::Deserialize)] +struct UrlQuery { + url: String, +} + /// download an image from a URL #[instrument(skip(client, manager))] async fn download( @@ -165,33 +230,71 @@ async fn delete( Ok(HttpResponse::NoContent().finish()) } -/// Serve files +type ProcessQuery = Vec<(String, String)>; + +/// Process files #[instrument(skip(manager, whitelist))] -async fn serve( - segments: web::Path, +async fn process( + query: web::Query, + ext: web::Path, manager: web::Data, whitelist: web::Data>>, ) -> Result { - let mut segments: Vec = segments - .into_inner() - .split('/') - .map(|s| s.to_string()) - .collect(); - let alias = segments.pop().ok_or(UploadError::MissingFilename)?; + let (alias, operations) = + query + .into_inner() + .into_iter() + .fold((String::new(), Vec::new()), |(s, mut acc), (k, v)| { + if k == "src" { + (v, acc) + } else { + acc.push((k, v)); + (s, acc) + } + }); - debug!("Building chain"); - let chain = self::processor::build_chain(&segments, whitelist.as_ref().as_ref()); - debug!("Chain built"); + if alias == "" { + return Err(UploadError::MissingFilename); + } let name = manager.from_alias(alias).await?; - let base = manager.image_dir(); - let path = self::processor::build_path(base, &chain, name.clone()); - let ext = path - .extension() - .ok_or(UploadError::MissingExtension)? - .to_owned(); - let ext = from_ext(ext); + let operations = if let Some(whitelist) = whitelist.as_ref() { + operations + .into_iter() + .filter(|(k, _)| whitelist.contains(&k.to_lowercase())) + .collect() + } else { + operations + }; + + let chain = self::processor::build_chain(&operations); + + let ext = ext.into_inner(); + let content_type = from_ext(&ext)?; + let processed_name = format!("{}.{}", name, ext); + let base = manager.image_dir(); + let mut path = self::processor::build_path(base, &chain, processed_name); + + if let Some((updated_path, exists)) = self::processor::prepare_image(path.clone()).await? { + path = updated_path.clone(); + + if exists.is_new() { + // Save the transcoded file in another task + debug!("Spawning storage task"); + let span = Span::current(); + let manager2 = manager.clone(); + let name = name.clone(); + actix_rt::spawn(async move { + let entered = span.enter(); + if let Err(e) = manager2.store_variant(updated_path, name).await { + error!("Error storing variant, {}", e); + return; + } + drop(entered); + }); + } + } // If the thumbnail doesn't exist, we need to create it if let Err(e) = actix_fs::metadata(path.clone()).await { @@ -203,15 +306,30 @@ async fn serve( let mut original_path = manager.image_dir(); original_path.push(name.clone()); - // apply chain to the provided image - let img_bytes = match process_image(original_path.clone(), chain).await? { - Some(bytes) => bytes, - None => { - let stream = actix_fs::read_to_stream(original_path).await?; + if let Some((updated_path, exists)) = + self::processor::prepare_image(original_path.clone()).await? + { + original_path = updated_path.clone(); - return Ok(srv_response(stream, ext)); + if exists.is_new() { + // Save the transcoded file in another task + debug!("Spawning storage task"); + let span = Span::current(); + let manager2 = manager.clone(); + let name = name.clone(); + actix_rt::spawn(async move { + let entered = span.enter(); + if let Err(e) = manager2.store_variant(updated_path, name).await { + error!("Error storing variant, {}", e); + return; + } + drop(entered); + }); } - }; + } + + // apply chain to the provided image + let img_bytes = process_image(original_path.clone(), chain).await?; let path2 = path.clone(); let img_bytes2 = img_bytes.clone(); @@ -221,7 +339,7 @@ async fn serve( let span = Span::current(); actix_rt::spawn(async move { let entered = span.enter(); - if let Err(e) = manager.store_variant(path2.clone()).await { + if let Err(e) = manager.store_variant(path2.clone(), name).await { error!("Error storing variant, {}", e); return; } @@ -236,13 +354,29 @@ async fn serve( Box::pin(futures::stream::once(async { Ok(img_bytes) as Result<_, UploadError> })), - ext, + content_type, )); } let stream = actix_fs::read_to_stream(path).await?; - Ok(srv_response(stream, ext)) + Ok(srv_response(stream, content_type)) +} + +/// Serve files +#[instrument(skip(manager))] +async fn serve( + alias: web::Path, + manager: web::Data, +) -> Result { + let name = manager.from_alias(alias.into_inner()).await?; + let content_type = from_name(&name)?; + let mut path = manager.image_dir(); + path.push(name); + + let stream = actix_fs::read_to_stream(path).await?; + + Ok(srv_response(stream, content_type)) } // A helper method to produce responses with proper cache headers @@ -261,11 +395,6 @@ where .streaming(stream.err_into()) } -#[derive(Debug, serde::Deserialize)] -struct UrlQuery { - url: String, -} - #[actix_rt::main] async fn main() -> Result<(), anyhow::Error> { MAGICK_INIT.call_once(|| { @@ -368,7 +497,8 @@ async fn main() -> Result<(), anyhow::Error> { .route(web::delete().to(delete)) .route(web::get().to(delete)), ) - .service(web::resource("/{tail:.*}").route(web::get().to(serve))), + .service(web::resource("/original/{filename}").route(web::get().to(serve))) + .service(web::resource("/process.{ext}").route(web::get().to(process))), ) .service( web::resource("/import") diff --git a/src/processor.rs b/src/processor.rs index 79d1fcc..8c8fd25 100644 --- a/src/processor.rs +++ b/src/processor.rs @@ -5,7 +5,7 @@ use crate::{ use actix_web::web; use bytes::Bytes; use magick_rust::MagickWand; -use std::{collections::HashSet, path::PathBuf}; +use std::path::PathBuf; use tracing::{debug, instrument, Span}; pub(crate) trait Processor { @@ -17,21 +17,12 @@ pub(crate) trait Processor { where Self: Sized; - fn parse(s: &str) -> Option> + fn parse(k: &str, v: &str) -> Option> where Self: Sized; fn path(&self, path: PathBuf) -> PathBuf; - fn process(&self, wand: &mut MagickWand) -> Result; - - fn is_whitelisted(whitelist: Option<&HashSet>) -> bool - where - Self: Sized, - { - whitelist - .map(|wl| wl.contains(Self::name())) - .unwrap_or(true) - } + fn process(&self, wand: &mut MagickWand) -> Result<(), UploadError>; } pub(crate) struct Identity; @@ -51,7 +42,7 @@ impl Processor for Identity { s == Self::name() } - fn parse(_: &str) -> Option> + fn parse(_: &str, _: &str) -> Option> where Self: Sized, { @@ -63,8 +54,8 @@ impl Processor for Identity { path } - fn process(&self, _: &mut MagickWand) -> Result { - Ok(false) + fn process(&self, _: &mut MagickWand) -> Result<(), UploadError> { + Ok(()) } } @@ -82,14 +73,14 @@ impl Processor for Thumbnail { where Self: Sized, { - s.starts_with(Self::name()) + s == Self::name() } - fn parse(s: &str) -> Option> + fn parse(_: &str, v: &str) -> Option> where Self: Sized, { - let size = s.trim_start_matches(Self::name()).parse().ok()?; + let size = v.parse().ok()?; Some(Box::new(Thumbnail(size))) } @@ -99,7 +90,7 @@ impl Processor for Thumbnail { path } - fn process(&self, wand: &mut MagickWand) -> Result { + fn process(&self, wand: &mut MagickWand) -> Result<(), UploadError> { debug!("Thumbnail"); let width = wand.get_image_width(); let height = wand.get_image_height(); @@ -115,12 +106,9 @@ impl Processor for Thumbnail { }; wand.op(|w| w.sample_image(new_width as usize, new_height as usize))?; - Ok(true) - } else if wand.op(|w| w.get_image_format())? == "GIF" { - Ok(true) - } else { - Ok(false) } + + Ok(()) } } @@ -135,11 +123,11 @@ impl Processor for Blur { } fn is_processor(s: &str) -> bool { - s.starts_with(Self::name()) + s == Self::name() } - fn parse(s: &str) -> Option> { - let sigma = s.trim_start_matches(Self::name()).parse().ok()?; + fn parse(_: &str, v: &str) -> Option> { + let sigma = v.parse().ok()?; Some(Box::new(Blur(sigma))) } @@ -149,21 +137,20 @@ impl Processor for Blur { path } - fn process(&self, wand: &mut MagickWand) -> Result { + fn process(&self, wand: &mut MagickWand) -> Result<(), UploadError> { debug!("Blur"); if self.0 > 0.0 { wand.op(|w| w.gaussian_blur_image(0.0, self.0))?; - Ok(true) - } else { - Ok(false) } + + Ok(()) } } macro_rules! parse { - ($x:ident, $y:expr, $z:expr) => {{ - if $x::is_processor($y) && $x::is_whitelisted($z) { - return $x::parse($y); + ($x:ident, $k:expr, $v:expr) => {{ + if $x::is_processor($k) { + return $x::parse($k, $v); } }}; } @@ -181,15 +168,18 @@ impl std::fmt::Debug for ProcessChain { } #[instrument] -pub(crate) fn build_chain(args: &[String], whitelist: Option<&HashSet>) -> ProcessChain { +pub(crate) fn build_chain(args: &[(String, String)]) -> ProcessChain { let inner = args .into_iter() - .filter_map(|arg| { - parse!(Identity, arg.as_str(), whitelist); - parse!(Thumbnail, arg.as_str(), whitelist); - parse!(Blur, arg.as_str(), whitelist); + .filter_map(|(k, v)| { + let k = k.as_str(); + let v = v.as_str(); - debug!("Skipping {}, invalid or whitelisted", arg); + parse!(Identity, k, v); + parse!(Thumbnail, k, v); + parse!(Blur, k, v); + + debug!("Skipping {}: {}, invalid", k, v); None }) @@ -208,15 +198,72 @@ pub(crate) fn build_path(base: PathBuf, chain: &ProcessChain, filename: String) path } +fn is_motion(s: &str) -> bool { + s.ends_with(".gif") || s.ends_with(".mp4") +} + +pub(crate) enum Exists { + Exists, + New, +} + +impl Exists { + pub(crate) fn is_new(&self) -> bool { + match self { + Exists::New => true, + _ => false, + } + } +} + +pub(crate) async fn prepare_image( + original_file: PathBuf, +) -> Result, UploadError> { + let original_path_str = ptos(&original_file)?; + let jpg_path = format!("{}.jpg", original_path_str); + let jpg_path = PathBuf::from(jpg_path); + + if actix_fs::metadata(jpg_path.clone()).await.is_ok() { + return Ok(Some((jpg_path, Exists::Exists))); + } + + if is_motion(&original_path_str) { + let orig_path = original_path_str.clone(); + + let tmpfile = crate::tmp_file(); + let tmpfile2 = tmpfile.clone(); + + let res = web::block(move || { + use crate::validate::transcode::{transcode, Target}; + + transcode(orig_path, tmpfile, Target::Jpeg).map_err(UploadError::Transcode) + }) + .await; + + if let Err(e) = res { + actix_fs::remove_file(tmpfile2).await?; + return Err(e.into()); + } + + return match crate::safe_move_file(tmpfile2, jpg_path.clone()).await { + Err(UploadError::FileExists) => Ok(Some((jpg_path, Exists::Exists))), + Err(e) => Err(e), + _ => Ok(Some((jpg_path, Exists::New))), + }; + } + + Ok(None) +} + #[instrument] pub(crate) async fn process_image( original_file: PathBuf, chain: ProcessChain, -) -> Result, UploadError> { +) -> Result { let original_path_str = ptos(&original_file)?; - let span = Span::current(); - let opt = web::block(move || { + let span = Span::current(); + let bytes = web::block(move || { let entered = span.enter(); let mut wand = MagickWand::new(); @@ -226,23 +273,16 @@ pub(crate) async fn process_image( let format = wand.op(|w| w.get_image_format())?; debug!("Processing image"); - let mut changed = false; - for processor in chain.inner.into_iter() { debug!("Step"); - changed |= processor.process(&mut wand)?; - debug!("Step complete"); - } - - if changed { - let vec = wand.op(|w| w.write_image_blob(&format))?; - return Ok(Some(Bytes::from(vec))); + processor.process(&mut wand)?; } + let vec = wand.op(|w| w.write_image_blob(&format))?; drop(entered); - Ok(None) as Result, UploadError> + return Ok(Bytes::from(vec)) as Result; }) .await?; - Ok(opt) + Ok(bytes) } diff --git a/src/upload_manager.rs b/src/upload_manager.rs index 35f53b2..4429bd7 100644 --- a/src/upload_manager.rs +++ b/src/upload_manager.rs @@ -108,12 +108,11 @@ impl UploadManager { /// Store the path to a generated image variant so we can easily clean it up later #[instrument(skip(self))] - pub(crate) async fn store_variant(&self, path: PathBuf) -> Result<(), UploadError> { - let filename = path - .file_name() - .and_then(|f| f.to_str()) - .map(|s| s.to_string()) - .ok_or(UploadError::Path)?; + pub(crate) async fn store_variant( + &self, + path: PathBuf, + filename: String, + ) -> Result<(), UploadError> { let path_string = path.to_str().ok_or(UploadError::Path)?.to_string(); let fname_tree = self.inner.filename_tree.clone(); @@ -276,7 +275,7 @@ impl UploadManager { { // -- READ IN BYTES FROM CLIENT -- debug!("Reading stream"); - let tmpfile = tmp_file(); + let tmpfile = crate::tmp_file(); safe_save_stream(tmpfile.clone(), stream).await?; let content_type = if validate { @@ -312,7 +311,7 @@ impl UploadManager { { // -- READ IN BYTES FROM CLIENT -- debug!("Reading stream"); - let tmpfile = tmp_file(); + let tmpfile = crate::tmp_file(); safe_save_stream(tmpfile.clone(), stream).await?; // -- VALIDATE IMAGE -- @@ -426,7 +425,7 @@ impl UploadManager { let mut real_path = self.image_dir(); real_path.push(name); - safe_move_file(tmpfile, real_path).await?; + crate::safe_move_file(tmpfile, real_path).await?; Ok(()) } @@ -505,7 +504,7 @@ impl UploadManager { let mut path = image_dir.clone(); let s: String = Alphanumeric.sample_iter(rng).take(limit).collect(); - let filename = file_name(s, content_type.clone()); + let filename = file_name(s, content_type.clone())?; path.push(filename.clone()); @@ -596,7 +595,7 @@ impl UploadManager { loop { debug!("Alias gen loop"); let s: String = Alphanumeric.sample_iter(rng).take(limit).collect(); - let alias = file_name(s, content_type.clone()); + let alias = file_name(s, content_type.clone())?; let res = self.save_alias(hash, &alias).await?; @@ -635,44 +634,6 @@ impl UploadManager { } } -pub(crate) fn tmp_file() -> PathBuf { - use rand::distributions::{Alphanumeric, Distribution}; - let limit: usize = 10; - let rng = rand::thread_rng(); - - let s: String = Alphanumeric.sample_iter(rng).take(limit).collect(); - - let name = format!("{}.tmp", s); - - let mut path = std::env::temp_dir(); - path.push("pict-rs"); - path.push(&name); - - path -} - -#[instrument] -async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> { - if let Some(path) = to.parent() { - debug!("Creating directory {:?}", path); - actix_fs::create_dir_all(path.to_owned()).await?; - } - - debug!("Checking if {:?} already exists", to); - if let Err(e) = actix_fs::metadata(to.clone()).await { - if e.kind() != Some(std::io::ErrorKind::NotFound) { - return Err(e.into()); - } - } else { - return Err(UploadError::FileExists); - } - - debug!("Moving {:?} to {:?}", from, to); - actix_fs::copy(from.clone(), to).await?; - actix_fs::remove_file(from).await?; - Ok(()) -} - #[instrument(skip(stream))] async fn safe_save_stream(to: PathBuf, stream: UploadStream) -> Result<(), UploadError> where @@ -710,8 +671,8 @@ fn trans_err(e: UploadError) -> sled::transaction::ConflictableTransactionError< sled::transaction::ConflictableTransactionError::Abort(e) } -fn file_name(name: String, content_type: mime::Mime) -> String { - format!("{}{}", name, to_ext(content_type)) +fn file_name(name: String, content_type: mime::Mime) -> Result { + Ok(format!("{}{}", name, to_ext(content_type)?)) } fn alias_key(hash: &[u8], id: &str) -> Vec { diff --git a/src/validate.rs b/src/validate/mod.rs similarity index 80% rename from src/validate.rs rename to src/validate/mod.rs index 7cbdafd..af7bd9e 100644 --- a/src/validate.rs +++ b/src/validate/mod.rs @@ -1,13 +1,13 @@ -use crate::{config::Format, error::UploadError, upload_manager::tmp_file}; +use crate::{config::Format, error::UploadError, tmp_file}; use actix_web::web; use magick_rust::MagickWand; use rexiv2::{MediaType, Metadata}; -use std::{ - fs::File, - io::{BufReader, BufWriter, Write}, - path::PathBuf, -}; -use tracing::{debug, error, instrument, trace, warn, Span}; +use std::path::PathBuf; +use tracing::{debug, error, instrument, warn, Span}; + +pub(crate) mod transcode; + +use self::transcode::{Error as TranscodeError, Target}; pub(crate) trait Op { fn op(&self, f: F) -> Result @@ -57,10 +57,10 @@ impl Op for MagickWand { #[derive(Debug, thiserror::Error)] pub(crate) enum GifError { - #[error("Error decoding gif")] - Decode(#[from] gif::DecodingError), + #[error("{0}")] + Decode(#[from] TranscodeError), - #[error("Error reading bytes")] + #[error("{0}")] Io(#[from] std::io::Error), } @@ -68,6 +68,10 @@ pub(crate) fn image_webp() -> mime::Mime { "image/webp".parse().unwrap() } +pub(crate) fn video_mp4() -> mime::Mime { + "video/mp4".parse().unwrap() +} + pub(crate) fn ptos(p: &PathBuf) -> Result { Ok(p.to_str().ok_or(UploadError::Path)?.to_owned()) } @@ -103,7 +107,7 @@ pub(crate) async fn validate_image( let newfile = tmp_file(); validate_gif(&tmpfile, &newfile)?; - mime::IMAGE_GIF + video_mp4() } (Some(Format::Jpeg), MediaType::Jpeg) | (None, MediaType::Jpeg) => { validate_format(&tmpfile_str, "JPEG")?; @@ -137,7 +141,10 @@ pub(crate) async fn validate_image( return Err(UploadError::UnsupportedFormat); } - wand.op(|w| w.write_image(&newfile_str))?; + if let Err(e) = wand.op(|w| w.write_image(&newfile_str)) { + std::fs::remove_file(&newfile_str)?; + return Err(e.into()); + } } std::fs::rename(&newfile, &tmpfile)?; @@ -156,7 +163,10 @@ pub(crate) async fn validate_image( wand.op_mut(|w| w.set_image_format(format.to_magick_format()))?; debug!("writing: {}", newfile_str); - wand.op(|w| w.write_image(&newfile_str))?; + if let Err(e) = wand.op(|w| w.write_image(&newfile_str)) { + std::fs::remove_file(&newfile_str)?; + return Err(e.into()); + } } std::fs::rename(&newfile, &tmpfile)?; @@ -180,31 +190,12 @@ pub(crate) async fn validate_image( #[instrument] fn validate_gif(from: &PathBuf, to: &PathBuf) -> Result<(), GifError> { debug!("Transmuting GIF"); - use gif::{Parameter, SetParameter}; - let mut decoder = gif::Decoder::new(BufReader::new(File::open(from)?)); - - decoder.set(gif::ColorOutput::Indexed); - - let mut reader = decoder.read_info()?; - - let width = reader.width(); - let height = reader.height(); - let global_palette = reader.global_palette().unwrap_or(&[]); - - let mut writer = BufWriter::new(File::create(to)?); - let mut encoder = gif::Encoder::new(&mut writer, width, height, global_palette)?; - - gif::Repeat::Infinite.set_param(&mut encoder)?; - - while let Some(frame) = reader.read_next_frame()? { - trace!("Writing frame"); - encoder.write_frame(frame)?; + if let Err(e) = self::transcode::transcode(from, to, Target::Mp4) { + std::fs::remove_file(to)?; + return Err(e.into()); } - drop(encoder); - writer.flush()?; - std::fs::rename(to, from)?; Ok(()) } diff --git a/src/validate/transcode.rs b/src/validate/transcode.rs new file mode 100644 index 0000000..b04c4e3 --- /dev/null +++ b/src/validate/transcode.rs @@ -0,0 +1,302 @@ +use ffmpeg_next::{ + self, codec, filter, format, frame, media, + util::{format::pixel::Pixel, rational::Rational}, +}; +use std::path::Path; + +#[derive(Debug, thiserror::Error)] +pub(crate) enum Error { + #[error("No video stream present")] + MissingVideo, + + #[error("Input format is not supported")] + UnsupportedFormat, + + #[error("No frame-rate present in input video")] + FrameRate, + + #[error("Filter {0} should have been set up by now")] + MissingFilter(&'static str), + + #[error("{0}")] + Transcode(#[from] ffmpeg_next::Error), +} + +#[derive(Clone, Copy, Debug)] +pub(crate) enum Target { + Mp4, + Jpeg, + #[allow(dead_code)] + Png, +} + +impl Target { + fn name(&self) -> &'static str { + match self { + Target::Mp4 => "mp4", + Target::Jpeg => "image2", + Target::Png => "image2", + } + } + + fn codec(&self) -> codec::id::Id { + match self { + Target::Mp4 => codec::id::Id::H264, + Target::Jpeg => codec::id::Id::MJPEG, + Target::Png => codec::id::Id::PNG, + } + } + + fn frames(&self) -> Option { + match self { + Target::Mp4 => None, + Target::Jpeg => Some(1), + Target::Png => Some(1), + } + } +} + +fn pixel_value(pixel: Pixel) -> i32 { + let av_px_fmt: ffmpeg_sys_next::AVPixelFormat = pixel.into(); + unsafe { std::mem::transmute::<_, i32>(av_px_fmt) } +} + +fn filter( + decoder: &codec::decoder::Video, + encoder: &codec::encoder::Video, +) -> Result { + let mut filter = filter::Graph::new(); + + let aspect = Rational::new(decoder.width() as i32, decoder.height() as i32).reduce(); + + let av_px_fmt = pixel_value(decoder.format()); + + let args = format!( + "video_size={width}x{height}:pix_fmt={pix_fmt}:time_base={time_base_num}/{time_base_den}:pixel_aspect={pix_aspect_num}/{pix_aspect_den}", + width=decoder.width(), + height=decoder.height(), + pix_fmt=av_px_fmt, + time_base_num=decoder.time_base().numerator(), + time_base_den=decoder.time_base().denominator(), + pix_aspect_num=aspect.numerator(), + pix_aspect_den=aspect.denominator(), + ); + + let buffer = filter::find("buffer").ok_or(Error::MissingFilter("buffer"))?; + filter.add(&buffer, "in", &args)?; + + let buffersink = filter::find("buffersink").ok_or(Error::MissingFilter("buffersink"))?; + let mut out = filter.add(&buffersink, "out", "")?; + out.set_pixel_format(encoder.format()); + + filter.output("in", 0)?.input("out", 0)?.parse("null")?; + filter.validate()?; + + println!("{}", filter.dump()); + + if let Some(codec) = encoder.codec() { + if !codec + .capabilities() + .contains(codec::capabilities::Capabilities::VARIABLE_FRAME_SIZE) + { + filter + .get("out") + .ok_or(Error::MissingFilter("out"))? + .sink() + .set_frame_size(encoder.frame_size()); + } + } + + Ok(filter) +} + +struct Transcoder { + stream: usize, + filter: filter::Graph, + decoder: codec::decoder::Video, + encoder: codec::encoder::Video, +} + +impl Transcoder { + fn decode( + &mut self, + packet: &ffmpeg_next::Packet, + decoded: &mut frame::Video, + ) -> Result { + self.decoder.decode(packet, decoded) + } + + fn add_frame(&mut self, decoded: &frame::Video) -> Result<(), Error> { + self.filter + .get("in") + .ok_or(Error::MissingFilter("out"))? + .source() + .add(decoded)?; + Ok(()) + } + + fn encode( + &mut self, + decoded: &mut frame::Video, + encoded: &mut ffmpeg_next::Packet, + octx: &mut format::context::Output, + in_time_base: Rational, + out_time_base: Rational, + ) -> Result<(), Error> { + while let Ok(()) = self + .filter + .get("out") + .ok_or(Error::MissingFilter("out"))? + .sink() + .frame(decoded) + { + if let Ok(true) = self.encoder.encode(decoded, encoded) { + encoded.set_stream(0); + encoded.rescale_ts(in_time_base, out_time_base); + encoded.write_interleaved(octx)?; + } + } + + Ok(()) + } + + fn flush( + &mut self, + decoded: &mut frame::Video, + encoded: &mut ffmpeg_next::Packet, + octx: &mut format::context::Output, + in_time_base: Rational, + out_time_base: Rational, + ) -> Result<(), Error> { + self.filter + .get("in") + .ok_or(Error::MissingFilter("in"))? + .source() + .flush()?; + + self.encode(decoded, encoded, octx, in_time_base, out_time_base)?; + + while let Ok(true) = self.encoder.flush(encoded) { + encoded.set_stream(0); + encoded.rescale_ts(in_time_base, out_time_base); + encoded.write_interleaved(octx)?; + } + Ok(()) + } +} + +fn transcoder( + ictx: &mut format::context::Input, + octx: &mut format::context::Output, + target: Target, +) -> Result { + let input = ictx + .streams() + .best(media::Type::Video) + .ok_or(Error::MissingVideo)?; + let mut decoder = input.codec().decoder().video()?; + let codec_id = target.codec(); + let codec = ffmpeg_next::encoder::find(codec_id) + .ok_or(Error::UnsupportedFormat)? + .video()?; + let global = octx + .format() + .flags() + .contains(format::flag::Flags::GLOBAL_HEADER); + + decoder.set_parameters(input.parameters())?; + + let mut output = octx.add_stream(codec)?; + let mut encoder = output.codec().encoder().video()?; + + if global { + encoder.set_flags(codec::flag::Flags::GLOBAL_HEADER); + } + + encoder.set_format( + codec + .formats() + .ok_or(Error::UnsupportedFormat)? + .next() + .ok_or(Error::UnsupportedFormat)?, + ); + encoder.set_bit_rate(decoder.bit_rate()); + encoder.set_max_bit_rate(decoder.max_bit_rate()); + + encoder.set_width(decoder.width()); + encoder.set_height(decoder.height()); + encoder.set_bit_rate(decoder.bit_rate()); + encoder.set_max_bit_rate(decoder.max_bit_rate()); + encoder.set_time_base(decoder.frame_rate().ok_or(Error::FrameRate)?.invert()); + output.set_time_base(decoder.time_base()); + + let encoder = encoder.open_as(codec)?; + output.set_parameters(&encoder); + + let filter = filter(&decoder, &encoder)?; + + Ok(Transcoder { + stream: input.index(), + filter: filter, + decoder: decoder, + encoder: encoder, + }) +} + +pub(crate) fn transcode(input: P, output: Q, target: Target) -> Result<(), Error> +where + P: AsRef, + Q: AsRef, +{ + let mut ictx = format::input(&input)?; + let mut octx = format::output_as(&output, target.name())?; + let mut transcoder = transcoder(&mut ictx, &mut octx, target)?; + + octx.write_header()?; + + let in_time_base = transcoder.decoder.time_base(); + let out_time_base = octx.stream(0).ok_or(Error::MissingVideo)?.time_base(); + + let mut decoded = frame::Video::empty(); + let mut encoded = ffmpeg_next::Packet::empty(); + let mut count = 0; + + for (stream, mut packet) in ictx.packets() { + if stream.index() == transcoder.stream { + packet.rescale_ts(stream.time_base(), in_time_base); + + if let Ok(true) = transcoder.decode(&packet, &mut decoded) { + let timestamp = decoded.timestamp(); + decoded.set_pts(timestamp); + + transcoder.add_frame(&decoded)?; + + transcoder.encode( + &mut decoded, + &mut encoded, + &mut octx, + in_time_base, + out_time_base, + )?; + + count += 1; + } + } + + if target.frames().map(|f| count >= f).unwrap_or(false) { + break; + } + } + + transcoder.flush( + &mut decoded, + &mut encoded, + &mut octx, + in_time_base, + out_time_base, + )?; + + octx.write_trailer()?; + + Ok(()) +}