2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-12-22 19:31:35 +00:00

Go back to actix-fs

This commit is contained in:
asonix 2021-01-18 19:54:39 -06:00
parent 36f5387fff
commit 9797e2e3fc
7 changed files with 99 additions and 216 deletions

129
Cargo.lock generated
View file

@ -55,6 +55,18 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "actix-fs"
version = "0.1.0"
source = "git+https://git.asonix.dog/asonix/actix-fs?branch=main#dc37026b9f1a8016304b71af34802a1ec52a318b"
dependencies = [
"actix-threadpool",
"bytes",
"futures",
"log",
"thiserror",
]
[[package]] [[package]]
name = "actix-http" name = "actix-http"
version = "2.2.0" version = "2.2.0"
@ -353,37 +365,6 @@ version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86" checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86"
[[package]]
name = "async-channel"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59740d83946db6a5af71ae25ddf9562c2b176b2ca42cf99a455f09f4a220d6b9"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "async-fs"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b3ca4f8ff117c37c278a2f7415ce9be55560b846b5bc4412aaa5d29c1c3dae2"
dependencies = [
"async-lock",
"blocking",
"futures-lite",
]
[[package]]
name = "async-lock"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1996609732bde4a9988bc42125f55f2af5f3c36370e27c778d5191a4a1b63bfb"
dependencies = [
"event-listener",
]
[[package]] [[package]]
name = "async-stream" name = "async-stream"
version = "0.3.0" version = "0.3.0"
@ -405,12 +386,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "async-task"
version = "4.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0"
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.42" version = "0.1.42"
@ -422,12 +397,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "atomic-waker"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a"
[[package]] [[package]]
name = "atty" name = "atty"
version = "0.2.14" version = "0.2.14"
@ -561,20 +530,6 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "blocking"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5e170dbede1f740736619b776d7251cb1b9095c435c34d8ca9f57fcd2f335e9"
dependencies = [
"async-channel",
"async-task",
"atomic-waker",
"fastrand",
"futures-lite",
"once_cell",
]
[[package]] [[package]]
name = "brotli-sys" name = "brotli-sys"
version = "0.3.2" version = "0.3.2"
@ -622,12 +577,6 @@ dependencies = [
"bytes", "bytes",
] ]
[[package]]
name = "cache-padded"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "631ae5198c9be5e753e5cc215e1bd73c2b466a3565173db433f52bb9d3e66dba"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.66" version = "1.0.66"
@ -694,15 +643,6 @@ dependencies = [
"vec_map", "vec_map",
] ]
[[package]]
name = "concurrent-queue"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3"
dependencies = [
"cache-padded",
]
[[package]] [[package]]
name = "const_fn" name = "const_fn"
version = "0.4.5" version = "0.4.5"
@ -832,21 +772,6 @@ dependencies = [
"termcolor", "termcolor",
] ]
[[package]]
name = "event-listener"
version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7531096570974c3a9dcf9e4b8e1cede1ec26cf5046219fb3b9d897503b9be59"
[[package]]
name = "fastrand"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca5faf057445ce5c9d4329e382b2ce7ca38550ef3b73a5348362d5f24e0c7fe3"
dependencies = [
"instant",
]
[[package]] [[package]]
name = "ffmpeg-next" name = "ffmpeg-next"
version = "4.3.8" version = "4.3.8"
@ -974,21 +899,6 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "611834ce18aaa1bd13c4b374f5d653e1027cf99b6b502584ff8c9a64413b30bb" checksum = "611834ce18aaa1bd13c4b374f5d653e1027cf99b6b502584ff8c9a64413b30bb"
[[package]]
name = "futures-lite"
version = "1.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4481d0cd0de1d204a4fa55e7d45f07b1d958abcb06714b3446438e2eff695fb"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite 0.2.1",
"waker-fn",
]
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.8" version = "0.3.8"
@ -1495,12 +1405,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "parking"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.11.1" version = "0.11.1"
@ -1543,17 +1447,16 @@ name = "pict-rs"
version = "0.3.0-alpha.5" version = "0.3.0-alpha.5"
dependencies = [ dependencies = [
"actix-form-data", "actix-form-data",
"actix-fs",
"actix-rt", "actix-rt",
"actix-web", "actix-web",
"anyhow", "anyhow",
"async-fs",
"async-stream", "async-stream",
"base64 0.13.0", "base64 0.13.0",
"bytes", "bytes",
"ffmpeg-next", "ffmpeg-next",
"ffmpeg-sys-next", "ffmpeg-sys-next",
"futures", "futures",
"futures-lite",
"magick_rust", "magick_rust",
"mime", "mime",
"once_cell", "once_cell",
@ -2524,12 +2427,6 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
[[package]]
name = "waker-fn"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
[[package]] [[package]]
name = "wasi" name = "wasi"
version = "0.9.0+wasi-snapshot-preview1" version = "0.9.0+wasi-snapshot-preview1"

View file

@ -12,15 +12,14 @@ edition = "2018"
[dependencies] [dependencies]
actix-form-data = "0.5.0" actix-form-data = "0.5.0"
actix-fs = { git = "https://git.asonix.dog/asonix/actix-fs", branch = "main" }
actix-rt = "1.1.1" actix-rt = "1.1.1"
actix-web = { version = "3.0.1", default-features = false, features = ["rustls"] } actix-web = { version = "3.0.1", default-features = false, features = ["rustls"] }
anyhow = "1.0" anyhow = "1.0"
async-fs = "1.5.0"
async-stream = "0.3.0" async-stream = "0.3.0"
base64 = "0.13.0" base64 = "0.13.0"
bytes = "0.5" bytes = "0.5"
futures = "0.3.4" futures = "0.3.4"
futures-lite = "1.11.3"
magick_rust = { version = "0.14.0", git = "https://github.com/nlfiedler/magick-rust" } magick_rust = { version = "0.14.0", git = "https://github.com/nlfiedler/magick-rust" }
mime = "0.3.1" mime = "0.3.1"
once_cell = "1.4.0" once_cell = "1.4.0"

View file

@ -18,6 +18,9 @@ pub(crate) enum UploadError {
#[error("Error interacting with filesystem, {0}")] #[error("Error interacting with filesystem, {0}")]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
#[error("Error in filesyste, {0}")]
Fs(#[from] actix_fs::Error),
#[error("Panic in blocking operation")] #[error("Panic in blocking operation")]
Canceled, Canceled,

View file

@ -8,11 +8,10 @@ use actix_web::{
web, App, HttpResponse, HttpServer, web, App, HttpResponse, HttpServer,
}; };
use bytes::Bytes; use bytes::Bytes;
use futures::stream::{once, Stream}; use futures::stream::{once, Stream, TryStreamExt};
use futures_lite::{AsyncReadExt, AsyncWriteExt};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::{ use std::{
collections::HashSet, future::ready, io, path::PathBuf, pin::Pin, sync::Once, time::SystemTime, collections::HashSet, future::ready, path::PathBuf, pin::Pin, sync::Once, time::SystemTime,
}; };
use structopt::StructOpt; use structopt::StructOpt;
use tracing::{debug, error, info, instrument, Span}; use tracing::{debug, error, info, instrument, Span};
@ -36,7 +35,6 @@ use self::{
validate::{image_webp, video_mp4}, validate::{image_webp, video_mp4},
}; };
const CHUNK_SIZE: usize = 65_356;
const MEGABYTES: usize = 1024 * 1024; const MEGABYTES: usize = 1024 * 1024;
const MINUTES: u32 = 60; const MINUTES: u32 = 60;
const HOURS: u32 = 60 * MINUTES; const HOURS: u32 = 60 * MINUTES;
@ -66,12 +64,12 @@ static MAGICK_INIT: Once = Once::new();
async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> { async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> {
if let Some(path) = to.parent() { if let Some(path) = to.parent() {
debug!("Creating directory {:?}", path); debug!("Creating directory {:?}", path);
async_fs::create_dir_all(path.to_owned()).await?; actix_fs::create_dir_all(path.to_owned()).await?;
} }
debug!("Checking if {:?} already exists", to); debug!("Checking if {:?} already exists", to);
if let Err(e) = async_fs::metadata(to.clone()).await { if let Err(e) = actix_fs::metadata(to.clone()).await {
if e.kind() != std::io::ErrorKind::NotFound { if e.kind() != Some(std::io::ErrorKind::NotFound) {
return Err(e.into()); return Err(e.into());
} }
} else { } else {
@ -79,15 +77,15 @@ async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> {
} }
debug!("Moving {:?} to {:?}", from, to); debug!("Moving {:?} to {:?}", from, to);
async_fs::copy(from.clone(), to).await?; actix_fs::copy(from.clone(), to).await?;
async_fs::remove_file(from).await?; actix_fs::remove_file(from).await?;
Ok(()) Ok(())
} }
async fn safe_create_parent(path: PathBuf) -> Result<(), UploadError> { async fn safe_create_parent(path: PathBuf) -> Result<(), UploadError> {
if let Some(path) = path.parent() { if let Some(path) = path.parent() {
debug!("Creating directory {:?}", path); debug!("Creating directory {:?}", path);
async_fs::create_dir_all(path.to_owned()).await?; actix_fs::create_dir_all(path.to_owned()).await?;
} }
Ok(()) Ok(())
@ -99,13 +97,13 @@ async fn safe_save_file(path: PathBuf, bytes: bytes::Bytes) -> Result<(), Upload
if let Some(path) = path.parent() { if let Some(path) = path.parent() {
// create the directory for the file // create the directory for the file
debug!("Creating directory {:?}", path); debug!("Creating directory {:?}", path);
async_fs::create_dir_all(path.to_owned()).await?; actix_fs::create_dir_all(path.to_owned()).await?;
} }
// Only write the file if it doesn't already exist // Only write the file if it doesn't already exist
debug!("Checking if {:?} already exists", path); debug!("Checking if {:?} already exists", path);
if let Err(e) = async_fs::metadata(path.clone()).await { if let Err(e) = actix_fs::metadata(path.clone()).await {
if e.kind() != std::io::ErrorKind::NotFound { if e.kind() != Some(std::io::ErrorKind::NotFound) {
return Err(e.into()); return Err(e.into());
} }
} else { } else {
@ -114,17 +112,16 @@ async fn safe_save_file(path: PathBuf, bytes: bytes::Bytes) -> Result<(), Upload
// Open the file for writing // Open the file for writing
debug!("Creating {:?}", path); debug!("Creating {:?}", path);
let mut file = async_fs::File::create(path.clone()).await?; let file = actix_fs::file::create(path.clone()).await?;
// try writing // try writing
debug!("Writing to {:?}", path); debug!("Writing to {:?}", path);
if let Err(e) = file.write_all(&bytes).await { if let Err(e) = actix_fs::file::write(file, bytes).await {
error!("Error writing {:?}, {}", path, e); error!("Error writing {:?}, {}", path, e);
// remove file if writing failed before completion // remove file if writing failed before completion
async_fs::remove_file(path).await?; actix_fs::remove_file(path).await?;
return Err(e.into()); return Err(e.into());
} }
file.flush().await?;
debug!("{:?} written", path); debug!("{:?} written", path);
Ok(()) Ok(())
@ -353,8 +350,8 @@ async fn process(
prepare_process(query, ext.as_str(), &manager, &whitelist).await?; prepare_process(query, ext.as_str(), &manager, &whitelist).await?;
// If the thumbnail doesn't exist, we need to create it // If the thumbnail doesn't exist, we need to create it
let thumbnail_exists = if let Err(e) = async_fs::metadata(thumbnail_path.clone()).await { let thumbnail_exists = if let Err(e) = actix_fs::metadata(thumbnail_path.clone()).await {
if e.kind() != std::io::ErrorKind::NotFound { if e.kind() != Some(std::io::ErrorKind::NotFound) {
error!("Error looking up processed image, {}", e); error!("Error looking up processed image, {}", e);
return Err(e.into()); return Err(e.into());
} }
@ -531,25 +528,6 @@ async fn serve(
ranged_file_resp(path, range, details).await ranged_file_resp(path, range, details).await
} }
fn read_to_stream(mut file: async_fs::File) -> impl Stream<Item = Result<Bytes, io::Error>> {
async_stream::stream! {
let mut buf = Vec::with_capacity(CHUNK_SIZE);
while {
buf.clear();
let mut take = (&mut file).take(CHUNK_SIZE as u64);
let read_bytes_result = take.read_to_end(&mut buf).await;
let read_bytes = read_bytes_result.as_ref().map(|num| *num).unwrap_or(0);
yield read_bytes_result.map(|_| Bytes::copy_from_slice(&buf));
read_bytes > 0
} {}
}
}
async fn ranged_file_resp( async fn ranged_file_resp(
path: PathBuf, path: PathBuf,
range: Option<range::RangeHeader>, range: Option<range::RangeHeader>,
@ -565,9 +543,9 @@ async fn ranged_file_resp(
if range_header.is_empty() { if range_header.is_empty() {
return Err(UploadError::Range); return Err(UploadError::Range);
} else if range_header.len() == 1 { } else if range_header.len() == 1 {
let file = async_fs::File::open(path).await?; let file = actix_fs::file::open(path).await?;
let meta = file.metadata().await?; let (file, meta) = actix_fs::file::metadata(file).await?;
let range = range_header.ranges().next().unwrap(); let range = range_header.ranges().next().unwrap();
@ -581,9 +559,10 @@ async fn ranged_file_resp(
} }
//No Range header in the request - return the entire document //No Range header in the request - return the entire document
None => { None => {
let file = async_fs::File::open(path).await?; let stream = actix_fs::read_to_stream(path)
let stream: Pin<Box<dyn Stream<Item = Result<Bytes, io::Error>>>> = .await?
Box::pin(read_to_stream(file)); .map_err(UploadError::from);
let stream: Pin<Box<dyn Stream<Item = Result<Bytes, UploadError>>>> = Box::pin(stream);
(HttpResponse::Ok(), stream) (HttpResponse::Ok(), stream)
} }
}; };
@ -814,8 +793,8 @@ async fn main() -> Result<(), anyhow::Error> {
.run() .run()
.await?; .await?;
if async_fs::metadata(&*TMP_DIR).await.is_ok() { if actix_fs::metadata(&*TMP_DIR).await.is_ok() {
async_fs::remove_dir_all(&*TMP_DIR).await?; actix_fs::remove_dir_all(&*TMP_DIR).await?;
} }
Ok(()) Ok(())

View file

@ -356,7 +356,7 @@ pub(crate) async fn prepare_image(
let jpg_path = format!("{}.jpg", original_path_str); let jpg_path = format!("{}.jpg", original_path_str);
let jpg_path = PathBuf::from(jpg_path); let jpg_path = PathBuf::from(jpg_path);
if async_fs::metadata(jpg_path.clone()).await.is_ok() { if actix_fs::metadata(jpg_path.clone()).await.is_ok() {
return Ok(Some((jpg_path, Exists::Exists))); return Ok(Some((jpg_path, Exists::Exists)));
} }
@ -376,7 +376,7 @@ pub(crate) async fn prepare_image(
if let Err(e) = res { if let Err(e) = res {
error!("transcode error: {:?}", e); error!("transcode error: {:?}", e);
async_fs::remove_file(tmpfile2).await?; actix_fs::remove_file(tmpfile2).await?;
return Err(e.into()); return Err(e.into());
} }

View file

@ -1,4 +1,4 @@
use crate::{UploadError, CHUNK_SIZE}; use crate::UploadError;
use actix_web::{ use actix_web::{
dev::Payload, dev::Payload,
http::{ http::{
@ -8,9 +8,8 @@ use actix_web::{
web::Bytes, web::Bytes,
FromRequest, HttpRequest, FromRequest, HttpRequest,
}; };
use futures::stream::{once, Once, Stream}; use futures::stream::{once, Once, Stream, StreamExt, TryStreamExt};
use futures_lite::{AsyncReadExt, AsyncSeekExt}; use std::{fs, io};
use std::io;
use std::{ use std::{
future::{ready, Ready}, future::{ready, Ready},
pin::Pin, pin::Pin,
@ -47,7 +46,7 @@ impl Range {
} }
} }
pub(crate) fn chop_bytes(&self, bytes: Bytes) -> Once<Ready<Result<Bytes, io::Error>>> { pub(crate) fn chop_bytes(&self, bytes: Bytes) -> Once<Ready<Result<Bytes, UploadError>>> {
match self { match self {
Range::RangeStart(start) => once(ready(Ok(bytes.slice(*start as usize..)))), Range::RangeStart(start) => once(ready(Ok(bytes.slice(*start as usize..)))),
Range::SuffixLength(from_start) => once(ready(Ok(bytes.slice(..*from_start as usize)))), Range::SuffixLength(from_start) => once(ready(Ok(bytes.slice(..*from_start as usize)))),
@ -59,26 +58,31 @@ impl Range {
pub(crate) async fn chop_file( pub(crate) async fn chop_file(
&self, &self,
mut file: async_fs::File, file: fs::File,
) -> Result<Pin<Box<dyn Stream<Item = Result<Bytes, io::Error>>>>, io::Error> { ) -> Result<Pin<Box<dyn Stream<Item = Result<Bytes, UploadError>>>>, UploadError> {
match self { match self {
Range::RangeStart(start) => { Range::RangeStart(start) => {
file.seek(io::SeekFrom::Start(*start)).await?; let (file, _) = actix_fs::file::seek(file, io::SeekFrom::Start(*start)).await?;
Ok(Box::pin(crate::read_to_stream(file))) Ok(Box::pin(
actix_fs::file::read_to_stream(file)
.await?
.map_err(UploadError::from),
))
} }
Range::SuffixLength(from_start) => { Range::SuffixLength(from_start) => {
file.seek(io::SeekFrom::Start(0)).await?; let (file, _) = actix_fs::file::seek(file, io::SeekFrom::Start(0)).await?;
Ok(Box::pin(read_num_bytes_to_stream(file, *from_start))) Ok(Box::pin(
read_num_bytes_to_stream(file, *from_start as usize).await?,
))
} }
Range::Segment(start, end) => { Range::Segment(start, end) => {
file.seek(io::SeekFrom::Start(*start)).await?; let (file, _) = actix_fs::file::seek(file, io::SeekFrom::Start(*start)).await?;
Ok(Box::pin(read_num_bytes_to_stream( Ok(Box::pin(
file, read_num_bytes_to_stream(file, end.saturating_sub(*start) as usize).await?,
end.saturating_sub(*start), ))
)))
} }
} }
} }
@ -184,25 +188,31 @@ fn parse_range(s: &str) -> Result<Range, UploadError> {
} }
} }
fn read_num_bytes_to_stream( async fn read_num_bytes_to_stream(
mut file: async_fs::File, file: fs::File,
mut num_bytes: u64, mut num_bytes: usize,
) -> impl Stream<Item = Result<Bytes, io::Error>> { ) -> Result<impl Stream<Item = Result<Bytes, UploadError>>, UploadError> {
async_stream::stream! { let mut stream = actix_fs::file::read_to_stream(file).await?;
let mut buf = Vec::with_capacity((CHUNK_SIZE as u64).min(num_bytes) as usize);
while { let stream = async_stream::stream! {
buf.clear(); while let Some(res) = stream.next().await {
let mut take = (&mut file).take((CHUNK_SIZE as u64).min(num_bytes)); let read_bytes = res.as_ref().map(|b| b.len()).unwrap_or(0);
let read_bytes_result = take.read_to_end(&mut buf).await; if read_bytes == 0 {
break;
}
let read_bytes = read_bytes_result.as_ref().map(|num| *num).unwrap_or(0); yield res.map_err(UploadError::from).map(|bytes| {
if bytes.len() > num_bytes {
bytes.slice(0..num_bytes)
} else {
bytes
}
});
yield read_bytes_result.map(|_| Bytes::copy_from_slice(&buf)); num_bytes = num_bytes.saturating_sub(read_bytes);
}
};
num_bytes = num_bytes.saturating_sub(read_bytes as u64); Ok(stream)
read_bytes > 0 && num_bytes > 0
} {}
}
} }

View file

@ -6,7 +6,7 @@ use crate::{
validate::validate_image, validate::validate_image,
}; };
use actix_web::web; use actix_web::web;
use futures::stream::{Stream, StreamExt}; use futures::stream::{Stream, StreamExt, TryStreamExt};
use sha2::Digest; use sha2::Digest;
use std::{path::PathBuf, pin::Pin, sync::Arc}; use std::{path::PathBuf, pin::Pin, sync::Arc};
use tracing::{debug, error, info, instrument, warn, Span}; use tracing::{debug, error, info, instrument, warn, Span};
@ -218,7 +218,7 @@ impl UploadManager {
root_dir.push("files"); root_dir.push("files");
// Ensure file dir exists // Ensure file dir exists
async_fs::create_dir_all(root_dir.clone()).await?; actix_fs::create_dir_all(root_dir.clone()).await?;
Ok(UploadManager { Ok(UploadManager {
inner: Arc::new(UploadManagerInner { inner: Arc::new(UploadManagerInner {
@ -603,7 +603,7 @@ impl UploadManager {
let mut errors = Vec::new(); let mut errors = Vec::new();
debug!("Deleting {:?}", path); debug!("Deleting {:?}", path);
if let Err(e) = async_fs::remove_file(path).await { if let Err(e) = actix_fs::remove_file(path).await {
errors.push(e.into()); errors.push(e.into());
} }
@ -676,8 +676,8 @@ impl UploadManager {
async fn hash(&self, tmpfile: PathBuf) -> Result<Hash, UploadError> { async fn hash(&self, tmpfile: PathBuf) -> Result<Hash, UploadError> {
let mut hasher = self.inner.hasher.clone(); let mut hasher = self.inner.hasher.clone();
let file = async_fs::File::open(tmpfile).await?; let file = actix_fs::file::open(tmpfile).await?;
let mut stream = Box::pin(crate::read_to_stream(file)); let mut stream = Box::pin(actix_fs::file::read_to_stream(file).await?);
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {
let bytes = res?; let bytes = res?;
@ -751,8 +751,8 @@ impl UploadManager {
path.push(filename.clone()); path.push(filename.clone());
if let Err(e) = async_fs::metadata(path).await { if let Err(e) = actix_fs::metadata(path).await {
if e.kind() == std::io::ErrorKind::NotFound { if e.kind() == Some(std::io::ErrorKind::NotFound) {
debug!("Generated unused filename {}", filename); debug!("Generated unused filename {}", filename);
return Ok(filename); return Ok(filename);
} }
@ -878,19 +878,19 @@ impl UploadManager {
} }
#[instrument(skip(stream))] #[instrument(skip(stream))]
async fn safe_save_stream<E>(to: PathBuf, mut stream: UploadStream<E>) -> Result<(), UploadError> async fn safe_save_stream<E>(to: PathBuf, stream: UploadStream<E>) -> Result<(), UploadError>
where where
UploadError: From<E>, UploadError: From<E>,
E: Unpin, E: Unpin,
{ {
if let Some(path) = to.parent() { if let Some(path) = to.parent() {
debug!("Creating directory {:?}", path); debug!("Creating directory {:?}", path);
async_fs::create_dir_all(path.to_owned()).await?; actix_fs::create_dir_all(path.to_owned()).await?;
} }
debug!("Checking if {:?} already exists", to); debug!("Checking if {:?} already exists", to);
if let Err(e) = async_fs::metadata(to.clone()).await { if let Err(e) = actix_fs::metadata(to.clone()).await {
if e.kind() != std::io::ErrorKind::NotFound { if e.kind() != Some(std::io::ErrorKind::NotFound) {
return Err(e.into()); return Err(e.into());
} }
} else { } else {
@ -899,21 +899,16 @@ where
debug!("Writing stream to {:?}", to); debug!("Writing stream to {:?}", to);
let mut file = async_fs::File::create(to).await?; let file = actix_fs::file::create(to).await?;
use futures_lite::AsyncWriteExt; actix_fs::file::write_stream(file, stream.map_err(UploadError::from)).await?;
while let Some(res) = stream.next().await {
let bytes = res?;
file.write_all(&bytes).await?;
}
file.flush().await?;
Ok(()) Ok(())
} }
async fn remove_path(path: sled::IVec) -> Result<(), UploadError> { async fn remove_path(path: sled::IVec) -> Result<(), UploadError> {
let path_string = String::from_utf8(path.to_vec())?; let path_string = String::from_utf8(path.to_vec())?;
async_fs::remove_file(path_string).await?; actix_fs::remove_file(path_string).await?;
Ok(()) Ok(())
} }