2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-12-22 03:11:24 +00:00

Simplify object and file path generation

This commit is contained in:
asonix 2024-02-26 15:43:30 -06:00
parent 7c6112e631
commit c17a8722c6
7 changed files with 43 additions and 122 deletions

7
Cargo.lock generated
View file

@ -1875,7 +1875,6 @@ dependencies = [
"serde_urlencoded",
"sha2",
"sled",
"storage-path-generator",
"streem",
"subtle",
"thiserror",
@ -2738,12 +2737,6 @@ dependencies = [
"der",
]
[[package]]
name = "storage-path-generator"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f11d35dae9818c4313649da4a97c8329e29357a7fe584526c1d78f5b63ef836"
[[package]]
name = "streem"
version = "0.2.0"

View file

@ -60,7 +60,6 @@ serde_json = "1.0"
serde_urlencoded = "0.7.1"
sha2 = "0.10.0"
sled = { version = "0.34.7" }
storage-path-generator = "0.1.0"
streem = "0.2.0"
subtle = { version = "2.5.0", default-features = false }
thiserror = "1.0"

View file

@ -33,9 +33,6 @@ impl ErrorCode {
pub(crate) const FILE_IO_ERROR: ErrorCode = ErrorCode {
code: "file-io-error",
};
pub(crate) const PARSE_PATH_ERROR: ErrorCode = ErrorCode {
code: "parse-path-error",
};
pub(crate) const FILE_EXISTS: ErrorCode = ErrorCode {
code: "file-exists",
};

24
src/file_path.rs Normal file
View file

@ -0,0 +1,24 @@
use std::path::PathBuf;
use uuid::Uuid;
pub(crate) fn generate_disk(mut path: PathBuf) -> PathBuf {
path.extend(generate());
path
}
pub(crate) fn generate_object() -> String {
generate().join("/")
}
fn generate() -> Vec<String> {
Uuid::now_v7()
.into_bytes()
.into_iter()
.map(to_hex)
.collect()
}
fn to_hex(byte: u8) -> String {
format!("{byte:x}")
}

View file

@ -11,6 +11,7 @@ mod error_code;
mod exiftool;
mod ffmpeg;
mod file;
mod file_path;
mod formats;
mod future;
mod generate;
@ -1766,7 +1767,7 @@ where
{
match to {
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
let store = FileStore::build(path.clone(), repo.clone()).await?;
let store = FileStore::build(path.clone()).await?;
let to = State {
config,
@ -1806,7 +1807,6 @@ where
signature_duration.unwrap_or(15),
client_timeout.unwrap_or(30),
public_endpoint,
repo.clone(),
)
.await?
.build(client.clone());
@ -1991,7 +1991,7 @@ impl PictRsConfiguration {
match from {
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
let from = FileStore::build(path.clone(), repo.clone()).await?;
let from = FileStore::build(path.clone()).await?;
migrate_inner(
config,
tmp_dir,
@ -2034,7 +2034,6 @@ impl PictRsConfiguration {
signature_duration.unwrap_or(15),
client_timeout.unwrap_or(30),
public_endpoint,
repo.clone(),
)
.await?
.build(client.clone());
@ -2075,7 +2074,7 @@ impl PictRsConfiguration {
config::Store::Filesystem(config::Filesystem { path }) => {
let arc_repo = repo.to_arc();
let store = FileStore::build(path, arc_repo.clone()).await?;
let store = FileStore::build(path).await?;
let state = State {
tmp_dir: tmp_dir.clone(),
@ -2135,7 +2134,6 @@ impl PictRsConfiguration {
signature_duration,
client_timeout,
public_endpoint,
arc_repo.clone(),
)
.await?
.build(client.clone());

View file

@ -1,32 +1,21 @@
use crate::{
error_code::ErrorCode, file::File, repo::ArcRepo, store::Store, stream::LocalBoxStream,
};
use crate::{error_code::ErrorCode, file::File, store::Store, stream::LocalBoxStream};
use actix_web::web::Bytes;
use futures_core::Stream;
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use storage_path_generator::Generator;
use tokio::io::{AsyncRead, AsyncWrite};
use tokio_util::io::StreamReader;
use tracing::Instrument;
use super::StoreError;
// - Settings Tree
// - last-path -> last generated path
const GENERATOR_KEY: &str = "last-path";
#[derive(Debug, thiserror::Error)]
pub(crate) enum FileError {
#[error("Failed to read or write file")]
Io(#[from] std::io::Error),
#[error("Failed to generate path")]
PathGenerator(#[from] storage_path_generator::PathError),
#[error("Couldn't strip root dir")]
PrefixError,
@ -41,7 +30,6 @@ impl FileError {
pub(super) const fn error_code(&self) -> ErrorCode {
match self {
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
Self::FileExists => ErrorCode::FILE_EXISTS,
Self::StringError | Self::PrefixError => ErrorCode::FORMAT_FILE_ID_ERROR,
}
@ -50,9 +38,7 @@ impl FileError {
#[derive(Clone)]
pub(crate) struct FileStore {
path_gen: Generator,
root_dir: PathBuf,
repo: ArcRepo,
}
impl Store for FileStore {
@ -76,7 +62,7 @@ impl Store for FileStore {
{
let mut reader = std::pin::pin!(reader);
let path = self.next_file().await?;
let path = self.next_file();
if let Err(e) = self.safe_save_reader(&path, &mut reader).await {
self.safe_remove_file(&path).await?;
@ -165,17 +151,10 @@ impl Store for FileStore {
}
impl FileStore {
#[tracing::instrument(skip(repo))]
pub(crate) async fn build(root_dir: PathBuf, repo: ArcRepo) -> color_eyre::Result<Self> {
let path_gen = init_generator(&repo).await?;
pub(crate) async fn build(root_dir: PathBuf) -> color_eyre::Result<Self> {
tokio::fs::create_dir_all(&root_dir).await?;
Ok(FileStore {
root_dir,
path_gen,
repo,
})
Ok(FileStore { root_dir })
}
fn file_id_from_path(&self, path: PathBuf) -> Result<Arc<str>, FileError> {
@ -190,26 +169,11 @@ impl FileStore {
self.root_dir.join(file_id.as_ref())
}
async fn next_directory(&self) -> Result<PathBuf, StoreError> {
let path = self.path_gen.next();
self.repo
.set(GENERATOR_KEY, path.to_be_bytes().into())
.await?;
let mut target_path = self.root_dir.clone();
for dir in path.to_strings() {
target_path.push(dir)
}
Ok(target_path)
}
async fn next_file(&self) -> Result<PathBuf, StoreError> {
let target_path = self.next_directory().await?;
fn next_file(&self) -> PathBuf {
let target_path = crate::file_path::generate_disk(self.root_dir.clone());
let filename = uuid::Uuid::new_v4().to_string();
Ok(target_path.join(filename))
target_path.join(filename)
}
#[tracing::instrument(level = "debug", skip(self, path), fields(path = ?path.as_ref()))]
@ -266,20 +230,9 @@ pub(crate) async fn safe_create_parent<P: AsRef<Path>>(path: P) -> Result<(), Fi
Ok(())
}
async fn init_generator(repo: &ArcRepo) -> Result<Generator, StoreError> {
if let Some(ivec) = repo.get(GENERATOR_KEY).await? {
Ok(Generator::from_existing(
storage_path_generator::Path::from_be_bytes(ivec.to_vec()).map_err(FileError::from)?,
))
} else {
Ok(Generator::new())
}
}
impl std::fmt::Debug for FileStore {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FileStore")
.field("path_gen", &"generator")
.field("root_dir", &self.root_dir)
.finish()
}

View file

@ -1,6 +1,6 @@
use crate::{
bytes_stream::BytesStream, error_code::ErrorCode, future::WithMetrics, repo::ArcRepo,
store::Store, stream::LocalBoxStream, sync::DropHandle,
bytes_stream::BytesStream, error_code::ErrorCode, future::WithMetrics, store::Store,
stream::LocalBoxStream, sync::DropHandle,
};
use actix_web::{
error::BlockingError,
@ -20,7 +20,6 @@ use rusty_s3::{
Bucket, BucketError, Credentials, UrlStyle,
};
use std::{string::FromUtf8Error, sync::Arc, time::Duration};
use storage_path_generator::{Generator, Path};
use streem::IntoStreamer;
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
use tokio_util::io::ReaderStream;
@ -31,16 +30,8 @@ use super::StoreError;
const CHUNK_SIZE: usize = 8_388_608; // 8 Mebibytes, min is 5 (5_242_880);
// - Settings Tree
// - last-path -> last generated path
const GENERATOR_KEY: &str = "last-path";
#[derive(Debug, thiserror::Error)]
pub(crate) enum ObjectError {
#[error("Failed to generate path")]
PathGenerator(#[from] storage_path_generator::PathError),
#[error("Failed to generate request")]
S3(#[from] BucketError),
@ -98,7 +89,6 @@ impl std::error::Error for XmlError {
impl ObjectError {
pub(super) const fn error_code(&self) -> ErrorCode {
match self {
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
Self::S3(_)
| Self::RequestMiddleware(_)
| Self::Request(_)
@ -127,8 +117,6 @@ impl From<BlockingError> for ObjectError {
#[derive(Clone)]
pub(crate) struct ObjectStore {
path_gen: Generator,
repo: ArcRepo,
bucket: Bucket,
credentials: Credentials,
client: ClientWithMiddleware,
@ -139,8 +127,6 @@ pub(crate) struct ObjectStore {
#[derive(Clone)]
pub(crate) struct ObjectStoreConfig {
path_gen: Generator,
repo: ArcRepo,
bucket: Bucket,
credentials: Credentials,
signature_expiration: u64,
@ -151,8 +137,6 @@ pub(crate) struct ObjectStoreConfig {
impl ObjectStoreConfig {
pub(crate) fn build(self, client: ClientWithMiddleware) -> ObjectStore {
ObjectStore {
path_gen: self.path_gen,
repo: self.repo,
bucket: self.bucket,
credentials: self.credentials,
client,
@ -431,7 +415,7 @@ enum UploadState {
impl ObjectStore {
#[allow(clippy::too_many_arguments)]
#[tracing::instrument(skip(access_key, secret_key, session_token, repo))]
#[tracing::instrument(skip(access_key, secret_key, session_token))]
pub(crate) async fn build(
endpoint: Url,
bucket_name: String,
@ -443,13 +427,8 @@ impl ObjectStore {
signature_expiration: u64,
client_timeout: u64,
public_endpoint: Option<Url>,
repo: ArcRepo,
) -> Result<ObjectStoreConfig, StoreError> {
let path_gen = init_generator(&repo).await?;
Ok(ObjectStoreConfig {
path_gen,
repo,
bucket: Bucket::new(endpoint, url_style, bucket_name, region)
.map_err(ObjectError::from)?,
credentials: if let Some(token) = session_token {
@ -596,7 +575,7 @@ impl ObjectStore {
length: usize,
content_type: mime::Mime,
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
let path = self.next_file().await?;
let path = self.next_file();
let mut action = self.bucket.put_object(Some(&self.credentials), &path);
@ -614,7 +593,7 @@ impl ObjectStore {
&self,
content_type: mime::Mime,
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
let path = self.next_file().await?;
let path = self.next_file();
let mut action = self
.bucket
@ -784,39 +763,17 @@ impl ObjectStore {
self.build_request(action)
}
async fn next_directory(&self) -> Result<Path, StoreError> {
let path = self.path_gen.next();
self.repo
.set(GENERATOR_KEY, path.to_be_bytes().into())
.await?;
Ok(path)
}
async fn next_file(&self) -> Result<String, StoreError> {
let path = self.next_directory().await?.to_strings().join("/");
fn next_file(&self) -> String {
let path = crate::file_path::generate_object();
let filename = uuid::Uuid::new_v4().to_string();
Ok(format!("{path}/{filename}"))
}
}
async fn init_generator(repo: &ArcRepo) -> Result<Generator, StoreError> {
if let Some(ivec) = repo.get(GENERATOR_KEY).await? {
Ok(Generator::from_existing(
storage_path_generator::Path::from_be_bytes(ivec.to_vec())
.map_err(ObjectError::from)?,
))
} else {
Ok(Generator::new())
format!("{path}/{filename}")
}
}
impl std::fmt::Debug for ObjectStore {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ObjectStore")
.field("path_gen", &"generator")
.field("bucket", &self.bucket.name())
.field("region", &self.bucket.region())
.finish()