mirror of
https://git.asonix.dog/asonix/pict-rs
synced 2024-12-23 03:41:23 +00:00
Remove Identifier trait, Replace with Arc<str>
This commit is contained in:
parent
8eb4cda256
commit
8921f57a21
32 changed files with 799 additions and 630 deletions
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -730,6 +730,9 @@ dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"diesel_derives",
|
"diesel_derives",
|
||||||
"itoa",
|
"itoa",
|
||||||
|
"serde_json",
|
||||||
|
"time",
|
||||||
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -28,7 +28,7 @@ config = "0.13.0"
|
||||||
console-subscriber = "0.1"
|
console-subscriber = "0.1"
|
||||||
dashmap = "5.1.0"
|
dashmap = "5.1.0"
|
||||||
deadpool = { version = "0.9.5", features = ["rt_tokio_1"] }
|
deadpool = { version = "0.9.5", features = ["rt_tokio_1"] }
|
||||||
diesel = "2.1.1"
|
diesel = { version = "2.1.1", features = ["postgres_backend", "serde_json", "time", "uuid"] }
|
||||||
diesel-async = { version = "0.4.1", features = ["postgres", "deadpool"] }
|
diesel-async = { version = "0.4.1", features = ["postgres", "deadpool"] }
|
||||||
flume = "0.11.0"
|
flume = "0.11.0"
|
||||||
futures-core = "0.3"
|
futures-core = "0.3"
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
repo::{ArcRepo, UploadId},
|
repo::{ArcRepo, UploadId},
|
||||||
|
@ -9,19 +11,13 @@ use futures_core::Stream;
|
||||||
use mime::APPLICATION_OCTET_STREAM;
|
use mime::APPLICATION_OCTET_STREAM;
|
||||||
use tracing::{Instrument, Span};
|
use tracing::{Instrument, Span};
|
||||||
|
|
||||||
pub(crate) struct Backgrounded<S>
|
pub(crate) struct Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
repo: ArcRepo,
|
repo: ArcRepo,
|
||||||
identifier: Option<S::Identifier>,
|
identifier: Option<Arc<str>>,
|
||||||
upload_id: Option<UploadId>,
|
upload_id: Option<UploadId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Backgrounded<S>
|
impl Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
pub(crate) fn disarm(mut self) {
|
pub(crate) fn disarm(mut self) {
|
||||||
let _ = self.identifier.take();
|
let _ = self.identifier.take();
|
||||||
let _ = self.upload_id.take();
|
let _ = self.upload_id.take();
|
||||||
|
@ -31,12 +27,13 @@ where
|
||||||
self.upload_id
|
self.upload_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn identifier(&self) -> Option<&S::Identifier> {
|
pub(crate) fn identifier(&self) -> Option<&Arc<str>> {
|
||||||
self.identifier.as_ref()
|
self.identifier.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn proxy<P>(repo: ArcRepo, store: S, stream: P) -> Result<Self, Error>
|
pub(crate) async fn proxy<S, P>(repo: ArcRepo, store: S, stream: P) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
|
S: Store,
|
||||||
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
let mut this = Self {
|
let mut this = Self {
|
||||||
|
@ -50,8 +47,9 @@ where
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn do_proxy<P>(&mut self, store: S, stream: P) -> Result<(), Error>
|
async fn do_proxy<S, P>(&mut self, store: S, stream: P) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
|
S: Store,
|
||||||
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
self.upload_id = Some(self.repo.create_upload().await?);
|
self.upload_id = Some(self.repo.create_upload().await?);
|
||||||
|
@ -68,10 +66,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Drop for Backgrounded<S>
|
impl Drop for Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let any_items = self.identifier.is_some() || self.upload_id.is_some();
|
let any_items = self.identifier.is_some() || self.upload_id.is_some();
|
||||||
|
|
||||||
|
@ -90,7 +85,7 @@ where
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
||||||
actix_rt::spawn(
|
actix_rt::spawn(
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
let _ = crate::queue::cleanup_identifier(&repo, &identifier).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bytes_stream::BytesStream,
|
bytes_stream::BytesStream,
|
||||||
discover::Discovery,
|
discover::Discovery,
|
||||||
|
@ -103,7 +105,7 @@ impl Details {
|
||||||
|
|
||||||
pub(crate) async fn from_store<S: Store>(
|
pub(crate) async fn from_store<S: Store>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let mut buf = BytesStream::new();
|
let mut buf = BytesStream::new();
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
formats::InternalVideoFormat,
|
formats::InternalVideoFormat,
|
||||||
|
@ -132,7 +134,7 @@ impl ThumbnailFormat {
|
||||||
#[tracing::instrument(skip(store))]
|
#[tracing::instrument(skip(store))]
|
||||||
pub(crate) async fn thumbnail<S: Store>(
|
pub(crate) async fn thumbnail<S: Store>(
|
||||||
store: S,
|
store: S,
|
||||||
from: S::Identifier,
|
from: Arc<str>,
|
||||||
input_format: InternalVideoFormat,
|
input_format: InternalVideoFormat,
|
||||||
format: ThumbnailFormat,
|
format: ThumbnailFormat,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
|
|
|
@ -5,7 +5,7 @@ use crate::{
|
||||||
ffmpeg::ThumbnailFormat,
|
ffmpeg::ThumbnailFormat,
|
||||||
formats::{InputProcessableFormat, InternalVideoFormat},
|
formats::{InputProcessableFormat, InternalVideoFormat},
|
||||||
repo::{Alias, ArcRepo, Hash, VariantAlreadyExists},
|
repo::{Alias, ArcRepo, Hash, VariantAlreadyExists},
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
};
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use std::{path::PathBuf, time::Instant};
|
use std::{path::PathBuf, time::Instant};
|
||||||
|
@ -91,7 +91,7 @@ async fn process<S: Store + 'static>(
|
||||||
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
||||||
|
|
||||||
let identifier = if let Some(identifier) = repo.still_identifier_from_alias(&alias).await? {
|
let identifier = if let Some(identifier) = repo.still_identifier_from_alias(&alias).await? {
|
||||||
S::Identifier::from_arc(identifier)?
|
identifier
|
||||||
} else {
|
} else {
|
||||||
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
||||||
return Err(UploadError::MissingIdentifier.into());
|
return Err(UploadError::MissingIdentifier.into());
|
||||||
|
@ -101,7 +101,7 @@ async fn process<S: Store + 'static>(
|
||||||
|
|
||||||
let reader = crate::ffmpeg::thumbnail(
|
let reader = crate::ffmpeg::thumbnail(
|
||||||
store.clone(),
|
store.clone(),
|
||||||
S::Identifier::from_arc(identifier)?,
|
identifier,
|
||||||
input_format.unwrap_or(InternalVideoFormat::Mp4),
|
input_format.unwrap_or(InternalVideoFormat::Mp4),
|
||||||
thumbnail_format,
|
thumbnail_format,
|
||||||
media.process_timeout,
|
media.process_timeout,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bytes_stream::BytesStream,
|
bytes_stream::BytesStream,
|
||||||
either::Either,
|
either::Either,
|
||||||
|
@ -15,15 +17,12 @@ mod hasher;
|
||||||
use hasher::Hasher;
|
use hasher::Hasher;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct Session<S>
|
pub(crate) struct Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
repo: ArcRepo,
|
repo: ArcRepo,
|
||||||
delete_token: DeleteToken,
|
delete_token: DeleteToken,
|
||||||
hash: Option<Hash>,
|
hash: Option<Hash>,
|
||||||
alias: Option<Alias>,
|
alias: Option<Alias>,
|
||||||
identifier: Option<S::Identifier>,
|
identifier: Option<Arc<str>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(stream))]
|
#[tracing::instrument(skip(stream))]
|
||||||
|
@ -49,7 +48,7 @@ pub(crate) async fn ingest<S>(
|
||||||
stream: impl Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
stream: impl Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
media: &crate::config::Media,
|
media: &crate::config::Media,
|
||||||
) -> Result<Session<S>, Error>
|
) -> Result<Session, Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
|
@ -131,11 +130,11 @@ where
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all)]
|
#[tracing::instrument(level = "trace", skip_all)]
|
||||||
async fn save_upload<S>(
|
async fn save_upload<S>(
|
||||||
session: &mut Session<S>,
|
session: &mut Session,
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
|
@ -153,10 +152,7 @@ where
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Session<S>
|
impl Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
pub(crate) fn disarm(mut self) -> DeleteToken {
|
pub(crate) fn disarm(mut self) -> DeleteToken {
|
||||||
let _ = self.hash.take();
|
let _ = self.hash.take();
|
||||||
let _ = self.alias.take();
|
let _ = self.alias.take();
|
||||||
|
@ -206,10 +202,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Drop for Session<S>
|
impl Drop for Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let any_items = self.hash.is_some() || self.alias.is_some() || self.identifier.is_some();
|
let any_items = self.hash.is_some() || self.alias.is_some() || self.identifier.is_some();
|
||||||
|
|
||||||
|
@ -258,7 +251,7 @@ where
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
||||||
actix_rt::spawn(
|
actix_rt::spawn(
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
let _ = crate::queue::cleanup_identifier(&repo, &identifier).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
)
|
||||||
|
|
74
src/lib.rs
74
src/lib.rs
|
@ -45,6 +45,7 @@ use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
use rusty_s3::UrlStyle;
|
use rusty_s3::UrlStyle;
|
||||||
use std::{
|
use std::{
|
||||||
|
marker::PhantomData,
|
||||||
path::Path,
|
path::Path,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
|
@ -69,7 +70,7 @@ use self::{
|
||||||
queue::queue_generate,
|
queue::queue_generate,
|
||||||
repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult},
|
repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store},
|
store::{file_store::FileStore, object_store::ObjectStore, Store},
|
||||||
stream::{empty, once, StreamLimit, StreamMap, StreamTimeout},
|
stream::{empty, once, StreamLimit, StreamMap, StreamTimeout},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -93,7 +94,7 @@ async fn ensure_details<S: Store + 'static>(
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
let Some(identifier) = repo.identifier_from_alias(alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(alias).await? else {
|
||||||
return Err(UploadError::MissingAlias.into());
|
return Err(UploadError::MissingAlias.into());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -117,10 +118,10 @@ async fn ensure_details<S: Store + 'static>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Upload<S: Store + 'static>(Value<Session<S>>);
|
struct Upload<S>(Value<Session>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for Upload<S> {
|
impl<S: Store + 'static> FormData for Upload<S> {
|
||||||
type Item = Session<S>;
|
type Item = Session;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -172,14 +173,14 @@ impl<S: Store + 'static> FormData for Upload<S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> {
|
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> {
|
||||||
Ok(Upload(value))
|
Ok(Upload(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Import<S: Store + 'static>(Value<Session<S>>);
|
struct Import<S: Store + 'static>(Value<Session>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for Import<S> {
|
impl<S: Store + 'static> FormData for Import<S> {
|
||||||
type Item = Session<S>;
|
type Item = Session;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -241,14 +242,14 @@ impl<S: Store + 'static> FormData for Import<S> {
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Ok(Import(value))
|
Ok(Import(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
||||||
async fn upload<S: Store + 'static>(
|
async fn upload<S: Store + 'static>(
|
||||||
Multipart(Upload(value)): Multipart<Upload<S>>,
|
Multipart(Upload(value, _)): Multipart<Upload<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -259,7 +260,7 @@ async fn upload<S: Store + 'static>(
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Imported files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Imported files", skip(value, repo, store, config))]
|
||||||
async fn import<S: Store + 'static>(
|
async fn import<S: Store + 'static>(
|
||||||
Multipart(Import(value)): Multipart<Import<S>>,
|
Multipart(Import(value, _)): Multipart<Import<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -270,7 +271,7 @@ async fn import<S: Store + 'static>(
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
||||||
async fn handle_upload<S: Store + 'static>(
|
async fn handle_upload<S: Store + 'static>(
|
||||||
value: Value<Session<S>>,
|
value: Value<Session>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -312,10 +313,10 @@ async fn handle_upload<S: Store + 'static>(
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BackgroundedUpload<S: Store + 'static>(Value<Backgrounded<S>>);
|
struct BackgroundedUpload<S: Store + 'static>(Value<Backgrounded>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
||||||
type Item = Backgrounded<S>;
|
type Item = Backgrounded;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -371,13 +372,13 @@ impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Ok(BackgroundedUpload(value))
|
Ok(BackgroundedUpload(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo))]
|
||||||
async fn upload_backgrounded<S: Store>(
|
async fn upload_backgrounded<S: Store>(
|
||||||
Multipart(BackgroundedUpload(value)): Multipart<BackgroundedUpload<S>>,
|
Multipart(BackgroundedUpload(value, _)): Multipart<BackgroundedUpload<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let images = value
|
let images = value
|
||||||
|
@ -394,11 +395,7 @@ async fn upload_backgrounded<S: Store>(
|
||||||
|
|
||||||
for image in &images {
|
for image in &images {
|
||||||
let upload_id = image.result.upload_id().expect("Upload ID exists");
|
let upload_id = image.result.upload_id().expect("Upload ID exists");
|
||||||
let identifier = image
|
let identifier = image.result.identifier().expect("Identifier exists");
|
||||||
.result
|
|
||||||
.identifier()
|
|
||||||
.expect("Identifier exists")
|
|
||||||
.to_bytes()?;
|
|
||||||
|
|
||||||
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
||||||
|
|
||||||
|
@ -560,10 +557,7 @@ async fn do_download_backgrounded<S: Store + 'static>(
|
||||||
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
|
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
|
||||||
|
|
||||||
let upload_id = backgrounded.upload_id().expect("Upload ID exists");
|
let upload_id = backgrounded.upload_id().expect("Upload ID exists");
|
||||||
let identifier = backgrounded
|
let identifier = backgrounded.identifier().expect("Identifier exists");
|
||||||
.identifier()
|
|
||||||
.expect("Identifier exists")
|
|
||||||
.to_bytes()?;
|
|
||||||
|
|
||||||
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
||||||
|
|
||||||
|
@ -764,8 +758,6 @@ async fn process_details<S: Store>(
|
||||||
let identifier = repo
|
let identifier = repo
|
||||||
.variant_identifier(hash, thumbnail_string)
|
.variant_identifier(hash, thumbnail_string)
|
||||||
.await?
|
.await?
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?
|
|
||||||
.ok_or(UploadError::MissingAlias)?;
|
.ok_or(UploadError::MissingAlias)?;
|
||||||
|
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -856,11 +848,7 @@ async fn process<S: Store + 'static>(
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if let Some(identifier) = identifier_opt {
|
if let Some(identifier) = identifier_opt {
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -980,11 +968,7 @@ async fn process_head<S: Store + 'static>(
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if let Some(identifier) = identifier_opt {
|
if let Some(identifier) = identifier_opt {
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -1047,11 +1031,7 @@ async fn process_backgrounded<S: Store>(
|
||||||
return Ok(HttpResponse::BadRequest().finish());
|
return Ok(HttpResponse::BadRequest().finish());
|
||||||
};
|
};
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if identifier_opt.is_some() {
|
if identifier_opt.is_some() {
|
||||||
return Ok(HttpResponse::Accepted().finish());
|
return Ok(HttpResponse::Accepted().finish());
|
||||||
|
@ -1185,7 +1165,7 @@ async fn do_serve<S: Store + 'static>(
|
||||||
(hash, alias, true)
|
(hash, alias, true)
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(identifier) = repo.identifier(hash.clone()).await?.map(Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
||||||
);
|
);
|
||||||
|
@ -1250,7 +1230,7 @@ async fn do_serve_head<S: Store + 'static>(
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let Some(identifier) = repo.identifier_from_alias(&alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(&alias).await? else {
|
||||||
// Invalid alias
|
// Invalid alias
|
||||||
return Ok(HttpResponse::NotFound().finish());
|
return Ok(HttpResponse::NotFound().finish());
|
||||||
};
|
};
|
||||||
|
@ -1268,7 +1248,7 @@ async fn do_serve_head<S: Store + 'static>(
|
||||||
|
|
||||||
async fn ranged_file_head_resp<S: Store + 'static>(
|
async fn ranged_file_head_resp<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: S::Identifier,
|
identifier: Arc<str>,
|
||||||
range: Option<web::Header<Range>>,
|
range: Option<web::Header<Range>>,
|
||||||
details: Details,
|
details: Details,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
|
@ -1303,7 +1283,7 @@ async fn ranged_file_head_resp<S: Store + 'static>(
|
||||||
|
|
||||||
async fn ranged_file_resp<S: Store + 'static>(
|
async fn ranged_file_resp<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: S::Identifier,
|
identifier: Arc<str>,
|
||||||
range: Option<web::Header<Range>>,
|
range: Option<web::Header<Range>>,
|
||||||
details: Details,
|
details: Details,
|
||||||
not_found: bool,
|
not_found: bool,
|
||||||
|
@ -1555,7 +1535,7 @@ async fn identifier<S: Store>(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(identifier) = repo.identifier_from_alias(&alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(&alias).await? else {
|
||||||
// Invalid alias
|
// Invalid alias
|
||||||
return Ok(HttpResponse::NotFound().json(serde_json::json!({
|
return Ok(HttpResponse::NotFound().json(serde_json::json!({
|
||||||
"msg": "No identifiers associated with provided alias"
|
"msg": "No identifiers associated with provided alias"
|
||||||
|
@ -1564,7 +1544,7 @@ async fn identifier<S: Store>(
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||||
"msg": "ok",
|
"msg": "ok",
|
||||||
"identifier": identifier.string_repr(),
|
"identifier": identifier.as_ref(),
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
formats::ProcessableFormat,
|
formats::ProcessableFormat,
|
||||||
|
@ -140,7 +142,7 @@ where
|
||||||
|
|
||||||
pub(crate) async fn process_image_store_read<S: Store + 'static>(
|
pub(crate) async fn process_image_store_read<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
input_format: ProcessableFormat,
|
input_format: ProcessableFormat,
|
||||||
format: ProcessableFormat,
|
format: ProcessableFormat,
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use std::{
|
use std::{
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
sync::atomic::{AtomicU64, Ordering},
|
sync::{
|
||||||
|
atomic::{AtomicU64, Ordering},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -8,7 +11,7 @@ use crate::{
|
||||||
details::Details,
|
details::Details,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
repo::{ArcRepo, Hash},
|
repo::{ArcRepo, Hash},
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::IntoStreamer,
|
stream::IntoStreamer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -103,7 +106,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hashes are read in a consistent order
|
// Hashes are read in a consistent order
|
||||||
let mut stream = repo.hashes().await.into_streamer();
|
let mut stream = repo.hashes().into_streamer();
|
||||||
|
|
||||||
let state = Rc::new(MigrateState {
|
let state = Rc::new(MigrateState {
|
||||||
repo: repo.clone(),
|
repo: repo.clone(),
|
||||||
|
@ -169,7 +172,7 @@ where
|
||||||
let current_index = index.fetch_add(1, Ordering::Relaxed);
|
let current_index = index.fetch_add(1, Ordering::Relaxed);
|
||||||
|
|
||||||
let original_identifier = match repo.identifier(hash.clone()).await {
|
let original_identifier = match repo.identifier(hash.clone()).await {
|
||||||
Ok(Some(identifier)) => S1::Identifier::from_arc(identifier)?,
|
Ok(Some(identifier)) => identifier,
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
||||||
|
@ -214,8 +217,6 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(identifier) = repo.motion_identifier(hash.clone()).await? {
|
if let Some(identifier) = repo.motion_identifier(hash.clone()).await? {
|
||||||
let identifier = S1::Identifier::from_arc(identifier)?;
|
|
||||||
|
|
||||||
if !repo.is_migrated(&identifier).await? {
|
if !repo.is_migrated(&identifier).await? {
|
||||||
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
||||||
Ok(new_identifier) => {
|
Ok(new_identifier) => {
|
||||||
|
@ -245,8 +246,6 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
||||||
let identifier = S1::Identifier::from_arc(identifier)?;
|
|
||||||
|
|
||||||
if !repo.is_migrated(&identifier).await? {
|
if !repo.is_migrated(&identifier).await? {
|
||||||
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
||||||
Ok(new_identifier) => {
|
Ok(new_identifier) => {
|
||||||
|
@ -339,10 +338,10 @@ async fn migrate_file<S1, S2>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
from: &S1,
|
from: &S1,
|
||||||
to: &S2,
|
to: &S2,
|
||||||
identifier: &S1::Identifier,
|
identifier: &Arc<str>,
|
||||||
skip_missing_files: bool,
|
skip_missing_files: bool,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<S2::Identifier, MigrateError>
|
) -> Result<Arc<str>, MigrateError>
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
|
@ -382,9 +381,9 @@ async fn do_migrate_file<S1, S2>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
from: &S1,
|
from: &S1,
|
||||||
to: &S2,
|
to: &S2,
|
||||||
identifier: &S1::Identifier,
|
identifier: &Arc<str>,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<S2::Identifier, MigrateError>
|
) -> Result<Arc<str>, MigrateError>
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
|
@ -421,11 +420,7 @@ where
|
||||||
Ok(new_identifier)
|
Ok(new_identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn migrate_details<I1, I2>(repo: &ArcRepo, from: &I1, to: &I2) -> Result<(), Error>
|
async fn migrate_details(repo: &ArcRepo, from: &Arc<str>, to: &Arc<str>) -> Result<(), Error> {
|
||||||
where
|
|
||||||
I1: Identifier,
|
|
||||||
I2: Identifier,
|
|
||||||
{
|
|
||||||
if let Some(details) = repo.details(from).await? {
|
if let Some(details) = repo.details(from).await? {
|
||||||
repo.relate_details(to, &details).await?;
|
repo.relate_details(to, &details).await?;
|
||||||
repo.cleanup_details(from).await?;
|
repo.cleanup_details(from).await?;
|
||||||
|
|
50
src/queue.rs
50
src/queue.rs
|
@ -5,7 +5,7 @@ use crate::{
|
||||||
formats::InputProcessableFormat,
|
formats::InputProcessableFormat,
|
||||||
repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId},
|
repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
};
|
};
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -55,7 +55,7 @@ enum Cleanup {
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
},
|
},
|
||||||
Identifier {
|
Identifier {
|
||||||
identifier: Base64Bytes,
|
identifier: String,
|
||||||
},
|
},
|
||||||
Alias {
|
Alias {
|
||||||
alias: Serde<Alias>,
|
alias: Serde<Alias>,
|
||||||
|
@ -74,7 +74,7 @@ enum Cleanup {
|
||||||
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
enum Process {
|
enum Process {
|
||||||
Ingest {
|
Ingest {
|
||||||
identifier: Base64Bytes,
|
identifier: String,
|
||||||
upload_id: Serde<UploadId>,
|
upload_id: Serde<UploadId>,
|
||||||
declared_alias: Option<Serde<Alias>>,
|
declared_alias: Option<Serde<Alias>>,
|
||||||
},
|
},
|
||||||
|
@ -91,7 +91,7 @@ pub(crate) async fn cleanup_alias(
|
||||||
alias: Alias,
|
alias: Alias,
|
||||||
token: DeleteToken,
|
token: DeleteToken,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Alias {
|
let job = serde_json::to_string(&Cleanup::Alias {
|
||||||
alias: Serde::new(alias),
|
alias: Serde::new(alias),
|
||||||
token: Serde::new(token),
|
token: Serde::new(token),
|
||||||
})
|
})
|
||||||
|
@ -101,17 +101,17 @@ pub(crate) async fn cleanup_alias(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> {
|
pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Hash { hash }).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_string(&Cleanup::Hash { hash }).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_identifier<I: Identifier>(
|
pub(crate) async fn cleanup_identifier(
|
||||||
repo: &Arc<dyn FullRepo>,
|
repo: &Arc<dyn FullRepo>,
|
||||||
identifier: I,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Identifier {
|
let job = serde_json::to_string(&Cleanup::Identifier {
|
||||||
identifier: Base64Bytes(identifier.to_bytes()?),
|
identifier: identifier.to_string(),
|
||||||
})
|
})
|
||||||
.map_err(UploadError::PushJob)?;
|
.map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
|
@ -124,37 +124,37 @@ async fn cleanup_variants(
|
||||||
variant: Option<String>,
|
variant: Option<String>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job =
|
let job =
|
||||||
serde_json::to_vec(&Cleanup::Variant { hash, variant }).map_err(UploadError::PushJob)?;
|
serde_json::to_string(&Cleanup::Variant { hash, variant }).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::OutdatedProxies).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_string(&Cleanup::OutdatedProxies).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::OutdatedVariants).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_string(&Cleanup::OutdatedVariants).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::AllVariants).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_string(&Cleanup::AllVariants).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn queue_ingest(
|
pub(crate) async fn queue_ingest(
|
||||||
repo: &Arc<dyn FullRepo>,
|
repo: &Arc<dyn FullRepo>,
|
||||||
identifier: Vec<u8>,
|
identifier: &Arc<str>,
|
||||||
upload_id: UploadId,
|
upload_id: UploadId,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Process::Ingest {
|
let job = serde_json::to_string(&Process::Ingest {
|
||||||
identifier: Base64Bytes(identifier),
|
identifier: identifier.to_string(),
|
||||||
declared_alias: declared_alias.map(Serde::new),
|
declared_alias: declared_alias.map(Serde::new),
|
||||||
upload_id: Serde::new(upload_id),
|
upload_id: Serde::new(upload_id),
|
||||||
})
|
})
|
||||||
|
@ -170,7 +170,7 @@ pub(crate) async fn queue_generate(
|
||||||
process_path: PathBuf,
|
process_path: PathBuf,
|
||||||
process_args: Vec<String>,
|
process_args: Vec<String>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Process::Generate {
|
let job = serde_json::to_string(&Process::Generate {
|
||||||
target_format,
|
target_format,
|
||||||
source: Serde::new(source),
|
source: Serde::new(source),
|
||||||
process_path,
|
process_path,
|
||||||
|
@ -220,7 +220,7 @@ async fn process_jobs<S, F>(
|
||||||
&'a Arc<dyn FullRepo>,
|
&'a Arc<dyn FullRepo>,
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
&'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
|
@ -284,13 +284,13 @@ where
|
||||||
&'a Arc<dyn FullRepo>,
|
&'a Arc<dyn FullRepo>,
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
&'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let (job_id, bytes) = repo.pop(queue, worker_id).await?;
|
let (job_id, string) = repo.pop(queue, worker_id).await?;
|
||||||
|
|
||||||
let span = tracing::info_span!("Running Job");
|
let span = tracing::info_span!("Running Job");
|
||||||
|
|
||||||
|
@ -303,7 +303,7 @@ where
|
||||||
queue,
|
queue,
|
||||||
worker_id,
|
worker_id,
|
||||||
job_id,
|
job_id,
|
||||||
(callback)(repo, store, config, bytes.as_ref()),
|
(callback)(repo, store, config, string.as_ref()),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.instrument(span)
|
.instrument(span)
|
||||||
|
@ -337,7 +337,7 @@ async fn process_image_jobs<S, F>(
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a ProcessMap,
|
&'a ProcessMap,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
&'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
|
@ -373,13 +373,13 @@ where
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a ProcessMap,
|
&'a ProcessMap,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
&'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let (job_id, bytes) = repo.pop(queue, worker_id).await?;
|
let (job_id, string) = repo.pop(queue, worker_id).await?;
|
||||||
|
|
||||||
let span = tracing::info_span!("Running Job");
|
let span = tracing::info_span!("Running Job");
|
||||||
|
|
||||||
|
@ -392,7 +392,7 @@ where
|
||||||
queue,
|
queue,
|
||||||
worker_id,
|
worker_id,
|
||||||
job_id,
|
job_id,
|
||||||
(callback)(repo, store, process_map, config, bytes.as_ref()),
|
(callback)(repo, store, process_map, config, string.as_ref()),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.instrument(span)
|
.instrument(span)
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Configuration,
|
config::Configuration,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
queue::{Base64Bytes, Cleanup, LocalBoxFuture},
|
queue::{Cleanup, LocalBoxFuture},
|
||||||
repo::{Alias, ArcRepo, DeleteToken, Hash},
|
repo::{Alias, ArcRepo, DeleteToken, Hash},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::IntoStreamer,
|
stream::IntoStreamer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -12,18 +14,18 @@ pub(super) fn perform<'a, S>(
|
||||||
repo: &'a ArcRepo,
|
repo: &'a ArcRepo,
|
||||||
store: &'a S,
|
store: &'a S,
|
||||||
configuration: &'a Configuration,
|
configuration: &'a Configuration,
|
||||||
job: &'a [u8],
|
job: &'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
match serde_json::from_slice(job) {
|
match serde_json::from_str(job) {
|
||||||
Ok(job) => match job {
|
Ok(job) => match job {
|
||||||
Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?,
|
Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?,
|
||||||
Cleanup::Identifier {
|
Cleanup::Identifier {
|
||||||
identifier: Base64Bytes(in_identifier),
|
identifier: in_identifier,
|
||||||
} => identifier(repo, store, in_identifier).await?,
|
} => identifier(repo, store, Arc::from(in_identifier)).await?,
|
||||||
Cleanup::Alias {
|
Cleanup::Alias {
|
||||||
alias: stored_alias,
|
alias: stored_alias,
|
||||||
token,
|
token,
|
||||||
|
@ -50,20 +52,18 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn identifier<S>(repo: &ArcRepo, store: &S, identifier: Vec<u8>) -> Result<(), Error>
|
async fn identifier<S>(repo: &ArcRepo, store: &S, identifier: Arc<str>) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
let identifier = S::Identifier::from_bytes(identifier)?;
|
|
||||||
|
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
if let Err(e) = store.remove(&identifier).await {
|
if let Err(e) = store.remove(&identifier).await {
|
||||||
errors.push(e);
|
errors.push(UploadError::from(e));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = repo.cleanup_details(&identifier).await {
|
if let Err(e) = repo.cleanup_details(&identifier).await {
|
||||||
errors.push(e);
|
errors.push(UploadError::from(e));
|
||||||
}
|
}
|
||||||
|
|
||||||
for error in errors {
|
for error in errors {
|
||||||
|
@ -100,7 +100,7 @@ async fn hash(repo: &ArcRepo, hash: Hash) -> Result<(), Error> {
|
||||||
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
||||||
|
|
||||||
for identifier in idents {
|
for identifier in idents {
|
||||||
let _ = super::cleanup_identifier(repo, identifier).await;
|
let _ = super::cleanup_identifier(repo, &identifier).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.cleanup_hash(hash).await?;
|
repo.cleanup_hash(hash).await?;
|
||||||
|
@ -136,7 +136,7 @@ async fn alias(repo: &ArcRepo, alias: Alias, token: DeleteToken) -> Result<(), E
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
|
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
|
||||||
let mut hash_stream = repo.hashes().await.into_streamer();
|
let mut hash_stream = repo.hashes().into_streamer();
|
||||||
|
|
||||||
while let Some(res) = hash_stream.next().await {
|
while let Some(res) = hash_stream.next().await {
|
||||||
let hash = res?;
|
let hash = res?;
|
||||||
|
@ -193,7 +193,7 @@ async fn hash_variant(
|
||||||
.variant_identifier(hash.clone(), target_variant.clone())
|
.variant_identifier(hash.clone(), target_variant.clone())
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
super::cleanup_identifier(repo, identifier).await?;
|
super::cleanup_identifier(repo, &identifier).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.remove_variant(hash.clone(), target_variant.clone())
|
repo.remove_variant(hash.clone(), target_variant.clone())
|
||||||
|
@ -203,7 +203,7 @@ async fn hash_variant(
|
||||||
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
||||||
repo.remove_variant(hash.clone(), variant.clone()).await?;
|
repo.remove_variant(hash.clone(), variant.clone()).await?;
|
||||||
repo.remove_variant_access(hash.clone(), variant).await?;
|
repo.remove_variant_access(hash.clone(), variant).await?;
|
||||||
super::cleanup_identifier(repo, identifier).await?;
|
super::cleanup_identifier(repo, &identifier).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,36 +4,36 @@ use crate::{
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
formats::InputProcessableFormat,
|
formats::InputProcessableFormat,
|
||||||
ingest::Session,
|
ingest::Session,
|
||||||
queue::{Base64Bytes, LocalBoxFuture, Process},
|
queue::{LocalBoxFuture, Process},
|
||||||
repo::{Alias, ArcRepo, UploadId, UploadResult},
|
repo::{Alias, ArcRepo, UploadId, UploadResult},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::StreamMap,
|
stream::StreamMap,
|
||||||
};
|
};
|
||||||
use std::path::PathBuf;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
pub(super) fn perform<'a, S>(
|
pub(super) fn perform<'a, S>(
|
||||||
repo: &'a ArcRepo,
|
repo: &'a ArcRepo,
|
||||||
store: &'a S,
|
store: &'a S,
|
||||||
process_map: &'a ProcessMap,
|
process_map: &'a ProcessMap,
|
||||||
config: &'a Configuration,
|
config: &'a Configuration,
|
||||||
job: &'a [u8],
|
job: &'a str,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
where
|
where
|
||||||
S: Store + 'static,
|
S: Store + 'static,
|
||||||
{
|
{
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
match serde_json::from_slice(job) {
|
match serde_json::from_str(job) {
|
||||||
Ok(job) => match job {
|
Ok(job) => match job {
|
||||||
Process::Ingest {
|
Process::Ingest {
|
||||||
identifier: Base64Bytes(identifier),
|
identifier,
|
||||||
upload_id,
|
upload_id,
|
||||||
declared_alias,
|
declared_alias,
|
||||||
} => {
|
} => {
|
||||||
process_ingest(
|
process_ingest(
|
||||||
repo,
|
repo,
|
||||||
store,
|
store,
|
||||||
identifier,
|
Arc::from(identifier),
|
||||||
Serde::into_inner(upload_id),
|
Serde::into_inner(upload_id),
|
||||||
declared_alias.map(Serde::into_inner),
|
declared_alias.map(Serde::into_inner),
|
||||||
&config.media,
|
&config.media,
|
||||||
|
@ -72,7 +72,7 @@ where
|
||||||
async fn process_ingest<S>(
|
async fn process_ingest<S>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
unprocessed_identifier: Vec<u8>,
|
unprocessed_identifier: Arc<str>,
|
||||||
upload_id: UploadId,
|
upload_id: UploadId,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
media: &crate::config::Media,
|
media: &crate::config::Media,
|
||||||
|
@ -81,8 +81,6 @@ where
|
||||||
S: Store + 'static,
|
S: Store + 'static,
|
||||||
{
|
{
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let unprocessed_identifier = S::Identifier::from_bytes(unprocessed_identifier)?;
|
|
||||||
|
|
||||||
let ident = unprocessed_identifier.clone();
|
let ident = unprocessed_identifier.clone();
|
||||||
let store2 = store.clone();
|
let store2 = store.clone();
|
||||||
let repo = repo.clone();
|
let repo = repo.clone();
|
||||||
|
@ -97,7 +95,7 @@ where
|
||||||
let session =
|
let session =
|
||||||
crate::ingest::ingest(&repo, &store2, stream, declared_alias, &media).await?;
|
crate::ingest::ingest(&repo, &store2, stream, declared_alias, &media).await?;
|
||||||
|
|
||||||
Ok(session) as Result<Session<S>, Error>
|
Ok(session) as Result<Session, Error>
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
store::Store,
|
store::Store,
|
||||||
|
@ -26,7 +28,7 @@ pub(crate) fn chop_bytes(
|
||||||
pub(crate) async fn chop_store<S: Store>(
|
pub(crate) async fn chop_store<S: Store>(
|
||||||
byte_range: &ByteRangeSpec,
|
byte_range: &ByteRangeSpec,
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
length: u64,
|
length: u64,
|
||||||
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
|
|
185
src/repo.rs
185
src/repo.rs
|
@ -2,7 +2,6 @@ use crate::{
|
||||||
config,
|
config,
|
||||||
details::Details,
|
details::Details,
|
||||||
error_code::{ErrorCode, OwnedErrorCode},
|
error_code::{ErrorCode, OwnedErrorCode},
|
||||||
store::{Identifier, StoreError},
|
|
||||||
stream::LocalBoxStream,
|
stream::LocalBoxStream,
|
||||||
};
|
};
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
|
@ -86,6 +85,7 @@ impl RepoError {
|
||||||
pub(crate) const fn error_code(&self) -> ErrorCode {
|
pub(crate) const fn error_code(&self) -> ErrorCode {
|
||||||
match self {
|
match self {
|
||||||
Self::SledError(e) => e.error_code(),
|
Self::SledError(e) => e.error_code(),
|
||||||
|
Self::PostgresError(e) => e.error_code(),
|
||||||
Self::AlreadyClaimed => ErrorCode::ALREADY_CLAIMED,
|
Self::AlreadyClaimed => ErrorCode::ALREADY_CLAIMED,
|
||||||
Self::Canceled => ErrorCode::PANIC,
|
Self::Canceled => ErrorCode::PANIC,
|
||||||
}
|
}
|
||||||
|
@ -111,7 +111,7 @@ pub(crate) trait FullRepo:
|
||||||
async fn health_check(&self) -> Result<(), RepoError>;
|
async fn health_check(&self) -> Result<(), RepoError>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn identifier_from_alias(&self, alias: &Alias) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier_from_alias(&self, alias: &Alias) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let Some(hash) = self.hash(alias).await? else {
|
let Some(hash) = self.hash(alias).await? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
@ -132,7 +132,7 @@ pub(crate) trait FullRepo:
|
||||||
async fn still_identifier_from_alias(
|
async fn still_identifier_from_alias(
|
||||||
&self,
|
&self,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
) -> Result<Option<Arc<[u8]>>, StoreError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let Some(hash) = self.hash(alias).await? else {
|
let Some(hash) = self.hash(alias).await? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
@ -372,13 +372,13 @@ impl JobId {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait QueueRepo: BaseRepo {
|
pub(crate) trait QueueRepo: BaseRepo {
|
||||||
async fn push(&self, queue: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError>;
|
async fn push(&self, queue: &'static str, job: Arc<str>) -> Result<JobId, RepoError>;
|
||||||
|
|
||||||
async fn pop(
|
async fn pop(
|
||||||
&self,
|
&self,
|
||||||
queue: &'static str,
|
queue: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError>;
|
) -> Result<(JobId, Arc<str>), RepoError>;
|
||||||
|
|
||||||
async fn heartbeat(
|
async fn heartbeat(
|
||||||
&self,
|
&self,
|
||||||
|
@ -400,7 +400,7 @@ impl<T> QueueRepo for Arc<T>
|
||||||
where
|
where
|
||||||
T: QueueRepo,
|
T: QueueRepo,
|
||||||
{
|
{
|
||||||
async fn push(&self, queue: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError> {
|
async fn push(&self, queue: &'static str, job: Arc<str>) -> Result<JobId, RepoError> {
|
||||||
T::push(self, queue, job).await
|
T::push(self, queue, job).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -408,7 +408,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
queue: &'static str,
|
queue: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError> {
|
) -> Result<(JobId, Arc<str>), RepoError> {
|
||||||
T::pop(self, queue, worker_id).await
|
T::pop(self, queue, worker_id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -460,12 +460,12 @@ where
|
||||||
pub(crate) trait DetailsRepo: BaseRepo {
|
pub(crate) trait DetailsRepo: BaseRepo {
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError>;
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError>;
|
||||||
|
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError>;
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -475,17 +475,17 @@ where
|
||||||
{
|
{
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::relate_details(self, identifier, details).await
|
T::relate_details(self, identifier, details).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
T::details(self, identifier).await
|
T::details(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError> {
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
T::cleanup_details(self, identifier).await
|
T::cleanup_details(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -496,11 +496,11 @@ pub(crate) trait StoreMigrationRepo: BaseRepo {
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError>;
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError>;
|
||||||
|
|
||||||
async fn clear(&self) -> Result<(), RepoError>;
|
async fn clear(&self) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
@ -516,13 +516,13 @@ where
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::mark_migrated(self, old_identifier, new_identifier).await
|
T::mark_migrated(self, old_identifier, new_identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError> {
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError> {
|
||||||
T::is_migrated(self, identifier).await
|
T::is_migrated(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -569,12 +569,87 @@ impl HashPage {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) struct HashStream {
|
||||||
|
repo: Option<ArcRepo>,
|
||||||
|
page_future:
|
||||||
|
Option<std::pin::Pin<Box<dyn std::future::Future<Output = Result<HashPage, RepoError>>>>>,
|
||||||
|
page: Option<HashPage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl futures_core::Stream for HashStream {
|
||||||
|
type Item = Result<Hash, RepoError>;
|
||||||
|
|
||||||
|
fn poll_next(
|
||||||
|
self: std::pin::Pin<&mut Self>,
|
||||||
|
cx: &mut std::task::Context<'_>,
|
||||||
|
) -> std::task::Poll<Option<Self::Item>> {
|
||||||
|
let this = self.get_mut();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let Some(repo) = &this.repo else {
|
||||||
|
return std::task::Poll::Ready(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let slug = if let Some(page) = &mut this.page {
|
||||||
|
// popping last in page is fine - we reversed them
|
||||||
|
if let Some(hash) = page.hashes.pop() {
|
||||||
|
return std::task::Poll::Ready(Some(Ok(hash)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let slug = page.next();
|
||||||
|
this.page.take();
|
||||||
|
|
||||||
|
if let Some(slug) = slug {
|
||||||
|
Some(slug)
|
||||||
|
} else {
|
||||||
|
this.repo.take();
|
||||||
|
return std::task::Poll::Ready(None);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(page_future) = &mut this.page_future {
|
||||||
|
let res = std::task::ready!(page_future.as_mut().poll(cx));
|
||||||
|
|
||||||
|
this.page_future.take();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Ok(mut page) => {
|
||||||
|
// reverse because we use `pop` to fetch next
|
||||||
|
page.hashes.reverse();
|
||||||
|
|
||||||
|
this.page = Some(page);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
this.repo.take();
|
||||||
|
|
||||||
|
return std::task::Poll::Ready(Some(Err(e)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let repo = repo.clone();
|
||||||
|
|
||||||
|
this.page_future = Some(Box::pin(async move { repo.hash_page(slug, 100).await }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl dyn FullRepo {
|
||||||
|
pub(crate) fn hashes(self: &Arc<Self>) -> HashStream {
|
||||||
|
HashStream {
|
||||||
|
repo: Some(self.clone()),
|
||||||
|
page_future: None,
|
||||||
|
page: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait HashRepo: BaseRepo {
|
pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn size(&self) -> Result<u64, RepoError>;
|
async fn size(&self) -> Result<u64, RepoError>;
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>>;
|
|
||||||
|
|
||||||
async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> {
|
async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> {
|
||||||
let hash = slug.as_deref().and_then(hash_from_slug);
|
let hash = slug.as_deref().and_then(hash_from_slug);
|
||||||
|
|
||||||
|
@ -604,8 +679,8 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn create_hash(
|
async fn create_hash(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
|
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -613,38 +688,34 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError>;
|
) -> Result<Result<(), HashAlreadyExists>, RepoError>;
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError>;
|
||||||
&self,
|
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError>;
|
|
||||||
|
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError>;
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
|
|
||||||
async fn relate_variant_identifier(
|
async fn relate_variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError>;
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError>;
|
||||||
async fn variant_identifier(
|
async fn variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError>;
|
) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError>;
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError>;
|
||||||
async fn remove_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
async fn remove_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
||||||
|
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError>;
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
|
|
||||||
async fn cleanup_hash(&self, hash: Hash) -> Result<(), RepoError>;
|
async fn cleanup_hash(&self, hash: Hash) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
@ -658,10 +729,6 @@ where
|
||||||
T::size(self).await
|
T::size(self).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>> {
|
|
||||||
T::hashes(self).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
||||||
T::bound(self, hash).await
|
T::bound(self, hash).await
|
||||||
}
|
}
|
||||||
|
@ -685,21 +752,17 @@ where
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
|
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
&self,
|
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError> {
|
|
||||||
T::update_identifier(self, hash, identifier).await
|
T::update_identifier(self, hash, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::identifier(self, hash).await
|
T::identifier(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -707,8 +770,8 @@ where
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError> {
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
T::relate_variant_identifier(self, hash, variant, identifier).await
|
T::relate_variant_identifier(self, hash, variant, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -716,11 +779,11 @@ where
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::variant_identifier(self, hash, variant).await
|
T::variant_identifier(self, hash, variant).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError> {
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
T::variants(self, hash).await
|
T::variants(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -731,12 +794,12 @@ where
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::relate_motion_identifier(self, hash, identifier).await
|
T::relate_motion_identifier(self, hash, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::motion_identifier(self, hash).await
|
T::motion_identifier(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,42 @@
|
||||||
|
use diesel::{backend::Backend, sql_types::VarChar, AsExpression, FromSqlRow};
|
||||||
|
|
||||||
use crate::formats::InternalFormat;
|
use crate::formats::InternalFormat;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, AsExpression, FromSqlRow)]
|
||||||
|
#[diesel(sql_type = VarChar)]
|
||||||
pub(crate) struct Hash {
|
pub(crate) struct Hash {
|
||||||
hash: Arc<[u8; 32]>,
|
hash: Arc<[u8; 32]>,
|
||||||
size: u64,
|
size: u64,
|
||||||
format: InternalFormat,
|
format: InternalFormat,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl diesel::serialize::ToSql<VarChar, diesel::pg::Pg> for Hash {
|
||||||
|
fn to_sql<'b>(
|
||||||
|
&'b self,
|
||||||
|
out: &mut diesel::serialize::Output<'b, '_, diesel::pg::Pg>,
|
||||||
|
) -> diesel::serialize::Result {
|
||||||
|
let s = self.to_base64();
|
||||||
|
|
||||||
|
<String as diesel::serialize::ToSql<VarChar, diesel::pg::Pg>>::to_sql(
|
||||||
|
&s,
|
||||||
|
&mut out.reborrow(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B> diesel::deserialize::FromSql<VarChar, B> for Hash
|
||||||
|
where
|
||||||
|
B: Backend,
|
||||||
|
String: diesel::deserialize::FromSql<VarChar, B>,
|
||||||
|
{
|
||||||
|
fn from_sql(bytes: <B as Backend>::RawValue<'_>) -> diesel::deserialize::Result<Self> {
|
||||||
|
let s = String::from_sql(bytes)?;
|
||||||
|
|
||||||
|
Self::from_base64(s).ok_or_else(|| format!("Invalid base64 hash").into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Hash {
|
impl Hash {
|
||||||
pub(crate) fn new(hash: [u8; 32], size: u64, format: InternalFormat) -> Self {
|
pub(crate) fn new(hash: [u8; 32], size: u64, format: InternalFormat) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -30,6 +59,22 @@ impl Hash {
|
||||||
hex::encode(self.to_bytes())
|
hex::encode(self.to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_base64(&self) -> String {
|
||||||
|
use base64::Engine;
|
||||||
|
|
||||||
|
base64::engine::general_purpose::STANDARD.encode(self.to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_base64(input: String) -> Option<Self> {
|
||||||
|
use base64::Engine;
|
||||||
|
|
||||||
|
let bytes = base64::engine::general_purpose::STANDARD
|
||||||
|
.decode(input)
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
Self::from_bytes(&bytes)
|
||||||
|
}
|
||||||
|
|
||||||
pub(super) fn to_bytes(&self) -> Vec<u8> {
|
pub(super) fn to_bytes(&self) -> Vec<u8> {
|
||||||
let format_byte = self.format.to_byte();
|
let format_byte = self.format.to_byte();
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -33,7 +35,7 @@ pub(crate) async fn migrate_repo(old_repo: ArcRepo, new_repo: ArcRepo) -> Result
|
||||||
tracing::warn!("Checks complete, migrating repo");
|
tracing::warn!("Checks complete, migrating repo");
|
||||||
tracing::warn!("{total_size} hashes will be migrated");
|
tracing::warn!("{total_size} hashes will be migrated");
|
||||||
|
|
||||||
let mut hash_stream = old_repo.hashes().await.into_streamer();
|
let mut hash_stream = old_repo.hashes().into_streamer();
|
||||||
|
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
while let Some(res) = hash_stream.next().await {
|
while let Some(res) = hash_stream.next().await {
|
||||||
|
@ -266,7 +268,7 @@ async fn do_migrate_hash_04<S: Store>(
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
old_hash: sled::IVec,
|
old_hash: sled::IVec,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let Some(identifier) = old_repo.identifier::<S::Identifier>(old_hash.clone()).await? else {
|
let Some(identifier) = old_repo.identifier(old_hash.clone()).await? else {
|
||||||
tracing::warn!("Skipping hash {}, no identifier", hex::encode(&old_hash));
|
tracing::warn!("Skipping hash {}, no identifier", hex::encode(&old_hash));
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
@ -276,10 +278,8 @@ async fn do_migrate_hash_04<S: Store>(
|
||||||
let hash_details = set_details(old_repo, new_repo, store, config, &identifier).await?;
|
let hash_details = set_details(old_repo, new_repo, store, config, &identifier).await?;
|
||||||
|
|
||||||
let aliases = old_repo.aliases_for_hash(old_hash.clone()).await?;
|
let aliases = old_repo.aliases_for_hash(old_hash.clone()).await?;
|
||||||
let variants = old_repo.variants::<S::Identifier>(old_hash.clone()).await?;
|
let variants = old_repo.variants(old_hash.clone()).await?;
|
||||||
let motion_identifier = old_repo
|
let motion_identifier = old_repo.motion_identifier(old_hash.clone()).await?;
|
||||||
.motion_identifier::<S::Identifier>(old_hash.clone())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let hash = old_hash[..].try_into().expect("Invalid hash size");
|
let hash = old_hash[..].try_into().expect("Invalid hash size");
|
||||||
|
|
||||||
|
@ -326,7 +326,7 @@ async fn set_details<S: Store>(
|
||||||
new_repo: &ArcRepo,
|
new_repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
if let Some(details) = new_repo.details(identifier).await? {
|
if let Some(details) = new_repo.details(identifier).await? {
|
||||||
Ok(details)
|
Ok(details)
|
||||||
|
@ -342,9 +342,9 @@ async fn fetch_or_generate_details<S: Store>(
|
||||||
old_repo: &OldSledRepo,
|
old_repo: &OldSledRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
let details_opt = old_repo.details(identifier).await?;
|
let details_opt = old_repo.details(identifier.clone()).await?;
|
||||||
|
|
||||||
if let Some(details) = details_opt {
|
if let Some(details) = details_opt {
|
||||||
Ok(details)
|
Ok(details)
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
mod embedded;
|
mod embedded;
|
||||||
mod schema;
|
mod schema;
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel_async::{
|
use diesel_async::{
|
||||||
pooled_connection::{
|
pooled_connection::{
|
||||||
|
@ -11,7 +13,12 @@ use diesel_async::{
|
||||||
};
|
};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::{BaseRepo, HashRepo, RepoError};
|
use crate::error_code::ErrorCode;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
BaseRepo, Hash, HashAlreadyExists, HashPage, HashRepo, OrderedHash, RepoError,
|
||||||
|
VariantAlreadyExists,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct PostgresRepo {
|
pub(crate) struct PostgresRepo {
|
||||||
|
@ -39,6 +46,12 @@ pub(crate) enum PostgresError {
|
||||||
Diesel(#[source] diesel::result::Error),
|
Diesel(#[source] diesel::result::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PostgresError {
|
||||||
|
pub(super) const fn error_code(&self) -> ErrorCode {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PostgresRepo {
|
impl PostgresRepo {
|
||||||
pub(crate) async fn connect(postgres_url: Url) -> Result<Self, ConnectPostgresError> {
|
pub(crate) async fn connect(postgres_url: Url) -> Result<Self, ConnectPostgresError> {
|
||||||
let (mut client, conn) =
|
let (mut client, conn) =
|
||||||
|
@ -65,6 +78,11 @@ impl PostgresRepo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn to_primitive(timestamp: time::OffsetDateTime) -> time::PrimitiveDateTime {
|
||||||
|
let timestamp = timestamp.to_offset(time::UtcOffset::UTC);
|
||||||
|
time::PrimitiveDateTime::new(timestamp.date(), timestamp.time())
|
||||||
|
}
|
||||||
|
|
||||||
impl BaseRepo for PostgresRepo {}
|
impl BaseRepo for PostgresRepo {}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -82,6 +100,187 @@ impl HashRepo for PostgresRepo {
|
||||||
|
|
||||||
Ok(count.try_into().expect("non-negative count"))
|
Ok(count.try_into().expect("non-negative count"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn bound(&self, input_hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
||||||
|
use schema::hashes::dsl::*;
|
||||||
|
|
||||||
|
let mut conn = self.pool.get().await.map_err(PostgresError::Pool)?;
|
||||||
|
|
||||||
|
let timestamp = hashes
|
||||||
|
.select(created_at)
|
||||||
|
.filter(hash.eq(&input_hash))
|
||||||
|
.first(&mut conn)
|
||||||
|
.await
|
||||||
|
.map(time::PrimitiveDateTime::assume_utc)
|
||||||
|
.optional()
|
||||||
|
.map_err(PostgresError::Diesel)?;
|
||||||
|
|
||||||
|
Ok(timestamp.map(|timestamp| OrderedHash {
|
||||||
|
timestamp,
|
||||||
|
hash: input_hash,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn hash_page_by_date(
|
||||||
|
&self,
|
||||||
|
date: time::OffsetDateTime,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<HashPage, RepoError> {
|
||||||
|
use schema::hashes::dsl::*;
|
||||||
|
|
||||||
|
let mut conn = self.pool.get().await.map_err(PostgresError::Pool)?;
|
||||||
|
|
||||||
|
let timestamp = to_primitive(date);
|
||||||
|
|
||||||
|
let ordered_hash = hashes
|
||||||
|
.select((created_at, hash))
|
||||||
|
.filter(created_at.lt(timestamp))
|
||||||
|
.order(created_at.desc())
|
||||||
|
.first::<(time::PrimitiveDateTime, Hash)>(&mut conn)
|
||||||
|
.await
|
||||||
|
.optional()
|
||||||
|
.map_err(PostgresError::Diesel)?
|
||||||
|
.map(|tup| OrderedHash {
|
||||||
|
timestamp: tup.0.assume_utc(),
|
||||||
|
hash: tup.1,
|
||||||
|
});
|
||||||
|
|
||||||
|
self.hashes_ordered(ordered_hash, limit).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn hashes_ordered(
|
||||||
|
&self,
|
||||||
|
bound: Option<OrderedHash>,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<HashPage, RepoError> {
|
||||||
|
use schema::hashes::dsl::*;
|
||||||
|
|
||||||
|
let mut conn = self.pool.get().await.map_err(PostgresError::Pool)?;
|
||||||
|
|
||||||
|
let (mut page, prev) = if let Some(OrderedHash {
|
||||||
|
timestamp,
|
||||||
|
hash: bound_hash,
|
||||||
|
}) = bound
|
||||||
|
{
|
||||||
|
let timestamp = to_primitive(timestamp);
|
||||||
|
|
||||||
|
let page = hashes
|
||||||
|
.select(hash)
|
||||||
|
.filter(created_at.lt(timestamp))
|
||||||
|
.or_filter(created_at.eq(timestamp).and(hash.le(&bound_hash)))
|
||||||
|
.order(created_at.desc())
|
||||||
|
.then_order_by(hash.desc())
|
||||||
|
.limit(limit as i64 + 1)
|
||||||
|
.load::<Hash>(&mut conn)
|
||||||
|
.await
|
||||||
|
.map_err(PostgresError::Diesel)?;
|
||||||
|
|
||||||
|
let prev = hashes
|
||||||
|
.select(hash)
|
||||||
|
.filter(created_at.gt(timestamp))
|
||||||
|
.or_filter(created_at.eq(timestamp).and(hash.gt(&bound_hash)))
|
||||||
|
.order(created_at)
|
||||||
|
.then_order_by(hash)
|
||||||
|
.offset(limit.saturating_sub(1) as i64)
|
||||||
|
.first::<Hash>(&mut conn)
|
||||||
|
.await
|
||||||
|
.optional()
|
||||||
|
.map_err(PostgresError::Diesel)?;
|
||||||
|
|
||||||
|
(page, prev)
|
||||||
|
} else {
|
||||||
|
let page = hashes
|
||||||
|
.select(hash)
|
||||||
|
.order(created_at.desc())
|
||||||
|
.then_order_by(hash.desc())
|
||||||
|
.limit(limit as i64 + 1)
|
||||||
|
.load::<Hash>(&mut conn)
|
||||||
|
.await
|
||||||
|
.map_err(PostgresError::Diesel)?;
|
||||||
|
|
||||||
|
(page, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let next = if page.len() > limit { page.pop() } else { None };
|
||||||
|
|
||||||
|
Ok(HashPage {
|
||||||
|
limit,
|
||||||
|
prev,
|
||||||
|
next,
|
||||||
|
hashes: page,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_hash_with_timestamp(
|
||||||
|
&self,
|
||||||
|
input_hash: Hash,
|
||||||
|
input_identifier: &Arc<str>,
|
||||||
|
timestamp: time::OffsetDateTime,
|
||||||
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
|
use schema::hashes::dsl::*;
|
||||||
|
|
||||||
|
let mut conn = self.pool.get().await.map_err(PostgresError::Pool)?;
|
||||||
|
|
||||||
|
let timestamp = to_primitive(timestamp);
|
||||||
|
|
||||||
|
/*
|
||||||
|
insert_into(hashes).values((
|
||||||
|
hash.eq(&input_hash),
|
||||||
|
identifier.eq(&input_identifier)
|
||||||
|
))
|
||||||
|
*/
|
||||||
|
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn relate_variant_identifier(
|
||||||
|
&self,
|
||||||
|
hash: Hash,
|
||||||
|
variant: String,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn variant_identifier(
|
||||||
|
&self,
|
||||||
|
hash: Hash,
|
||||||
|
variant: String,
|
||||||
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn remove_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn relate_motion_identifier(
|
||||||
|
&self,
|
||||||
|
hash: Hash,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
) -> Result<(), RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn cleanup_hash(&self, hash: Hash) -> Result<(), RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for PostgresRepo {
|
impl std::fmt::Debug for PostgresRepo {
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub(crate) fn migration() -> String {
|
||||||
m.create_table("hashes", |t| {
|
m.create_table("hashes", |t| {
|
||||||
t.add_column(
|
t.add_column(
|
||||||
"hash",
|
"hash",
|
||||||
types::binary()
|
types::text()
|
||||||
.primary(true)
|
.primary(true)
|
||||||
.unique(true)
|
.unique(true)
|
||||||
.nullable(false)
|
.nullable(false)
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub(crate) fn migration() -> String {
|
||||||
|
|
||||||
m.create_table("variants", |t| {
|
m.create_table("variants", |t| {
|
||||||
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
||||||
t.add_column("hash", types::binary().nullable(false));
|
t.add_column("hash", types::text().nullable(false));
|
||||||
t.add_column("variant", types::text().nullable(false));
|
t.add_column("variant", types::text().nullable(false));
|
||||||
t.add_column("identifier", types::text().nullable(false));
|
t.add_column("identifier", types::text().nullable(false));
|
||||||
t.add_column(
|
t.add_column(
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub(crate) fn migration() -> String {
|
||||||
.unique(true)
|
.unique(true)
|
||||||
.nullable(false),
|
.nullable(false),
|
||||||
);
|
);
|
||||||
t.add_column("hash", types::binary().nullable(false));
|
t.add_column("hash", types::text().nullable(false));
|
||||||
t.add_column("token", types::text().size(60).nullable(false));
|
t.add_column("token", types::text().size(60).nullable(false));
|
||||||
|
|
||||||
t.add_foreign_key(&["hash"], "hashes", &["hash"]);
|
t.add_foreign_key(&["hash"], "hashes", &["hash"]);
|
||||||
|
|
|
@ -10,7 +10,7 @@ pub(crate) fn migration() -> String {
|
||||||
"identifier",
|
"identifier",
|
||||||
types::text().primary(true).unique(true).nullable(false),
|
types::text().primary(true).unique(true).nullable(false),
|
||||||
);
|
);
|
||||||
t.add_column("details", types::custom("jsonb").nullable(false));
|
t.add_column("json", types::custom("jsonb").nullable(false));
|
||||||
});
|
});
|
||||||
|
|
||||||
m.make::<Pg>().to_string()
|
m.make::<Pg>().to_string()
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub(crate) fn migration() -> String {
|
||||||
|
|
||||||
m.inject_custom("CREATE TYPE job_status AS ENUM ('new', 'running');");
|
m.inject_custom("CREATE TYPE job_status AS ENUM ('new', 'running');");
|
||||||
|
|
||||||
m.create_table("queue", |t| {
|
m.create_table("job_queue", |t| {
|
||||||
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
||||||
t.add_column("queue", types::text().size(50).nullable(false));
|
t.add_column("queue", types::text().size(50).nullable(false));
|
||||||
t.add_column("job", types::custom("jsonb").nullable(false));
|
t.add_column("job", types::custom("jsonb").nullable(false));
|
||||||
|
@ -42,7 +42,7 @@ $$ LANGUAGE plpgsql;
|
||||||
r#"
|
r#"
|
||||||
CREATE TRIGGER queue_status
|
CREATE TRIGGER queue_status
|
||||||
AFTER INSERT OR UPDATE OF status
|
AFTER INSERT OR UPDATE OF status
|
||||||
ON queue
|
ON job_queue
|
||||||
FOR EACH ROW
|
FOR EACH ROW
|
||||||
EXECUTE PROCEDURE queue_status_notify();
|
EXECUTE PROCEDURE queue_status_notify();
|
||||||
"#
|
"#
|
||||||
|
|
|
@ -9,7 +9,7 @@ pub mod sql_types {
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
aliases (alias) {
|
aliases (alias) {
|
||||||
alias -> Text,
|
alias -> Text,
|
||||||
hash -> Bytea,
|
hash -> Text,
|
||||||
token -> Text,
|
token -> Text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,20 +17,33 @@ diesel::table! {
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
details (identifier) {
|
details (identifier) {
|
||||||
identifier -> Text,
|
identifier -> Text,
|
||||||
#[sql_name = "details"]
|
json -> Jsonb,
|
||||||
details_json -> Jsonb,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
hashes (hash) {
|
hashes (hash) {
|
||||||
hash -> Bytea,
|
hash -> Text,
|
||||||
identifier -> Text,
|
identifier -> Text,
|
||||||
motion_identifier -> Nullable<Text>,
|
motion_identifier -> Nullable<Text>,
|
||||||
created_at -> Timestamp,
|
created_at -> Timestamp,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::JobStatus;
|
||||||
|
|
||||||
|
job_queue (id) {
|
||||||
|
id -> Uuid,
|
||||||
|
queue -> Text,
|
||||||
|
job -> Jsonb,
|
||||||
|
status -> JobStatus,
|
||||||
|
queue_time -> Timestamp,
|
||||||
|
heartbeat -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
proxies (url) {
|
proxies (url) {
|
||||||
url -> Text,
|
url -> Text,
|
||||||
|
@ -39,21 +52,6 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::table! {
|
|
||||||
use diesel::sql_types::*;
|
|
||||||
use super::sql_types::JobStatus;
|
|
||||||
|
|
||||||
queue (id) {
|
|
||||||
id -> Uuid,
|
|
||||||
#[sql_name = "queue"]
|
|
||||||
queue_name -> Text,
|
|
||||||
job -> Jsonb,
|
|
||||||
status -> JobStatus,
|
|
||||||
queue_time -> Timestamp,
|
|
||||||
heartbeat -> Timestamp,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
refinery_schema_history (version) {
|
refinery_schema_history (version) {
|
||||||
version -> Int4,
|
version -> Int4,
|
||||||
|
@ -89,7 +87,7 @@ diesel::table! {
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
variants (id) {
|
variants (id) {
|
||||||
id -> Uuid,
|
id -> Uuid,
|
||||||
hash -> Bytea,
|
hash -> Text,
|
||||||
variant -> Text,
|
variant -> Text,
|
||||||
identifier -> Text,
|
identifier -> Text,
|
||||||
accessed -> Timestamp,
|
accessed -> Timestamp,
|
||||||
|
@ -104,8 +102,8 @@ diesel::allow_tables_to_appear_in_same_query!(
|
||||||
aliases,
|
aliases,
|
||||||
details,
|
details,
|
||||||
hashes,
|
hashes,
|
||||||
|
job_queue,
|
||||||
proxies,
|
proxies,
|
||||||
queue,
|
|
||||||
refinery_schema_history,
|
refinery_schema_history,
|
||||||
settings,
|
settings,
|
||||||
store_migrations,
|
store_migrations,
|
||||||
|
|
163
src/repo/sled.rs
163
src/repo/sled.rs
|
@ -2,7 +2,6 @@ use crate::{
|
||||||
details::HumanDate,
|
details::HumanDate,
|
||||||
error_code::{ErrorCode, OwnedErrorCode},
|
error_code::{ErrorCode, OwnedErrorCode},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::StoreError,
|
|
||||||
stream::{from_iterator, LocalBoxStream},
|
stream::{from_iterator, LocalBoxStream},
|
||||||
};
|
};
|
||||||
use sled::{transaction::TransactionError, Db, IVec, Transactional, Tree};
|
use sled::{transaction::TransactionError, Db, IVec, Transactional, Tree};
|
||||||
|
@ -21,9 +20,9 @@ use uuid::Uuid;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
hash::Hash, Alias, AliasAccessRepo, AliasAlreadyExists, AliasRepo, BaseRepo, DeleteToken,
|
hash::Hash, Alias, AliasAccessRepo, AliasAlreadyExists, AliasRepo, BaseRepo, DeleteToken,
|
||||||
Details, DetailsRepo, FullRepo, HashAlreadyExists, HashPage, HashRepo, Identifier, JobId,
|
Details, DetailsRepo, FullRepo, HashAlreadyExists, HashPage, HashRepo, JobId, OrderedHash,
|
||||||
OrderedHash, ProxyRepo, QueueRepo, RepoError, SettingsRepo, StoreMigrationRepo, UploadId,
|
ProxyRepo, QueueRepo, RepoError, SettingsRepo, StoreMigrationRepo, UploadId, UploadRepo,
|
||||||
UploadRepo, UploadResult, VariantAccessRepo, VariantAlreadyExists,
|
UploadResult, VariantAccessRepo, VariantAlreadyExists,
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! b {
|
macro_rules! b {
|
||||||
|
@ -55,6 +54,9 @@ pub(crate) enum SledError {
|
||||||
#[error("Error parsing variant key")]
|
#[error("Error parsing variant key")]
|
||||||
VariantKey(#[from] VariantKeyError),
|
VariantKey(#[from] VariantKeyError),
|
||||||
|
|
||||||
|
#[error("Invalid string data in db")]
|
||||||
|
Utf8(#[source] std::str::Utf8Error),
|
||||||
|
|
||||||
#[error("Operation panicked")]
|
#[error("Operation panicked")]
|
||||||
Panic,
|
Panic,
|
||||||
|
|
||||||
|
@ -65,7 +67,7 @@ pub(crate) enum SledError {
|
||||||
impl SledError {
|
impl SledError {
|
||||||
pub(super) const fn error_code(&self) -> ErrorCode {
|
pub(super) const fn error_code(&self) -> ErrorCode {
|
||||||
match self {
|
match self {
|
||||||
Self::Sled(_) | Self::VariantKey(_) => ErrorCode::SLED_ERROR,
|
Self::Sled(_) | Self::VariantKey(_) | Self::Utf8(_) => ErrorCode::SLED_ERROR,
|
||||||
Self::Details(_) => ErrorCode::EXTRACT_DETAILS,
|
Self::Details(_) => ErrorCode::EXTRACT_DETAILS,
|
||||||
Self::UploadResult(_) => ErrorCode::EXTRACT_UPLOAD_RESULT,
|
Self::UploadResult(_) => ErrorCode::EXTRACT_UPLOAD_RESULT,
|
||||||
Self::Panic => ErrorCode::PANIC,
|
Self::Panic => ErrorCode::PANIC,
|
||||||
|
@ -648,10 +650,17 @@ fn job_key(queue: &'static str, job_id: JobId) -> Arc<[u8]> {
|
||||||
Arc::from(key)
|
Arc::from(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_into_arc_str(ivec: IVec) -> Result<Arc<str>, SledError> {
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::Utf8)
|
||||||
|
.map(String::from)
|
||||||
|
.map(Arc::from)
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl QueueRepo for SledRepo {
|
impl QueueRepo for SledRepo {
|
||||||
#[tracing::instrument(skip(self, job), fields(job = %String::from_utf8_lossy(&job)))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn push(&self, queue_name: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError> {
|
async fn push(&self, queue_name: &'static str, job: Arc<str>) -> Result<JobId, RepoError> {
|
||||||
let metrics_guard = PushMetricsGuard::guard(queue_name);
|
let metrics_guard = PushMetricsGuard::guard(queue_name);
|
||||||
|
|
||||||
let id = JobId::gen();
|
let id = JobId::gen();
|
||||||
|
@ -700,7 +709,7 @@ impl QueueRepo for SledRepo {
|
||||||
&self,
|
&self,
|
||||||
queue_name: &'static str,
|
queue_name: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError> {
|
) -> Result<(JobId, Arc<str>), RepoError> {
|
||||||
let metrics_guard = PopMetricsGuard::guard(queue_name);
|
let metrics_guard = PopMetricsGuard::guard(queue_name);
|
||||||
|
|
||||||
let now = time::OffsetDateTime::now_utc();
|
let now = time::OffsetDateTime::now_utc();
|
||||||
|
@ -753,11 +762,10 @@ impl QueueRepo for SledRepo {
|
||||||
|
|
||||||
tracing::Span::current().record("job_id", &format!("{job_id:?}"));
|
tracing::Span::current().record("job_id", &format!("{job_id:?}"));
|
||||||
|
|
||||||
let opt = queue
|
let opt = queue.get(&key)?.map(try_into_arc_str).transpose()?;
|
||||||
.get(&key)?
|
|
||||||
.map(|job_bytes| (job_id, Arc::from(job_bytes.to_vec())));
|
|
||||||
|
|
||||||
return Ok(opt) as Result<Option<(JobId, Arc<[u8]>)>, SledError>;
|
return Ok(opt.map(|job| (job_id, job)))
|
||||||
|
as Result<Option<(JobId, Arc<str>)>, SledError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -949,43 +957,46 @@ fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl DetailsRepo for SledRepo {
|
impl DetailsRepo for SledRepo {
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
let details = serde_json::to_vec(&details.inner)
|
let details = serde_json::to_vec(&details.inner).map_err(SledError::Details)?;
|
||||||
.map_err(SledError::Details)
|
|
||||||
.map_err(RepoError::from)?;
|
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.identifier_details,
|
self.identifier_details,
|
||||||
identifier_details.insert(key, details)
|
identifier_details.insert(key.as_bytes(), details)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
let opt = b!(self.identifier_details, identifier_details.get(key));
|
let opt = b!(
|
||||||
|
self.identifier_details,
|
||||||
|
identifier_details.get(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner }))
|
opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner }))
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(SledError::Details)
|
.map_err(SledError::Details)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError> {
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
b!(self.identifier_details, identifier_details.remove(key));
|
b!(
|
||||||
|
self.identifier_details,
|
||||||
|
identifier_details.remove(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -999,24 +1010,28 @@ impl StoreMigrationRepo for SledRepo {
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let key = new_identifier.to_bytes()?;
|
let key = new_identifier.clone();
|
||||||
let value = old_identifier.to_bytes()?;
|
let value = old_identifier.clone();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.migration_identifiers,
|
self.migration_identifiers,
|
||||||
migration_identifiers.insert(key, value)
|
migration_identifiers.insert(key.as_bytes(), value.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError> {
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
Ok(b!(self.migration_identifiers, migration_identifiers.get(key)).is_some())
|
Ok(b!(
|
||||||
|
self.migration_identifiers,
|
||||||
|
migration_identifiers.get(key.as_bytes())
|
||||||
|
)
|
||||||
|
.is_some())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear(&self) -> Result<(), RepoError> {
|
async fn clear(&self) -> Result<(), RepoError> {
|
||||||
|
@ -1062,17 +1077,6 @@ impl HashRepo for SledRepo {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>> {
|
|
||||||
let iter = self.hashes.iter().keys().filter_map(|res| {
|
|
||||||
res.map_err(SledError::from)
|
|
||||||
.map_err(RepoError::from)
|
|
||||||
.map(Hash::from_ivec)
|
|
||||||
.transpose()
|
|
||||||
});
|
|
||||||
|
|
||||||
Box::pin(from_iterator(iter, 8))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
||||||
let opt = b!(self.hashes, hashes.get(hash.to_ivec()));
|
let opt = b!(self.hashes, hashes.get(hash.to_ivec()));
|
||||||
|
|
||||||
|
@ -1197,10 +1201,10 @@ impl HashRepo for SledRepo {
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
let identifier: sled::IVec = identifier.to_bytes()?.into();
|
let identifier: sled::IVec = identifier.as_bytes().to_vec().into();
|
||||||
|
|
||||||
let hashes = self.hashes.clone();
|
let hashes = self.hashes.clone();
|
||||||
let hashes_inverse = self.hashes_inverse.clone();
|
let hashes_inverse = self.hashes_inverse.clone();
|
||||||
|
@ -1234,63 +1238,56 @@ impl HashRepo for SledRepo {
|
||||||
match res {
|
match res {
|
||||||
Ok(res) => Ok(res),
|
Ok(res) => Ok(res),
|
||||||
Err(TransactionError::Abort(e) | TransactionError::Storage(e)) => {
|
Err(TransactionError::Abort(e) | TransactionError::Storage(e)) => {
|
||||||
Err(StoreError::from(RepoError::from(SledError::from(e))))
|
Err(RepoError::from(SledError::from(e)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
&self,
|
let identifier = identifier.clone();
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError> {
|
|
||||||
let identifier = identifier.to_bytes()?;
|
|
||||||
|
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_identifiers,
|
self.hash_identifiers,
|
||||||
hash_identifiers.insert(hash, identifier)
|
hash_identifiers.insert(hash, identifier.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_variant_identifier(
|
async fn relate_variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError> {
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
let hash = hash.to_bytes();
|
let hash = hash.to_bytes();
|
||||||
|
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
let value = identifier.to_bytes()?;
|
let value = identifier.clone();
|
||||||
|
|
||||||
let hash_variant_identifiers = self.hash_variant_identifiers.clone();
|
let hash_variant_identifiers = self.hash_variant_identifiers.clone();
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
actix_rt::task::spawn_blocking(move || {
|
||||||
hash_variant_identifiers
|
hash_variant_identifiers
|
||||||
.compare_and_swap(key, Option::<&[u8]>::None, Some(value))
|
.compare_and_swap(key, Option::<&[u8]>::None, Some(value.as_bytes()))
|
||||||
.map(|res| res.map_err(|_| VariantAlreadyExists))
|
.map(|res| res.map_err(|_| VariantAlreadyExists))
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.map_err(|_| RepoError::Canceled)?
|
.map_err(|_| RepoError::Canceled)?
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
|
@ -1298,7 +1295,7 @@ impl HashRepo for SledRepo {
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_bytes();
|
let hash = hash.to_bytes();
|
||||||
|
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
|
@ -1308,11 +1305,11 @@ impl HashRepo for SledRepo {
|
||||||
hash_variant_identifiers.get(key)
|
hash_variant_identifiers.get(key)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(opt.map(|ivec| Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError> {
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let vec = b!(
|
let vec = b!(
|
||||||
|
@ -1321,14 +1318,14 @@ impl HashRepo for SledRepo {
|
||||||
.scan_prefix(hash.clone())
|
.scan_prefix(hash.clone())
|
||||||
.filter_map(|res| res.ok())
|
.filter_map(|res| res.ok())
|
||||||
.filter_map(|(key, ivec)| {
|
.filter_map(|(key, ivec)| {
|
||||||
let identifier = Arc::from(ivec.to_vec());
|
let identifier = try_into_arc_str(ivec).ok();
|
||||||
|
|
||||||
let variant = variant_from_key(&hash, &key);
|
let variant = variant_from_key(&hash, &key);
|
||||||
if variant.is_none() {
|
if variant.is_none() {
|
||||||
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((variant?, identifier))
|
Some((variant?, identifier?))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
||||||
);
|
);
|
||||||
|
@ -1350,25 +1347,25 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
let bytes = identifier.to_bytes()?;
|
let bytes = identifier.clone();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
hash_motion_identifiers.insert(hash, bytes)
|
hash_motion_identifiers.insert(hash, bytes.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
|
@ -1376,7 +1373,7 @@ impl HashRepo for SledRepo {
|
||||||
hash_motion_identifiers.get(hash)
|
hash_motion_identifiers.get(hash)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(opt.map(|ivec| Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
|
|
|
@ -2,10 +2,9 @@ use crate::{
|
||||||
config,
|
config,
|
||||||
details::Details,
|
details::Details,
|
||||||
repo::{Alias, DeleteToken},
|
repo::{Alias, DeleteToken},
|
||||||
store::{Identifier, StoreError},
|
|
||||||
};
|
};
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::fmt::Debug;
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
pub(crate) use self::sled::SledRepo;
|
pub(crate) use self::sled::SledRepo;
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ pub(crate) trait SettingsRepo: BaseRepo {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait IdentifierRepo: BaseRepo {
|
pub(crate) trait IdentifierRepo: BaseRepo {
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, StoreError>;
|
async fn details(&self, identifier: Arc<str>) -> Result<Option<Details>, RepoError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -57,20 +56,11 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
|
|
||||||
async fn hashes(&self) -> Self::Stream;
|
async fn hashes(&self) -> Self::Stream;
|
||||||
|
|
||||||
async fn identifier<I: Identifier + 'static>(
|
async fn identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError>;
|
|
||||||
|
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants(&self, hash: Self::Bytes) -> Result<Vec<(String, Arc<str>)>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Vec<(String, I)>, StoreError>;
|
|
||||||
|
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
details::HumanDate,
|
details::HumanDate,
|
||||||
repo_04::{
|
repo_04::{
|
||||||
Alias, AliasRepo, BaseRepo, DeleteToken, Details, HashRepo, Identifier, IdentifierRepo,
|
Alias, AliasRepo, BaseRepo, DeleteToken, Details, HashRepo, IdentifierRepo, RepoError,
|
||||||
RepoError, SettingsRepo,
|
SettingsRepo,
|
||||||
},
|
},
|
||||||
store::StoreError,
|
|
||||||
stream::{from_iterator, LocalBoxStream},
|
stream::{from_iterator, LocalBoxStream},
|
||||||
};
|
};
|
||||||
use sled::{Db, IVec, Tree};
|
use sled::{Db, IVec, Tree};
|
||||||
|
@ -56,6 +55,9 @@ pub(crate) enum SledError {
|
||||||
|
|
||||||
#[error("Operation panicked")]
|
#[error("Operation panicked")]
|
||||||
Panic,
|
Panic,
|
||||||
|
|
||||||
|
#[error("Error reading string")]
|
||||||
|
Utf8(#[from] std::str::Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -179,17 +181,17 @@ fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl IdentifierRepo for SledRepo {
|
impl IdentifierRepo for SledRepo {
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, key: Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let opt = b!(
|
||||||
|
self.identifier_details,
|
||||||
let opt = b!(self.identifier_details, identifier_details.get(key));
|
identifier_details.get(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
opt.map(|ivec| serde_json::from_slice::<OldDetails>(&ivec))
|
opt.map(|ivec| serde_json::from_slice::<OldDetails>(&ivec))
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
.map(|opt| opt.and_then(OldDetails::into_details))
|
.map(|opt| opt.and_then(OldDetails::into_details))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -219,29 +221,27 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn identifier<I: Identifier + 'static>(
|
async fn identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError> {
|
|
||||||
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
Ok(Some(Arc::from(
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::from)?
|
||||||
|
.to_string(),
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "debug", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants(&self, hash: Self::Bytes) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Vec<(String, I)>, StoreError> {
|
|
||||||
let vec = b!(
|
let vec = b!(
|
||||||
self.hash_variant_identifiers,
|
self.hash_variant_identifiers,
|
||||||
Ok(hash_variant_identifiers
|
Ok(hash_variant_identifiers
|
||||||
.scan_prefix(&hash)
|
.scan_prefix(&hash)
|
||||||
.filter_map(|res| res.ok())
|
.filter_map(|res| res.ok())
|
||||||
.filter_map(|(key, ivec)| {
|
.filter_map(|(key, ivec)| {
|
||||||
let identifier = I::from_bytes(ivec.to_vec()).ok();
|
let identifier = String::from_utf8(ivec.to_vec()).ok();
|
||||||
if identifier.is_none() {
|
if identifier.is_none() {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Skipping an identifier: {}",
|
"Skipping an identifier: {}",
|
||||||
|
@ -254,7 +254,7 @@ impl HashRepo for SledRepo {
|
||||||
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((variant?, identifier?))
|
Some((variant?, Arc::from(identifier?)))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
||||||
);
|
);
|
||||||
|
@ -263,16 +263,20 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError> {
|
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
hash_motion_identifiers.get(hash)
|
hash_motion_identifiers.get(hash)
|
||||||
);
|
);
|
||||||
|
|
||||||
opt.map(|ivec| I::from_bytes(ivec.to_vec())).transpose()
|
opt.map(|ivec| {
|
||||||
|
Ok(Arc::from(
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::from)?
|
||||||
|
.to_string(),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.transpose()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
200
src/store.rs
200
src/store.rs
|
@ -1,10 +1,9 @@
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::{fmt::Debug, sync::Arc};
|
use std::{fmt::Debug, sync::Arc};
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
|
|
||||||
use crate::error_code::ErrorCode;
|
use crate::{error_code::ErrorCode, stream::LocalBoxStream};
|
||||||
|
|
||||||
pub(crate) mod file_store;
|
pub(crate) mod file_store;
|
||||||
pub(crate) mod object_store;
|
pub(crate) mod object_store;
|
||||||
|
@ -70,32 +69,15 @@ impl From<crate::store::object_store::ObjectError> for StoreError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait Identifier: Send + Sync + Debug {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError>;
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait Store: Clone + Debug {
|
pub(crate) trait Store: Clone + Debug {
|
||||||
type Identifier: Identifier + Clone + 'static;
|
|
||||||
type Stream: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError>;
|
async fn health_check(&self) -> Result<(), StoreError>;
|
||||||
|
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static;
|
Reader: AsyncRead + Unpin + 'static;
|
||||||
|
|
||||||
|
@ -103,7 +85,7 @@ pub(crate) trait Store: Clone + Debug {
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
||||||
|
|
||||||
|
@ -111,28 +93,28 @@ pub(crate) trait Store: Clone + Debug {
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>;
|
) -> Result<Arc<str>, StoreError>;
|
||||||
|
|
||||||
fn public_url(&self, _: &Self::Identifier) -> Option<url::Url>;
|
fn public_url(&self, _: &Arc<str>) -> Option<url::Url>;
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError>;
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError>;
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
Writer: AsyncWrite + Unpin;
|
Writer: AsyncWrite + Unpin;
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError>;
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError>;
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError>;
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -140,9 +122,6 @@ impl<T> Store for actix_web::web::Data<T>
|
||||||
where
|
where
|
||||||
T: Store,
|
T: Store,
|
||||||
{
|
{
|
||||||
type Identifier = T::Identifier;
|
|
||||||
type Stream = T::Stream;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
T::health_check(self).await
|
T::health_check(self).await
|
||||||
}
|
}
|
||||||
|
@ -151,7 +130,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -162,7 +141,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -173,26 +152,26 @@ where
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
T::save_bytes(self, bytes, content_type).await
|
T::save_bytes(self, bytes, content_type).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
T::public_url(self, identifier)
|
T::public_url(self, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
T::to_stream(self, identifier, from_start, len).await
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -201,11 +180,83 @@ where
|
||||||
T::read_into(self, identifier, writer).await
|
T::read_into(self, identifier, writer).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
T::len(self, identifier).await
|
T::len(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
|
T::remove(self, identifier).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl<T> Store for Arc<T>
|
||||||
|
where
|
||||||
|
T: Store,
|
||||||
|
{
|
||||||
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
|
T::health_check(self).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_async_read<Reader>(
|
||||||
|
&self,
|
||||||
|
reader: Reader,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError>
|
||||||
|
where
|
||||||
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
|
{
|
||||||
|
T::save_async_read(self, reader, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_stream<S>(
|
||||||
|
&self,
|
||||||
|
stream: S,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError>
|
||||||
|
where
|
||||||
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
|
{
|
||||||
|
T::save_stream(self, stream, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_bytes(
|
||||||
|
&self,
|
||||||
|
bytes: Bytes,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError> {
|
||||||
|
T::save_bytes(self, bytes, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
|
T::public_url(self, identifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn to_stream(
|
||||||
|
&self,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
from_start: Option<u64>,
|
||||||
|
len: Option<u64>,
|
||||||
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_into<Writer>(
|
||||||
|
&self,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
writer: &mut Writer,
|
||||||
|
) -> Result<(), std::io::Error>
|
||||||
|
where
|
||||||
|
Writer: AsyncWrite + Unpin,
|
||||||
|
{
|
||||||
|
T::read_into(self, identifier, writer).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
|
T::len(self, identifier).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
T::remove(self, identifier).await
|
T::remove(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -215,9 +266,6 @@ impl<'a, T> Store for &'a T
|
||||||
where
|
where
|
||||||
T: Store,
|
T: Store,
|
||||||
{
|
{
|
||||||
type Identifier = T::Identifier;
|
|
||||||
type Stream = T::Stream;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
T::health_check(self).await
|
T::health_check(self).await
|
||||||
}
|
}
|
||||||
|
@ -226,7 +274,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -237,7 +285,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -248,26 +296,26 @@ where
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
T::save_bytes(self, bytes, content_type).await
|
T::save_bytes(self, bytes, content_type).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
T::public_url(self, identifier)
|
T::public_url(self, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
T::to_stream(self, identifier, from_start, len).await
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -276,59 +324,11 @@ where
|
||||||
T::read_into(self, identifier, writer).await
|
T::read_into(self, identifier, writer).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
T::len(self, identifier).await
|
T::len(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
T::remove(self, identifier).await
|
T::remove(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Identifier for Vec<u8> {
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(self.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
BASE64_STANDARD.encode(self.as_slice())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Identifier for Arc<[u8]> {
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(Arc::from(bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(arc)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(Vec::from(&self[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
BASE64_STANDARD.encode(&self[..])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,18 +1,17 @@
|
||||||
use crate::{error_code::ErrorCode, file::File, repo::ArcRepo, store::Store};
|
use crate::{
|
||||||
|
error_code::ErrorCode, file::File, repo::ArcRepo, store::Store, stream::LocalBoxStream,
|
||||||
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::{
|
use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
pin::Pin,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use storage_path_generator::Generator;
|
use storage_path_generator::Generator;
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
use tokio_util::io::StreamReader;
|
use tokio_util::io::StreamReader;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
|
|
||||||
mod file_id;
|
|
||||||
pub(crate) use file_id::FileId;
|
|
||||||
|
|
||||||
use super::StoreError;
|
use super::StoreError;
|
||||||
|
|
||||||
// - Settings Tree
|
// - Settings Tree
|
||||||
|
@ -28,11 +27,8 @@ pub(crate) enum FileError {
|
||||||
#[error("Failed to generate path")]
|
#[error("Failed to generate path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error("Error formatting file store ID")]
|
#[error("Couldn't convert Path to String")]
|
||||||
IdError,
|
StringError,
|
||||||
|
|
||||||
#[error("Malformed file store ID")]
|
|
||||||
PrefixError,
|
|
||||||
|
|
||||||
#[error("Tried to save over existing file")]
|
#[error("Tried to save over existing file")]
|
||||||
FileExists,
|
FileExists,
|
||||||
|
@ -44,7 +40,7 @@ impl FileError {
|
||||||
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
|
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
|
||||||
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
|
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
|
||||||
Self::FileExists => ErrorCode::FILE_EXISTS,
|
Self::FileExists => ErrorCode::FILE_EXISTS,
|
||||||
Self::IdError | Self::PrefixError => ErrorCode::FORMAT_FILE_ID_ERROR,
|
Self::StringError => ErrorCode::FORMAT_FILE_ID_ERROR,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,9 +54,6 @@ pub(crate) struct FileStore {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for FileStore {
|
impl Store for FileStore {
|
||||||
type Identifier = FileId;
|
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
tokio::fs::metadata(&self.root_dir)
|
tokio::fs::metadata(&self.root_dir)
|
||||||
.await
|
.await
|
||||||
|
@ -74,7 +67,7 @@ impl Store for FileStore {
|
||||||
&self,
|
&self,
|
||||||
mut reader: Reader,
|
mut reader: Reader,
|
||||||
_content_type: mime::Mime,
|
_content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -92,7 +85,7 @@ impl Store for FileStore {
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -105,7 +98,7 @@ impl Store for FileStore {
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
_content_type: mime::Mime,
|
_content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
||||||
|
@ -116,17 +109,17 @@ impl Store for FileStore {
|
||||||
Ok(self.file_id_from_path(path)?)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, _identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, _identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let file_span = tracing::trace_span!(parent: None, "File Stream");
|
let file_span = tracing::trace_span!(parent: None, "File Stream");
|
||||||
|
@ -147,7 +140,7 @@ impl Store for FileStore {
|
||||||
#[tracing::instrument(skip(writer))]
|
#[tracing::instrument(skip(writer))]
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -161,7 +154,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let len = tokio::fs::metadata(path)
|
let len = tokio::fs::metadata(path)
|
||||||
|
@ -173,7 +166,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
self.safe_remove_file(path).await?;
|
self.safe_remove_file(path).await?;
|
||||||
|
@ -196,6 +189,14 @@ impl FileStore {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn file_id_from_path(&self, path: PathBuf) -> Result<Arc<str>, FileError> {
|
||||||
|
path.to_str().ok_or(FileError::StringError).map(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_from_file_id(&self, file_id: &Arc<str>) -> PathBuf {
|
||||||
|
self.root_dir.join(file_id.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
async fn next_directory(&self) -> Result<PathBuf, StoreError> {
|
async fn next_directory(&self) -> Result<PathBuf, StoreError> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
|
|
|
@ -1,57 +0,0 @@
|
||||||
use crate::store::{
|
|
||||||
file_store::{FileError, FileStore},
|
|
||||||
Identifier, StoreError,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub(crate) struct FileId(PathBuf);
|
|
||||||
|
|
||||||
impl Identifier for FileId {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
let vec = self
|
|
||||||
.0
|
|
||||||
.to_str()
|
|
||||||
.ok_or(FileError::IdError)?
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
|
|
||||||
Ok(vec)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
let string = String::from_utf8(bytes).map_err(|_| FileError::IdError)?;
|
|
||||||
|
|
||||||
let id = FileId(PathBuf::from(string));
|
|
||||||
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: std::sync::Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Self::from_bytes(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
self.0.to_string_lossy().into_owned()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileStore {
|
|
||||||
pub(super) fn file_id_from_path(&self, path: PathBuf) -> Result<FileId, FileError> {
|
|
||||||
let stripped = path
|
|
||||||
.strip_prefix(&self.root_dir)
|
|
||||||
.map_err(|_| FileError::PrefixError)?;
|
|
||||||
|
|
||||||
Ok(FileId(stripped.to_path_buf()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn path_from_file_id(&self, file_id: &FileId) -> PathBuf {
|
|
||||||
self.root_dir.join(&file_id.0)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
repo::ArcRepo,
|
repo::ArcRepo,
|
||||||
store::Store,
|
store::Store,
|
||||||
stream::{IntoStreamer, StreamMap},
|
stream::{IntoStreamer, LocalBoxStream, StreamMap},
|
||||||
};
|
};
|
||||||
use actix_rt::task::JoinError;
|
use actix_rt::task::JoinError;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
|
@ -19,16 +19,13 @@ use futures_core::Stream;
|
||||||
use reqwest::{header::RANGE, Body, Response};
|
use reqwest::{header::RANGE, Body, Response};
|
||||||
use reqwest_middleware::{ClientWithMiddleware, RequestBuilder};
|
use reqwest_middleware::{ClientWithMiddleware, RequestBuilder};
|
||||||
use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle};
|
use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle};
|
||||||
use std::{pin::Pin, string::FromUtf8Error, time::Duration};
|
use std::{string::FromUtf8Error, sync::Arc, time::Duration};
|
||||||
use storage_path_generator::{Generator, Path};
|
use storage_path_generator::{Generator, Path};
|
||||||
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
|
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
|
||||||
use tokio_util::io::ReaderStream;
|
use tokio_util::io::ReaderStream;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
mod object_id;
|
|
||||||
pub(crate) use object_id::ObjectId;
|
|
||||||
|
|
||||||
use super::StoreError;
|
use super::StoreError;
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 8_388_608; // 8 Mebibytes, min is 5 (5_242_880);
|
const CHUNK_SIZE: usize = 8_388_608; // 8 Mebibytes, min is 5 (5_242_880);
|
||||||
|
@ -189,9 +186,6 @@ async fn status_error(response: Response) -> StoreError {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for ObjectStore {
|
impl Store for ObjectStore {
|
||||||
type Identifier = ObjectId;
|
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.head_bucket_request()
|
.head_bucket_request()
|
||||||
|
@ -211,7 +205,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -224,7 +218,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
mut stream: S,
|
mut stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -363,7 +357,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
let (req, object_id) = self.put_object_request(bytes.len(), content_type).await?;
|
let (req, object_id) = self.put_object_request(bytes.len(), content_type).await?;
|
||||||
|
|
||||||
let response = req.body(bytes).send().await.map_err(ObjectError::from)?;
|
let response = req.body(bytes).send().await.map_err(ObjectError::from)?;
|
||||||
|
@ -375,9 +369,9 @@ impl Store for ObjectStore {
|
||||||
Ok(object_id)
|
Ok(object_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
self.public_endpoint.clone().map(|mut endpoint| {
|
self.public_endpoint.clone().map(|mut endpoint| {
|
||||||
endpoint.set_path(identifier.as_str());
|
endpoint.set_path(identifier.as_ref());
|
||||||
endpoint
|
endpoint
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -385,10 +379,10 @@ impl Store for ObjectStore {
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.get_object_request(identifier, from_start, len)
|
.get_object_request(identifier, from_start, len)
|
||||||
.send()
|
.send()
|
||||||
|
@ -409,7 +403,7 @@ impl Store for ObjectStore {
|
||||||
#[tracing::instrument(skip(self, writer))]
|
#[tracing::instrument(skip(self, writer))]
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -440,7 +434,7 @@ impl Store for ObjectStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.head_object_request(identifier)
|
.head_object_request(identifier)
|
||||||
.send()
|
.send()
|
||||||
|
@ -464,7 +458,7 @@ impl Store for ObjectStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.delete_object_request(identifier)
|
.delete_object_request(identifier)
|
||||||
.send()
|
.send()
|
||||||
|
@ -523,7 +517,7 @@ impl ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
length: usize,
|
length: usize,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(RequestBuilder, ObjectId), StoreError> {
|
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
let mut action = self.bucket.put_object(Some(&self.credentials), &path);
|
let mut action = self.bucket.put_object(Some(&self.credentials), &path);
|
||||||
|
@ -535,13 +529,13 @@ impl ObjectStore {
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.insert("content-length", length.to_string());
|
.insert("content-length", length.to_string());
|
||||||
|
|
||||||
Ok((self.build_request(action), ObjectId::from_string(path)))
|
Ok((self.build_request(action), Arc::from(path)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_multipart_request(
|
async fn create_multipart_request(
|
||||||
&self,
|
&self,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(RequestBuilder, ObjectId), StoreError> {
|
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
let mut action = self
|
let mut action = self
|
||||||
|
@ -552,13 +546,13 @@ impl ObjectStore {
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.insert("content-type", content_type.as_ref());
|
.insert("content-type", content_type.as_ref());
|
||||||
|
|
||||||
Ok((self.build_request(action), ObjectId::from_string(path)))
|
Ok((self.build_request(action), Arc::from(path)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_upload_part_request(
|
async fn create_upload_part_request(
|
||||||
&self,
|
&self,
|
||||||
buf: BytesStream,
|
buf: BytesStream,
|
||||||
object_id: &ObjectId,
|
object_id: &Arc<str>,
|
||||||
part_number: u16,
|
part_number: u16,
|
||||||
upload_id: &str,
|
upload_id: &str,
|
||||||
) -> Result<RequestBuilder, ObjectError> {
|
) -> Result<RequestBuilder, ObjectError> {
|
||||||
|
@ -566,7 +560,7 @@ impl ObjectStore {
|
||||||
|
|
||||||
let mut action = self.bucket.upload_part(
|
let mut action = self.bucket.upload_part(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
part_number,
|
part_number,
|
||||||
upload_id,
|
upload_id,
|
||||||
);
|
);
|
||||||
|
@ -601,13 +595,13 @@ impl ObjectStore {
|
||||||
|
|
||||||
async fn send_complete_multipart_request<'a, I: Iterator<Item = &'a str>>(
|
async fn send_complete_multipart_request<'a, I: Iterator<Item = &'a str>>(
|
||||||
&'a self,
|
&'a self,
|
||||||
object_id: &'a ObjectId,
|
object_id: &'a Arc<str>,
|
||||||
upload_id: &'a str,
|
upload_id: &'a str,
|
||||||
etags: I,
|
etags: I,
|
||||||
) -> Result<Response, reqwest_middleware::Error> {
|
) -> Result<Response, reqwest_middleware::Error> {
|
||||||
let mut action = self.bucket.complete_multipart_upload(
|
let mut action = self.bucket.complete_multipart_upload(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
upload_id,
|
upload_id,
|
||||||
etags,
|
etags,
|
||||||
);
|
);
|
||||||
|
@ -628,12 +622,12 @@ impl ObjectStore {
|
||||||
|
|
||||||
fn create_abort_multipart_request(
|
fn create_abort_multipart_request(
|
||||||
&self,
|
&self,
|
||||||
object_id: &ObjectId,
|
object_id: &Arc<str>,
|
||||||
upload_id: &str,
|
upload_id: &str,
|
||||||
) -> RequestBuilder {
|
) -> RequestBuilder {
|
||||||
let action = self.bucket.abort_multipart_upload(
|
let action = self.bucket.abort_multipart_upload(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
upload_id,
|
upload_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -671,13 +665,13 @@ impl ObjectStore {
|
||||||
|
|
||||||
fn get_object_request(
|
fn get_object_request(
|
||||||
&self,
|
&self,
|
||||||
identifier: &ObjectId,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> RequestBuilder {
|
) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.get_object(Some(&self.credentials), identifier.as_str());
|
.get_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
let req = self.build_request(action);
|
let req = self.build_request(action);
|
||||||
|
|
||||||
|
@ -695,18 +689,18 @@ impl ObjectStore {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn head_object_request(&self, identifier: &ObjectId) -> RequestBuilder {
|
fn head_object_request(&self, identifier: &Arc<str>) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.head_object(Some(&self.credentials), identifier.as_str());
|
.head_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
self.build_request(action)
|
self.build_request(action)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_object_request(&self, identifier: &ObjectId) -> RequestBuilder {
|
fn delete_object_request(&self, identifier: &Arc<str>) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.delete_object(Some(&self.credentials), identifier.as_str());
|
.delete_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
self.build_request(action)
|
self.build_request(action)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
use crate::store::{object_store::ObjectError, Identifier, StoreError};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub(crate) struct ObjectId(String);
|
|
||||||
|
|
||||||
impl Identifier for ObjectId {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(self.0.as_bytes().to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError> {
|
|
||||||
Ok(ObjectId(
|
|
||||||
String::from_utf8(bytes).map_err(ObjectError::from)?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: std::sync::Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Self::from_bytes(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
self.0.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectId {
|
|
||||||
pub(super) fn from_string(string: String) -> Self {
|
|
||||||
ObjectId(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in a new issue