2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-10 06:25:00 +00:00
pict-rs/src/ingest.rs

280 lines
8.2 KiB
Rust
Raw Normal View History

use std::{sync::Arc, time::Duration};
use crate::{
bytes_stream::BytesStream,
either::Either,
error::{Error, UploadError},
2023-07-13 22:42:21 +00:00
formats::{InternalFormat, Validations},
future::WithMetrics,
2023-08-16 21:09:40 +00:00
repo::{Alias, ArcRepo, DeleteToken, Hash},
store::Store,
};
use actix_web::web::Bytes;
2023-08-23 16:59:42 +00:00
use futures_core::Stream;
2023-09-06 01:45:07 +00:00
use reqwest::Body;
use reqwest_middleware::ClientWithMiddleware;
use streem::IntoStreamer;
use tracing::{Instrument, Span};
mod hasher;
use hasher::Hasher;
2022-04-07 02:40:49 +00:00
#[derive(Debug)]
pub(crate) struct Session {
repo: ArcRepo,
delete_token: DeleteToken,
2023-08-14 19:25:19 +00:00
hash: Option<Hash>,
alias: Option<Alias>,
identifier: Option<Arc<str>>,
}
#[tracing::instrument(skip(stream))]
2023-08-23 16:59:42 +00:00
async fn aggregate<S>(stream: S) -> Result<Bytes, Error>
2022-04-07 02:40:49 +00:00
where
S: Stream<Item = Result<Bytes, Error>>,
2022-04-07 02:40:49 +00:00
{
let mut buf = BytesStream::new();
2022-04-07 02:40:49 +00:00
let stream = std::pin::pin!(stream);
2023-08-23 16:59:42 +00:00
let mut stream = stream.into_streamer();
2022-04-07 02:40:49 +00:00
while let Some(res) = stream.next().await {
buf.add_bytes(res?);
2022-04-07 02:40:49 +00:00
}
Ok(buf.into_bytes())
2022-04-07 02:40:49 +00:00
}
2023-09-06 01:45:07 +00:00
#[tracing::instrument(skip(repo, store, client, stream, media))]
pub(crate) async fn ingest<S>(
repo: &ArcRepo,
store: &S,
2023-09-06 01:45:07 +00:00
client: &ClientWithMiddleware,
stream: impl Stream<Item = Result<Bytes, Error>> + 'static,
declared_alias: Option<Alias>,
media: &crate::config::Media,
) -> Result<Session, Error>
where
S: Store,
{
2022-04-07 17:56:40 +00:00
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
2022-04-07 02:40:49 +00:00
let bytes = aggregate(stream).await?;
2023-07-13 22:42:21 +00:00
let prescribed = Validations {
image: &media.image,
animation: &media.animation,
video: &media.video,
};
tracing::trace!("Validating bytes");
2023-08-05 17:41:06 +00:00
let (input_type, validated_reader) =
crate::validate::validate_bytes(bytes, prescribed, media.process_timeout).await?;
2023-07-13 22:42:21 +00:00
let processed_reader = if let Some(operations) = media.preprocess_steps() {
if let Some(format) = input_type.processable_format() {
let (_, magick_args) =
crate::processor::build_chain(operations, format.file_extension())?;
let quality = match format {
crate::formats::ProcessableFormat::Image(format) => media.image.quality_for(format),
crate::formats::ProcessableFormat::Animation(format) => {
media.animation.quality_for(format)
}
};
let processed_reader = crate::magick::process_image_async_read(
validated_reader,
magick_args,
format,
format,
quality,
2023-08-05 17:41:06 +00:00
media.process_timeout,
)
.await?;
Either::left(processed_reader)
} else {
Either::right(validated_reader)
}
} else {
Either::right(validated_reader)
};
2023-08-14 19:25:19 +00:00
let hasher_reader = Hasher::new(processed_reader);
let state = hasher_reader.state();
let identifier = store
.save_async_read(hasher_reader, input_type.media_type())
.await?;
drop(permit);
let mut session = Session {
repo: repo.clone(),
delete_token: DeleteToken::generate(),
hash: None,
alias: None,
identifier: Some(identifier.clone()),
};
2023-09-06 01:45:07 +00:00
if let Some(endpoint) = &media.external_validation {
let stream = store.to_stream(&identifier, None, None).await?;
2023-09-06 01:45:07 +00:00
let response = client
.post(endpoint.as_str())
.timeout(Duration::from_secs(media.external_validation_timeout))
2023-09-06 01:45:07 +00:00
.header("Content-Type", input_type.media_type().as_ref())
.body(Body::wrap_stream(crate::stream::make_send(stream)))
2023-09-06 01:45:07 +00:00
.send()
.instrument(tracing::info_span!("external-validation"))
.with_metrics("pict-rs.ingest.external-validation")
2023-09-06 01:45:07 +00:00
.await?;
if !response.status().is_success() {
return Err(UploadError::FailedExternalValidation.into());
}
}
2023-08-14 19:25:19 +00:00
let (hash, size) = state.borrow_mut().finalize_reset();
2023-08-14 19:25:19 +00:00
let hash = Hash::new(hash, size, input_type);
save_upload(&mut session, repo, store, hash.clone(), &identifier).await?;
if let Some(alias) = declared_alias {
2023-08-14 19:25:19 +00:00
session.add_existing_alias(hash, alias).await?
} else {
2023-08-14 19:25:19 +00:00
session.create_alias(hash, input_type).await?
};
Ok(session)
}
#[tracing::instrument(level = "trace", skip_all)]
async fn save_upload<S>(
session: &mut Session,
repo: &ArcRepo,
store: &S,
2023-08-14 19:25:19 +00:00
hash: Hash,
identifier: &Arc<str>,
) -> Result<(), Error>
where
S: Store,
{
2023-08-16 21:09:40 +00:00
if repo.create_hash(hash.clone(), identifier).await?.is_err() {
// duplicate upload
store.remove(identifier).await?;
session.identifier.take();
return Ok(());
}
// Set hash after upload uniquness check so we don't clean existing files on failure
2023-08-14 19:25:19 +00:00
session.hash = Some(hash);
Ok(())
}
impl Session {
pub(crate) fn disarm(mut self) -> DeleteToken {
let _ = self.hash.take();
let _ = self.alias.take();
let _ = self.identifier.take();
self.delete_token.clone()
}
pub(crate) fn alias(&self) -> Option<&Alias> {
self.alias.as_ref()
}
pub(crate) fn delete_token(&self) -> &DeleteToken {
&self.delete_token
}
#[tracing::instrument(skip(self, hash))]
2023-08-14 19:25:19 +00:00
async fn add_existing_alias(&mut self, hash: Hash, alias: Alias) -> Result<(), Error> {
2023-08-16 21:09:40 +00:00
self.repo
.create_alias(&alias, &self.delete_token, hash)
.await?
.map_err(|_| UploadError::DuplicateAlias)?;
self.alias = Some(alias.clone());
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, hash))]
2023-08-14 19:25:19 +00:00
async fn create_alias(&mut self, hash: Hash, input_type: InternalFormat) -> Result<(), Error> {
loop {
let alias = Alias::generate(input_type.file_extension().to_string());
2023-08-16 21:09:40 +00:00
if self
.repo
.create_alias(&alias, &self.delete_token, hash.clone())
.await?
.is_ok()
{
self.alias = Some(alias.clone());
return Ok(());
}
tracing::trace!("Alias exists, regenerating");
}
}
}
impl Drop for Session {
fn drop(&mut self) {
2023-07-23 02:11:28 +00:00
let any_items = self.hash.is_some() || self.alias.is_some() || self.identifier.is_some();
metrics::increment_counter!("pict-rs.ingest.end", "completed" => (!any_items).to_string());
2023-07-22 21:47:59 +00:00
2023-07-23 02:11:28 +00:00
if self.hash.is_some() || self.alias.is_some() | self.identifier.is_some() {
let cleanup_parent_span = tracing::info_span!(parent: None, "Dropped session cleanup");
cleanup_parent_span.follows_from(Span::current());
if let Some(hash) = self.hash.take() {
let repo = self.repo.clone();
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup hash", hash = ?hash);
crate::sync::spawn(
async move {
let _ = crate::queue::cleanup_hash(&repo, hash).await;
}
.instrument(cleanup_span),
);
}
if let Some(alias) = self.alias.take() {
let repo = self.repo.clone();
let token = self.delete_token.clone();
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup alias", alias = ?alias);
crate::sync::spawn(
async move {
let _ = crate::queue::cleanup_alias(&repo, alias, token).await;
}
.instrument(cleanup_span),
);
}
if let Some(identifier) = self.identifier.take() {
let repo = self.repo.clone();
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup identifier", identifier = ?identifier);
crate::sync::spawn(
async move {
let _ = crate::queue::cleanup_identifier(&repo, &identifier).await;
}
.instrument(cleanup_span),
);
}
}
}
}