2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-14 16:33:57 +00:00
pict-rs/src/queue/cleanup.rs

215 lines
6.4 KiB
Rust
Raw Normal View History

use std::sync::Arc;
use streem::IntoStreamer;
use crate::{
config::Configuration,
error::{Error, UploadError},
2023-09-05 02:51:27 +00:00
future::LocalBoxFuture,
queue::Cleanup,
2023-08-16 21:09:40 +00:00
repo::{Alias, ArcRepo, DeleteToken, Hash},
serde_str::Serde,
store::Store,
};
pub(super) fn perform<'a, S>(
repo: &'a ArcRepo,
store: &'a S,
configuration: &'a Configuration,
2023-09-03 17:47:06 +00:00
job: serde_json::Value,
) -> LocalBoxFuture<'a, Result<(), Error>>
where
S: Store,
{
Box::pin(async move {
2023-09-03 17:47:06 +00:00
match serde_json::from_value(job) {
Ok(job) => match job {
Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?,
Cleanup::Identifier {
identifier: in_identifier,
} => identifier(repo, store, Arc::from(in_identifier)).await?,
Cleanup::Alias {
alias: stored_alias,
token,
} => {
alias(
repo,
Serde::into_inner(stored_alias),
Serde::into_inner(token),
)
.await?
}
Cleanup::Variant { hash, variant } => hash_variant(repo, hash, variant).await?,
Cleanup::AllVariants => all_variants(repo).await?,
Cleanup::OutdatedVariants => outdated_variants(repo, configuration).await?,
Cleanup::OutdatedProxies => outdated_proxies(repo, configuration).await?,
},
Err(e) => {
2023-01-29 17:57:59 +00:00
tracing::warn!("Invalid job: {}", format!("{e}"));
}
}
Ok(())
})
}
#[tracing::instrument(skip_all)]
async fn identifier<S>(repo: &ArcRepo, store: &S, identifier: Arc<str>) -> Result<(), Error>
where
S: Store,
{
let mut errors = Vec::new();
if let Err(e) = store.remove(&identifier).await {
errors.push(UploadError::from(e));
}
2023-08-16 21:09:40 +00:00
if let Err(e) = repo.cleanup_details(&identifier).await {
errors.push(UploadError::from(e));
}
for error in errors {
tracing::error!("{}", format!("{error:?}"));
}
Ok(())
}
#[tracing::instrument(skip_all)]
async fn hash(repo: &ArcRepo, hash: Hash) -> Result<(), Error> {
let aliases = repo.aliases_for_hash(hash.clone()).await?;
if !aliases.is_empty() {
for alias in aliases {
// TODO: decide if it is okay to skip aliases without tokens
if let Some(token) = repo.delete_token(&alias).await? {
super::cleanup_alias(repo, alias, token).await?;
} else {
tracing::warn!("Not cleaning alias!");
}
}
// Return after queueing cleanup alias, since we will be requeued when the last alias is cleaned
return Ok(());
}
let mut idents = repo
.variants(hash.clone())
.await?
.into_iter()
.map(|(_, v)| v)
.collect::<Vec<_>>();
idents.extend(repo.identifier(hash.clone()).await?);
idents.extend(repo.motion_identifier(hash.clone()).await?);
for identifier in idents {
let _ = super::cleanup_identifier(repo, &identifier).await;
}
2023-08-16 21:09:40 +00:00
repo.cleanup_hash(hash).await?;
Ok(())
}
#[tracing::instrument(skip_all)]
async fn alias(repo: &ArcRepo, alias: Alias, token: DeleteToken) -> Result<(), Error> {
let saved_delete_token = repo.delete_token(&alias).await?;
if saved_delete_token.is_some() && saved_delete_token != Some(token) {
return Err(UploadError::InvalidToken.into());
}
let hash = repo.hash(&alias).await?;
2023-08-16 21:09:40 +00:00
repo.cleanup_alias(&alias).await?;
2023-07-23 20:45:52 +00:00
repo.remove_relation(alias.clone()).await?;
2023-08-16 21:09:40 +00:00
repo.remove_alias_access(alias.clone()).await?;
let Some(hash) = hash else {
// hash doesn't exist, nothing to do
return Ok(());
};
if repo.aliases_for_hash(hash.clone()).await?.is_empty() {
super::cleanup_hash(repo, hash).await?;
}
Ok(())
}
#[tracing::instrument(skip_all)]
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
2023-09-11 00:08:01 +00:00
let hash_stream = std::pin::pin!(repo.hashes());
let mut hash_stream = hash_stream.into_streamer();
while let Some(res) = hash_stream.next().await {
let hash = res?;
super::cleanup_variants(repo, hash, None).await?;
}
Ok(())
}
#[tracing::instrument(skip_all)]
async fn outdated_variants(repo: &ArcRepo, config: &Configuration) -> Result<(), Error> {
let now = time::OffsetDateTime::now_utc();
let since = now.saturating_sub(config.media.retention.variants.to_duration());
2023-08-23 16:59:42 +00:00
let mut variant_stream = repo.older_variants(since).await?.into_streamer();
while let Some(res) = variant_stream.next().await {
let (hash, variant) = res?;
super::cleanup_variants(repo, hash, Some(variant)).await?;
}
Ok(())
}
#[tracing::instrument(skip_all)]
async fn outdated_proxies(repo: &ArcRepo, config: &Configuration) -> Result<(), Error> {
2023-07-23 20:45:52 +00:00
let now = time::OffsetDateTime::now_utc();
let since = now.saturating_sub(config.media.retention.proxy.to_duration());
2023-08-23 16:59:42 +00:00
let mut alias_stream = repo.older_aliases(since).await?.into_streamer();
2023-07-23 20:45:52 +00:00
while let Some(res) = alias_stream.next().await {
let alias = res?;
if let Some(token) = repo.delete_token(&alias).await? {
super::cleanup_alias(repo, alias, token).await?;
} else {
tracing::warn!("Skipping alias cleanup - no delete token");
repo.remove_relation(alias.clone()).await?;
2023-08-16 21:09:40 +00:00
repo.remove_alias_access(alias).await?;
2023-07-23 20:45:52 +00:00
}
}
Ok(())
}
#[tracing::instrument(skip_all)]
async fn hash_variant(
repo: &ArcRepo,
2023-08-14 19:25:19 +00:00
hash: Hash,
target_variant: Option<String>,
) -> Result<(), Error> {
if let Some(target_variant) = target_variant {
if let Some(identifier) = repo
.variant_identifier(hash.clone(), target_variant.clone())
.await?
{
super::cleanup_identifier(repo, &identifier).await?;
}
repo.remove_variant(hash.clone(), target_variant.clone())
.await?;
2023-08-16 21:09:40 +00:00
repo.remove_variant_access(hash, target_variant).await?;
} else {
for (variant, identifier) in repo.variants(hash.clone()).await? {
repo.remove_variant(hash.clone(), variant.clone()).await?;
2023-08-16 21:09:40 +00:00
repo.remove_variant_access(hash.clone(), variant).await?;
super::cleanup_identifier(repo, &identifier).await?;
}
}
Ok(())
}