2022-04-01 16:51:46 +00:00
|
|
|
use crate::{
|
2022-04-02 21:44:03 +00:00
|
|
|
error::{Error, UploadError},
|
|
|
|
queue::{Cleanup, LocalBoxFuture},
|
|
|
|
repo::{Alias, AliasRepo, DeleteToken, FullRepo, HashRepo, IdentifierRepo},
|
|
|
|
serde_str::Serde,
|
2022-04-01 16:51:46 +00:00
|
|
|
store::{Identifier, Store},
|
|
|
|
};
|
2022-04-11 21:56:39 +00:00
|
|
|
use futures_util::StreamExt;
|
2022-04-01 16:51:46 +00:00
|
|
|
|
2022-04-01 21:51:12 +00:00
|
|
|
pub(super) fn perform<'a, R, S>(
|
|
|
|
repo: &'a R,
|
|
|
|
store: &'a S,
|
|
|
|
job: &'a [u8],
|
|
|
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
|
|
|
where
|
2022-04-02 21:44:03 +00:00
|
|
|
R: FullRepo,
|
2022-04-01 21:51:12 +00:00
|
|
|
S: Store,
|
|
|
|
{
|
|
|
|
Box::pin(async move {
|
|
|
|
match serde_json::from_slice(job) {
|
|
|
|
Ok(job) => match job {
|
2022-04-02 21:44:03 +00:00
|
|
|
Cleanup::Hash { hash: in_hash } => hash::<R, S>(repo, in_hash).await?,
|
|
|
|
Cleanup::Identifier {
|
2022-04-01 21:51:12 +00:00
|
|
|
identifier: in_identifier,
|
2022-04-11 21:56:39 +00:00
|
|
|
} => identifier(repo, store, in_identifier).await?,
|
2022-04-02 21:44:03 +00:00
|
|
|
Cleanup::Alias {
|
|
|
|
alias: stored_alias,
|
|
|
|
token,
|
|
|
|
} => {
|
|
|
|
alias(
|
|
|
|
repo,
|
|
|
|
Serde::into_inner(stored_alias),
|
|
|
|
Serde::into_inner(token),
|
|
|
|
)
|
|
|
|
.await?
|
|
|
|
}
|
2022-04-11 21:56:39 +00:00
|
|
|
Cleanup::Variant { hash } => variant::<R, S>(repo, hash).await?,
|
|
|
|
Cleanup::AllVariants => all_variants::<R, S>(repo).await?,
|
2022-04-01 21:51:12 +00:00
|
|
|
},
|
|
|
|
Err(e) => {
|
2022-09-25 23:46:26 +00:00
|
|
|
tracing::warn!("Invalid job: {}", format!("{}", e));
|
2022-04-01 21:51:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-04-01 16:51:46 +00:00
|
|
|
#[tracing::instrument(skip(repo, store))]
|
2022-04-01 21:51:12 +00:00
|
|
|
async fn identifier<R, S>(repo: &R, store: &S, identifier: Vec<u8>) -> Result<(), Error>
|
2022-04-01 16:51:46 +00:00
|
|
|
where
|
2022-04-02 21:44:03 +00:00
|
|
|
R: FullRepo,
|
2022-04-01 16:51:46 +00:00
|
|
|
S: Store,
|
|
|
|
{
|
|
|
|
let identifier = S::Identifier::from_bytes(identifier)?;
|
|
|
|
|
|
|
|
let mut errors = Vec::new();
|
|
|
|
|
|
|
|
if let Err(e) = store.remove(&identifier).await {
|
|
|
|
errors.push(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Err(e) = IdentifierRepo::cleanup(repo, &identifier).await {
|
|
|
|
errors.push(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
if !errors.is_empty() {
|
|
|
|
let span = tracing::error_span!("Error deleting files");
|
|
|
|
span.in_scope(|| {
|
|
|
|
for error in errors {
|
2022-09-26 00:34:51 +00:00
|
|
|
tracing::error!("{}", format!("{}", error));
|
2022-04-01 16:51:46 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(skip(repo))]
|
2022-04-01 21:51:12 +00:00
|
|
|
async fn hash<R, S>(repo: &R, hash: Vec<u8>) -> Result<(), Error>
|
2022-04-01 16:51:46 +00:00
|
|
|
where
|
2022-04-02 21:44:03 +00:00
|
|
|
R: FullRepo,
|
2022-04-01 16:51:46 +00:00
|
|
|
S: Store,
|
|
|
|
{
|
|
|
|
let hash: R::Bytes = hash.into();
|
|
|
|
|
|
|
|
let aliases = repo.aliases(hash.clone()).await?;
|
|
|
|
|
|
|
|
if !aliases.is_empty() {
|
2022-04-06 01:29:30 +00:00
|
|
|
for alias in aliases {
|
|
|
|
let token = repo.delete_token(&alias).await?;
|
2022-04-11 21:56:39 +00:00
|
|
|
super::cleanup_alias(repo, alias, token).await?;
|
2022-04-06 01:29:30 +00:00
|
|
|
}
|
|
|
|
// Return after queueing cleanup alias, since we will be requeued when the last alias is cleaned
|
2022-04-01 16:51:46 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut idents = repo
|
|
|
|
.variants::<S::Identifier>(hash.clone())
|
|
|
|
.await?
|
|
|
|
.into_iter()
|
|
|
|
.map(|(_, v)| v)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
idents.push(repo.identifier(hash.clone()).await?);
|
|
|
|
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
|
|
|
|
|
|
|
for identifier in idents {
|
2022-04-11 21:56:39 +00:00
|
|
|
let _ = super::cleanup_identifier(repo, identifier).await;
|
2022-04-01 16:51:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
HashRepo::cleanup(repo, hash).await?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-04-02 21:44:03 +00:00
|
|
|
|
|
|
|
async fn alias<R>(repo: &R, alias: Alias, token: DeleteToken) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
R: FullRepo,
|
|
|
|
{
|
|
|
|
let saved_delete_token = repo.delete_token(&alias).await?;
|
|
|
|
if saved_delete_token != token {
|
|
|
|
return Err(UploadError::InvalidToken.into());
|
|
|
|
}
|
|
|
|
|
|
|
|
let hash = repo.hash(&alias).await?;
|
|
|
|
|
|
|
|
AliasRepo::cleanup(repo, &alias).await?;
|
|
|
|
repo.remove_alias(hash.clone(), &alias).await?;
|
|
|
|
|
|
|
|
if repo.aliases(hash.clone()).await?.is_empty() {
|
2022-04-11 21:56:39 +00:00
|
|
|
super::cleanup_hash(repo, hash).await?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn all_variants<R, S>(repo: &R) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
R: FullRepo,
|
|
|
|
S: Store,
|
|
|
|
{
|
|
|
|
let mut hash_stream = Box::pin(repo.hashes().await);
|
|
|
|
|
|
|
|
while let Some(res) = hash_stream.next().await {
|
|
|
|
let hash = res?;
|
|
|
|
super::cleanup_variants(repo, hash).await?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn variant<R, S>(repo: &R, hash: Vec<u8>) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
R: FullRepo,
|
|
|
|
S: Store,
|
|
|
|
{
|
|
|
|
let hash: R::Bytes = hash.into();
|
|
|
|
|
|
|
|
for (variant, identifier) in repo.variants::<S::Identifier>(hash.clone()).await? {
|
|
|
|
repo.remove_variant(hash.clone(), variant).await?;
|
|
|
|
super::cleanup_identifier(repo, identifier).await?;
|
2022-04-02 21:44:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|