From 7ba2fc9b264e46c31fc6b9d018ac0f8a52a049f7 Mon Sep 17 00:00:00 2001 From: asonix Date: Fri, 23 Jun 2023 11:39:43 -0500 Subject: [PATCH] Fix duplicate uploads potentially purging existing uploads on failure --- src/ingest.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/ingest.rs b/src/ingest.rs index f5615f8..fa34cbd 100644 --- a/src/ingest.rs +++ b/src/ingest.rs @@ -92,9 +92,7 @@ where let hash = hasher.borrow_mut().finalize_reset().to_vec(); - session.hash = Some(hash.clone()); - - save_upload(repo, store, &hash, &identifier).await?; + save_upload(&mut session, repo, store, &hash, &identifier).await?; if let Some(alias) = declared_alias { session.add_existing_alias(&hash, alias).await? @@ -107,6 +105,7 @@ where #[tracing::instrument(level = "trace", skip_all)] async fn save_upload( + session: &mut Session, repo: &R, store: &S, hash: &[u8], @@ -117,10 +116,14 @@ where R: FullRepo, { if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() { + // duplicate upload store.remove(identifier).await?; return Ok(()); } + // Set hash after upload uniquness check so we don't clean existing files on failure + session.hash = Some(Vec::from(hash)); + repo.relate_identifier(hash.to_vec().into(), identifier) .await?;