2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-10 06:25:00 +00:00

Fix duplicate uploads potentially purging existing uploads on failure

This commit is contained in:
asonix 2023-06-23 11:39:43 -05:00
parent 58f0c328d9
commit 7ba2fc9b26

View file

@ -92,9 +92,7 @@ where
let hash = hasher.borrow_mut().finalize_reset().to_vec(); let hash = hasher.borrow_mut().finalize_reset().to_vec();
session.hash = Some(hash.clone()); save_upload(&mut session, repo, store, &hash, &identifier).await?;
save_upload(repo, store, &hash, &identifier).await?;
if let Some(alias) = declared_alias { if let Some(alias) = declared_alias {
session.add_existing_alias(&hash, alias).await? session.add_existing_alias(&hash, alias).await?
@ -107,6 +105,7 @@ where
#[tracing::instrument(level = "trace", skip_all)] #[tracing::instrument(level = "trace", skip_all)]
async fn save_upload<R, S>( async fn save_upload<R, S>(
session: &mut Session<R, S>,
repo: &R, repo: &R,
store: &S, store: &S,
hash: &[u8], hash: &[u8],
@ -117,10 +116,14 @@ where
R: FullRepo, R: FullRepo,
{ {
if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() { if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() {
// duplicate upload
store.remove(identifier).await?; store.remove(identifier).await?;
return Ok(()); return Ok(());
} }
// Set hash after upload uniquness check so we don't clean existing files on failure
session.hash = Some(Vec::from(hash));
repo.relate_identifier(hash.to_vec().into(), identifier) repo.relate_identifier(hash.to_vec().into(), identifier)
.await?; .await?;