mirror of
https://git.asonix.dog/asonix/pict-rs.git
synced 2024-11-25 02:51:17 +00:00
Fix duplicate uploads potentially purging existing uploads on failure
This commit is contained in:
parent
58f0c328d9
commit
7ba2fc9b26
1 changed files with 6 additions and 3 deletions
|
@ -92,9 +92,7 @@ where
|
|||
|
||||
let hash = hasher.borrow_mut().finalize_reset().to_vec();
|
||||
|
||||
session.hash = Some(hash.clone());
|
||||
|
||||
save_upload(repo, store, &hash, &identifier).await?;
|
||||
save_upload(&mut session, repo, store, &hash, &identifier).await?;
|
||||
|
||||
if let Some(alias) = declared_alias {
|
||||
session.add_existing_alias(&hash, alias).await?
|
||||
|
@ -107,6 +105,7 @@ where
|
|||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
async fn save_upload<R, S>(
|
||||
session: &mut Session<R, S>,
|
||||
repo: &R,
|
||||
store: &S,
|
||||
hash: &[u8],
|
||||
|
@ -117,10 +116,14 @@ where
|
|||
R: FullRepo,
|
||||
{
|
||||
if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() {
|
||||
// duplicate upload
|
||||
store.remove(identifier).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Set hash after upload uniquness check so we don't clean existing files on failure
|
||||
session.hash = Some(Vec::from(hash));
|
||||
|
||||
repo.relate_identifier(hash.to_vec().into(), identifier)
|
||||
.await?;
|
||||
|
||||
|
|
Loading…
Reference in a new issue