Make stores take filename into account when saving files

This commit is contained in:
Aode (lion) 2021-11-16 09:45:16 -06:00
parent 334e508d88
commit 0d153ca239
6 changed files with 49 additions and 36 deletions

View file

@ -411,7 +411,7 @@ where
let bytes2 = bytes.clone(); let bytes2 = bytes.clone();
actix_rt::spawn( actix_rt::spawn(
async move { async move {
let identifier = match store.save_bytes(bytes2).await { let identifier = match store.save_bytes(bytes2, &name).await {
Ok(identifier) => identifier, Ok(identifier) => identifier,
Err(e) => { Err(e) => {
tracing::warn!("Failed to generate directory path: {}", e); tracing::warn!("Failed to generate directory path: {}", e);

View file

@ -27,11 +27,16 @@ pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
async fn save_async_read<Reader>( async fn save_async_read<Reader>(
&self, &self,
reader: &mut Reader, reader: &mut Reader,
filename: &str,
) -> Result<Self::Identifier, Self::Error> ) -> Result<Self::Identifier, Self::Error>
where where
Reader: AsyncRead + Unpin; Reader: AsyncRead + Unpin;
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error>; async fn save_bytes(
&self,
bytes: Bytes,
filename: &str,
) -> Result<Self::Identifier, Self::Error>;
async fn to_stream( async fn to_stream(
&self, &self,

View file

@ -8,7 +8,6 @@ use std::{
use storage_path_generator::Generator; use storage_path_generator::Generator;
use tokio::io::{AsyncRead, AsyncWrite}; use tokio::io::{AsyncRead, AsyncWrite};
use tracing::{debug, error, instrument}; use tracing::{debug, error, instrument};
use uuid::Uuid;
mod file_id; mod file_id;
mod restructure; mod restructure;
@ -58,11 +57,12 @@ impl Store for FileStore {
async fn save_async_read<Reader>( async fn save_async_read<Reader>(
&self, &self,
reader: &mut Reader, reader: &mut Reader,
filename: &str,
) -> Result<Self::Identifier, Self::Error> ) -> Result<Self::Identifier, Self::Error>
where where
Reader: AsyncRead + Unpin, Reader: AsyncRead + Unpin,
{ {
let path = self.next_file()?; let path = self.next_file(filename)?;
if let Err(e) = self.safe_save_reader(&path, reader).await { if let Err(e) = self.safe_save_reader(&path, reader).await {
self.safe_remove_file(&path).await?; self.safe_remove_file(&path).await?;
@ -73,8 +73,12 @@ impl Store for FileStore {
} }
#[tracing::instrument(skip(bytes))] #[tracing::instrument(skip(bytes))]
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error> { async fn save_bytes(
let path = self.next_file()?; &self,
bytes: Bytes,
filename: &str,
) -> Result<Self::Identifier, Self::Error> {
let path = self.next_file(filename)?;
if let Err(e) = self.safe_save_bytes(&path, bytes).await { if let Err(e) = self.safe_save_bytes(&path, bytes).await {
self.safe_remove_file(&path).await?; self.safe_remove_file(&path).await?;
@ -163,11 +167,9 @@ impl FileStore {
Ok(target_path) Ok(target_path)
} }
fn next_file(&self) -> Result<PathBuf, FileError> { fn next_file(&self, filename: &str) -> Result<PathBuf, FileError> {
let target_path = self.next_directory()?; let target_path = self.next_directory()?;
let filename = Uuid::new_v4().to_string();
Ok(target_path.join(filename)) Ok(target_path.join(filename))
} }

View file

@ -11,7 +11,6 @@ use std::{
}; };
use storage_path_generator::{Generator, Path}; use storage_path_generator::{Generator, Path};
use tokio::io::{AsyncRead, AsyncWrite}; use tokio::io::{AsyncRead, AsyncWrite};
use uuid::Uuid;
mod object_id; mod object_id;
pub(crate) use object_id::ObjectId; pub(crate) use object_id::ObjectId;
@ -64,11 +63,12 @@ impl Store for ObjectStore {
async fn save_async_read<Reader>( async fn save_async_read<Reader>(
&self, &self,
reader: &mut Reader, reader: &mut Reader,
filename: &str,
) -> Result<Self::Identifier, Self::Error> ) -> Result<Self::Identifier, Self::Error>
where where
Reader: AsyncRead + Unpin, Reader: AsyncRead + Unpin,
{ {
let path = self.next_file()?; let path = self.next_file(filename)?;
self.bucket self.bucket
.put_object_stream(&self.client, reader, &path) .put_object_stream(&self.client, reader, &path)
@ -78,8 +78,12 @@ impl Store for ObjectStore {
} }
#[tracing::instrument(skip(bytes))] #[tracing::instrument(skip(bytes))]
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error> { async fn save_bytes(
let path = self.next_file()?; &self,
bytes: Bytes,
filename: &str,
) -> Result<Self::Identifier, Self::Error> {
let path = self.next_file(filename)?;
self.bucket.put_object(&self.client, &path, &bytes).await?; self.bucket.put_object(&self.client, &path, &bytes).await?;
@ -195,8 +199,7 @@ impl ObjectStore {
Ok(path) Ok(path)
} }
fn next_file(&self) -> Result<String, ObjectError> { fn next_file(&self, filename: &str) -> Result<String, ObjectError> {
let filename = Uuid::new_v4().to_string();
let path = self.next_directory()?.to_strings().join("/"); let path = self.next_directory()?.to_strings().join("/");
Ok(format!("{}/{}", path, filename)) Ok(format!("{}/{}", path, filename))

View file

@ -120,7 +120,7 @@ impl UploadManager {
futures_util::pin_mut!(stream); futures_util::pin_mut!(stream);
let mut reader = tokio_util::io::StreamReader::new(stream); let mut reader = tokio_util::io::StreamReader::new(stream);
let new_identifier = to.save_async_read(&mut reader).await?; let new_identifier = to.save_async_read(&mut reader, &filename).await?;
let details_key = self.details_key(&identifier, &filename)?; let details_key = self.details_key(&identifier, &filename)?;
@ -194,7 +194,7 @@ impl UploadManager {
ThumbnailFormat::Jpeg, ThumbnailFormat::Jpeg,
) )
.await?; .await?;
let motion_identifier = store.save_async_read(&mut reader).await?; let motion_identifier = store.save_async_read(&mut reader, &filename).await?;
drop(permit); drop(permit);
self.store_motion_path(&filename, &motion_identifier) self.store_motion_path(&filename, &motion_identifier)

View file

@ -167,7 +167,12 @@ where
let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone()); let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone());
let identifier = self.store.save_async_read(&mut hasher_reader).await?; let filename = self.next_file(content_type).await?;
let identifier = self
.store
.save_async_read(&mut hasher_reader, &filename)
.await?;
let hash = hasher_reader.finalize_reset().await?; let hash = hasher_reader.finalize_reset().await?;
debug!("Storing alias"); debug!("Storing alias");
@ -175,7 +180,7 @@ where
self.add_existing_alias(&hash, &alias).await?; self.add_existing_alias(&hash, &alias).await?;
debug!("Saving file"); debug!("Saving file");
self.save_upload(&identifier, hash, content_type).await?; self.save_upload(&identifier, hash, filename).await?;
// Return alias to file // Return alias to file
Ok(self) Ok(self)
@ -205,14 +210,19 @@ where
let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone()); let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone());
let identifier = self.store.save_async_read(&mut hasher_reader).await?; let filename = self.next_file(input_type).await?;
let identifier = self
.store
.save_async_read(&mut hasher_reader, &filename)
.await?;
let hash = hasher_reader.finalize_reset().await?; let hash = hasher_reader.finalize_reset().await?;
debug!("Adding alias"); debug!("Adding alias");
self.add_alias(&hash, input_type).await?; self.add_alias(&hash, input_type).await?;
debug!("Saving file"); debug!("Saving file");
self.save_upload(&identifier, hash, input_type).await?; self.save_upload(&identifier, hash, filename).await?;
// Return alias to file // Return alias to file
Ok(self) Ok(self)
@ -223,9 +233,9 @@ where
&self, &self,
identifier: &S::Identifier, identifier: &S::Identifier,
hash: Hash, hash: Hash,
input_type: ValidInputType, filename: String,
) -> Result<(), Error> { ) -> Result<(), Error> {
let (dup, name) = self.check_duplicate(hash, input_type).await?; let dup = self.check_duplicate(hash, filename.clone()).await?;
// bail early with alias to existing file if this is a duplicate // bail early with alias to existing file if this is a duplicate
if dup.exists() { if dup.exists() {
@ -235,22 +245,16 @@ where
return Ok(()); return Ok(());
} }
self.manager.store_identifier(name, identifier).await?; self.manager.store_identifier(filename, identifier).await?;
Ok(()) Ok(())
} }
// check for an already-uploaded image with this hash, returning the path to the target file // check for an already-uploaded image with this hash, returning the path to the target file
#[instrument(skip(self, hash, input_type))] #[instrument(skip(self, hash))]
async fn check_duplicate( async fn check_duplicate(&self, hash: Hash, filename: String) -> Result<Dup, Error> {
&self,
hash: Hash,
input_type: ValidInputType,
) -> Result<(Dup, String), Error> {
let main_tree = self.manager.inner.main_tree.clone(); let main_tree = self.manager.inner.main_tree.clone();
let filename = self.next_file(input_type).await?;
let filename2 = filename.clone(); let filename2 = filename.clone();
let hash2 = hash.as_slice().to_vec(); let hash2 = hash.as_slice().to_vec();
debug!("Inserting filename for hash"); debug!("Inserting filename for hash");
@ -270,15 +274,14 @@ where
{ {
let name = String::from_utf8(ivec.to_vec())?; let name = String::from_utf8(ivec.to_vec())?;
debug!("Filename exists for hash, {}", name); debug!("Filename exists for hash, {}", name);
return Ok((Dup::Exists, name)); return Ok(Dup::Exists);
} }
let fname_tree = self.manager.inner.filename_tree.clone(); let fname_tree = self.manager.inner.filename_tree.clone();
let filename2 = filename.clone();
debug!("Saving filename -> hash relation"); debug!("Saving filename -> hash relation");
web::block(move || fname_tree.insert(filename2, hash.into_inner())).await??; web::block(move || fname_tree.insert(filename, hash.into_inner())).await??;
Ok((Dup::New, filename)) Ok(Dup::New)
} }
// generate a short filename that isn't already in-use // generate a short filename that isn't already in-use