Merge pull request 'Add per-upload limits and per-upload preprocess steps' (#55) from asonix/per-upload-limits-and-operations into main

Reviewed-on: https://git.asonix.dog/asonix/pict-rs/pulls/55
This commit is contained in:
asonix 2024-03-28 01:17:32 +00:00
commit 525deffd8d
14 changed files with 299 additions and 96 deletions

100
Cargo.lock generated
View file

@ -21,9 +21,9 @@ dependencies = [
[[package]] [[package]]
name = "actix-form-data" name = "actix-form-data"
version = "0.7.0-beta.6" version = "0.7.0-beta.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0588d156cb871d8c237d55ce398e2a65727370fb98352ba5b65c15a2f834b0f" checksum = "6c2355a841a5d9a6c616d6a4f31336064116d206e6c1830de22730f983613a05"
dependencies = [ dependencies = [
"actix-multipart", "actix-multipart",
"actix-web", "actix-web",
@ -340,18 +340,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.78" version = "0.1.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -362,9 +362,9 @@ checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
[[package]] [[package]]
name = "axum" name = "axum"
@ -538,9 +538,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.35" version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e"
dependencies = [ dependencies = [
"android-tzdata", "android-tzdata",
"iana-time-zone", "iana-time-zone",
@ -550,9 +550,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.3" version = "4.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "949626d00e063efc93b6dca932419ceb5432f99769911c0b995f7e884c778813" checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@ -572,14 +572,14 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.3" version = "4.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90239a040c80f5e14809ca132ddc4176ab33d5e17e49691793296e3fcb34d72f" checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -839,7 +839,7 @@ dependencies = [
"heck 0.4.1", "heck 0.4.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -851,7 +851,7 @@ dependencies = [
"diesel_table_macro_syntax", "diesel_table_macro_syntax",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -860,7 +860,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5" checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5"
dependencies = [ dependencies = [
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -1019,7 +1019,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -1367,9 +1367,9 @@ dependencies = [
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.10" version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
[[package]] [[package]]
name = "js-sys" name = "js-sys"
@ -1464,9 +1464,9 @@ dependencies = [
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.7.1" version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
[[package]] [[package]]
name = "metrics" name = "metrics"
@ -1852,7 +1852,7 @@ dependencies = [
"reqwest", "reqwest",
"reqwest-middleware", "reqwest-middleware",
"reqwest-tracing", "reqwest-tracing",
"rustls 0.22.2", "rustls 0.22.3",
"rustls-channel-resolver", "rustls-channel-resolver",
"rustls-pemfile 2.1.1", "rustls-pemfile 2.1.1",
"rusty-s3", "rusty-s3",
@ -1900,7 +1900,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -2009,7 +2009,7 @@ dependencies = [
"itertools", "itertools",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -2154,7 +2154,7 @@ dependencies = [
"quote", "quote",
"refinery-core", "refinery-core",
"regex", "regex",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -2166,7 +2166,7 @@ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-automata 0.4.6", "regex-automata 0.4.6",
"regex-syntax 0.8.2", "regex-syntax 0.8.3",
] ]
[[package]] [[package]]
@ -2186,7 +2186,7 @@ checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-syntax 0.8.2", "regex-syntax 0.8.3",
] ]
[[package]] [[package]]
@ -2197,9 +2197,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.2" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]] [[package]]
name = "reqwest" name = "reqwest"
@ -2341,9 +2341,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.22.2" version = "0.22.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c"
dependencies = [ dependencies = [
"log", "log",
"ring", "ring",
@ -2360,7 +2360,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbd1941204442f051576a9a7ea8e8db074ad7fd43db1eb3378c3633f9f9e166" checksum = "ffbd1941204442f051576a9a7ea8e8db074ad7fd43db1eb3378c3633f9f9e166"
dependencies = [ dependencies = [
"nanorand", "nanorand",
"rustls 0.22.2", "rustls 0.22.3",
] ]
[[package]] [[package]]
@ -2384,9 +2384,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls-pki-types" name = "rustls-pki-types"
version = "1.4.0" version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "868e20fada228fefaf6b652e00cc73623d54f8171e7352c18bb281571f2d92da" checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247"
[[package]] [[package]]
name = "rustls-webpki" name = "rustls-webpki"
@ -2513,14 +2513,14 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.114" version = "1.0.115"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -2741,9 +2741,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.53" version = "2.0.55"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2803,7 +2803,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -2900,7 +2900,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -2937,7 +2937,7 @@ checksum = "0ea13f22eda7127c827983bdaf0d7fff9df21c8817bab02815ac277a21143677"
dependencies = [ dependencies = [
"futures", "futures",
"ring", "ring",
"rustls 0.22.2", "rustls 0.22.3",
"tokio", "tokio",
"tokio-postgres", "tokio-postgres",
"tokio-rustls 0.25.0", "tokio-rustls 0.25.0",
@ -2960,7 +2960,7 @@ version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f"
dependencies = [ dependencies = [
"rustls 0.22.2", "rustls 0.22.3",
"rustls-pki-types", "rustls-pki-types",
"tokio", "tokio",
] ]
@ -3160,7 +3160,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -3390,7 +3390,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -3424,7 +3424,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -3730,7 +3730,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]
[[package]] [[package]]
@ -3750,5 +3750,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.53", "syn 2.0.55",
] ]

View file

@ -19,7 +19,7 @@ poll-timer-warnings = []
random-errors = ["dep:nanorand"] random-errors = ["dep:nanorand"]
[dependencies] [dependencies]
actix-form-data = "0.7.0-beta.6" actix-form-data = "0.7.0-beta.7"
actix-web = { version = "4.0.0", default-features = false, features = ["rustls-0_22"] } actix-web = { version = "4.0.0", default-features = false, features = ["rustls-0_22"] }
async-trait = "0.1.51" async-trait = "0.1.51"
barrel = { version = "0.7.0", features = ["pg"] } barrel = { version = "0.7.0", features = ["pg"] }
@ -58,8 +58,8 @@ rustls-channel-resolver = "0.2.0"
rustls-pemfile = "2.0.0" rustls-pemfile = "2.0.0"
rusty-s3 = "0.5.0" rusty-s3 = "0.5.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde-tuple-vec-map = "1.0.1"
serde_json = "1.0" serde_json = "1.0"
serde-tuple-vec-map = "1.0.1"
serde_urlencoded = "0.7.1" serde_urlencoded = "0.7.1"
sha2 = "0.10.0" sha2 = "0.10.0"
sled = { version = "0.34.7" } sled = { version = "0.34.7" }

View file

@ -33,11 +33,13 @@
cargo-outdated cargo-outdated
certstrap certstrap
clippy clippy
curl
diesel-cli diesel-cli
exiftool exiftool
ffmpeg_6-full ffmpeg_6-full
garage garage
imagemagick imagemagick
jq
minio-client minio-client
rust-analyzer rust-analyzer
rustc rustc

View file

@ -82,7 +82,7 @@ pub(crate) enum UploadError {
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
#[error("Error validating upload")] #[error("Error validating upload")]
Validation(#[from] crate::validate::ValidationError), Validation(#[from] crate::ingest::ValidationError),
#[error("Error in store")] #[error("Error in store")]
Store(#[source] crate::store::StoreError), Store(#[source] crate::store::StoreError),
@ -111,6 +111,12 @@ pub(crate) enum UploadError {
#[error("Invalid job popped from job queue: {1}")] #[error("Invalid job popped from job queue: {1}")]
InvalidJob(#[source] serde_json::Error, String), InvalidJob(#[source] serde_json::Error, String),
#[error("Invalid query supplied")]
InvalidQuery(#[source] actix_web::error::QueryPayloadError),
#[error("Invalid json supplied")]
InvalidJson(#[source] actix_web::error::JsonPayloadError),
#[error("pict-rs is in read-only mode")] #[error("pict-rs is in read-only mode")]
ReadOnly, ReadOnly,
@ -209,6 +215,8 @@ impl UploadError {
Self::ProcessTimeout => ErrorCode::COMMAND_TIMEOUT, Self::ProcessTimeout => ErrorCode::COMMAND_TIMEOUT,
Self::FailedExternalValidation => ErrorCode::FAILED_EXTERNAL_VALIDATION, Self::FailedExternalValidation => ErrorCode::FAILED_EXTERNAL_VALIDATION,
Self::InvalidJob(_, _) => ErrorCode::INVALID_JOB, Self::InvalidJob(_, _) => ErrorCode::INVALID_JOB,
Self::InvalidQuery(_) => ErrorCode::INVALID_QUERY,
Self::InvalidJson(_) => ErrorCode::INVALID_JSON,
#[cfg(feature = "random-errors")] #[cfg(feature = "random-errors")]
Self::RandomError => ErrorCode::RANDOM_ERROR, Self::RandomError => ErrorCode::RANDOM_ERROR,
} }
@ -248,7 +256,7 @@ impl ResponseError for Error {
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
match self.kind() { match self.kind() {
Some(UploadError::Upload(actix_form_data::Error::FileSize)) Some(UploadError::Upload(actix_form_data::Error::FileSize))
| Some(UploadError::Validation(crate::validate::ValidationError::Filesize)) => { | Some(UploadError::Validation(crate::ingest::ValidationError::Filesize)) => {
StatusCode::PAYLOAD_TOO_LARGE StatusCode::PAYLOAD_TOO_LARGE
} }
Some( Some(
@ -261,6 +269,8 @@ impl ResponseError for Error {
)) ))
| UploadError::Repo(crate::repo::RepoError::AlreadyClaimed) | UploadError::Repo(crate::repo::RepoError::AlreadyClaimed)
| UploadError::Validation(_) | UploadError::Validation(_)
| UploadError::InvalidQuery(_)
| UploadError::InvalidJson(_)
| UploadError::UnsupportedProcessExtension | UploadError::UnsupportedProcessExtension
| UploadError::ReadOnly | UploadError::ReadOnly
| UploadError::FailedExternalValidation | UploadError::FailedExternalValidation

View file

@ -100,6 +100,9 @@ impl ErrorCode {
pub(crate) const VIDEO_DISABLED: ErrorCode = ErrorCode { pub(crate) const VIDEO_DISABLED: ErrorCode = ErrorCode {
code: "video-disabled", code: "video-disabled",
}; };
pub(crate) const MEDIA_DISALLOWED: ErrorCode = ErrorCode {
code: "media-disallowed",
};
pub(crate) const HTTP_CLIENT_ERROR: ErrorCode = ErrorCode { pub(crate) const HTTP_CLIENT_ERROR: ErrorCode = ErrorCode {
code: "http-client-error", code: "http-client-error",
}; };
@ -147,6 +150,12 @@ impl ErrorCode {
pub(crate) const INVALID_JOB: ErrorCode = ErrorCode { pub(crate) const INVALID_JOB: ErrorCode = ErrorCode {
code: "invalid-job", code: "invalid-job",
}; };
pub(crate) const INVALID_QUERY: ErrorCode = ErrorCode {
code: "invalid-query",
};
pub(crate) const INVALID_JSON: ErrorCode = ErrorCode {
code: "invalid-json",
};
#[cfg(feature = "random-errors")] #[cfg(feature = "random-errors")]
pub(crate) const RANDOM_ERROR: ErrorCode = ErrorCode { pub(crate) const RANDOM_ERROR: ErrorCode = ErrorCode {
code: "random-error", code: "random-error",

View file

@ -1,3 +1,6 @@
mod hasher;
mod validate;
use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration}; use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration};
use crate::{ use crate::{
@ -9,16 +12,17 @@ use crate::{
repo::{Alias, ArcRepo, DeleteToken, Hash}, repo::{Alias, ArcRepo, DeleteToken, Hash},
state::State, state::State,
store::Store, store::Store,
UploadQuery,
}; };
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_core::Stream; use futures_core::Stream;
use reqwest::Body; use reqwest::Body;
use tracing::{Instrument, Span}; use tracing::{Instrument, Span};
mod hasher;
use hasher::Hasher; use hasher::Hasher;
pub(crate) use validate::ValidationError;
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct Session { pub(crate) struct Session {
repo: ArcRepo, repo: ArcRepo,
@ -31,6 +35,7 @@ pub(crate) struct Session {
async fn process_ingest<S>( async fn process_ingest<S>(
state: &State<S>, state: &State<S>,
stream: impl Stream<Item = Result<Bytes, Error>>, stream: impl Stream<Item = Result<Bytes, Error>>,
upload_query: &UploadQuery,
) -> Result< ) -> Result<
( (
InternalFormat, InternalFormat,
@ -54,11 +59,18 @@ where
let permit = crate::process_semaphore().acquire().await?; let permit = crate::process_semaphore().acquire().await?;
tracing::trace!("Validating bytes"); tracing::trace!("Validating bytes");
let (input_type, process_read) = crate::validate::validate_bytes_stream(state, bytes) let (input_type, process_read) =
.with_poll_timer("validate-bytes-stream") validate::validate_bytes_stream(state, bytes, &upload_query.limits)
.await?; .with_poll_timer("validate-bytes-stream")
.await?;
let process_read = if let Some(operations) = state.config.media.preprocess_steps() { let operations = if upload_query.operations.is_empty() {
state.config.media.preprocess_steps()
} else {
Some(upload_query.operations.as_ref())
};
let process_read = if let Some(operations) = operations {
if let Some(format) = input_type.processable_format() { if let Some(format) = input_type.processable_format() {
let (_, magick_args) = let (_, magick_args) =
crate::processor::build_chain(operations, format.file_extension())?; crate::processor::build_chain(operations, format.file_extension())?;
@ -159,6 +171,7 @@ pub(crate) async fn ingest<S>(
state: &State<S>, state: &State<S>,
stream: impl Stream<Item = Result<Bytes, Error>>, stream: impl Stream<Item = Result<Bytes, Error>>,
declared_alias: Option<Alias>, declared_alias: Option<Alias>,
upload_query: &UploadQuery,
) -> Result<Session, Error> ) -> Result<Session, Error>
where where
S: Store, S: Store,
@ -166,7 +179,7 @@ where
let (input_type, identifier, details, hash_state) = if state.config.server.danger_dummy_mode { let (input_type, identifier, details, hash_state) = if state.config.server.danger_dummy_mode {
dummy_ingest(state, stream).await? dummy_ingest(state, stream).await?
} else { } else {
process_ingest(state, stream) process_ingest(state, stream, upload_query)
.with_poll_timer("ingest-future") .with_poll_timer("ingest-future")
.await? .await?
}; };

View file

@ -14,6 +14,7 @@ use crate::{
future::WithPollTimer, future::WithPollTimer,
process::{Process, ProcessRead}, process::{Process, ProcessRead},
state::State, state::State,
UploadLimits,
}; };
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -38,6 +39,9 @@ pub(crate) enum ValidationError {
#[error("Video is disabled")] #[error("Video is disabled")]
VideoDisabled, VideoDisabled,
#[error("Media type wasn't allowed for this upload")]
MediaDisallowed,
} }
impl ValidationError { impl ValidationError {
@ -50,6 +54,7 @@ impl ValidationError {
Self::Empty => ErrorCode::VALIDATE_FILE_EMPTY, Self::Empty => ErrorCode::VALIDATE_FILE_EMPTY,
Self::Filesize => ErrorCode::VALIDATE_FILE_SIZE, Self::Filesize => ErrorCode::VALIDATE_FILE_SIZE,
Self::VideoDisabled => ErrorCode::VIDEO_DISABLED, Self::VideoDisabled => ErrorCode::VIDEO_DISABLED,
Self::MediaDisallowed => ErrorCode::MEDIA_DISALLOWED,
} }
} }
} }
@ -60,6 +65,7 @@ const MEGABYTES: usize = 1024 * 1024;
pub(crate) async fn validate_bytes_stream<S>( pub(crate) async fn validate_bytes_stream<S>(
state: &State<S>, state: &State<S>,
bytes: BytesStream, bytes: BytesStream,
upload_limits: &UploadLimits,
) -> Result<(InternalFormat, ProcessRead), Error> { ) -> Result<(InternalFormat, ProcessRead), Error> {
if bytes.is_empty() { if bytes.is_empty() {
return Err(ValidationError::Empty.into()); return Err(ValidationError::Empty.into());
@ -74,14 +80,16 @@ pub(crate) async fn validate_bytes_stream<S>(
.with_poll_timer("discover-bytes-stream") .with_poll_timer("discover-bytes-stream")
.await?; .await?;
validate_upload(bytes.len(), width, height, frames, upload_limits)?;
match &input { match &input {
InputFile::Image(input) => { InputFile::Image(input) if *upload_limits.allow_image => {
let (format, process) = let (format, process) =
process_image_command(state, *input, bytes.len(), width, height).await?; process_image_command(state, *input, bytes.len(), width, height).await?;
Ok((format, process.drive_with_stream(bytes.into_io_stream()))) Ok((format, process.drive_with_stream(bytes.into_io_stream())))
} }
InputFile::Animation(input) => { InputFile::Animation(input) if *upload_limits.allow_animation => {
let (format, process) = process_animation_command( let (format, process) = process_animation_command(
state, state,
*input, *input,
@ -94,20 +102,67 @@ pub(crate) async fn validate_bytes_stream<S>(
Ok((format, process.drive_with_stream(bytes.into_io_stream()))) Ok((format, process.drive_with_stream(bytes.into_io_stream())))
} }
InputFile::Video(input) => { InputFile::Video(input) if *upload_limits.allow_video => {
let (format, process_read) = let (format, process_read) =
process_video(state, bytes, *input, width, height, frames.unwrap_or(1)).await?; process_video(state, bytes, *input, width, height, frames.unwrap_or(1)).await?;
Ok((format, process_read)) Ok((format, process_read))
} }
_ => Err(ValidationError::MediaDisallowed.into()),
} }
} }
fn validate_upload(
size: usize,
width: u16,
height: u16,
frames: Option<u32>,
upload_limits: &UploadLimits,
) -> Result<(), ValidationError> {
if upload_limits
.max_width
.is_some_and(|max_width| width > *max_width)
{
return Err(ValidationError::Width);
}
if upload_limits
.max_height
.is_some_and(|max_height| height > *max_height)
{
return Err(ValidationError::Height);
}
if upload_limits
.max_frame_count
.zip(frames)
.is_some_and(|(max_frame_count, frames)| frames > *max_frame_count)
{
return Err(ValidationError::Frames);
}
if upload_limits
.max_area
.is_some_and(|max_area| u32::from(width) * u32::from(height) > *max_area)
{
return Err(ValidationError::Area);
}
if upload_limits
.max_file_size
.is_some_and(|max_file_size| size > *max_file_size * MEGABYTES)
{
return Err(ValidationError::Filesize);
}
Ok(())
}
#[tracing::instrument(skip(state))] #[tracing::instrument(skip(state))]
async fn process_image_command<S>( async fn process_image_command<S>(
state: &State<S>, state: &State<S>,
input: ImageInput, input: ImageInput,
length: usize, size: usize,
width: u16, width: u16,
height: u16, height: u16,
) -> Result<(InternalFormat, Process), Error> { ) -> Result<(InternalFormat, Process), Error> {
@ -122,7 +177,7 @@ async fn process_image_command<S>(
if u32::from(width) * u32::from(height) > validations.max_area { if u32::from(width) * u32::from(height) > validations.max_area {
return Err(ValidationError::Area.into()); return Err(ValidationError::Area.into());
} }
if length > validations.max_file_size * MEGABYTES { if size > validations.max_file_size * MEGABYTES {
return Err(ValidationError::Filesize.into()); return Err(ValidationError::Filesize.into());
} }
@ -172,14 +227,14 @@ fn validate_animation(
async fn process_animation_command<S>( async fn process_animation_command<S>(
state: &State<S>, state: &State<S>,
input: AnimationFormat, input: AnimationFormat,
length: usize, size: usize,
width: u16, width: u16,
height: u16, height: u16,
frames: u32, frames: u32,
) -> Result<(InternalFormat, Process), Error> { ) -> Result<(InternalFormat, Process), Error> {
let validations = &state.config.media.animation; let validations = &state.config.media.animation;
validate_animation(length, width, height, frames, validations)?; validate_animation(size, width, height, frames, validations)?;
let AnimationOutput { let AnimationOutput {
format, format,

View file

@ -35,7 +35,6 @@ mod stream;
mod sync; mod sync;
mod tls; mod tls;
mod tmp_file; mod tmp_file;
mod validate;
use actix_form_data::{Field, Form, FormData, Multipart, Value}; use actix_form_data::{Field, Form, FormData, Multipart, Value};
use actix_web::{ use actix_web::{
@ -59,6 +58,7 @@ use std::{
marker::PhantomData, marker::PhantomData,
path::Path, path::Path,
path::PathBuf, path::PathBuf,
rc::Rc,
sync::{Arc, OnceLock}, sync::{Arc, OnceLock},
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
@ -147,22 +147,64 @@ async fn ensure_details_identifier<S: Store + 'static>(
} }
} }
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(default)]
struct UploadLimits {
max_width: Option<Serde<u16>>,
max_height: Option<Serde<u16>>,
max_area: Option<Serde<u32>>,
max_frame_count: Option<Serde<u32>>,
max_file_size: Option<Serde<usize>>,
allow_image: Serde<bool>,
allow_animation: Serde<bool>,
allow_video: Serde<bool>,
}
impl Default for UploadLimits {
fn default() -> Self {
Self {
max_width: None,
max_height: None,
max_area: None,
max_frame_count: None,
max_file_size: None,
allow_image: Serde::new(true),
allow_animation: Serde::new(true),
allow_video: Serde::new(true),
}
}
}
#[derive(Clone, Default, Debug, serde::Deserialize, serde::Serialize)]
struct UploadQuery {
#[serde(flatten)]
limits: UploadLimits,
#[serde(with = "tuple_vec_map", flatten)]
operations: Vec<(String, String)>,
}
struct Upload<S>(Value<Session>, PhantomData<S>); struct Upload<S>(Value<Session>, PhantomData<S>);
impl<S: Store + 'static> FormData for Upload<S> { impl<S: Store + 'static> FormData for Upload<S> {
type Item = Session; type Item = Session;
type Error = Error; type Error = Error;
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> { fn form(req: &HttpRequest) -> Result<Form<Self::Item, Self::Error>, Self::Error> {
let state = req let state = req
.app_data::<web::Data<State<S>>>() .app_data::<web::Data<State<S>>>()
.expect("No state in request") .expect("No state in request")
.clone(); .clone();
let web::Query(upload_query) = web::Query::<UploadQuery>::from_query(req.query_string())
.map_err(UploadError::InvalidQuery)?;
let upload_query = Rc::new(upload_query);
// Create a new Multipart Form validator // Create a new Multipart Form validator
// //
// This form is expecting a single array field, 'images' with at most 10 files in it // This form is expecting a single array field, 'images' with at most 10 files in it
Form::new() Ok(Form::new()
.max_files(state.config.server.max_file_count) .max_files(state.config.server.max_file_count)
.max_file_size(state.config.media.max_file_size * MEGABYTES) .max_file_size(state.config.media.max_file_size * MEGABYTES)
.transform_error(transform_error) .transform_error(transform_error)
@ -170,6 +212,7 @@ impl<S: Store + 'static> FormData for Upload<S> {
"images", "images",
Field::array(Field::file(move |filename, _, stream| { Field::array(Field::file(move |filename, _, stream| {
let state = state.clone(); let state = state.clone();
let upload_query = upload_query.clone();
metrics::counter!(crate::init_metrics::FILES, "upload" => "inline") metrics::counter!(crate::init_metrics::FILES, "upload" => "inline")
.increment(1); .increment(1);
@ -184,13 +227,13 @@ impl<S: Store + 'static> FormData for Upload<S> {
let stream = crate::stream::from_err(stream); let stream = crate::stream::from_err(stream);
ingest::ingest(&state, stream, None).await ingest::ingest(&state, stream, None, &upload_query).await
} }
.with_poll_timer("file-upload") .with_poll_timer("file-upload")
.instrument(span), .instrument(span),
) )
})), })),
) ))
} }
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> { fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> {
@ -204,16 +247,21 @@ impl<S: Store + 'static> FormData for Import<S> {
type Item = Session; type Item = Session;
type Error = Error; type Error = Error;
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> { fn form(req: &actix_web::HttpRequest) -> Result<Form<Self::Item, Self::Error>, Self::Error> {
let state = req let state = req
.app_data::<web::Data<State<S>>>() .app_data::<web::Data<State<S>>>()
.expect("No state in request") .expect("No state in request")
.clone(); .clone();
let web::Query(upload_query) = web::Query::<UploadQuery>::from_query(req.query_string())
.map_err(UploadError::InvalidQuery)?;
let upload_query = Rc::new(upload_query);
// Create a new Multipart Form validator for internal imports // Create a new Multipart Form validator for internal imports
// //
// This form is expecting a single array field, 'images' with at most 10 files in it // This form is expecting a single array field, 'images' with at most 10 files in it
Form::new() Ok(Form::new()
.max_files(state.config.server.max_file_count) .max_files(state.config.server.max_file_count)
.max_file_size(state.config.media.max_file_size * MEGABYTES) .max_file_size(state.config.media.max_file_size * MEGABYTES)
.transform_error(transform_error) .transform_error(transform_error)
@ -221,6 +269,7 @@ impl<S: Store + 'static> FormData for Import<S> {
"images", "images",
Field::array(Field::file(move |filename, _, stream| { Field::array(Field::file(move |filename, _, stream| {
let state = state.clone(); let state = state.clone();
let upload_query = upload_query.clone();
metrics::counter!(crate::init_metrics::FILES, "import" => "inline") metrics::counter!(crate::init_metrics::FILES, "import" => "inline")
.increment(1); .increment(1);
@ -235,14 +284,19 @@ impl<S: Store + 'static> FormData for Import<S> {
let stream = crate::stream::from_err(stream); let stream = crate::stream::from_err(stream);
ingest::ingest(&state, stream, Some(Alias::from_existing(&filename))) ingest::ingest(
.await &state,
stream,
Some(Alias::from_existing(&filename)),
&upload_query,
)
.await
} }
.with_poll_timer("file-import") .with_poll_timer("file-import")
.instrument(span), .instrument(span),
) )
})), })),
) ))
} }
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error>
@ -320,16 +374,16 @@ impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
type Item = Backgrounded; type Item = Backgrounded;
type Error = Error; type Error = Error;
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> { fn form(req: &actix_web::HttpRequest) -> Result<Form<Self::Item, Self::Error>, Self::Error> {
// Create a new Multipart Form validator for backgrounded uploads
//
// This form is expecting a single array field, 'images' with at most 10 files in it
let state = req let state = req
.app_data::<web::Data<State<S>>>() .app_data::<web::Data<State<S>>>()
.expect("No state in request") .expect("No state in request")
.clone(); .clone();
Form::new() // Create a new Multipart Form validator for backgrounded uploads
//
// This form is expecting a single array field, 'images' with at most 10 files in it
Ok(Form::new()
.max_files(state.config.server.max_file_count) .max_files(state.config.server.max_file_count)
.max_file_size(state.config.media.max_file_size * MEGABYTES) .max_file_size(state.config.media.max_file_size * MEGABYTES)
.transform_error(transform_error) .transform_error(transform_error)
@ -357,7 +411,7 @@ impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
.instrument(span), .instrument(span),
) )
})), })),
) ))
} }
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error>
@ -372,7 +426,10 @@ impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
async fn upload_backgrounded<S: Store>( async fn upload_backgrounded<S: Store>(
Multipart(BackgroundedUpload(value, _)): Multipart<BackgroundedUpload<S>>, Multipart(BackgroundedUpload(value, _)): Multipart<BackgroundedUpload<S>>,
state: web::Data<State<S>>, state: web::Data<State<S>>,
upload_query: web::Query<UploadQuery>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let upload_query = upload_query.into_inner();
let images = value let images = value
.map() .map()
.and_then(|mut m| m.remove("images")) .and_then(|mut m| m.remove("images"))
@ -389,7 +446,14 @@ async fn upload_backgrounded<S: Store>(
let upload_id = image.result.upload_id().expect("Upload ID exists"); let upload_id = image.result.upload_id().expect("Upload ID exists");
let identifier = image.result.identifier().expect("Identifier exists"); let identifier = image.result.identifier().expect("Identifier exists");
queue::queue_ingest(&state.repo, identifier, upload_id, None).await?; queue::queue_ingest(
&state.repo,
identifier,
upload_id,
None,
upload_query.clone(),
)
.await?;
files.push(serde_json::json!({ files.push(serde_json::json!({
"upload_id": upload_id.to_string(), "upload_id": upload_id.to_string(),
@ -462,11 +526,21 @@ struct UrlQuery {
backgrounded: bool, backgrounded: bool,
} }
#[derive(Debug, serde::Deserialize)]
struct DownloadQuery {
#[serde(flatten)]
url_query: UrlQuery,
#[serde(flatten)]
upload_query: UploadQuery,
}
async fn ingest_inline<S: Store + 'static>( async fn ingest_inline<S: Store + 'static>(
stream: impl Stream<Item = Result<web::Bytes, Error>>, stream: impl Stream<Item = Result<web::Bytes, Error>>,
state: &State<S>, state: &State<S>,
upload_query: &UploadQuery,
) -> Result<(Alias, DeleteToken, Details), Error> { ) -> Result<(Alias, DeleteToken, Details), Error> {
let session = ingest::ingest(state, stream, None).await?; let session = ingest::ingest(state, stream, None, upload_query).await?;
let alias = session.alias().expect("alias should exist").to_owned(); let alias = session.alias().expect("alias should exist").to_owned();
@ -480,15 +554,20 @@ async fn ingest_inline<S: Store + 'static>(
/// download an image from a URL /// download an image from a URL
#[tracing::instrument(name = "Downloading file", skip(state))] #[tracing::instrument(name = "Downloading file", skip(state))]
async fn download<S: Store + 'static>( async fn download<S: Store + 'static>(
query: web::Query<UrlQuery>, download_query: web::Query<DownloadQuery>,
state: web::Data<State<S>>, state: web::Data<State<S>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let stream = download_stream(&query.url, &state).await?; let DownloadQuery {
url_query,
upload_query,
} = download_query.into_inner();
if query.backgrounded { let stream = download_stream(&url_query.url, &state).await?;
do_download_backgrounded(stream, state).await
if url_query.backgrounded {
do_download_backgrounded(stream, state, upload_query).await
} else { } else {
do_download_inline(stream, &state).await do_download_inline(stream, &state, &upload_query).await
} }
} }
@ -518,10 +597,11 @@ async fn download_stream<S>(
async fn do_download_inline<S: Store + 'static>( async fn do_download_inline<S: Store + 'static>(
stream: impl Stream<Item = Result<web::Bytes, Error>>, stream: impl Stream<Item = Result<web::Bytes, Error>>,
state: &State<S>, state: &State<S>,
upload_query: &UploadQuery,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
metrics::counter!(crate::init_metrics::FILES, "download" => "inline").increment(1); metrics::counter!(crate::init_metrics::FILES, "download" => "inline").increment(1);
let (alias, delete_token, details) = ingest_inline(stream, state).await?; let (alias, delete_token, details) = ingest_inline(stream, state, upload_query).await?;
Ok(HttpResponse::Created().json(&serde_json::json!({ Ok(HttpResponse::Created().json(&serde_json::json!({
"msg": "ok", "msg": "ok",
@ -537,6 +617,7 @@ async fn do_download_inline<S: Store + 'static>(
async fn do_download_backgrounded<S: Store + 'static>( async fn do_download_backgrounded<S: Store + 'static>(
stream: impl Stream<Item = Result<web::Bytes, Error>>, stream: impl Stream<Item = Result<web::Bytes, Error>>,
state: web::Data<State<S>>, state: web::Data<State<S>>,
upload_query: UploadQuery,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
metrics::counter!(crate::init_metrics::FILES, "download" => "background").increment(1); metrics::counter!(crate::init_metrics::FILES, "download" => "background").increment(1);
@ -545,7 +626,7 @@ async fn do_download_backgrounded<S: Store + 'static>(
let upload_id = backgrounded.upload_id().expect("Upload ID exists"); let upload_id = backgrounded.upload_id().expect("Upload ID exists");
let identifier = backgrounded.identifier().expect("Identifier exists"); let identifier = backgrounded.identifier().expect("Identifier exists");
queue::queue_ingest(&state.repo, identifier, upload_id, None).await?; queue::queue_ingest(&state.repo, identifier, upload_id, None, upload_query).await?;
backgrounded.disarm(); backgrounded.disarm();
@ -1212,7 +1293,7 @@ async fn proxy_alias_from_query<S: Store + 'static>(
} else if !state.config.server.read_only { } else if !state.config.server.read_only {
let stream = download_stream(proxy.as_str(), state).await?; let stream = download_stream(proxy.as_str(), state).await?;
let (alias, _, _) = ingest_inline(stream, state).await?; let (alias, _, _) = ingest_inline(stream, state, &Default::default()).await?;
state.repo.relate_url(proxy, alias.clone()).await?; state.repo.relate_url(proxy, alias.clone()).await?;
@ -1497,6 +1578,16 @@ fn build_client() -> Result<ClientWithMiddleware, Error> {
.build()) .build())
} }
fn query_config() -> web::QueryConfig {
web::QueryConfig::default()
.error_handler(|err, _| Error::from(UploadError::InvalidQuery(err)).into())
}
fn json_config() -> web::JsonConfig {
web::JsonConfig::default()
.error_handler(|err, _| Error::from(UploadError::InvalidJson(err)).into())
}
fn configure_endpoints<S: Store + 'static, F: Fn(&mut web::ServiceConfig)>( fn configure_endpoints<S: Store + 'static, F: Fn(&mut web::ServiceConfig)>(
config: &mut web::ServiceConfig, config: &mut web::ServiceConfig,
state: State<S>, state: State<S>,
@ -1504,6 +1595,8 @@ fn configure_endpoints<S: Store + 'static, F: Fn(&mut web::ServiceConfig)>(
extra_config: F, extra_config: F,
) { ) {
config config
.app_data(query_config())
.app_data(json_config())
.app_data(web::Data::new(state.clone())) .app_data(web::Data::new(state.clone()))
.app_data(web::Data::new(process_map.clone())) .app_data(web::Data::new(process_map.clone()))
.route("/healthz", web::get().to(healthz::<S>)) .route("/healthz", web::get().to(healthz::<S>))

View file

@ -7,6 +7,7 @@ use crate::{
serde_str::Serde, serde_str::Serde,
state::State, state::State,
store::Store, store::Store,
UploadQuery,
}; };
use std::{ use std::{
@ -56,6 +57,8 @@ enum Process {
identifier: String, identifier: String,
upload_id: Serde<UploadId>, upload_id: Serde<UploadId>,
declared_alias: Option<Serde<Alias>>, declared_alias: Option<Serde<Alias>>,
#[serde(default)]
upload_query: UploadQuery,
}, },
Generate { Generate {
target_format: InputProcessableFormat, target_format: InputProcessableFormat,
@ -158,11 +161,13 @@ pub(crate) async fn queue_ingest(
identifier: &Arc<str>, identifier: &Arc<str>,
upload_id: UploadId, upload_id: UploadId,
declared_alias: Option<Alias>, declared_alias: Option<Alias>,
upload_query: UploadQuery,
) -> Result<(), Error> { ) -> Result<(), Error> {
let job = serde_json::to_value(Process::Ingest { let job = serde_json::to_value(Process::Ingest {
identifier: identifier.to_string(), identifier: identifier.to_string(),
declared_alias: declared_alias.map(Serde::new), declared_alias: declared_alias.map(Serde::new),
upload_id: Serde::new(upload_id), upload_id: Serde::new(upload_id),
upload_query,
}) })
.map_err(UploadError::PushJob)?; .map_err(UploadError::PushJob)?;
repo.push(PROCESS_QUEUE, job, None).await?; repo.push(PROCESS_QUEUE, job, None).await?;

View file

@ -12,6 +12,7 @@ use crate::{
serde_str::Serde, serde_str::Serde,
state::State, state::State,
store::Store, store::Store,
UploadQuery,
}; };
use std::{path::PathBuf, sync::Arc}; use std::{path::PathBuf, sync::Arc};
@ -37,12 +38,14 @@ where
identifier, identifier,
upload_id, upload_id,
declared_alias, declared_alias,
upload_query,
} => { } => {
process_ingest( process_ingest(
state, state,
Arc::from(identifier), Arc::from(identifier),
Serde::into_inner(upload_id), Serde::into_inner(upload_id),
declared_alias.map(Serde::into_inner), declared_alias.map(Serde::into_inner),
upload_query,
) )
.with_poll_timer("process-ingest") .with_poll_timer("process-ingest")
.await? .await?
@ -110,6 +113,7 @@ async fn process_ingest<S>(
unprocessed_identifier: Arc<str>, unprocessed_identifier: Arc<str>,
upload_id: UploadId, upload_id: UploadId,
declared_alias: Option<Alias>, declared_alias: Option<Alias>,
upload_query: UploadQuery,
) -> JobResult ) -> JobResult
where where
S: Store + 'static, S: Store + 'static,
@ -129,7 +133,8 @@ where
let stream = let stream =
crate::stream::from_err(state2.store.to_stream(&ident, None, None).await?); crate::stream::from_err(state2.store.to_stream(&ident, None, None).await?);
let session = crate::ingest::ingest(&state2, stream, declared_alias).await?; let session =
crate::ingest::ingest(&state2, stream, declared_alias, &upload_query).await?;
Ok(session) as Result<Session, Error> Ok(session) as Result<Session, Error>
} }

View file

@ -3,7 +3,7 @@ use std::{
str::FromStr, str::FromStr,
}; };
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct Serde<T> { pub(crate) struct Serde<T> {
inner: T, inner: T,
} }
@ -44,6 +44,17 @@ impl<T> DerefMut for Serde<T> {
} }
} }
impl<T> Default for Serde<T>
where
T: Default,
{
fn default() -> Self {
Serde {
inner: T::default(),
}
}
}
impl<T> FromStr for Serde<T> impl<T> FromStr for Serde<T>
where where
T: FromStr, T: FromStr,