Finish redoing config probably

This commit is contained in:
Aode (Lion) 2022-03-27 23:27:07 -05:00
parent ca28f68ef5
commit 3792a8923a
16 changed files with 856 additions and 253 deletions

13
Cargo.lock generated
View file

@ -2651,6 +2651,16 @@ dependencies = [
"tracing-subscriber",
]
[[package]]
name = "tracing-serde"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1"
dependencies = [
"serde",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.9"
@ -2661,12 +2671,15 @@ dependencies = [
"lazy_static",
"matchers",
"regex",
"serde",
"serde_json",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
"tracing-serde",
]
[[package]]

View file

@ -63,8 +63,11 @@ tracing-futures = "0.2.4"
tracing-log = "0.1.2"
tracing-opentelemetry = "0.17"
tracing-subscriber = { version = "0.3.0", features = [
"ansi",
"env-filter",
"fmt",
"json",
"registry",
"tracing-log",
] }
url = { version = "2.2", features = ["serde"] }

39
defaults.toml Normal file
View file

@ -0,0 +1,39 @@
[server]
address = '0.0.0.0:8080'
[tracing.logging]
format = 'normal'
targets = 'info'
[tracing.console]
buffer_capacity = 102400
[tracing.opentelemetry]
service_name = 'pict-rs'
targets = 'info'
[old_db]
path = '/mnt'
[media]
max_width = 10000
max_height = 10000
max_area = 40000000
max_file_size = 40
enable_silent_video = true
filters = [
'crop',
'blur',
'resize',
'identity',
'thumbnail',
]
skip_validate_imports = false
[repo]
type = 'sled'
path = '/mnt/sled-repo'
cache_capacity = 67108864
[store]
type = 'filesystem'
path = '/mnt/files'

39
dev.toml Normal file
View file

@ -0,0 +1,39 @@
[server]
address = '0.0.0.0:8080'
[tracing.logging]
format = 'normal'
targets = 'info'
[tracing.console]
buffer_capacity = 102400
[tracing.opentelemetry]
service_name = 'pict-rs'
targets = 'info'
[old_db]
path = 'data/'
[media]
max_width = 10000
max_height = 10000
max_area = 40000000
max_file_size = 40
enable_silent_video = true
filters = [
'identity',
'resize',
'crop',
'thumbnail',
'blur',
]
skip_validate_imports = false
[repo]
type = 'sled'
path = 'data/sled-repo'
cache_capacity = 67108864
[store]
type = 'filesystem'
path = 'data/files'

View file

@ -1,12 +1,45 @@
use clap::Parser;
mod commandline;
mod defaults;
mod file;
mod primitives;
use crate::magick::ValidInputType;
use commandline::{Args, Output};
use config::Config;
use defaults::Defaults;
pub(crate) use file::ConfigFile as Configuration;
pub(crate) use commandline::Operation;
pub(crate) use file::{ConfigFile as Configuration, OpenTelemetry, Repo, Sled, Tracing};
pub(crate) use primitives::{Filesystem, ImageFormat, LogFormat, ObjectStorage, Store};
pub(crate) fn configure() -> anyhow::Result<Configuration> {
unimplemented!()
pub(crate) fn configure() -> anyhow::Result<(Configuration, Operation)> {
let Output {
config_format,
operation,
save_to,
config_file,
} = Args::parse().into_output();
let config = Config::builder().add_source(config::Config::try_from(&Defaults::default())?);
let config = if let Some(config_file) = config_file {
config.add_source(config::File::from(config_file))
} else {
config
};
let built = config
.add_source(config::Environment::with_prefix("PICTRS").separator("__"))
.add_source(config::Config::try_from(&config_format)?)
.build()?;
let config: Configuration = built.try_deserialize()?;
if let Some(save_to) = save_to {
let output = toml::to_string_pretty(&config)?;
std::fs::write(save_to, output)?;
}
Ok((config, operation))
}

View file

@ -1,50 +1,359 @@
use crate::config::primitives::{ImageFormat, LogFormat, Targets};
use crate::{
config::primitives::{ImageFormat, LogFormat, Targets},
serde_str::Serde,
};
use clap::{Parser, Subcommand};
use std::{net::SocketAddr, path::PathBuf};
use url::Url;
impl Args {
pub(super) fn into_output(self) -> Output {
let Args {
config_file,
old_db_path,
log_format,
log_targets,
console_address,
console_buffer_capacity,
opentelemetry_url,
opentelemetry_service_name,
opentelemetry_targets,
save_to,
command,
} = self;
let old_db = OldDb { path: old_db_path };
let tracing = Tracing {
logging: Logging {
format: log_format,
targets: log_targets.map(Serde::new),
},
console: Console {
address: console_address,
buffer_capacity: console_buffer_capacity,
},
opentelemetry: OpenTelemetry {
url: opentelemetry_url,
service_name: opentelemetry_service_name,
targets: opentelemetry_targets.map(Serde::new),
},
};
match command {
Command::Run(Run {
address,
api_key,
media_skip_validate_imports,
media_max_width,
media_max_height,
media_max_area,
media_max_file_size,
media_enable_silent_video,
media_filters,
media_format,
store,
}) => {
let server = Server { address, api_key };
let media = Media {
skip_validate_imports: media_skip_validate_imports,
max_width: media_max_width,
max_height: media_max_height,
max_area: media_max_area,
max_file_size: media_max_file_size,
enable_silent_video: media_enable_silent_video,
filters: media_filters,
format: media_format,
};
let operation = Operation::Run;
match store {
Some(RunStore::Filesystem(RunFilesystem { system, repo })) => {
let store = Some(Store::Filesystem(system));
Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store,
repo,
},
operation,
config_file,
save_to,
}
}
Some(RunStore::ObjectStorage(RunObjectStorage { storage, repo })) => {
let store = Some(Store::ObjectStorage(storage));
Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store,
repo,
},
operation,
config_file,
save_to,
}
}
None => Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store: None,
repo: None,
},
operation,
config_file,
save_to,
},
}
}
Command::MigrateStore(migrate_store) => {
let server = Server::default();
let media = Media::default();
match migrate_store {
MigrateStore::Filesystem(MigrateFilesystem { from, to }) => match to {
MigrateStoreInner::Filesystem(MigrateFilesystemInner { to, repo }) => {
Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store: None,
repo,
},
operation: Operation::MigrateStore {
from: from.into(),
to: to.into(),
},
config_file,
save_to,
}
}
MigrateStoreInner::ObjectStorage(MigrateObjectStorageInner {
to,
repo,
}) => Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store: None,
repo,
},
operation: Operation::MigrateStore {
from: from.into(),
to: to.into(),
},
config_file,
save_to,
},
},
MigrateStore::ObjectStorage(MigrateObjectStorage { from, to }) => match to {
MigrateStoreInner::Filesystem(MigrateFilesystemInner { to, repo }) => {
Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store: None,
repo,
},
operation: Operation::MigrateStore {
from: from.into(),
to: to.into(),
},
config_file,
save_to,
}
}
MigrateStoreInner::ObjectStorage(MigrateObjectStorageInner {
to,
repo,
}) => Output {
config_format: ConfigFormat {
server,
old_db,
tracing,
media,
store: None,
repo,
},
operation: Operation::MigrateStore {
from: from.into(),
to: to.into(),
},
config_file,
save_to,
},
},
}
}
}
}
}
pub(super) struct Output {
pub(super) config_format: ConfigFormat,
pub(super) operation: Operation,
pub(super) save_to: Option<PathBuf>,
pub(super) config_file: Option<PathBuf>,
}
#[allow(clippy::large_enum_variant)]
#[derive(Clone)]
pub(crate) enum Operation {
Run,
MigrateStore {
from: crate::config::primitives::Store,
to: crate::config::primitives::Store,
},
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(super) struct ConfigFormat {
server: Server,
old_db: OldDb,
tracing: Tracing,
media: Media,
#[serde(skip_serializing_if = "Option::is_none")]
repo: Option<Repo>,
#[serde(skip_serializing_if = "Option::is_none")]
store: Option<Store>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Server {
#[serde(skip_serializing_if = "Option::is_none")]
address: Option<SocketAddr>,
#[serde(skip_serializing_if = "Option::is_none")]
api_key: Option<String>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Tracing {
logging: Logging,
console: Console,
opentelemetry: OpenTelemetry,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Logging {
#[serde(skip_serializing_if = "Option::is_none")]
format: Option<LogFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
targets: Option<Serde<Targets>>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Console {
#[serde(skip_serializing_if = "Option::is_none")]
address: Option<SocketAddr>,
#[serde(skip_serializing_if = "Option::is_none")]
buffer_capacity: Option<usize>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct OpenTelemetry {
#[serde(skip_serializing_if = "Option::is_none")]
url: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none")]
service_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
targets: Option<Serde<Targets>>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct OldDb {
#[serde(skip_serializing_if = "Option::is_none")]
path: Option<PathBuf>,
}
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Media {
#[serde(skip_serializing_if = "Option::is_none")]
max_width: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
max_height: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
max_area: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
max_file_size: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
enable_silent_video: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
filters: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
format: Option<ImageFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
skip_validate_imports: Option<bool>,
}
/// Run the pict-rs application
#[derive(Debug, Parser)]
#[clap(author, version, about, long_about = None)]
pub(crate) struct Args {
pub(super) struct Args {
/// Path to the pict-rs configuration file
#[clap(short, long)]
pub(crate) config_file: Option<PathBuf>,
config_file: Option<PathBuf>,
/// Path to the old pict-rs sled database
#[clap(long)]
old_db_path: Option<PathBuf>,
/// Format of logs printed to stdout
#[clap(long)]
pub(crate) log_format: Option<LogFormat>,
log_format: Option<LogFormat>,
/// Log levels to print to stdout, respects RUST_LOG formatting
#[clap(long)]
pub(crate) log_targets: Option<Targets>,
log_targets: Option<Targets>,
/// Address and port to expose tokio-console metrics
#[clap(long)]
pub(crate) console_address: Option<SocketAddr>,
console_address: Option<SocketAddr>,
/// Capacity of the console-subscriber Event Buffer
#[clap(long)]
pub(crate) console_buffer_capacity: Option<usize>,
console_buffer_capacity: Option<usize>,
/// URL to send OpenTelemetry metrics
#[clap(long)]
pub(crate) opentelemetry_url: Option<Url>,
opentelemetry_url: Option<Url>,
/// Service Name to use for OpenTelemetry
#[clap(long)]
pub(crate) opentelemetry_service_name: Option<String>,
opentelemetry_service_name: Option<String>,
/// Log levels to use for OpenTelemetry, respects RUST_LOG formatting
#[clap(long)]
pub(crate) opentelemetry_targets: Option<Targets>,
opentelemetry_targets: Option<Targets>,
/// File to save the current configuration for reproducible runs
#[clap(long)]
pub(crate) save_to: Option<PathBuf>,
save_to: Option<PathBuf>,
#[clap(subcommand)]
pub(crate) command: Command,
command: Command,
}
#[derive(Debug, Subcommand)]
pub(crate) enum Command {
enum Command {
/// Runs the pict-rs web server
Run(Run),
@ -54,47 +363,49 @@ pub(crate) enum Command {
}
#[derive(Debug, Parser)]
pub(crate) struct Run {
struct Run {
/// The address and port to bind the pict-rs web server
#[clap(short, long)]
pub(crate) address: SocketAddr,
address: Option<SocketAddr>,
/// The API KEY required to access restricted routes
#[clap(long)]
pub(crate) api_key: Option<String>,
api_key: Option<String>,
/// Whether to validate media on the "import" endpoint
#[clap(long)]
pub(crate) media_skip_validate_imports: Option<bool>,
media_skip_validate_imports: Option<bool>,
/// The maximum width, in pixels, for uploaded media
#[clap(long)]
pub(crate) media_max_width: Option<usize>,
media_max_width: Option<usize>,
/// The maximum height, in pixels, for uploaded media
#[clap(long)]
pub(crate) media_max_height: Option<usize>,
media_max_height: Option<usize>,
/// The maximum area, in pixels, for uploaded media
#[clap(long)]
pub(crate) media_max_area: Option<usize>,
media_max_area: Option<usize>,
/// The maximum size, in megabytes, for uploaded media
#[clap(long)]
pub(crate) media_max_file_size: Option<usize>,
media_max_file_size: Option<usize>,
/// Whether to enable GIF and silent MP4 uploads. Full videos are unsupported
#[clap(long)]
pub(crate) media_enable_silent_video: Option<bool>,
media_enable_silent_video: Option<bool>,
/// Which media filters should be enabled on the `process` endpoint
#[clap(long)]
pub(crate) media_filters: Option<Vec<String>>,
media_filters: Option<Vec<String>>,
/// Enforce uploaded media is transcoded to the provided format
#[clap(long)]
pub(crate) media_format: Option<ImageFormat>,
media_format: Option<ImageFormat>,
#[clap(subcommand)]
pub(crate) store: Option<RunStore>,
store: Option<RunStore>,
}
/// Configure the provided storage
#[derive(Debug, Subcommand)]
pub(crate) enum Store {
#[derive(Clone, Debug, Subcommand, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
enum Store {
/// configure filesystem storage
Filesystem(Filesystem),
@ -104,7 +415,7 @@ pub(crate) enum Store {
/// Run pict-rs with the provided storage
#[derive(Debug, Subcommand)]
pub(crate) enum RunStore {
enum RunStore {
/// Run pict-rs with filesystem storage
Filesystem(RunFilesystem),
@ -114,7 +425,7 @@ pub(crate) enum RunStore {
/// Configure the pict-rs storage migration
#[derive(Debug, Subcommand)]
pub(crate) enum MigrateStore {
enum MigrateStore {
/// Migrate from the provided filesystem storage
Filesystem(MigrateFilesystem),
@ -122,95 +433,134 @@ pub(crate) enum MigrateStore {
ObjectStorage(MigrateObjectStorage),
}
/// Configure the destination storage for pict-rs storage migration
#[derive(Debug, Subcommand)]
enum MigrateStoreInner {
/// Migrate to the provided filesystem storage
Filesystem(MigrateFilesystemInner),
/// Migrate to the provided object storage
ObjectStorage(MigrateObjectStorageInner),
}
/// Migrate pict-rs' storage from the provided filesystem storage
#[derive(Debug, Parser)]
pub(crate) struct MigrateFilesystem {
struct MigrateFilesystem {
#[clap(flatten)]
pub(crate) from: Filesystem,
from: crate::config::primitives::Filesystem,
#[clap(subcommand)]
pub(crate) to: RunStore,
to: MigrateStoreInner,
}
/// Migrate pict-rs' storage to the provided filesystem storage
#[derive(Debug, Parser)]
struct MigrateFilesystemInner {
#[clap(flatten)]
to: crate::config::primitives::Filesystem,
#[clap(subcommand)]
repo: Option<Repo>,
}
/// Migrate pict-rs' storage from the provided object storage
#[derive(Debug, Parser)]
pub(crate) struct MigrateObjectStorage {
struct MigrateObjectStorage {
#[clap(flatten)]
pub(crate) from: ObjectStorage,
from: crate::config::primitives::ObjectStorage,
#[clap(subcommand)]
pub(crate) to: RunStore,
to: MigrateStoreInner,
}
/// Migrate pict-rs' storage to the provided object storage
#[derive(Debug, Parser)]
struct MigrateObjectStorageInner {
#[clap(flatten)]
to: crate::config::primitives::ObjectStorage,
#[clap(subcommand)]
repo: Option<Repo>,
}
/// Run pict-rs with the provided filesystem storage
#[derive(Debug, Parser)]
pub(crate) struct RunFilesystem {
struct RunFilesystem {
#[clap(flatten)]
pub(crate) system: Filesystem,
system: Filesystem,
#[clap(subcommand)]
pub(crate) repo: Repo,
repo: Option<Repo>,
}
/// Run pict-rs with the provided object storage
#[derive(Debug, Parser)]
pub(crate) struct RunObjectStorage {
struct RunObjectStorage {
#[clap(flatten)]
pub(crate) storage: ObjectStorage,
storage: ObjectStorage,
#[clap(subcommand)]
pub(crate) repo: Repo,
repo: Option<Repo>,
}
/// Configuration for data repositories
#[derive(Debug, Subcommand)]
pub(crate) enum Repo {
#[derive(Debug, Subcommand, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
enum Repo {
/// Run pict-rs with the provided sled-backed data repository
Sled(Sled),
}
/// Configuration for filesystem media storage
#[derive(Debug, Parser)]
pub(crate) struct Filesystem {
#[derive(Clone, Debug, Parser, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Filesystem {
/// The path to store uploaded media
#[clap(short, long)]
pub(crate) path: Option<PathBuf>,
path: Option<PathBuf>,
}
/// Configuration for Object Storage
#[derive(Debug, Parser)]
pub(crate) struct ObjectStorage {
#[derive(Clone, Debug, Parser, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct ObjectStorage {
/// The bucket in which to store media
#[clap(short, long)]
pub(crate) bucket_name: Option<String>,
bucket_name: Option<String>,
/// The region the bucket is located in
#[clap(short, long)]
pub(crate) region: Option<s3::Region>,
region: Option<Serde<s3::Region>>,
/// The Access Key for the user accessing the bucket
#[clap(short, long)]
pub(crate) access_key: Option<String>,
access_key: Option<String>,
/// The secret key for the user accessing the bucket
#[clap(short, long)]
pub(crate) secret_key: Option<String>,
secret_key: Option<String>,
/// The security token for accessing the bucket
#[clap(long)]
pub(crate) security_token: Option<String>,
security_token: Option<String>,
/// The session token for accessing the bucket
#[clap(long)]
pub(crate) session_token: Option<String>,
session_token: Option<String>,
}
/// Configuration for the sled-backed data repository
#[derive(Debug, Parser)]
pub(crate) struct Sled {
#[derive(Debug, Parser, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Sled {
/// The path to store the sled database
pub(crate) path: Option<PathBuf>,
#[clap(short, long)]
#[serde(skip_serializing_if = "Option::is_none")]
path: Option<PathBuf>,
/// The cache capacity, in bytes, allowed to sled for in-memory operations
pub(crate) cache_capacity: Option<u64>,
#[clap(short, long)]
#[serde(skip_serializing_if = "Option::is_none")]
cache_capacity: Option<u64>,
}

View file

@ -4,7 +4,8 @@ use crate::{
};
use std::{net::SocketAddr, path::PathBuf};
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Defaults {
server: ServerDefaults,
tracing: TracingDefaults,
@ -14,35 +15,50 @@ pub(crate) struct Defaults {
store: StoreDefaults,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct ServerDefaults {
address: SocketAddr,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct TracingDefaults {
logging: LoggingDefaults,
console: ConsoleDefaults,
opentelemetry: OpenTelemetryDefaults,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct LoggingDefaults {
format: LogFormat,
targets: Serde<Targets>,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct ConsoleDefaults {
buffer_capacity: usize,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct OpenTelemetryDefaults {
service_name: String,
targets: Serde<Targets>,
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct OldDbDefaults {
path: PathBuf,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct MediaDefaults {
max_width: usize,
max_height: usize,
@ -53,42 +69,33 @@ struct MediaDefaults {
skip_validate_imports: bool,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
enum RepoDefaults {
Sled(SledDefaults),
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct SledDefaults {
path: PathBuf,
cache_capacity: u64,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
enum StoreDefaults {
Filesystem(FilesystemDefaults),
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct FilesystemDefaults {
path: PathBuf,
}
impl Default for Defaults {
fn default() -> Self {
Defaults {
server: ServerDefaults::default(),
tracing: TracingDefaults::default(),
old_db: OldDbDefaults::default(),
media: MediaDefaults::default(),
repo: RepoDefaults::default(),
store: StoreDefaults::default(),
}
}
}
impl Default for ServerDefaults {
fn default() -> Self {
ServerDefaults {
@ -97,15 +104,6 @@ impl Default for ServerDefaults {
}
}
impl Default for TracingDefaults {
fn default() -> TracingDefaults {
TracingDefaults {
logging: LoggingDefaults::default(),
console: ConsoleDefaults::default(),
}
}
}
impl Default for LoggingDefaults {
fn default() -> Self {
LoggingDefaults {
@ -123,6 +121,15 @@ impl Default for ConsoleDefaults {
}
}
impl Default for OpenTelemetryDefaults {
fn default() -> Self {
OpenTelemetryDefaults {
service_name: String::from("pict-rs"),
targets: "info".parse().expect("Valid targets string"),
}
}
}
impl Default for OldDbDefaults {
fn default() -> Self {
OldDbDefaults {

View file

@ -1,18 +1,18 @@
use crate::{
config::primitives::{ImageFormat, LogFormat, Targets},
config::primitives::{ImageFormat, LogFormat, Store, Targets},
serde_str::Serde,
};
use std::{net::SocketAddr, path::PathBuf};
use std::{collections::HashSet, net::SocketAddr, path::PathBuf};
use url::Url;
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct ConfigFile {
pub(crate) server: Server,
pub(crate) tracing: Tracing,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) old_db: Option<OldDb>,
pub(crate) old_db: OldDb,
pub(crate) media: Media,
@ -22,20 +22,14 @@ pub(crate) struct ConfigFile {
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
pub(crate) enum Repo {
Sled(Sled),
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(tag = "type")]
pub(crate) enum Store {
Filesystem(Filesystem),
ObjectStorage(ObjectStorage),
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Server {
pub(crate) address: SocketAddr,
@ -44,17 +38,17 @@ pub(crate) struct Server {
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Tracing {
logging: Logging,
pub(crate) logging: Logging,
#[serde(skip_serializing_if = "Option::is_none")]
console: Option<Console>,
pub(crate) console: Console,
#[serde(skip_serializing_if = "Option::is_none")]
opentelemetry: Option<OpenTelemetry>,
pub(crate) opentelemetry: OpenTelemetry,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Logging {
pub(crate) format: LogFormat,
@ -62,8 +56,10 @@ pub(crate) struct Logging {
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct OpenTelemetry {
pub(crate) url: Url,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) url: Option<Url>,
pub(crate) service_name: String,
@ -71,17 +67,22 @@ pub(crate) struct OpenTelemetry {
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Console {
pub(crate) address: SocketAddr,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) address: Option<SocketAddr>,
pub(crate) buffer_capacity: usize,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct OldDb {
pub(crate) path: PathBuf,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Media {
pub(crate) max_width: usize,
@ -93,7 +94,7 @@ pub(crate) struct Media {
pub(crate) enable_silent_video: bool,
pub(crate) filters: Vec<String>,
pub(crate) filters: HashSet<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) format: Option<ImageFormat>,
@ -102,28 +103,7 @@ pub(crate) struct Media {
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub(crate) struct Filesystem {
pub(crate) path: PathBuf,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub(crate) struct ObjectStorage {
pub(crate) bucket_name: String,
pub(crate) region: Serde<s3::Region>,
pub(crate) access_key: String,
pub(crate) secret_key: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) security_token: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) session_token: Option<String>,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Sled {
pub(crate) path: PathBuf,

View file

@ -1,8 +1,12 @@
use crate::magick::ValidInputType;
use crate::serde_str::Serde;
use clap::ArgEnum;
use std::{fmt::Display, str::FromStr};
use std::{fmt::Display, path::PathBuf, str::FromStr};
use tracing::Level;
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
@ -13,6 +17,7 @@ use std::{fmt::Display, str::FromStr};
serde::Serialize,
ArgEnum,
)]
#[serde(rename_all = "snake_case")]
pub(crate) enum LogFormat {
Compact,
Json,
@ -22,6 +27,7 @@ pub(crate) enum LogFormat {
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
@ -32,6 +38,7 @@ pub(crate) enum LogFormat {
serde::Serialize,
ArgEnum,
)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ImageFormat {
Jpeg,
Webp,
@ -43,6 +50,81 @@ pub(crate) struct Targets {
pub(crate) targets: tracing_subscriber::filter::Targets,
}
/// Configuration for filesystem media storage
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, clap::Parser)]
#[serde(rename_all = "snake_case")]
pub(crate) struct Filesystem {
/// Path to store media
#[clap(short, long)]
pub(crate) path: PathBuf,
}
/// Configuration for object media storage
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, clap::Parser)]
#[serde(rename_all = "snake_case")]
pub(crate) struct ObjectStorage {
/// The bucket in which to store media
#[clap(short, long)]
pub(crate) bucket_name: String,
/// The region the bucket is located in
#[clap(short, long)]
pub(crate) region: Serde<s3::Region>,
/// The Access Key for the user accessing the bucket
#[clap(short, long)]
pub(crate) access_key: String,
/// The secret key for the user accessing the bucket
#[clap(short, long)]
pub(crate) secret_key: String,
/// The security token for accessing the bucket
#[clap(long)]
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) security_token: Option<String>,
/// The session token for accessing the bucket
#[clap(long)]
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) session_token: Option<String>,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "type")]
pub(crate) enum Store {
Filesystem(Filesystem),
ObjectStorage(ObjectStorage),
}
impl ImageFormat {
pub(crate) fn as_hint(self) -> Option<ValidInputType> {
Some(ValidInputType::from_format(self))
}
pub(crate) fn as_magick_format(self) -> &'static str {
match self {
Self::Jpeg => "JPEG",
Self::Png => "PNG",
Self::Webp => "WEBP",
}
}
}
impl From<Filesystem> for Store {
fn from(f: Filesystem) -> Self {
Self::Filesystem(f)
}
}
impl From<ObjectStorage> for Store {
fn from(o: ObjectStorage) -> Self {
Self::ObjectStorage(o)
}
}
impl FromStr for Targets {
type Err = <tracing_subscriber::filter::Targets as FromStr>::Err;
@ -62,7 +144,37 @@ impl Display for Targets {
.collect::<Vec<_>>()
.join(",");
write!(f, "{}", targets)
let max_level = [
Level::TRACE,
Level::DEBUG,
Level::INFO,
Level::WARN,
Level::ERROR,
]
.iter()
.fold(None, |found, level| {
if found.is_none()
&& self
.targets
.would_enable("not_a_real_target_so_nothing_can_conflict", level)
{
Some(level.to_string().to_lowercase())
} else {
found
}
});
if let Some(level) = max_level {
if !targets.is_empty() {
write!(f, "{},{}", level, targets)
} else {
write!(f, "{}", level)
}
} else if !targets.is_empty() {
write!(f, "{}", targets)
} else {
Ok(())
}
}
}
@ -109,3 +221,31 @@ impl Display for LogFormat {
.fmt(f)
}
}
#[cfg(test)]
mod tests {
use super::{Serde, Targets};
#[test]
fn builds_info_targets() {
let t: Serde<Targets> = "info".parse().unwrap();
println!("{:?}", t);
assert_eq!(t.to_string(), "info");
}
#[test]
fn builds_specific_targets() {
let t: Serde<Targets> = "pict_rs=info".parse().unwrap();
assert_eq!(t.to_string(), "pict_rs=info");
}
#[test]
fn builds_warn_and_specific_targets() {
let t: Serde<Targets> = "warn,pict_rs=info".parse().unwrap();
assert_eq!(t.to_string(), "warn,pict_rs=info");
}
}

View file

@ -1,3 +1,4 @@
use crate::config::{LogFormat, OpenTelemetry, Tracing};
use console_subscriber::ConsoleLayer;
use opentelemetry::{
sdk::{propagation::TraceContextPropagator, Resource},
@ -8,74 +9,73 @@ use tracing::subscriber::set_global_default;
use tracing_error::ErrorLayer;
use tracing_log::LogTracer;
use tracing_subscriber::{
filter::Targets, fmt::format::FmtSpan, layer::SubscriberExt, registry::LookupSpan, Layer,
Registry,
fmt::format::FmtSpan, layer::SubscriberExt, registry::LookupSpan, Layer, Registry,
};
use url::Url;
pub(super) fn init_tracing(
servic_name: &'static str,
opentelemetry_url: Option<&Url>,
buffer_capacity: Option<usize>,
) -> anyhow::Result<()> {
pub(super) fn init_tracing(tracing: &Tracing) -> anyhow::Result<()> {
LogTracer::init()?;
opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
let targets = std::env::var("RUST_LOG")
.unwrap_or_else(|_| "info".into())
.parse::<Targets>()?;
let format_layer =
tracing_subscriber::fmt::layer().with_span_events(FmtSpan::NEW | FmtSpan::CLOSE);
let format_layer = tracing_subscriber::fmt::layer()
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(targets.clone());
match tracing.logging.format {
LogFormat::Compact => with_format(format_layer.compact(), tracing),
LogFormat::Json => with_format(format_layer.json(), tracing),
LogFormat::Normal => with_format(format_layer, tracing),
LogFormat::Pretty => with_format(format_layer.pretty(), tracing),
}
}
fn with_format<F>(format_layer: F, tracing: &Tracing) -> anyhow::Result<()>
where
F: Layer<Registry> + Send + Sync,
{
let format_layer = format_layer.with_filter(tracing.logging.targets.targets.clone());
let subscriber = Registry::default()
.with(format_layer)
.with(ErrorLayer::default());
if let Some(buffer_capacity) = buffer_capacity {
if let Some(address) = tracing.console.address {
let console_layer = ConsoleLayer::builder()
.with_default_env()
.event_buffer_capacity(buffer_capacity)
.server_addr(([0, 0, 0, 0], 6669))
.event_buffer_capacity(tracing.console.buffer_capacity)
.server_addr(address)
.spawn();
let subscriber = subscriber.with(console_layer);
with_otel(subscriber, targets, servic_name, opentelemetry_url)
with_subscriber(subscriber, &tracing.opentelemetry)
} else {
with_otel(subscriber, targets, servic_name, opentelemetry_url)
with_subscriber(subscriber, &tracing.opentelemetry)
}
}
fn with_otel<S>(
subscriber: S,
targets: Targets,
servic_name: &'static str,
opentelemetry_url: Option<&Url>,
) -> anyhow::Result<()>
fn with_subscriber<S>(subscriber: S, otel: &OpenTelemetry) -> anyhow::Result<()>
where
S: SubscriberExt + Send + Sync,
for<'a> S: LookupSpan<'a>,
{
if let Some(url) = opentelemetry_url {
let tracer =
opentelemetry_otlp::new_pipeline()
.tracing()
.with_trace_config(opentelemetry::sdk::trace::config().with_resource(
Resource::new(vec![KeyValue::new("service.name", servic_name)]),
))
.with_exporter(
opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(url.as_str()),
)
.install_batch(opentelemetry::runtime::Tokio)?;
if let Some(url) = otel.url.as_ref() {
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_trace_config(
opentelemetry::sdk::trace::config().with_resource(Resource::new(vec![
KeyValue::new("service.name", otel.service_name.clone()),
])),
)
.with_exporter(
opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(url.as_str()),
)
.install_batch(opentelemetry::runtime::Tokio)?;
let otel_layer = tracing_opentelemetry::layer()
.with_tracer(tracer)
.with_filter(targets);
.with_filter(otel.targets.as_ref().targets.clone());
let subscriber = subscriber.with(otel_layer);

View file

@ -1,5 +1,5 @@
use crate::{
config::Format,
config::ImageFormat,
error::{Error, UploadError},
process::Process,
repo::Alias,
@ -63,11 +63,11 @@ impl ValidInputType {
matches!(self, Self::Mp4)
}
pub(crate) fn from_format(format: Format) -> Self {
pub(crate) fn from_format(format: ImageFormat) -> Self {
match format {
Format::Jpeg => ValidInputType::Jpeg,
Format::Png => ValidInputType::Png,
Format::Webp => ValidInputType::Webp,
ImageFormat::Jpeg => ValidInputType::Jpeg,
ImageFormat::Png => ValidInputType::Png,
ImageFormat::Webp => ValidInputType::Webp,
}
}
}
@ -87,7 +87,7 @@ pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl As
pub(crate) fn convert_bytes_read(
input: Bytes,
format: Format,
format: ImageFormat,
) -> std::io::Result<impl AsyncRead + Unpin> {
let process = Process::run(
"magick",
@ -259,7 +259,7 @@ pub(crate) fn process_image_store_read<S: Store>(
store: S,
identifier: S::Identifier,
args: Vec<String>,
format: Format,
format: ImageFormat,
) -> std::io::Result<impl AsyncRead + Unpin> {
let command = "magick";
let convert_args = ["convert", "-"];
@ -278,9 +278,9 @@ pub(crate) fn process_image_store_read<S: Store>(
impl Details {
#[instrument(name = "Validating input type")]
fn validate_input(&self) -> Result<ValidInputType, Error> {
if self.width > crate::CONFIG.max_width()
|| self.height > crate::CONFIG.max_height()
|| self.width * self.height > crate::CONFIG.max_area()
if self.width > crate::CONFIG.media.max_width
|| self.height > crate::CONFIG.media.max_height
|| self.width * self.height > crate::CONFIG.media.max_area
{
return Err(UploadError::Dimensions.into());
}

View file

@ -49,7 +49,7 @@ mod validate;
use self::{
concurrent_processor::CancelSafeProcessor,
config::{CommandConfig, Config, Format, RequiredFilesystemStorage, RequiredObjectStorage},
config::{Configuration, ImageFormat, Operation},
details::Details,
either::Either,
error::{Error, UploadError},
@ -58,6 +58,7 @@ use self::{
middleware::{Deadline, Internal},
migrate::LatestDb,
repo::{Alias, DeleteToken, Repo},
serde_str::Serde,
store::{file_store::FileStore, object_store::ObjectStore, Store},
upload_manager::{UploadManager, UploadManagerSession},
};
@ -67,7 +68,10 @@ const MINUTES: u32 = 60;
const HOURS: u32 = 60 * MINUTES;
const DAYS: u32 = 24 * HOURS;
static CONFIG: Lazy<Config> = Lazy::new(|| Config::build().unwrap());
static DO_CONFIG: Lazy<(Configuration, Operation)> =
Lazy::new(|| config::configure().expect("Failed to configure"));
static CONFIG: Lazy<Configuration> = Lazy::new(|| DO_CONFIG.0.clone());
static OPERATION: Lazy<Operation> = Lazy::new(|| DO_CONFIG.1.clone());
static PROCESS_SEMAPHORE: Lazy<Semaphore> =
Lazy::new(|| Semaphore::new(num_cpus::get().saturating_sub(1).max(1)));
@ -202,7 +206,7 @@ async fn download<S: Store>(
let stream = Limit::new(
map_error::map_crate_error(res),
(CONFIG.max_file_size() * MEGABYTES) as u64,
(CONFIG.media.max_file_size * MEGABYTES) as u64,
);
futures_util::pin_mut!(stream);
@ -260,7 +264,7 @@ fn prepare_process(
query: web::Query<ProcessQuery>,
ext: &str,
filters: &Option<HashSet<String>>,
) -> Result<(Format, Alias, PathBuf, Vec<String>), Error> {
) -> Result<(ImageFormat, Alias, PathBuf, Vec<String>), Error> {
let (alias, operations) =
query
.into_inner()
@ -290,7 +294,7 @@ fn prepare_process(
};
let format = ext
.parse::<Format>()
.parse::<ImageFormat>()
.map_err(|_| UploadError::UnsupportedFormat)?;
let (thumbnail_path, thumbnail_args) = self::processor::build_chain(&operations)?;
@ -639,7 +643,7 @@ async fn launch<S: Store + Clone + 'static>(
let store2 = store.clone();
let form = Form::new()
.max_files(10)
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
.max_file_size(CONFIG.media.max_file_size * MEGABYTES)
.transform_error(transform_error)
.field(
"images",
@ -667,12 +671,12 @@ async fn launch<S: Store + Clone + 'static>(
// Create a new Multipart Form validator for internal imports
//
// This form is expecting a single array field, 'images' with at most 10 files in it
let validate_imports = CONFIG.validate_imports();
let validate_imports = !CONFIG.media.skip_validate_imports;
let manager2 = manager.clone();
let store2 = store.clone();
let import_form = Form::new()
.max_files(10)
.max_file_size(CONFIG.max_file_size() * MEGABYTES)
.max_file_size(CONFIG.media.max_file_size * MEGABYTES)
.transform_error(transform_error)
.field(
"images",
@ -708,7 +712,7 @@ async fn launch<S: Store + Clone + 'static>(
.app_data(web::Data::new(store.clone()))
.app_data(web::Data::new(manager.clone()))
.app_data(web::Data::new(build_client()))
.app_data(web::Data::new(CONFIG.allowed_filters()))
.app_data(web::Data::new(CONFIG.media.filters.clone()))
.service(
web::scope("/image")
.service(
@ -739,7 +743,9 @@ async fn launch<S: Store + Clone + 'static>(
)
.service(
web::scope("/internal")
.wrap(Internal(CONFIG.api_key().map(|s| s.to_owned())))
.wrap(Internal(
CONFIG.server.api_key.as_ref().map(|s| s.to_owned()),
))
.service(
web::resource("/import")
.wrap(import_form.clone())
@ -749,7 +755,7 @@ async fn launch<S: Store + Clone + 'static>(
.service(web::resource("/aliases").route(web::get().to(aliases::<S>))),
)
})
.bind(CONFIG.bind_address())?
.bind(CONFIG.server.address)?
.run()
.await?;
@ -762,17 +768,17 @@ async fn migrate_inner<S1>(
manager: &UploadManager,
repo: &Repo,
from: S1,
to: &config::Storage,
to: &config::Store,
) -> anyhow::Result<()>
where
S1: Store,
{
match to {
config::Storage::Filesystem(RequiredFilesystemStorage { path }) => {
config::Store::Filesystem(config::Filesystem { path }) => {
let to = FileStore::build(path.clone(), repo.clone()).await?;
manager.migrate_store::<S1, FileStore>(from, to).await?;
}
config::Storage::ObjectStorage(RequiredObjectStorage {
config::Store::ObjectStorage(config::ObjectStorage {
bucket_name,
region,
access_key,
@ -782,9 +788,9 @@ where
}) => {
let to = ObjectStore::build(
bucket_name,
region.clone(),
access_key.clone(),
secret_key.clone(),
region.as_ref().clone(),
Some(access_key.clone()),
Some(secret_key.clone()),
security_token.clone(),
session_token.clone(),
repo.clone(),
@ -801,38 +807,24 @@ where
#[actix_rt::main]
async fn main() -> anyhow::Result<()> {
init_tracing(
"pict-rs",
CONFIG.opentelemetry_url(),
CONFIG.console_buffer_capacity(),
)?;
init_tracing(&CONFIG.tracing)?;
let repo = Repo::open(CONFIG.repo())?;
let repo = Repo::open(CONFIG.repo.clone())?;
let db = LatestDb::exists(CONFIG.data_dir()).migrate()?;
let db = LatestDb::exists(CONFIG.old_db.path.clone()).migrate()?;
repo.from_db(db).await?;
let manager = UploadManager::new(repo.clone(), CONFIG.format()).await?;
match CONFIG.command()? {
CommandConfig::Run => (),
CommandConfig::Dump { path } => {
let configuration = toml::to_string_pretty(&*CONFIG)?;
tokio::fs::write(path, configuration).await?;
return Ok(());
}
CommandConfig::MigrateRepo { to: _ } => {
unimplemented!("Repo migrations are currently unsupported")
}
CommandConfig::MigrateStore { to } => {
let from = CONFIG.store()?;
let manager = UploadManager::new(repo.clone(), CONFIG.media.format).await?;
match (*OPERATION).clone() {
Operation::Run => (),
Operation::MigrateStore { from, to } => {
match from {
config::Storage::Filesystem(RequiredFilesystemStorage { path }) => {
config::Store::Filesystem(config::Filesystem { path }) => {
let from = FileStore::build(path.clone(), repo.clone()).await?;
migrate_inner(&manager, &repo, from, &to).await?;
}
config::Storage::ObjectStorage(RequiredObjectStorage {
config::Store::ObjectStorage(config::ObjectStorage {
bucket_name,
region,
access_key,
@ -842,9 +834,9 @@ async fn main() -> anyhow::Result<()> {
}) => {
let from = ObjectStore::build(
&bucket_name,
region,
access_key,
secret_key,
Serde::into_inner(region),
Some(access_key),
Some(secret_key),
security_token,
session_token,
repo.clone(),
@ -860,12 +852,12 @@ async fn main() -> anyhow::Result<()> {
}
}
match CONFIG.store()? {
config::Storage::Filesystem(RequiredFilesystemStorage { path }) => {
match CONFIG.store.clone() {
config::Store::Filesystem(config::Filesystem { path }) => {
let store = FileStore::build(path, repo).await?;
launch(manager, store).await
}
config::Storage::ObjectStorage(RequiredObjectStorage {
config::Store::ObjectStorage(config::ObjectStorage {
bucket_name,
region,
access_key,
@ -875,9 +867,9 @@ async fn main() -> anyhow::Result<()> {
}) => {
let store = ObjectStore::build(
&bucket_name,
region,
access_key,
secret_key,
Serde::into_inner(region),
Some(access_key),
Some(secret_key),
security_token,
session_token,
repo,

View file

@ -1,9 +1,4 @@
use crate::{
config::{Repository, RequiredSledRepo},
details::Details,
error::Error,
store::Identifier,
};
use crate::{config, details::Details, error::Error, store::Identifier};
use futures_util::Stream;
use tracing::debug;
use uuid::Uuid;
@ -125,9 +120,9 @@ pub(crate) trait AliasRepo {
}
impl Repo {
pub(crate) fn open(config: Repository) -> anyhow::Result<Self> {
pub(crate) fn open(config: config::Repo) -> anyhow::Result<Self> {
match config {
Repository::Sled(RequiredSledRepo {
config::Repo::Sled(config::Sled {
mut path,
cache_capacity,
}) => {

View file

@ -18,6 +18,18 @@ impl<T> Serde<T> {
}
}
impl<T> AsRef<T> for Serde<T> {
fn as_ref(&self) -> &T {
&self.inner
}
}
impl<T> AsMut<T> for Serde<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.inner
}
}
impl<T> Deref for Serde<T> {
type Target = T;

View file

@ -1,5 +1,5 @@
use crate::{
config::Format,
config::ImageFormat,
details::Details,
error::{Error, UploadError},
ffmpeg::{InputFormat, ThumbnailFormat},
@ -28,14 +28,14 @@ pub(crate) struct UploadManager {
}
pub(crate) struct UploadManagerInner {
format: Option<Format>,
format: Option<ImageFormat>,
hasher: sha2::Sha256,
repo: Repo,
}
impl UploadManager {
/// Create a new UploadManager
pub(crate) async fn new(repo: Repo, format: Option<Format>) -> Result<Self, Error> {
pub(crate) async fn new(repo: Repo, format: Option<ImageFormat>) -> Result<Self, Error> {
let manager = UploadManager {
inner: Arc::new(UploadManagerInner {
format,

View file

@ -1,5 +1,5 @@
use crate::{
config::Format, either::Either, error::Error, ffmpeg::InputFormat, magick::ValidInputType,
config::ImageFormat, either::Either, error::Error, ffmpeg::InputFormat, magick::ValidInputType,
};
use actix_web::web::Bytes;
use tokio::io::AsyncRead;
@ -35,7 +35,7 @@ impl AsyncRead for UnvalidatedBytes {
#[instrument(name = "Validate image", skip(bytes))]
pub(crate) async fn validate_image_bytes(
bytes: Bytes,
prescribed_format: Option<Format>,
prescribed_format: Option<ImageFormat>,
validate: bool,
) -> Result<(ValidInputType, impl AsyncRead + Unpin), Error> {
let input_type = crate::magick::input_type_bytes(bytes.clone()).await?;
@ -57,19 +57,19 @@ pub(crate) async fn validate_image_bytes(
crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Mp4).await?,
)),
)),
(Some(Format::Jpeg) | None, ValidInputType::Jpeg) => Ok((
(Some(ImageFormat::Jpeg) | None, ValidInputType::Jpeg) => Ok((
ValidInputType::Jpeg,
Either::right(Either::right(Either::left(
crate::exiftool::clear_metadata_bytes_read(bytes)?,
))),
)),
(Some(Format::Png) | None, ValidInputType::Png) => Ok((
(Some(ImageFormat::Png) | None, ValidInputType::Png) => Ok((
ValidInputType::Png,
Either::right(Either::right(Either::left(
crate::exiftool::clear_metadata_bytes_read(bytes)?,
))),
)),
(Some(Format::Webp) | None, ValidInputType::Webp) => Ok((
(Some(ImageFormat::Webp) | None, ValidInputType::Webp) => Ok((
ValidInputType::Webp,
Either::right(Either::right(Either::right(Either::left(
crate::magick::clear_metadata_bytes_read(bytes)?,