mirror of
https://git.asonix.dog/asonix/pict-rs.git
synced 2024-11-25 02:51:17 +00:00
Merge branch 'asonix/repo' into main
This commit is contained in:
commit
8633eab54f
51 changed files with 5495 additions and 2921 deletions
|
@ -24,8 +24,6 @@ steps:
|
||||||
- rustup component add clippy
|
- rustup component add clippy
|
||||||
- cargo clippy --no-default-features -- -D warnings
|
- cargo clippy --no-default-features -- -D warnings
|
||||||
- cargo clippy --no-default-features --features io-uring -- -D warnings
|
- cargo clippy --no-default-features --features io-uring -- -D warnings
|
||||||
- cargo clippy --no-default-features --features object-storage -- -D warnings
|
|
||||||
- cargo clippy --no-default-features --features object-storage,io-uring -- -D warnings
|
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
event:
|
event:
|
||||||
|
|
218
Cargo.lock
generated
218
Cargo.lock
generated
|
@ -219,6 +219,15 @@ dependencies = [
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "addr2line"
|
||||||
|
version = "0.17.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
|
||||||
|
dependencies = [
|
||||||
|
"gimli",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "adler"
|
name = "adler"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
|
@ -366,6 +375,21 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backtrace"
|
||||||
|
version = "0.3.64"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f"
|
||||||
|
dependencies = [
|
||||||
|
"addr2line",
|
||||||
|
"cc",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
"object",
|
||||||
|
"rustc-demangle",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.13.0"
|
version = "0.13.0"
|
||||||
|
@ -464,17 +488,59 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "2.34.0"
|
version = "3.1.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
|
checksum = "71c47df61d9e16dc010b55dba1952a57d8c215dbb533fd13cdd13369aac73b1c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi_term",
|
|
||||||
"atty",
|
"atty",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
|
"clap_derive",
|
||||||
|
"indexmap",
|
||||||
|
"lazy_static",
|
||||||
|
"os_str_bytes",
|
||||||
"strsim",
|
"strsim",
|
||||||
|
"termcolor",
|
||||||
"textwrap",
|
"textwrap",
|
||||||
"unicode-width",
|
]
|
||||||
"vec_map",
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "3.1.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1"
|
||||||
|
dependencies = [
|
||||||
|
"heck 0.4.0",
|
||||||
|
"proc-macro-error",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "color-eyre"
|
||||||
|
version = "0.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8ebf286c900a6d5867aeff75cfee3192857bb7f24b547d4f0df2ed6baa812c90"
|
||||||
|
dependencies = [
|
||||||
|
"backtrace",
|
||||||
|
"color-spantrace",
|
||||||
|
"eyre",
|
||||||
|
"indenter",
|
||||||
|
"once_cell",
|
||||||
|
"owo-colors",
|
||||||
|
"tracing-error",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "color-spantrace"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce"
|
||||||
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
|
"owo-colors",
|
||||||
|
"tracing-core",
|
||||||
|
"tracing-error",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -704,6 +770,16 @@ dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "eyre"
|
||||||
|
version = "0.6.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9289ed2c0440a6536e65119725cf91fc2c6b5e513bfd2e36e1134d7cca6ca12f"
|
||||||
|
dependencies = [
|
||||||
|
"indenter",
|
||||||
|
"once_cell",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fake-simd"
|
name = "fake-simd"
|
||||||
version = "0.1.2"
|
version = "0.1.2"
|
||||||
|
@ -896,6 +972,12 @@ dependencies = [
|
||||||
"wasi 0.10.2+wasi-snapshot-preview1",
|
"wasi 0.10.2+wasi-snapshot-preview1",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gimli"
|
||||||
|
version = "0.26.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "h2"
|
name = "h2"
|
||||||
version = "0.3.13"
|
version = "0.3.13"
|
||||||
|
@ -952,6 +1034,12 @@ dependencies = [
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "heck"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hermit-abi"
|
name = "hermit-abi"
|
||||||
version = "0.1.19"
|
version = "0.1.19"
|
||||||
|
@ -1077,6 +1165,12 @@ dependencies = [
|
||||||
"unicode-normalization",
|
"unicode-normalization",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "indenter"
|
||||||
|
version = "0.3.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "1.8.1"
|
version = "1.8.1"
|
||||||
|
@ -1359,6 +1453,15 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.27.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.10.0"
|
version = "1.10.0"
|
||||||
|
@ -1426,6 +1529,21 @@ dependencies = [
|
||||||
"hashbrown 0.12.0",
|
"hashbrown 0.12.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "os_str_bytes"
|
||||||
|
version = "6.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "owo-colors"
|
||||||
|
version = "3.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e72e30578e0d0993c8ae20823dd9cff2bc5517d2f586a8aef462a581e8a03eb"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "parking_lot"
|
||||||
version = "0.11.2"
|
version = "0.11.2"
|
||||||
|
@ -1547,7 +1665,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pict-rs"
|
name = "pict-rs"
|
||||||
version = "0.3.0"
|
version = "0.4.0-alpha.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-form-data",
|
"actix-form-data",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
|
@ -1557,6 +1675,8 @@ dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"awc",
|
"awc",
|
||||||
"base64",
|
"base64",
|
||||||
|
"clap",
|
||||||
|
"color-eyre",
|
||||||
"config",
|
"config",
|
||||||
"console-subscriber",
|
"console-subscriber",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
|
@ -1574,12 +1694,12 @@ dependencies = [
|
||||||
"sha2 0.10.2",
|
"sha2 0.10.2",
|
||||||
"sled",
|
"sled",
|
||||||
"storage-path-generator",
|
"storage-path-generator",
|
||||||
"structopt",
|
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-uring",
|
"tokio-uring",
|
||||||
"tokio-util 0.7.1",
|
"tokio-util 0.7.1",
|
||||||
|
"toml",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-actix-web",
|
"tracing-actix-web",
|
||||||
"tracing-awc",
|
"tracing-awc",
|
||||||
|
@ -1680,7 +1800,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5"
|
checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"heck",
|
"heck 0.3.3",
|
||||||
"itertools",
|
"itertools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
|
@ -1923,6 +2043,12 @@ dependencies = [
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-demangle"
|
||||||
|
version = "0.1.21"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc_version"
|
name = "rustc_version"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
@ -2163,33 +2289,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.8.0"
|
version = "0.10.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
|
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "structopt"
|
|
||||||
version = "0.3.26"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10"
|
|
||||||
dependencies = [
|
|
||||||
"clap",
|
|
||||||
"lazy_static",
|
|
||||||
"structopt-derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "structopt-derive"
|
|
||||||
version = "0.4.18"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
|
|
||||||
dependencies = [
|
|
||||||
"heck",
|
|
||||||
"proc-macro-error",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "subtle"
|
name = "subtle"
|
||||||
|
@ -2223,14 +2325,20 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "textwrap"
|
name = "termcolor"
|
||||||
version = "0.11.0"
|
version = "1.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
|
checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-width",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "textwrap"
|
||||||
|
version = "0.15.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.30"
|
version = "1.0.30"
|
||||||
|
@ -2595,6 +2703,16 @@ dependencies = [
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-serde"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
"tracing-core",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.3.10"
|
version = "0.3.10"
|
||||||
|
@ -2605,12 +2723,15 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"matchers",
|
"matchers",
|
||||||
"regex",
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"sharded-slab",
|
"sharded-slab",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"thread_local",
|
"thread_local",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-core",
|
"tracing-core",
|
||||||
"tracing-log",
|
"tracing-log",
|
||||||
|
"tracing-serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2668,12 +2789,6 @@ version = "1.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
|
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-width"
|
|
||||||
version = "0.1.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-xid"
|
name = "unicode-xid"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
|
@ -2715,12 +2830,6 @@ version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "vec_map"
|
|
||||||
version = "0.8.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
version = "0.9.4"
|
version = "0.9.4"
|
||||||
|
@ -2871,6 +2980,15 @@ version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-util"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
|
20
Cargo.toml
20
Cargo.toml
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pict-rs"
|
name = "pict-rs"
|
||||||
description = "A simple image hosting service"
|
description = "A simple image hosting service"
|
||||||
version = "0.3.0"
|
version = "0.4.0-alpha.1"
|
||||||
authors = ["asonix <asonix@asonix.dog>"]
|
authors = ["asonix <asonix@asonix.dog>"]
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
@ -10,8 +10,7 @@ edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
[features]
|
[features]
|
||||||
default = ["object-storage"]
|
default = []
|
||||||
object-storage = ["reqwest", "rust-s3"]
|
|
||||||
io-uring = [
|
io-uring = [
|
||||||
"actix-rt/io-uring",
|
"actix-rt/io-uring",
|
||||||
"actix-server/io-uring",
|
"actix-server/io-uring",
|
||||||
|
@ -28,6 +27,8 @@ anyhow = "1.0"
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.51"
|
||||||
awc = { version = "3.0.0", default-features = false, features = ["rustls"] }
|
awc = { version = "3.0.0", default-features = false, features = ["rustls"] }
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
|
clap = { version = "3.1.6", features = ["derive"] }
|
||||||
|
color-eyre = "0.6"
|
||||||
config = "0.13.0"
|
config = "0.13.0"
|
||||||
console-subscriber = "0.1"
|
console-subscriber = "0.1"
|
||||||
dashmap = "5.1.0"
|
dashmap = "5.1.0"
|
||||||
|
@ -41,30 +42,33 @@ pin-project-lite = "0.2.7"
|
||||||
reqwest = { version = "0.11.5", default-features = false, features = [
|
reqwest = { version = "0.11.5", default-features = false, features = [
|
||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
"stream",
|
"stream",
|
||||||
], optional = true }
|
] }
|
||||||
rust-s3 = { version = "0.29.0", default-features = false, features = [
|
rust-s3 = { version = "0.29.0", default-features = false, features = [
|
||||||
"fail-on-err",
|
"fail-on-err",
|
||||||
"with-reqwest",
|
"with-reqwest",
|
||||||
], optional = true, git = "https://github.com/asonix/rust-s3", branch = "asonix/generic-client" }
|
], git = "https://github.com/asonix/rust-s3", branch = "asonix/generic-client" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
sha2 = "0.10.0"
|
sha2 = "0.10.0"
|
||||||
sled = { version = "0.34.7" }
|
sled = { version = "0.34.7" }
|
||||||
storage-path-generator = "0.1.0"
|
storage-path-generator = "0.1.0"
|
||||||
structopt = "0.3.14"
|
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
time = { version = "0.3.0", features = ["serde"] }
|
time = { version = "0.3.0", features = ["serde"] }
|
||||||
tokio = { version = "1", features = ["full", "tracing"] }
|
tokio = { version = "1", features = ["full", "tracing"] }
|
||||||
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
||||||
tokio-util = { version = "0.7", default-features = false, features = ["codec"] }
|
tokio-util = { version = "0.7", default-features = false, features = ["codec"] }
|
||||||
|
toml = "0.5.8"
|
||||||
tracing = "0.1.15"
|
tracing = "0.1.15"
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
tracing-futures = "0.2.4"
|
tracing-futures = "0.2.4"
|
||||||
tracing-log = "0.1.2"
|
tracing-log = "0.1.2"
|
||||||
tracing-opentelemetry = "0.17"
|
tracing-opentelemetry = "0.17"
|
||||||
tracing-subscriber = { version = "0.3.0", features = [
|
tracing-subscriber = { version = "0.3.0", features = [
|
||||||
|
"ansi",
|
||||||
"env-filter",
|
"env-filter",
|
||||||
"fmt",
|
"fmt",
|
||||||
|
"json",
|
||||||
|
"registry",
|
||||||
"tracing-log",
|
"tracing-log",
|
||||||
] }
|
] }
|
||||||
url = { version = "2.2", features = ["serde"] }
|
url = { version = "2.2", features = ["serde"] }
|
||||||
|
@ -73,9 +77,9 @@ uuid = { version = "0.8.2", features = ["v4", "serde"] }
|
||||||
[dependencies.tracing-actix-web]
|
[dependencies.tracing-actix-web]
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["emit_event_on_error", "opentelemetry_0_17"]
|
features = ["opentelemetry_0_17"]
|
||||||
|
|
||||||
[dependencies.tracing-awc]
|
[dependencies.tracing-awc]
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["emit_event_on_error", "opentelemetry_0_17"]
|
features = ["opentelemetry_0_17"]
|
||||||
|
|
171
README.md
171
README.md
|
@ -9,109 +9,142 @@ _a simple image hosting service_
|
||||||
## Usage
|
## Usage
|
||||||
### Running
|
### Running
|
||||||
```
|
```
|
||||||
pict-rs 0.3.0
|
pict-rs 0.4.0-alpha.1
|
||||||
|
asonix <asonix@asonix.dog>
|
||||||
|
A simple image hosting service
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
pict-rs [FLAGS] [OPTIONS] [SUBCOMMAND]
|
pict-rs [OPTIONS] <SUBCOMMAND>
|
||||||
|
|
||||||
FLAGS:
|
|
||||||
-h, --help Prints help information
|
|
||||||
-s, --skip-validate-imports Whether to skip validating images uploaded via the internal import API
|
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
-a, --addr <addr> The address and port the server binds to.
|
-c, --config-file <CONFIG_FILE>
|
||||||
--api-key <api-key>
|
Path to the pict-rs configuration file
|
||||||
An optional string to be checked on requests to privileged endpoints
|
|
||||||
|
|
||||||
-c, --config-file <config-file> Path to the pict-rs configuration file
|
--console-address <CONSOLE_ADDRESS>
|
||||||
--console-buffer-capacity <console-buffer-capacity>
|
Address and port to expose tokio-console metrics
|
||||||
Specify the number of events the console subscriber is allowed to buffer
|
|
||||||
|
|
||||||
-f, --filters <filters>...
|
--console-buffer-capacity <CONSOLE_BUFFER_CAPACITY>
|
||||||
An optional list of filters to permit, supports 'identity', 'thumbnail', 'resize', 'crop', and 'blur'
|
Capacity of the console-subscriber Event Buffer
|
||||||
|
|
||||||
-i, --image-format <image-format>
|
-h, --help
|
||||||
An optional image format to convert all uploaded files into, supports 'jpg', 'png', and 'webp'
|
Print help information
|
||||||
|
|
||||||
-m, --max-file-size <max-file-size>
|
--log-format <LOG_FORMAT>
|
||||||
Specify the maximum allowed uploaded file size (in Megabytes)
|
Format of logs printed to stdout
|
||||||
|
|
||||||
--max-image-area <max-image-area> Specify the maximum area in pixels allowed in an image
|
--log-targets <LOG_TARGETS>
|
||||||
--max-image-height <max-image-height> Specify the maximum width in pixels allowed on an image
|
Log levels to print to stdout, respects RUST_LOG formatting
|
||||||
--max-image-width <max-image-width> Specify the maximum width in pixels allowed on an image
|
|
||||||
--migrate-file <migrate-file> Path to a file defining a store migration
|
|
||||||
-o, --opentelemetry-url <opentelemetry-url>
|
|
||||||
Enable OpenTelemetry Tracing exports to the given OpenTelemetry collector
|
|
||||||
|
|
||||||
-p, --path <path> The path to the data directory, e.g. data/
|
--old-db-path <OLD_DB_PATH>
|
||||||
--sled-cache-capacity <sled-cache-capacity>
|
Path to the old pict-rs sled database
|
||||||
Specify the number of bytes sled is allowed to use for it's cache
|
|
||||||
|
--opentelemetry-service-name <OPENTELEMETRY_SERVICE_NAME>
|
||||||
|
Service Name to use for OpenTelemetry
|
||||||
|
|
||||||
|
--opentelemetry-targets <OPENTELEMETRY_TARGETS>
|
||||||
|
Log levels to use for OpenTelemetry, respects RUST_LOG formatting
|
||||||
|
|
||||||
|
--opentelemetry-url <OPENTELEMETRY_URL>
|
||||||
|
URL to send OpenTelemetry metrics
|
||||||
|
|
||||||
|
--save-to <SAVE_TO>
|
||||||
|
File to save the current configuration for reproducible runs
|
||||||
|
|
||||||
|
-V, --version
|
||||||
|
Print version information
|
||||||
|
|
||||||
|
SUBCOMMANDS:
|
||||||
|
filesystem Migrate from the provided filesystem storage
|
||||||
|
help Print this message or the help of the given subcommand(s)
|
||||||
|
object-storage Migrate from the provided object storage
|
||||||
|
run Runs the pict-rs web server
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
pict-rs-run
|
||||||
|
Runs the pict-rs web server
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
pict-rs run [OPTIONS] [SUBCOMMAND]
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
-a, --address <ADDRESS>
|
||||||
|
The address and port to bind the pict-rs web server
|
||||||
|
|
||||||
|
--api-key <API_KEY>
|
||||||
|
The API KEY required to access restricted routes
|
||||||
|
|
||||||
|
-h, --help
|
||||||
|
Print help information
|
||||||
|
|
||||||
|
--media-enable-silent-video <MEDIA_ENABLE_SILENT_VIDEO>
|
||||||
|
Whether to enable GIF and silent MP4 uploads. Full videos are unsupported
|
||||||
|
|
||||||
|
--media-filters <MEDIA_FILTERS>
|
||||||
|
Which media filters should be enabled on the `process` endpoint
|
||||||
|
|
||||||
|
--media-format <MEDIA_FORMAT>
|
||||||
|
Enforce uploaded media is transcoded to the provided format
|
||||||
|
|
||||||
|
--media-max-area <MEDIA_MAX_AREA>
|
||||||
|
The maximum area, in pixels, for uploaded media
|
||||||
|
|
||||||
|
--media-max-file-size <MEDIA_MAX_FILE_SIZE>
|
||||||
|
The maximum size, in megabytes, for uploaded media
|
||||||
|
|
||||||
|
--media-max-height <MEDIA_MAX_HEIGHT>
|
||||||
|
The maximum height, in pixels, for uploaded media
|
||||||
|
|
||||||
|
--media-max-width <MEDIA_MAX_WIDTH>
|
||||||
|
The maximum width, in pixels, for uploaded media
|
||||||
|
|
||||||
|
--media-skip-validate-imports <MEDIA_SKIP_VALIDATE_IMPORTS>
|
||||||
|
Whether to validate media on the "import" endpoint
|
||||||
|
|
||||||
|
--worker-id <WORKER_ID>
|
||||||
|
|
||||||
|
|
||||||
SUBCOMMANDS:
|
SUBCOMMANDS:
|
||||||
file-store
|
filesystem Run pict-rs with filesystem storage
|
||||||
help Prints this message or the help of the given subcommand(s)
|
help Print this message or the help of the given subcommand(s)
|
||||||
s3-store
|
object-storage Run pict-rs with object storage
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Try running `help` commands for more runtime configuration options
|
||||||
```
|
```
|
||||||
pict-rs-file-store 0.3.0
|
$ pict-rs run filesystem -h
|
||||||
|
$ pict-rs run object-storage -h
|
||||||
USAGE:
|
$ pict-rs run filesystem sled -h
|
||||||
pict-rs file-store [OPTIONS]
|
$ pict-rs run object-storage sled -h
|
||||||
|
|
||||||
FLAGS:
|
|
||||||
-h, --help Prints help information
|
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
|
||||||
--path <path> Path in which pict-rs will create it's 'files' directory
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
See [`pict-rs.toml`](https://git.asonix.dog/asonix/pict-rs/src/branch/main/pict-rs.toml) for more
|
||||||
pict-rs-s3-store 0.3.0
|
|
||||||
|
|
||||||
USAGE:
|
|
||||||
pict-rs s3-store [OPTIONS] --bucket-name <bucket-name> --region <region>
|
|
||||||
|
|
||||||
FLAGS:
|
|
||||||
-h, --help Prints help information
|
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
|
||||||
--access-key <access-key>
|
|
||||||
--bucket-name <bucket-name> Name of the bucket in which pict-rs will store images
|
|
||||||
--region <region> Region in which the bucket exists, can be an http endpoint
|
|
||||||
--secret-key <secret-key>
|
|
||||||
--security-token <security-token>
|
|
||||||
--session-token <session-token>
|
|
||||||
```
|
|
||||||
|
|
||||||
See [`pict-rs.toml`](https://git.asonix.dog/asonix/pict-rs/src/branch/main/pict-rs.toml) and
|
|
||||||
[`migrate.toml`](https://git.asonix.dog/asonix/pict-rs/src/branch/main/migrate.toml) for more
|
|
||||||
configuration
|
configuration
|
||||||
|
|
||||||
#### Example:
|
#### Example:
|
||||||
Running on all interfaces, port 8080, storing data in /opt/data
|
Running on all interfaces, port 8080, storing data in /opt/data
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 0.0.0.0:8080 -p /opt/data
|
$ ./pict-rs -a 0.0.0.0:8080 -p /opt/data run
|
||||||
```
|
```
|
||||||
Running locally, port 9000, storing data in data/, and converting all uploads to PNG
|
Running locally, port 9000, storing data in data/, and converting all uploads to PNG
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 127.0.0.1:9000 -p data/ -f png
|
$ ./pict-rs -a 127.0.0.1:9000 -p data/ -f png run
|
||||||
```
|
```
|
||||||
Running locally, port 8080, storing data in data/, and only allowing the `thumbnail` and `identity` filters
|
Running locally, port 8080, storing data in data/, and only allowing the `thumbnail` and `identity` filters
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 127.0.0.1:8080 -p data/ -w thumbnail identity
|
$ ./pict-rs -a 127.0.0.1:8080 -p data/ -w thumbnail identity run
|
||||||
```
|
```
|
||||||
Running from a configuration file
|
Running from a configuration file
|
||||||
```
|
```
|
||||||
$ ./pict-rs -c ./pict-rs.toml
|
$ ./pict-rs -c ./pict-rs.toml run
|
||||||
```
|
```
|
||||||
Migrating between storage backends
|
Migrating to object storage from filesystem storage (both storages must be configured in pict-rs.toml)
|
||||||
```
|
```
|
||||||
$ ./pict-rs -p ./data --migrate-file ./migrate.toml
|
$ ./pict-rs -c ./pict-rs.toml --store filesystem migrate-store object-storage
|
||||||
|
```
|
||||||
|
Dumping commandline flags to a toml file
|
||||||
|
```
|
||||||
|
$ ./pict-rs -p data/ --store object-storage --object-storage-bucket-name pict-rs --object-storage-region us-east-1 dump pict-rs.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Docker
|
#### Docker
|
||||||
|
|
41
client-examples/bash/upload.sh
Executable file
41
client-examples/bash/upload.sh
Executable file
|
@ -0,0 +1,41 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
upload_ids=$(
|
||||||
|
curl \
|
||||||
|
-F "images[]=@../cat.jpg" \
|
||||||
|
-F "images[]=@../earth.gif" \
|
||||||
|
-F "images[]=@../scene.webp" \
|
||||||
|
-F "images[]=@../test.png" \
|
||||||
|
-F "images[]=@../earth.gif" \
|
||||||
|
-F "images[]=@../test.png" \
|
||||||
|
-F "images[]=@../cat.jpg" \
|
||||||
|
-F "images[]=@../scene.webp" \
|
||||||
|
'http://localhost:8080/image/backgrounded' | \
|
||||||
|
jq '.uploads[].upload_id' | \
|
||||||
|
sed 's/"//g'
|
||||||
|
)
|
||||||
|
|
||||||
|
for upload in $(echo $upload_ids)
|
||||||
|
do
|
||||||
|
echo "Processing for $upload"
|
||||||
|
|
||||||
|
json=$(curl "http://localhost:8080/image/backgrounded/claim?upload_id=$upload")
|
||||||
|
delete_token=$(echo $json | jq '.files[0].delete_token' | sed 's/"//g')
|
||||||
|
filename=$(echo $json | jq '.files[0].file' | sed 's/"//g')
|
||||||
|
|
||||||
|
details=$(curl "http://localhost:8080/image/details/original/$filename")
|
||||||
|
mime_type=$(echo $details | jq '.content_type' | sed 's/"//g')
|
||||||
|
|
||||||
|
echo "Original mime: $mime_type"
|
||||||
|
|
||||||
|
curl "http://localhost:8080/image/process_backgrounded.webp?src=$filename&resize=200"
|
||||||
|
sleep 1
|
||||||
|
details=$(curl "http://localhost:8080/image/details/process.webp?src=$filename&resize=200")
|
||||||
|
mime_type=$(echo $details | jq '.content_type' | sed 's/"//g')
|
||||||
|
|
||||||
|
echo "Processed mime: $mime_type"
|
||||||
|
|
||||||
|
curl "http://localhost:8080/image/delete/$delete_token/$filename"
|
||||||
|
done
|
40
defaults.toml
Normal file
40
defaults.toml
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
[server]
|
||||||
|
address = '0.0.0.0:8080'
|
||||||
|
worker_id = 'pict-rs-1'
|
||||||
|
[tracing.logging]
|
||||||
|
format = 'normal'
|
||||||
|
targets = 'warn,tracing_actix_web=info,actix_server=info,actix_web=info'
|
||||||
|
|
||||||
|
[tracing.console]
|
||||||
|
buffer_capacity = 102400
|
||||||
|
|
||||||
|
[tracing.opentelemetry]
|
||||||
|
service_name = 'pict-rs'
|
||||||
|
targets = 'info'
|
||||||
|
|
||||||
|
[old_db]
|
||||||
|
path = '/mnt'
|
||||||
|
|
||||||
|
[media]
|
||||||
|
max_width = 10000
|
||||||
|
max_height = 10000
|
||||||
|
max_area = 40000000
|
||||||
|
max_file_size = 40
|
||||||
|
enable_silent_video = true
|
||||||
|
filters = [
|
||||||
|
'blur',
|
||||||
|
'crop',
|
||||||
|
'identity',
|
||||||
|
'resize',
|
||||||
|
'thumbnail',
|
||||||
|
]
|
||||||
|
skip_validate_imports = false
|
||||||
|
|
||||||
|
[repo]
|
||||||
|
type = 'sled'
|
||||||
|
path = '/mnt/sled-repo'
|
||||||
|
cache_capacity = 67108864
|
||||||
|
|
||||||
|
[store]
|
||||||
|
type = 'filesystem'
|
||||||
|
path = '/mnt/files'
|
40
dev.toml
Normal file
40
dev.toml
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
[server]
|
||||||
|
address = '0.0.0.0:8080'
|
||||||
|
worker_id = 'pict-rs-1'
|
||||||
|
[tracing.logging]
|
||||||
|
format = 'normal'
|
||||||
|
targets = 'warn,tracing_actix_web=info,actix_server=info,actix_web=info'
|
||||||
|
|
||||||
|
[tracing.console]
|
||||||
|
buffer_capacity = 102400
|
||||||
|
|
||||||
|
[tracing.opentelemetry]
|
||||||
|
service_name = 'pict-rs'
|
||||||
|
targets = 'info'
|
||||||
|
|
||||||
|
[old_db]
|
||||||
|
path = 'data/'
|
||||||
|
|
||||||
|
[media]
|
||||||
|
max_width = 10000
|
||||||
|
max_height = 10000
|
||||||
|
max_area = 40000000
|
||||||
|
max_file_size = 40
|
||||||
|
enable_silent_video = true
|
||||||
|
filters = [
|
||||||
|
'blur',
|
||||||
|
'crop',
|
||||||
|
'identity',
|
||||||
|
'resize',
|
||||||
|
'thumbnail',
|
||||||
|
]
|
||||||
|
skip_validate_imports = false
|
||||||
|
|
||||||
|
[repo]
|
||||||
|
type = 'sled'
|
||||||
|
path = 'data/sled-repo'
|
||||||
|
cache_capacity = 67108864
|
||||||
|
|
||||||
|
[store]
|
||||||
|
type = 'filesystem'
|
||||||
|
path = 'data/files'
|
|
@ -1,25 +1,17 @@
|
||||||
FROM archlinux:latest
|
FROM alpine:edge
|
||||||
|
|
||||||
ARG UID=1000
|
ARG UID=1000
|
||||||
ARG GID=1000
|
ARG GID=1000
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
pacman -Syu --noconfirm \
|
apk add exiftool imagemagick ffmpeg && \
|
||||||
perl-image-exiftool \
|
addgroup -g $GID app && \
|
||||||
imagemagick \
|
adduser -h /opt/app -g "" -G app -u $UID -D app && \
|
||||||
ffmpeg && \
|
chown -R app:app /mnt
|
||||||
groupadd -g 1000 app && \
|
|
||||||
useradd -m \
|
|
||||||
-d /opt/app \
|
|
||||||
-u $UID \
|
|
||||||
-g $GID \
|
|
||||||
app
|
|
||||||
|
|
||||||
COPY root/ /
|
COPY root/ /
|
||||||
|
|
||||||
COPY ./pict-rs.toml /etc/pict-rs.toml
|
COPY ./pict-rs.toml /etc/pict-rs.toml
|
||||||
|
|
||||||
ENV PATH=$PATH:/usr/bin/vendor_perl
|
|
||||||
|
|
||||||
WORKDIR /opt/app
|
WORKDIR /opt/app
|
||||||
USER app
|
USER app
|
||||||
|
|
|
@ -1,8 +1,42 @@
|
||||||
|
[server]
|
||||||
|
address = '0.0.0.0:8080'
|
||||||
|
worker_id = 'pict-rs-1'
|
||||||
|
[tracing.logging]
|
||||||
|
format = 'normal'
|
||||||
|
targets = 'warn,tracing_actix_web=info,actix_server=info,actix_web=info'
|
||||||
|
|
||||||
|
[tracing.console]
|
||||||
|
buffer_capacity = 102400
|
||||||
|
|
||||||
|
[tracing.opentelemetry]
|
||||||
|
service_name = 'pict-rs'
|
||||||
|
targets = 'info'
|
||||||
|
|
||||||
|
[old_db]
|
||||||
path = '/mnt'
|
path = '/mnt'
|
||||||
addr = '0.0.0.0:8080'
|
|
||||||
|
[media]
|
||||||
|
max_width = 10000
|
||||||
|
max_height = 10000
|
||||||
|
max_area = 40000000
|
||||||
|
max_file_size = 40
|
||||||
|
enable_silent_video = true
|
||||||
|
filters = [
|
||||||
|
'blur',
|
||||||
|
'crop',
|
||||||
|
'identity',
|
||||||
|
'resize',
|
||||||
|
'thumbnail',
|
||||||
|
]
|
||||||
|
skip_validate_imports = false
|
||||||
|
|
||||||
|
[repo]
|
||||||
|
type = 'sled'
|
||||||
|
path = '/mnt/sled-repo'
|
||||||
|
cache_capacity = 67108864
|
||||||
|
|
||||||
[store]
|
[store]
|
||||||
type = 's3_store'
|
type = 'object_storage'
|
||||||
bucket_name = 'pict-rs'
|
bucket_name = 'pict-rs'
|
||||||
region = 'http://minio:9000'
|
region = 'http://minio:9000'
|
||||||
access_key = 'Q7Z3AY3JO01N27UNH5IR'
|
access_key = 'Q7Z3AY3JO01N27UNH5IR'
|
||||||
|
|
104
pict-rs.toml
104
pict-rs.toml
|
@ -43,13 +43,6 @@ max_image_area = 40_000_000 # in Pixels
|
||||||
# default: false
|
# default: false
|
||||||
skip_validate_imports = false
|
skip_validate_imports = false
|
||||||
|
|
||||||
## Optional: set sled's cache capacity to a given number of bytes
|
|
||||||
# environment variable: PICTRS_SLED_CACHE_CAPACITY
|
|
||||||
# default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB
|
|
||||||
#
|
|
||||||
# Increasing this value can improve performance by keeping more of the database in RAM
|
|
||||||
sled_cache_capacity = 67_108_864 # in bytes
|
|
||||||
|
|
||||||
## Optional: enable tokio-console and set the event buffer size
|
## Optional: enable tokio-console and set the event buffer size
|
||||||
# environment variable: PICTRS_CONSOLE_BUFFER_CAPACITY
|
# environment variable: PICTRS_CONSOLE_BUFFER_CAPACITY
|
||||||
# default: empty
|
# default: empty
|
||||||
|
@ -95,58 +88,65 @@ api_key = 'API_KEY'
|
||||||
# Not specifying opentelemetry_url means no traces will be exported
|
# Not specifying opentelemetry_url means no traces will be exported
|
||||||
opentelemetry_url = 'http://localhost:4317/'
|
opentelemetry_url = 'http://localhost:4317/'
|
||||||
|
|
||||||
## Optional: store definition
|
## Optional: the data repository to use
|
||||||
# default store: file_store
|
# environment variable: PICTRS_REPO
|
||||||
#
|
# default: 'sled'
|
||||||
# Not specifying a store means a file_store will be used with the top-level pict-rs' path
|
# available options: 'sled'
|
||||||
[store]
|
repo = 'sled'
|
||||||
type = "file_store"
|
|
||||||
|
|
||||||
## Example file store
|
## Optional: the file storage to use
|
||||||
# [store]
|
# environment variable: PICTRS_STORE
|
||||||
#
|
# default: 'filesystem'
|
||||||
# # environment variable: PICTRS_STORE__TYPE
|
# available options: 'filesystem', 'object_storage'
|
||||||
# type = 'file_store'
|
store = 'filesystem'
|
||||||
#
|
|
||||||
# # Optional: file path
|
|
||||||
# # environment variable: PICTRS_STORE__PATH
|
|
||||||
# # default: empty
|
|
||||||
# #
|
|
||||||
# # Not specifying path means pict-rs' top-level `path` config is used
|
|
||||||
# path = './data'
|
|
||||||
|
|
||||||
## Example s3 store
|
|
||||||
# [store]
|
## Optional: Sled store configration definition
|
||||||
#
|
[sled]
|
||||||
# # environment variable: PICTRS_STORE__TYPE
|
## Optional: set sled's cache capacity to a given number of bytes
|
||||||
# type = 's3_store'
|
# environment variable: PICTRS_SLED__SLED_CACHE_CAPACITY
|
||||||
|
# default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB
|
||||||
#
|
#
|
||||||
|
# Increasing this value can improve performance by keeping more of the database in RAM
|
||||||
|
sled_cache_capacity = 67_108_864 # in bytes
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: Filesystem storage configuration
|
||||||
|
[filesystem_storage]
|
||||||
|
## Optional: set the path for pict-rs filesystem file storage
|
||||||
|
# environment variable: PICTRS_FILESYSTEM_STORAGE__FILESYSTEM_STORAGE_PATH
|
||||||
|
# default '${path}/files'
|
||||||
|
filesystem_storage_path = 'data/files'
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: Object Storage configuration
|
||||||
|
[object_storage]
|
||||||
## Required: bucket name
|
## Required: bucket name
|
||||||
# # environment variable: PICTRS_STORE__BUCKET_NAME
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_BUCKET_NAME
|
||||||
# bucket_name = 'rust_s3'
|
object_store_bucket_name = 'pict-rs'
|
||||||
#
|
|
||||||
## Required: bucket region
|
## Required: bucket region
|
||||||
# # environment variable: PICTRS_STORE__REGION
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_REGION
|
||||||
# #
|
|
||||||
# # can also be endpoint of local s3 store, e.g. 'http://minio:9000'
|
|
||||||
# region = 'eu-central-1'
|
|
||||||
#
|
#
|
||||||
|
# can also be endpoint of local s3 store, e.g. 'http://minio:9000'
|
||||||
|
object_store_region = 'eu-central-1'
|
||||||
|
|
||||||
## Optional: bucket access key
|
## Optional: bucket access key
|
||||||
# # environment variable: PICTRS_STORE__ACCESS_KEY
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_ACCESS_KEY
|
||||||
# # default: empty
|
# default: empty
|
||||||
# access_key = 'ACCESS_KEY'
|
object_store_access_key = '09ODZ3BGBISV4U92JLIM'
|
||||||
#
|
|
||||||
## Optional: bucket secret key
|
## Optional: bucket secret key
|
||||||
# # environment variable: PICTRS_STORE__SECRET_KEY
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SECRET_KEY
|
||||||
# # default: empty
|
# default: empty
|
||||||
# secret_key = 'SECRET_KEY'
|
object_store_secret_key = 'j35YE9RrxhBP0dpiD5mmdXRXvPkEJR4k6zK12q3o'
|
||||||
#
|
|
||||||
## Optional: bucket security token
|
## Optional: bucket security token
|
||||||
# # environment variable: PICTRS_STORE__SECURITY_TOKEN
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SECURITY_TOKEN
|
||||||
# # default: empty
|
# default: empty
|
||||||
# security_token = 'SECURITY_TOKEN'
|
object_store_security_token = 'SECURITY_TOKEN'
|
||||||
#
|
|
||||||
## Optional: bucket session token
|
## Optional: bucket session token
|
||||||
# # environment variable: PICTRS_STORE__SESSION_TOKEN
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SESSION_TOKEN
|
||||||
# # default: empty
|
# default: empty
|
||||||
# session_token = 'SESSION_TOKEN'
|
object_store_session_token = 'SESSION_TOKEN'
|
||||||
|
|
92
src/backgrounded.rs
Normal file
92
src/backgrounded.rs
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
repo::{FullRepo, UploadId, UploadRepo},
|
||||||
|
store::Store,
|
||||||
|
};
|
||||||
|
use actix_web::web::Bytes;
|
||||||
|
use futures_util::{Stream, TryStreamExt};
|
||||||
|
use tokio_util::io::StreamReader;
|
||||||
|
|
||||||
|
pub(crate) struct Backgrounded<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
repo: R,
|
||||||
|
identifier: Option<S::Identifier>,
|
||||||
|
upload_id: Option<UploadId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R, S> Backgrounded<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
pub(crate) fn disarm(mut self) {
|
||||||
|
let _ = self.identifier.take();
|
||||||
|
let _ = self.upload_id.take();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn upload_id(&self) -> Option<UploadId> {
|
||||||
|
self.upload_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn identifier(&self) -> Option<&S::Identifier> {
|
||||||
|
self.identifier.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn proxy<P>(repo: R, store: S, stream: P) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
P: Stream<Item = Result<Bytes, Error>>,
|
||||||
|
{
|
||||||
|
let mut this = Self {
|
||||||
|
repo,
|
||||||
|
identifier: None,
|
||||||
|
upload_id: Some(UploadId::generate()),
|
||||||
|
};
|
||||||
|
|
||||||
|
this.do_proxy(store, stream).await?;
|
||||||
|
|
||||||
|
Ok(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn do_proxy<P>(&mut self, store: S, stream: P) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
P: Stream<Item = Result<Bytes, Error>>,
|
||||||
|
{
|
||||||
|
UploadRepo::create(&self.repo, self.upload_id.expect("Upload id exists")).await?;
|
||||||
|
|
||||||
|
let stream = stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e));
|
||||||
|
let mut reader = StreamReader::new(Box::pin(stream));
|
||||||
|
|
||||||
|
let identifier = store.save_async_read(&mut reader).await?;
|
||||||
|
|
||||||
|
self.identifier = Some(identifier);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R, S> Drop for Backgrounded<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(identifier) = self.identifier.take() {
|
||||||
|
let repo = self.repo.clone();
|
||||||
|
|
||||||
|
actix_rt::spawn(async move {
|
||||||
|
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(upload_id) = self.upload_id {
|
||||||
|
let repo = self.repo.clone();
|
||||||
|
|
||||||
|
actix_rt::spawn(async move {
|
||||||
|
let _ = repo.claim(upload_id).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
details::Details,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
upload_manager::Details,
|
|
||||||
};
|
};
|
||||||
use actix_web::web;
|
use actix_web::web;
|
||||||
use dashmap::{mapref::entry::Entry, DashMap};
|
use dashmap::{mapref::entry::Entry, DashMap};
|
||||||
|
@ -16,13 +16,15 @@ use tracing::Span;
|
||||||
|
|
||||||
type OutcomeSender = Sender<(Details, web::Bytes)>;
|
type OutcomeSender = Sender<(Details, web::Bytes)>;
|
||||||
|
|
||||||
type ProcessMap = DashMap<PathBuf, Vec<OutcomeSender>>;
|
type ProcessMapKey = (Vec<u8>, PathBuf);
|
||||||
|
|
||||||
|
type ProcessMap = DashMap<ProcessMapKey, Vec<OutcomeSender>>;
|
||||||
|
|
||||||
static PROCESS_MAP: Lazy<ProcessMap> = Lazy::new(DashMap::new);
|
static PROCESS_MAP: Lazy<ProcessMap> = Lazy::new(DashMap::new);
|
||||||
|
|
||||||
struct CancelToken {
|
struct CancelToken {
|
||||||
span: Span,
|
span: Span,
|
||||||
path: PathBuf,
|
key: ProcessMapKey,
|
||||||
receiver: Option<Receiver<(Details, web::Bytes)>>,
|
receiver: Option<Receiver<(Details, web::Bytes)>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,14 +41,17 @@ impl<F> CancelSafeProcessor<F>
|
||||||
where
|
where
|
||||||
F: Future<Output = Result<(Details, web::Bytes), Error>>,
|
F: Future<Output = Result<(Details, web::Bytes), Error>>,
|
||||||
{
|
{
|
||||||
pub(super) fn new(path: PathBuf, fut: F) -> Self {
|
pub(super) fn new(hash: &[u8], path: PathBuf, fut: F) -> Self {
|
||||||
let entry = PROCESS_MAP.entry(path.clone());
|
let key = (hash.to_vec(), path.clone());
|
||||||
|
|
||||||
|
let entry = PROCESS_MAP.entry(key.clone());
|
||||||
|
|
||||||
let (receiver, span) = match entry {
|
let (receiver, span) = match entry {
|
||||||
Entry::Vacant(vacant) => {
|
Entry::Vacant(vacant) => {
|
||||||
vacant.insert(Vec::new());
|
vacant.insert(Vec::new());
|
||||||
let span = tracing::info_span!(
|
let span = tracing::info_span!(
|
||||||
"Processing image",
|
"Processing image",
|
||||||
|
hash = &tracing::field::debug(&hash),
|
||||||
path = &tracing::field::debug(&path),
|
path = &tracing::field::debug(&path),
|
||||||
completed = &tracing::field::Empty,
|
completed = &tracing::field::Empty,
|
||||||
);
|
);
|
||||||
|
@ -57,6 +62,7 @@ where
|
||||||
occupied.get_mut().push(tx);
|
occupied.get_mut().push(tx);
|
||||||
let span = tracing::info_span!(
|
let span = tracing::info_span!(
|
||||||
"Waiting for processed image",
|
"Waiting for processed image",
|
||||||
|
hash = &tracing::field::debug(&hash),
|
||||||
path = &tracing::field::debug(&path),
|
path = &tracing::field::debug(&path),
|
||||||
);
|
);
|
||||||
(Some(rx), span)
|
(Some(rx), span)
|
||||||
|
@ -66,7 +72,7 @@ where
|
||||||
CancelSafeProcessor {
|
CancelSafeProcessor {
|
||||||
cancel_token: CancelToken {
|
cancel_token: CancelToken {
|
||||||
span,
|
span,
|
||||||
path,
|
key,
|
||||||
receiver,
|
receiver,
|
||||||
},
|
},
|
||||||
fut,
|
fut,
|
||||||
|
@ -85,7 +91,7 @@ where
|
||||||
|
|
||||||
let span = &this.cancel_token.span;
|
let span = &this.cancel_token.span;
|
||||||
let receiver = &mut this.cancel_token.receiver;
|
let receiver = &mut this.cancel_token.receiver;
|
||||||
let path = &this.cancel_token.path;
|
let key = &this.cancel_token.key;
|
||||||
let fut = this.fut;
|
let fut = this.fut;
|
||||||
|
|
||||||
span.in_scope(|| {
|
span.in_scope(|| {
|
||||||
|
@ -95,7 +101,7 @@ where
|
||||||
.map(|res| res.map_err(|_| UploadError::Canceled.into()))
|
.map(|res| res.map_err(|_| UploadError::Canceled.into()))
|
||||||
} else {
|
} else {
|
||||||
fut.poll(cx).map(|res| {
|
fut.poll(cx).map(|res| {
|
||||||
let opt = PROCESS_MAP.remove(path);
|
let opt = PROCESS_MAP.remove(key);
|
||||||
res.map(|tup| {
|
res.map(|tup| {
|
||||||
if let Some((_, vec)) = opt {
|
if let Some((_, vec)) = opt {
|
||||||
for sender in vec {
|
for sender in vec {
|
||||||
|
@ -113,7 +119,7 @@ where
|
||||||
impl Drop for CancelToken {
|
impl Drop for CancelToken {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if self.receiver.is_none() {
|
if self.receiver.is_none() {
|
||||||
let completed = PROCESS_MAP.remove(&self.path).is_none();
|
let completed = PROCESS_MAP.remove(&self.key).is_none();
|
||||||
self.span.record("completed", &completed);
|
self.span.record("completed", &completed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
388
src/config.rs
388
src/config.rs
|
@ -1,369 +1,45 @@
|
||||||
use std::{collections::HashSet, net::SocketAddr, path::PathBuf};
|
use clap::Parser;
|
||||||
use structopt::StructOpt;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::magick::ValidInputType;
|
mod commandline;
|
||||||
|
mod defaults;
|
||||||
|
mod file;
|
||||||
|
mod primitives;
|
||||||
|
|
||||||
#[derive(Clone, Debug, StructOpt)]
|
use commandline::{Args, Output};
|
||||||
pub(crate) struct Args {
|
use config::Config;
|
||||||
#[structopt(short, long, help = "Path to the pict-rs configuration file")]
|
use defaults::Defaults;
|
||||||
config_file: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[structopt(long, help = "Path to a file defining a store migration")]
|
pub(crate) use commandline::Operation;
|
||||||
migrate_file: Option<PathBuf>,
|
pub(crate) use file::{ConfigFile as Configuration, OpenTelemetry, Repo, Sled, Tracing};
|
||||||
|
pub(crate) use primitives::{Filesystem, ImageFormat, LogFormat, ObjectStorage, Store};
|
||||||
|
|
||||||
#[structopt(flatten)]
|
pub(crate) fn configure() -> color_eyre::Result<(Configuration, Operation)> {
|
||||||
overrides: Overrides,
|
let Output {
|
||||||
}
|
config_format,
|
||||||
|
operation,
|
||||||
|
save_to,
|
||||||
|
config_file,
|
||||||
|
} = Args::parse().into_output();
|
||||||
|
|
||||||
fn is_false(b: &bool) -> bool {
|
let config = Config::builder().add_source(config::Config::try_from(&Defaults::default())?);
|
||||||
!b
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Serialize, structopt::StructOpt)]
|
let config = if let Some(config_file) = config_file {
|
||||||
#[serde(rename_all = "snake_case")]
|
config.add_source(config::File::from(config_file))
|
||||||
pub(crate) struct Overrides {
|
} else {
|
||||||
#[structopt(
|
config
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help = "Whether to skip validating images uploaded via the internal import API"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "is_false")]
|
|
||||||
skip_validate_imports: bool,
|
|
||||||
|
|
||||||
#[structopt(short, long, help = "The address and port the server binds to.")]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
addr: Option<SocketAddr>,
|
|
||||||
|
|
||||||
#[structopt(short, long, help = "The path to the data directory, e.g. data/")]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
path: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help = "An optional image format to convert all uploaded files into, supports 'jpg', 'png', and 'webp'"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
image_format: Option<Format>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help = "An optional list of filters to permit, supports 'identity', 'thumbnail', 'resize', 'crop', and 'blur'"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
filters: Option<Vec<String>>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help = "Specify the maximum allowed uploaded file size (in Megabytes)"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
max_file_size: Option<usize>,
|
|
||||||
|
|
||||||
#[structopt(long, help = "Specify the maximum width in pixels allowed on an image")]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
max_image_width: Option<usize>,
|
|
||||||
|
|
||||||
#[structopt(long, help = "Specify the maximum width in pixels allowed on an image")]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
max_image_height: Option<usize>,
|
|
||||||
|
|
||||||
#[structopt(long, help = "Specify the maximum area in pixels allowed in an image")]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
max_image_area: Option<usize>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
help = "Specify the number of bytes sled is allowed to use for it's cache"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
sled_cache_capacity: Option<u64>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
help = "Specify the number of events the console subscriber is allowed to buffer"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
console_buffer_capacity: Option<usize>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
help = "An optional string to be checked on requests to privileged endpoints"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
api_key: Option<String>,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help = "Enable OpenTelemetry Tracing exports to the given OpenTelemetry collector"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
opentelemetry_url: Option<Url>,
|
|
||||||
|
|
||||||
#[structopt(subcommand)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
store: Option<Store>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Overrides {
|
|
||||||
fn is_default(&self) -> bool {
|
|
||||||
!self.skip_validate_imports
|
|
||||||
&& self.addr.is_none()
|
|
||||||
&& self.path.is_none()
|
|
||||||
&& self.image_format.is_none()
|
|
||||||
&& self.filters.is_none()
|
|
||||||
&& self.max_file_size.is_none()
|
|
||||||
&& self.max_image_width.is_none()
|
|
||||||
&& self.max_image_height.is_none()
|
|
||||||
&& self.max_image_area.is_none()
|
|
||||||
&& self.sled_cache_capacity.is_none()
|
|
||||||
&& self.console_buffer_capacity.is_none()
|
|
||||||
&& self.api_key.is_none()
|
|
||||||
&& self.opentelemetry_url.is_none()
|
|
||||||
&& self.store.is_none()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub(crate) struct Migrate {
|
|
||||||
from: Store,
|
|
||||||
to: Store,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Migrate {
|
|
||||||
pub(crate) fn from(&self) -> &Store {
|
|
||||||
&self.from
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to(&self) -> &Store {
|
|
||||||
&self.to
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, structopt::StructOpt)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
#[serde(tag = "type")]
|
|
||||||
pub(crate) enum Store {
|
|
||||||
FileStore {
|
|
||||||
// defaults to {config.path}
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
help = "Path in which pict-rs will create it's 'files' directory"
|
|
||||||
)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
path: Option<PathBuf>,
|
|
||||||
},
|
|
||||||
#[cfg(feature = "object-storage")]
|
|
||||||
S3Store {
|
|
||||||
#[structopt(long, help = "Name of the bucket in which pict-rs will store images")]
|
|
||||||
bucket_name: String,
|
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
help = "Region in which the bucket exists, can be an http endpoint"
|
|
||||||
)]
|
|
||||||
region: crate::serde_str::Serde<s3::Region>,
|
|
||||||
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
#[structopt(long)]
|
|
||||||
access_key: Option<String>,
|
|
||||||
|
|
||||||
#[structopt(long)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
secret_key: Option<String>,
|
|
||||||
|
|
||||||
#[structopt(long)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
security_token: Option<String>,
|
|
||||||
|
|
||||||
#[structopt(long)]
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
session_token: Option<String>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub(crate) struct Config {
|
|
||||||
skip_validate_imports: bool,
|
|
||||||
addr: SocketAddr,
|
|
||||||
path: PathBuf,
|
|
||||||
image_format: Option<Format>,
|
|
||||||
filters: Option<Vec<String>>,
|
|
||||||
max_file_size: usize,
|
|
||||||
max_image_width: usize,
|
|
||||||
max_image_height: usize,
|
|
||||||
max_image_area: usize,
|
|
||||||
sled_cache_capacity: u64,
|
|
||||||
console_buffer_capacity: Option<usize>,
|
|
||||||
api_key: Option<String>,
|
|
||||||
opentelemetry_url: Option<Url>,
|
|
||||||
store: Store,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub(crate) struct Defaults {
|
|
||||||
skip_validate_imports: bool,
|
|
||||||
addr: SocketAddr,
|
|
||||||
max_file_size: usize,
|
|
||||||
max_image_width: usize,
|
|
||||||
max_image_height: usize,
|
|
||||||
max_image_area: usize,
|
|
||||||
sled_cache_capacity: u64,
|
|
||||||
store: Store,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Defaults {
|
|
||||||
fn new() -> Self {
|
|
||||||
Defaults {
|
|
||||||
skip_validate_imports: false,
|
|
||||||
addr: ([0, 0, 0, 0], 8080).into(),
|
|
||||||
max_file_size: 40,
|
|
||||||
max_image_width: 10_000,
|
|
||||||
max_image_height: 10_000,
|
|
||||||
max_image_area: 40_000_000,
|
|
||||||
sled_cache_capacity: 1024 * 1024 * 64, // 16 times smaller than sled's default of 1GB
|
|
||||||
store: Store::FileStore { path: None },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Config {
|
|
||||||
pub(crate) fn build() -> anyhow::Result<Self> {
|
|
||||||
let args = Args::from_args();
|
|
||||||
|
|
||||||
if let Some(path) = args.migrate_file {
|
|
||||||
let migrate_config = config::Config::builder()
|
|
||||||
.add_source(config::File::from(path))
|
|
||||||
.build()?;
|
|
||||||
let migrate: Migrate = migrate_config.try_deserialize()?;
|
|
||||||
|
|
||||||
crate::MIGRATE.set(migrate).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut base_config =
|
|
||||||
config::Config::builder().add_source(config::Config::try_from(&Defaults::new())?);
|
|
||||||
|
|
||||||
if let Some(path) = args.config_file {
|
|
||||||
base_config = base_config.add_source(config::File::from(path));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if !args.overrides.is_default() {
|
let built = config
|
||||||
let merging = config::Config::try_from(&args.overrides)?;
|
|
||||||
|
|
||||||
base_config = base_config.add_source(merging);
|
|
||||||
}
|
|
||||||
|
|
||||||
let config: Self = base_config
|
|
||||||
.add_source(config::Environment::with_prefix("PICTRS").separator("__"))
|
.add_source(config::Environment::with_prefix("PICTRS").separator("__"))
|
||||||
.build()?
|
.add_source(config::Config::try_from(&config_format)?)
|
||||||
.try_deserialize()?;
|
.build()?;
|
||||||
|
|
||||||
Ok(config)
|
let config: Configuration = built.try_deserialize()?;
|
||||||
|
|
||||||
|
if let Some(save_to) = save_to {
|
||||||
|
let output = toml::to_string_pretty(&config)?;
|
||||||
|
std::fs::write(save_to, output)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn store(&self) -> &Store {
|
Ok((config, operation))
|
||||||
&self.store
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn bind_address(&self) -> SocketAddr {
|
|
||||||
self.addr
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn data_dir(&self) -> PathBuf {
|
|
||||||
self.path.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn sled_cache_capacity(&self) -> u64 {
|
|
||||||
self.sled_cache_capacity
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn console_buffer_capacity(&self) -> Option<usize> {
|
|
||||||
self.console_buffer_capacity
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn format(&self) -> Option<Format> {
|
|
||||||
self.image_format
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn allowed_filters(&self) -> Option<HashSet<String>> {
|
|
||||||
self.filters.as_ref().map(|wl| wl.iter().cloned().collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn validate_imports(&self) -> bool {
|
|
||||||
!self.skip_validate_imports
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn max_file_size(&self) -> usize {
|
|
||||||
self.max_file_size
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn max_width(&self) -> usize {
|
|
||||||
self.max_image_width
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn max_height(&self) -> usize {
|
|
||||||
self.max_image_height
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn max_area(&self) -> usize {
|
|
||||||
self.max_image_area
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn api_key(&self) -> Option<&str> {
|
|
||||||
self.api_key.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn opentelemetry_url(&self) -> Option<&Url> {
|
|
||||||
self.opentelemetry_url.as_ref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
#[error("Invalid format supplied, {0}")]
|
|
||||||
pub(crate) struct FormatError(String);
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, serde::Deserialize, serde::Serialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub(crate) enum Format {
|
|
||||||
Jpeg,
|
|
||||||
Png,
|
|
||||||
Webp,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Format {
|
|
||||||
pub(crate) fn as_magick_format(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Format::Jpeg => "JPEG",
|
|
||||||
Format::Png => "PNG",
|
|
||||||
Format::Webp => "WEBP",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn as_hint(&self) -> Option<ValidInputType> {
|
|
||||||
match self {
|
|
||||||
Format::Jpeg => Some(ValidInputType::Jpeg),
|
|
||||||
Format::Png => Some(ValidInputType::Png),
|
|
||||||
Format::Webp => Some(ValidInputType::Webp),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::str::FromStr for Format {
|
|
||||||
type Err = FormatError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
match s {
|
|
||||||
"png" => Ok(Format::Png),
|
|
||||||
"jpg" => Ok(Format::Jpeg),
|
|
||||||
"webp" => Ok(Format::Webp),
|
|
||||||
other => Err(FormatError(other.to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
576
src/config/commandline.rs
Normal file
576
src/config/commandline.rs
Normal file
|
@ -0,0 +1,576 @@
|
||||||
|
use crate::{
|
||||||
|
config::primitives::{ImageFormat, LogFormat, Targets},
|
||||||
|
serde_str::Serde,
|
||||||
|
};
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use std::{net::SocketAddr, path::PathBuf};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
impl Args {
|
||||||
|
pub(super) fn into_output(self) -> Output {
|
||||||
|
let Args {
|
||||||
|
config_file,
|
||||||
|
old_db_path,
|
||||||
|
log_format,
|
||||||
|
log_targets,
|
||||||
|
console_address,
|
||||||
|
console_buffer_capacity,
|
||||||
|
opentelemetry_url,
|
||||||
|
opentelemetry_service_name,
|
||||||
|
opentelemetry_targets,
|
||||||
|
save_to,
|
||||||
|
command,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let old_db = OldDb { path: old_db_path };
|
||||||
|
|
||||||
|
let tracing = Tracing {
|
||||||
|
logging: Logging {
|
||||||
|
format: log_format,
|
||||||
|
targets: log_targets.map(Serde::new),
|
||||||
|
},
|
||||||
|
console: Console {
|
||||||
|
address: console_address,
|
||||||
|
buffer_capacity: console_buffer_capacity,
|
||||||
|
},
|
||||||
|
opentelemetry: OpenTelemetry {
|
||||||
|
url: opentelemetry_url,
|
||||||
|
service_name: opentelemetry_service_name,
|
||||||
|
targets: opentelemetry_targets.map(Serde::new),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
match command {
|
||||||
|
Command::Run(Run {
|
||||||
|
address,
|
||||||
|
api_key,
|
||||||
|
worker_id,
|
||||||
|
media_skip_validate_imports,
|
||||||
|
media_max_width,
|
||||||
|
media_max_height,
|
||||||
|
media_max_area,
|
||||||
|
media_max_file_size,
|
||||||
|
media_enable_silent_video,
|
||||||
|
media_filters,
|
||||||
|
media_format,
|
||||||
|
store,
|
||||||
|
}) => {
|
||||||
|
let server = Server {
|
||||||
|
address,
|
||||||
|
api_key,
|
||||||
|
worker_id,
|
||||||
|
};
|
||||||
|
let media = Media {
|
||||||
|
skip_validate_imports: media_skip_validate_imports,
|
||||||
|
max_width: media_max_width,
|
||||||
|
max_height: media_max_height,
|
||||||
|
max_area: media_max_area,
|
||||||
|
max_file_size: media_max_file_size,
|
||||||
|
enable_silent_video: media_enable_silent_video,
|
||||||
|
filters: media_filters,
|
||||||
|
format: media_format,
|
||||||
|
};
|
||||||
|
let operation = Operation::Run;
|
||||||
|
|
||||||
|
match store {
|
||||||
|
Some(RunStore::Filesystem(RunFilesystem { system, repo })) => {
|
||||||
|
let store = Some(Store::Filesystem(system));
|
||||||
|
Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation,
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(RunStore::ObjectStorage(RunObjectStorage { storage, repo })) => {
|
||||||
|
let store = Some(Store::ObjectStorage(storage));
|
||||||
|
Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation,
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store: None,
|
||||||
|
repo: None,
|
||||||
|
},
|
||||||
|
operation,
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::MigrateStore(migrate_store) => {
|
||||||
|
let server = Server::default();
|
||||||
|
let media = Media::default();
|
||||||
|
|
||||||
|
match migrate_store {
|
||||||
|
MigrateStore::Filesystem(MigrateFilesystem { from, to }) => match to {
|
||||||
|
MigrateStoreInner::Filesystem(MigrateFilesystemInner { to, repo }) => {
|
||||||
|
Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store: None,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateStore {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MigrateStoreInner::ObjectStorage(MigrateObjectStorageInner {
|
||||||
|
to,
|
||||||
|
repo,
|
||||||
|
}) => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store: None,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateStore {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MigrateStore::ObjectStorage(MigrateObjectStorage { from, to }) => match to {
|
||||||
|
MigrateStoreInner::Filesystem(MigrateFilesystemInner { to, repo }) => {
|
||||||
|
Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store: None,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateStore {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MigrateStoreInner::ObjectStorage(MigrateObjectStorageInner {
|
||||||
|
to,
|
||||||
|
repo,
|
||||||
|
}) => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
old_db,
|
||||||
|
tracing,
|
||||||
|
media,
|
||||||
|
store: None,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateStore {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
config_file,
|
||||||
|
save_to,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) struct Output {
|
||||||
|
pub(super) config_format: ConfigFormat,
|
||||||
|
pub(super) operation: Operation,
|
||||||
|
pub(super) save_to: Option<PathBuf>,
|
||||||
|
pub(super) config_file: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) enum Operation {
|
||||||
|
Run,
|
||||||
|
MigrateStore {
|
||||||
|
from: crate::config::primitives::Store,
|
||||||
|
to: crate::config::primitives::Store,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(super) struct ConfigFormat {
|
||||||
|
server: Server,
|
||||||
|
old_db: OldDb,
|
||||||
|
tracing: Tracing,
|
||||||
|
media: Media,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
repo: Option<Repo>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
store: Option<Store>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Server {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
address: Option<SocketAddr>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
worker_id: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
api_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Tracing {
|
||||||
|
logging: Logging,
|
||||||
|
console: Console,
|
||||||
|
opentelemetry: OpenTelemetry,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Logging {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
format: Option<LogFormat>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
targets: Option<Serde<Targets>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Console {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
address: Option<SocketAddr>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
buffer_capacity: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct OpenTelemetry {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
url: Option<Url>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
service_name: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
targets: Option<Serde<Targets>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct OldDb {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Media {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
max_width: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
max_height: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
max_area: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
max_file_size: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
enable_silent_video: Option<bool>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
filters: Option<Vec<String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
format: Option<ImageFormat>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
skip_validate_imports: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run the pict-rs application
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
#[clap(author, version, about, long_about = None)]
|
||||||
|
pub(super) struct Args {
|
||||||
|
/// Path to the pict-rs configuration file
|
||||||
|
#[clap(short, long)]
|
||||||
|
config_file: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Path to the old pict-rs sled database
|
||||||
|
#[clap(long)]
|
||||||
|
old_db_path: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Format of logs printed to stdout
|
||||||
|
#[clap(long)]
|
||||||
|
log_format: Option<LogFormat>,
|
||||||
|
/// Log levels to print to stdout, respects RUST_LOG formatting
|
||||||
|
#[clap(long)]
|
||||||
|
log_targets: Option<Targets>,
|
||||||
|
|
||||||
|
/// Address and port to expose tokio-console metrics
|
||||||
|
#[clap(long)]
|
||||||
|
console_address: Option<SocketAddr>,
|
||||||
|
/// Capacity of the console-subscriber Event Buffer
|
||||||
|
#[clap(long)]
|
||||||
|
console_buffer_capacity: Option<usize>,
|
||||||
|
|
||||||
|
/// URL to send OpenTelemetry metrics
|
||||||
|
#[clap(long)]
|
||||||
|
opentelemetry_url: Option<Url>,
|
||||||
|
/// Service Name to use for OpenTelemetry
|
||||||
|
#[clap(long)]
|
||||||
|
opentelemetry_service_name: Option<String>,
|
||||||
|
/// Log levels to use for OpenTelemetry, respects RUST_LOG formatting
|
||||||
|
#[clap(long)]
|
||||||
|
opentelemetry_targets: Option<Targets>,
|
||||||
|
|
||||||
|
/// File to save the current configuration for reproducible runs
|
||||||
|
#[clap(long)]
|
||||||
|
save_to: Option<PathBuf>,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
command: Command,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum Command {
|
||||||
|
/// Runs the pict-rs web server
|
||||||
|
Run(Run),
|
||||||
|
|
||||||
|
/// Migrates from one provided media store to another
|
||||||
|
#[clap(flatten)]
|
||||||
|
MigrateStore(MigrateStore),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct Run {
|
||||||
|
/// The address and port to bind the pict-rs web server
|
||||||
|
#[clap(short, long)]
|
||||||
|
address: Option<SocketAddr>,
|
||||||
|
|
||||||
|
/// The API KEY required to access restricted routes
|
||||||
|
#[clap(long)]
|
||||||
|
api_key: Option<String>,
|
||||||
|
|
||||||
|
#[clap(long)]
|
||||||
|
worker_id: Option<String>,
|
||||||
|
|
||||||
|
/// Whether to validate media on the "import" endpoint
|
||||||
|
#[clap(long)]
|
||||||
|
media_skip_validate_imports: Option<bool>,
|
||||||
|
/// The maximum width, in pixels, for uploaded media
|
||||||
|
#[clap(long)]
|
||||||
|
media_max_width: Option<usize>,
|
||||||
|
/// The maximum height, in pixels, for uploaded media
|
||||||
|
#[clap(long)]
|
||||||
|
media_max_height: Option<usize>,
|
||||||
|
/// The maximum area, in pixels, for uploaded media
|
||||||
|
#[clap(long)]
|
||||||
|
media_max_area: Option<usize>,
|
||||||
|
/// The maximum size, in megabytes, for uploaded media
|
||||||
|
#[clap(long)]
|
||||||
|
media_max_file_size: Option<usize>,
|
||||||
|
/// Whether to enable GIF and silent MP4 uploads. Full videos are unsupported
|
||||||
|
#[clap(long)]
|
||||||
|
media_enable_silent_video: Option<bool>,
|
||||||
|
/// Which media filters should be enabled on the `process` endpoint
|
||||||
|
#[clap(long)]
|
||||||
|
media_filters: Option<Vec<String>>,
|
||||||
|
/// Enforce uploaded media is transcoded to the provided format
|
||||||
|
#[clap(long)]
|
||||||
|
media_format: Option<ImageFormat>,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
store: Option<RunStore>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configure the provided storage
|
||||||
|
#[derive(Clone, Debug, Subcommand, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
enum Store {
|
||||||
|
/// configure filesystem storage
|
||||||
|
Filesystem(Filesystem),
|
||||||
|
|
||||||
|
/// configure object storage
|
||||||
|
ObjectStorage(ObjectStorage),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run pict-rs with the provided storage
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum RunStore {
|
||||||
|
/// Run pict-rs with filesystem storage
|
||||||
|
Filesystem(RunFilesystem),
|
||||||
|
|
||||||
|
/// Run pict-rs with object storage
|
||||||
|
ObjectStorage(RunObjectStorage),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configure the pict-rs storage migration
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum MigrateStore {
|
||||||
|
/// Migrate from the provided filesystem storage
|
||||||
|
Filesystem(MigrateFilesystem),
|
||||||
|
|
||||||
|
/// Migrate from the provided object storage
|
||||||
|
ObjectStorage(MigrateObjectStorage),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configure the destination storage for pict-rs storage migration
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum MigrateStoreInner {
|
||||||
|
/// Migrate to the provided filesystem storage
|
||||||
|
Filesystem(MigrateFilesystemInner),
|
||||||
|
|
||||||
|
/// Migrate to the provided object storage
|
||||||
|
ObjectStorage(MigrateObjectStorageInner),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' storage from the provided filesystem storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigrateFilesystem {
|
||||||
|
#[clap(flatten)]
|
||||||
|
from: crate::config::primitives::Filesystem,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
to: MigrateStoreInner,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' storage to the provided filesystem storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigrateFilesystemInner {
|
||||||
|
#[clap(flatten)]
|
||||||
|
to: crate::config::primitives::Filesystem,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
repo: Option<Repo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' storage from the provided object storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigrateObjectStorage {
|
||||||
|
#[clap(flatten)]
|
||||||
|
from: crate::config::primitives::ObjectStorage,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
to: MigrateStoreInner,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' storage to the provided object storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigrateObjectStorageInner {
|
||||||
|
#[clap(flatten)]
|
||||||
|
to: crate::config::primitives::ObjectStorage,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
repo: Option<Repo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run pict-rs with the provided filesystem storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct RunFilesystem {
|
||||||
|
#[clap(flatten)]
|
||||||
|
system: Filesystem,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
repo: Option<Repo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run pict-rs with the provided object storage
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct RunObjectStorage {
|
||||||
|
#[clap(flatten)]
|
||||||
|
storage: ObjectStorage,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
repo: Option<Repo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for data repositories
|
||||||
|
#[derive(Debug, Subcommand, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
enum Repo {
|
||||||
|
/// Run pict-rs with the provided sled-backed data repository
|
||||||
|
Sled(Sled),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for filesystem media storage
|
||||||
|
#[derive(Clone, Debug, Parser, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Filesystem {
|
||||||
|
/// The path to store uploaded media
|
||||||
|
#[clap(short, long)]
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for Object Storage
|
||||||
|
#[derive(Clone, Debug, Parser, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct ObjectStorage {
|
||||||
|
/// The bucket in which to store media
|
||||||
|
#[clap(short, long)]
|
||||||
|
bucket_name: Option<String>,
|
||||||
|
|
||||||
|
/// The region the bucket is located in
|
||||||
|
#[clap(short, long)]
|
||||||
|
region: Option<Serde<s3::Region>>,
|
||||||
|
|
||||||
|
/// The Access Key for the user accessing the bucket
|
||||||
|
#[clap(short, long)]
|
||||||
|
access_key: Option<String>,
|
||||||
|
|
||||||
|
/// The secret key for the user accessing the bucket
|
||||||
|
#[clap(short, long)]
|
||||||
|
secret_key: Option<String>,
|
||||||
|
|
||||||
|
/// The security token for accessing the bucket
|
||||||
|
#[clap(long)]
|
||||||
|
security_token: Option<String>,
|
||||||
|
|
||||||
|
/// The session token for accessing the bucket
|
||||||
|
#[clap(long)]
|
||||||
|
session_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for the sled-backed data repository
|
||||||
|
#[derive(Debug, Parser, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct Sled {
|
||||||
|
/// The path to store the sled database
|
||||||
|
#[clap(short, long)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// The cache capacity, in bytes, allowed to sled for in-memory operations
|
||||||
|
#[clap(short, long)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
cache_capacity: Option<u64>,
|
||||||
|
}
|
192
src/config/defaults.rs
Normal file
192
src/config/defaults.rs
Normal file
|
@ -0,0 +1,192 @@
|
||||||
|
use crate::{
|
||||||
|
config::primitives::{LogFormat, Targets},
|
||||||
|
serde_str::Serde,
|
||||||
|
};
|
||||||
|
use std::{net::SocketAddr, path::PathBuf};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Defaults {
|
||||||
|
server: ServerDefaults,
|
||||||
|
tracing: TracingDefaults,
|
||||||
|
old_db: OldDbDefaults,
|
||||||
|
media: MediaDefaults,
|
||||||
|
repo: RepoDefaults,
|
||||||
|
store: StoreDefaults,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct ServerDefaults {
|
||||||
|
address: SocketAddr,
|
||||||
|
worker_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct TracingDefaults {
|
||||||
|
logging: LoggingDefaults,
|
||||||
|
|
||||||
|
console: ConsoleDefaults,
|
||||||
|
|
||||||
|
opentelemetry: OpenTelemetryDefaults,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct LoggingDefaults {
|
||||||
|
format: LogFormat,
|
||||||
|
targets: Serde<Targets>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct ConsoleDefaults {
|
||||||
|
buffer_capacity: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct OpenTelemetryDefaults {
|
||||||
|
service_name: String,
|
||||||
|
targets: Serde<Targets>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct OldDbDefaults {
|
||||||
|
path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct MediaDefaults {
|
||||||
|
max_width: usize,
|
||||||
|
max_height: usize,
|
||||||
|
max_area: usize,
|
||||||
|
max_file_size: usize,
|
||||||
|
enable_silent_video: bool,
|
||||||
|
filters: Vec<String>,
|
||||||
|
skip_validate_imports: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
enum RepoDefaults {
|
||||||
|
Sled(SledDefaults),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct SledDefaults {
|
||||||
|
path: PathBuf,
|
||||||
|
cache_capacity: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
enum StoreDefaults {
|
||||||
|
Filesystem(FilesystemDefaults),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
struct FilesystemDefaults {
|
||||||
|
path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ServerDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
ServerDefaults {
|
||||||
|
address: "0.0.0.0:8080".parse().expect("Valid address string"),
|
||||||
|
worker_id: String::from("pict-rs-1"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoggingDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
LoggingDefaults {
|
||||||
|
format: LogFormat::Normal,
|
||||||
|
targets: "warn,tracing_actix_web=info,actix_web=info,actix_server=info"
|
||||||
|
.parse()
|
||||||
|
.expect("Valid targets string"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ConsoleDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
ConsoleDefaults {
|
||||||
|
buffer_capacity: 1024 * 100,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for OpenTelemetryDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
OpenTelemetryDefaults {
|
||||||
|
service_name: String::from("pict-rs"),
|
||||||
|
targets: "info".parse().expect("Valid targets string"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for OldDbDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
OldDbDefaults {
|
||||||
|
path: PathBuf::from(String::from("/mnt")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for MediaDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
MediaDefaults {
|
||||||
|
max_width: 10_000,
|
||||||
|
max_height: 10_000,
|
||||||
|
max_area: 40_000_000,
|
||||||
|
max_file_size: 40,
|
||||||
|
enable_silent_video: true,
|
||||||
|
filters: vec![
|
||||||
|
"identity".into(),
|
||||||
|
"thumbnail".into(),
|
||||||
|
"resize".into(),
|
||||||
|
"crop".into(),
|
||||||
|
"blur".into(),
|
||||||
|
],
|
||||||
|
skip_validate_imports: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RepoDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Sled(SledDefaults::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SledDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
SledDefaults {
|
||||||
|
path: PathBuf::from(String::from("/mnt/sled-repo")),
|
||||||
|
cache_capacity: 1024 * 1024 * 64,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for StoreDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Filesystem(FilesystemDefaults::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FilesystemDefaults {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
path: PathBuf::from(String::from("/mnt/files")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
113
src/config/file.rs
Normal file
113
src/config/file.rs
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
use crate::{
|
||||||
|
config::primitives::{ImageFormat, LogFormat, Store, Targets},
|
||||||
|
serde_str::Serde,
|
||||||
|
};
|
||||||
|
use std::{collections::BTreeSet, net::SocketAddr, path::PathBuf};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct ConfigFile {
|
||||||
|
pub(crate) server: Server,
|
||||||
|
|
||||||
|
pub(crate) tracing: Tracing,
|
||||||
|
|
||||||
|
pub(crate) old_db: OldDb,
|
||||||
|
|
||||||
|
pub(crate) media: Media,
|
||||||
|
|
||||||
|
pub(crate) repo: Repo,
|
||||||
|
|
||||||
|
pub(crate) store: Store,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
pub(crate) enum Repo {
|
||||||
|
Sled(Sled),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Server {
|
||||||
|
pub(crate) address: SocketAddr,
|
||||||
|
|
||||||
|
pub(crate) worker_id: String,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) api_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Tracing {
|
||||||
|
pub(crate) logging: Logging,
|
||||||
|
|
||||||
|
pub(crate) console: Console,
|
||||||
|
|
||||||
|
pub(crate) opentelemetry: OpenTelemetry,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Logging {
|
||||||
|
pub(crate) format: LogFormat,
|
||||||
|
|
||||||
|
pub(crate) targets: Serde<Targets>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct OpenTelemetry {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) url: Option<Url>,
|
||||||
|
|
||||||
|
pub(crate) service_name: String,
|
||||||
|
|
||||||
|
pub(crate) targets: Serde<Targets>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Console {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) address: Option<SocketAddr>,
|
||||||
|
|
||||||
|
pub(crate) buffer_capacity: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct OldDb {
|
||||||
|
pub(crate) path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Media {
|
||||||
|
pub(crate) max_width: usize,
|
||||||
|
|
||||||
|
pub(crate) max_height: usize,
|
||||||
|
|
||||||
|
pub(crate) max_area: usize,
|
||||||
|
|
||||||
|
pub(crate) max_file_size: usize,
|
||||||
|
|
||||||
|
pub(crate) enable_silent_video: bool,
|
||||||
|
|
||||||
|
pub(crate) filters: BTreeSet<String>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) format: Option<ImageFormat>,
|
||||||
|
|
||||||
|
pub(crate) skip_validate_imports: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Sled {
|
||||||
|
pub(crate) path: PathBuf,
|
||||||
|
|
||||||
|
pub(crate) cache_capacity: u64,
|
||||||
|
}
|
251
src/config/primitives.rs
Normal file
251
src/config/primitives.rs
Normal file
|
@ -0,0 +1,251 @@
|
||||||
|
use crate::magick::ValidInputType;
|
||||||
|
use crate::serde_str::Serde;
|
||||||
|
use clap::ArgEnum;
|
||||||
|
use std::{fmt::Display, path::PathBuf, str::FromStr};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Clone,
|
||||||
|
Copy,
|
||||||
|
Debug,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
PartialOrd,
|
||||||
|
Ord,
|
||||||
|
Hash,
|
||||||
|
serde::Deserialize,
|
||||||
|
serde::Serialize,
|
||||||
|
ArgEnum,
|
||||||
|
)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) enum LogFormat {
|
||||||
|
Compact,
|
||||||
|
Json,
|
||||||
|
Normal,
|
||||||
|
Pretty,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Clone,
|
||||||
|
Copy,
|
||||||
|
Debug,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
PartialOrd,
|
||||||
|
Ord,
|
||||||
|
Hash,
|
||||||
|
serde::Deserialize,
|
||||||
|
serde::Serialize,
|
||||||
|
ArgEnum,
|
||||||
|
)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) enum ImageFormat {
|
||||||
|
Jpeg,
|
||||||
|
Webp,
|
||||||
|
Png,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct Targets {
|
||||||
|
pub(crate) targets: tracing_subscriber::filter::Targets,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for filesystem media storage
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, clap::Parser)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Filesystem {
|
||||||
|
/// Path to store media
|
||||||
|
#[clap(short, long)]
|
||||||
|
pub(crate) path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for object media storage
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, clap::Parser)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct ObjectStorage {
|
||||||
|
/// The bucket in which to store media
|
||||||
|
#[clap(short, long)]
|
||||||
|
pub(crate) bucket_name: String,
|
||||||
|
|
||||||
|
/// The region the bucket is located in
|
||||||
|
#[clap(short, long)]
|
||||||
|
pub(crate) region: Serde<s3::Region>,
|
||||||
|
|
||||||
|
/// The Access Key for the user accessing the bucket
|
||||||
|
#[clap(short, long)]
|
||||||
|
pub(crate) access_key: String,
|
||||||
|
|
||||||
|
/// The secret key for the user accessing the bucket
|
||||||
|
#[clap(short, long)]
|
||||||
|
pub(crate) secret_key: String,
|
||||||
|
|
||||||
|
/// The security token for accessing the bucket
|
||||||
|
#[clap(long)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) security_token: Option<String>,
|
||||||
|
|
||||||
|
/// The session token for accessing the bucket
|
||||||
|
#[clap(long)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub(crate) session_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
pub(crate) enum Store {
|
||||||
|
Filesystem(Filesystem),
|
||||||
|
|
||||||
|
ObjectStorage(ObjectStorage),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ImageFormat {
|
||||||
|
pub(crate) fn as_hint(self) -> Option<ValidInputType> {
|
||||||
|
Some(ValidInputType::from_format(self))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_magick_format(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Jpeg => "JPEG",
|
||||||
|
Self::Png => "PNG",
|
||||||
|
Self::Webp => "WEBP",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Filesystem> for Store {
|
||||||
|
fn from(f: Filesystem) -> Self {
|
||||||
|
Self::Filesystem(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ObjectStorage> for Store {
|
||||||
|
fn from(o: ObjectStorage) -> Self {
|
||||||
|
Self::ObjectStorage(o)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Targets {
|
||||||
|
type Err = <tracing_subscriber::filter::Targets as FromStr>::Err;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(Targets {
|
||||||
|
targets: s.parse()?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Targets {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let targets = self
|
||||||
|
.targets
|
||||||
|
.iter()
|
||||||
|
.map(|(path, level)| format!("{}={}", path, level))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",");
|
||||||
|
|
||||||
|
let max_level = [
|
||||||
|
Level::TRACE,
|
||||||
|
Level::DEBUG,
|
||||||
|
Level::INFO,
|
||||||
|
Level::WARN,
|
||||||
|
Level::ERROR,
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.fold(None, |found, level| {
|
||||||
|
if found.is_none()
|
||||||
|
&& self
|
||||||
|
.targets
|
||||||
|
.would_enable("not_a_real_target_so_nothing_can_conflict", level)
|
||||||
|
{
|
||||||
|
Some(level.to_string().to_lowercase())
|
||||||
|
} else {
|
||||||
|
found
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(level) = max_level {
|
||||||
|
if !targets.is_empty() {
|
||||||
|
write!(f, "{},{}", level, targets)
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", level)
|
||||||
|
}
|
||||||
|
} else if !targets.is_empty() {
|
||||||
|
write!(f, "{}", targets)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for ImageFormat {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"jpeg" | "jpg" => Ok(Self::Jpeg),
|
||||||
|
"png" => Ok(Self::Png),
|
||||||
|
"webp" => Ok(Self::Webp),
|
||||||
|
other => Err(format!("Invalid variant: {}", other)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for LogFormat {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
for variant in Self::value_variants() {
|
||||||
|
if variant.to_possible_value().unwrap().matches(s, false) {
|
||||||
|
return Ok(*variant);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(format!("Invalid variant: {}", s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for ImageFormat {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
self.to_possible_value()
|
||||||
|
.expect("no values are skipped")
|
||||||
|
.get_name()
|
||||||
|
.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for LogFormat {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
self.to_possible_value()
|
||||||
|
.expect("no values are skipped")
|
||||||
|
.get_name()
|
||||||
|
.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{Serde, Targets};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn builds_info_targets() {
|
||||||
|
let t: Serde<Targets> = "info".parse().unwrap();
|
||||||
|
|
||||||
|
println!("{:?}", t);
|
||||||
|
|
||||||
|
assert_eq!(t.to_string(), "info");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn builds_specific_targets() {
|
||||||
|
let t: Serde<Targets> = "pict_rs=info".parse().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(t.to_string(), "pict_rs=info");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn builds_warn_and_specific_targets() {
|
||||||
|
let t: Serde<Targets> = "warn,pict_rs=info".parse().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(t.to_string(), "warn,pict_rs=info");
|
||||||
|
}
|
||||||
|
}
|
63
src/details.rs
Normal file
63
src/details.rs
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
use crate::{error::Error, magick::ValidInputType, serde_str::Serde, store::Store};
|
||||||
|
use actix_web::web;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub(crate) struct Details {
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
content_type: Serde<mime::Mime>,
|
||||||
|
created_at: time::OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Details {
|
||||||
|
pub(crate) fn is_motion(&self) -> bool {
|
||||||
|
self.content_type.type_() == "video"
|
||||||
|
|| self.content_type.type_() == "image" && self.content_type.subtype() == "gif"
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument("Details from bytes", skip(input))]
|
||||||
|
pub(crate) async fn from_bytes(
|
||||||
|
input: web::Bytes,
|
||||||
|
hint: Option<ValidInputType>,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let details = crate::magick::details_bytes(input, hint).await?;
|
||||||
|
|
||||||
|
Ok(Details::now(
|
||||||
|
details.width,
|
||||||
|
details.height,
|
||||||
|
details.mime_type,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument("Details from store")]
|
||||||
|
pub(crate) async fn from_store<S: Store + 'static>(
|
||||||
|
store: S,
|
||||||
|
identifier: S::Identifier,
|
||||||
|
expected_format: Option<ValidInputType>,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let details = crate::magick::details_store(store, identifier, expected_format).await?;
|
||||||
|
|
||||||
|
Ok(Details::now(
|
||||||
|
details.width,
|
||||||
|
details.height,
|
||||||
|
details.mime_type,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn now(width: usize, height: usize, content_type: mime::Mime) -> Self {
|
||||||
|
Details {
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
content_type: Serde::new(content_type),
|
||||||
|
created_at: time::OffsetDateTime::now_utc(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn content_type(&self) -> mime::Mime {
|
||||||
|
(*self.content_type).clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn system_time(&self) -> std::time::SystemTime {
|
||||||
|
self.created_at.into()
|
||||||
|
}
|
||||||
|
}
|
100
src/error.rs
100
src/error.rs
|
@ -1,27 +1,31 @@
|
||||||
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
||||||
use tracing_error::SpanTrace;
|
use color_eyre::Report;
|
||||||
|
|
||||||
pub(crate) struct Error {
|
pub(crate) struct Error {
|
||||||
context: SpanTrace,
|
inner: color_eyre::Report,
|
||||||
kind: UploadError,
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
fn kind(&self) -> Option<&UploadError> {
|
||||||
|
self.inner.downcast_ref()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Error {
|
impl std::fmt::Debug for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
writeln!(f, "{}", self.kind)
|
std::fmt::Debug::fmt(&self.inner, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Error {
|
impl std::fmt::Display for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
writeln!(f, "{}", self.kind)?;
|
std::fmt::Display::fmt(&self.inner, f)
|
||||||
std::fmt::Display::fmt(&self.context, f)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::error::Error for Error {
|
impl std::error::Error for Error {
|
||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
self.kind.source()
|
self.inner.source()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,46 +35,38 @@ where
|
||||||
{
|
{
|
||||||
fn from(error: T) -> Self {
|
fn from(error: T) -> Self {
|
||||||
Error {
|
Error {
|
||||||
kind: UploadError::from(error),
|
inner: Report::from(UploadError::from(error)),
|
||||||
context: SpanTrace::capture(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<sled::transaction::TransactionError<Error>> for Error {
|
|
||||||
fn from(e: sled::transaction::TransactionError<Error>) -> Self {
|
|
||||||
match e {
|
|
||||||
sled::transaction::TransactionError::Abort(t) => t,
|
|
||||||
sled::transaction::TransactionError::Storage(e) => e.into(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum UploadError {
|
pub(crate) enum UploadError {
|
||||||
#[error("Couln't upload file, {0}")]
|
#[error("Couln't upload file")]
|
||||||
Upload(#[from] actix_form_data::Error),
|
Upload(#[from] actix_form_data::Error),
|
||||||
|
|
||||||
#[error("Error in DB, {0}")]
|
#[error("Error in DB")]
|
||||||
Db(#[from] sled::Error),
|
Sled(#[from] crate::repo::sled::SledError),
|
||||||
|
|
||||||
#[error("Error parsing string, {0}")]
|
#[error("Error in old sled DB")]
|
||||||
|
OldSled(#[from] ::sled::Error),
|
||||||
|
|
||||||
|
#[error("Error parsing string")]
|
||||||
ParseString(#[from] std::string::FromUtf8Error),
|
ParseString(#[from] std::string::FromUtf8Error),
|
||||||
|
|
||||||
#[error("Error interacting with filesystem, {0}")]
|
#[error("Error interacting with filesystem")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Error generating path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Error stripping prefix")]
|
||||||
StripPrefix(#[from] std::path::StripPrefixError),
|
StripPrefix(#[from] std::path::StripPrefixError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Error storing file")]
|
||||||
FileStore(#[from] crate::store::file_store::FileError),
|
FileStore(#[from] crate::store::file_store::FileError),
|
||||||
|
|
||||||
#[cfg(feature = "object-storage")]
|
#[error("Error storing object")]
|
||||||
#[error(transparent)]
|
|
||||||
ObjectStore(#[from] crate::store::object_store::ObjectError),
|
ObjectStore(#[from] crate::store::object_store::ObjectError),
|
||||||
|
|
||||||
#[error("Provided process path is invalid")]
|
#[error("Provided process path is invalid")]
|
||||||
|
@ -88,44 +84,44 @@ pub(crate) enum UploadError {
|
||||||
#[error("Requested a file that doesn't exist")]
|
#[error("Requested a file that doesn't exist")]
|
||||||
MissingAlias,
|
MissingAlias,
|
||||||
|
|
||||||
#[error("Alias directed to missing file")]
|
|
||||||
MissingFile,
|
|
||||||
|
|
||||||
#[error("Provided token did not match expected token")]
|
#[error("Provided token did not match expected token")]
|
||||||
InvalidToken,
|
InvalidToken,
|
||||||
|
|
||||||
#[error("Unsupported image format")]
|
#[error("Unsupported image format")]
|
||||||
UnsupportedFormat,
|
UnsupportedFormat,
|
||||||
|
|
||||||
|
#[error("Gif uploads are not enabled")]
|
||||||
|
SilentVideoDisabled,
|
||||||
|
|
||||||
#[error("Invalid media dimensions")]
|
#[error("Invalid media dimensions")]
|
||||||
Dimensions,
|
Dimensions,
|
||||||
|
|
||||||
#[error("Unable to download image, bad response {0}")]
|
#[error("Unable to download image, bad response {0}")]
|
||||||
Download(actix_web::http::StatusCode),
|
Download(actix_web::http::StatusCode),
|
||||||
|
|
||||||
#[error("Unable to download image, {0}")]
|
#[error("Unable to download image")]
|
||||||
Payload(#[from] awc::error::PayloadError),
|
Payload(#[from] awc::error::PayloadError),
|
||||||
|
|
||||||
#[error("Unable to send request, {0}")]
|
#[error("Unable to send request, {0}")]
|
||||||
SendRequest(String),
|
SendRequest(String),
|
||||||
|
|
||||||
#[error("No filename provided in request")]
|
|
||||||
MissingFilename,
|
|
||||||
|
|
||||||
#[error("Error converting Path to String")]
|
#[error("Error converting Path to String")]
|
||||||
Path,
|
Path,
|
||||||
|
|
||||||
#[error("Tried to save an image with an already-taken name")]
|
#[error("Tried to save an image with an already-taken name")]
|
||||||
DuplicateAlias,
|
DuplicateAlias,
|
||||||
|
|
||||||
#[error("{0}")]
|
#[error("Error in json")]
|
||||||
Json(#[from] serde_json::Error),
|
Json(#[from] serde_json::Error),
|
||||||
|
|
||||||
#[error("Range header not satisfiable")]
|
#[error("Range header not satisfiable")]
|
||||||
Range,
|
Range,
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Hit limit")]
|
||||||
Limit(#[from] super::LimitError),
|
Limit(#[from] crate::stream::LimitError),
|
||||||
|
|
||||||
|
#[error("Response timeout")]
|
||||||
|
Timeout(#[from] crate::stream::TimeoutError),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<awc::error::SendRequestError> for UploadError {
|
impl From<awc::error::SendRequestError> for UploadError {
|
||||||
|
@ -148,24 +144,40 @@ impl From<tokio::sync::AcquireError> for UploadError {
|
||||||
|
|
||||||
impl ResponseError for Error {
|
impl ResponseError for Error {
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
match self.kind {
|
match self.kind() {
|
||||||
|
Some(
|
||||||
UploadError::DuplicateAlias
|
UploadError::DuplicateAlias
|
||||||
| UploadError::Limit(_)
|
| UploadError::Limit(_)
|
||||||
| UploadError::NoFiles
|
| UploadError::NoFiles
|
||||||
| UploadError::Upload(_) => StatusCode::BAD_REQUEST,
|
| UploadError::Upload(_)
|
||||||
UploadError::MissingAlias | UploadError::MissingFilename => StatusCode::NOT_FOUND,
|
| UploadError::UnsupportedFormat
|
||||||
UploadError::InvalidToken => StatusCode::FORBIDDEN,
|
| UploadError::SilentVideoDisabled,
|
||||||
UploadError::Range => StatusCode::RANGE_NOT_SATISFIABLE,
|
) => StatusCode::BAD_REQUEST,
|
||||||
|
Some(
|
||||||
|
UploadError::Sled(crate::repo::sled::SledError::Missing)
|
||||||
|
| UploadError::MissingAlias,
|
||||||
|
) => StatusCode::NOT_FOUND,
|
||||||
|
Some(UploadError::InvalidToken) => StatusCode::FORBIDDEN,
|
||||||
|
Some(UploadError::Range) => StatusCode::RANGE_NOT_SATISFIABLE,
|
||||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error_response(&self) -> HttpResponse {
|
fn error_response(&self) -> HttpResponse {
|
||||||
|
if let Some(kind) = self.kind() {
|
||||||
HttpResponse::build(self.status_code())
|
HttpResponse::build(self.status_code())
|
||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(
|
.body(
|
||||||
serde_json::to_string(&serde_json::json!({ "msg": self.kind.to_string() }))
|
serde_json::to_string(&serde_json::json!({ "msg": kind.to_string() }))
|
||||||
|
.unwrap_or_else(|_| r#"{"msg":"Request failed"}"#.to_string()),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
HttpResponse::build(self.status_code())
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(
|
||||||
|
serde_json::to_string(&serde_json::json!({ "msg": "Unknown error" }))
|
||||||
.unwrap_or_else(|_| r#"{"msg":"Request failed"}"#.to_string()),
|
.unwrap_or_else(|_| r#"{"msg":"Request failed"}"#.to_string()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -101,10 +101,7 @@ pub(crate) async fn thumbnail<S: Store>(
|
||||||
from: S::Identifier,
|
from: S::Identifier,
|
||||||
input_format: InputFormat,
|
input_format: InputFormat,
|
||||||
format: ThumbnailFormat,
|
format: ThumbnailFormat,
|
||||||
) -> Result<impl AsyncRead + Unpin, Error>
|
) -> Result<impl AsyncRead + Unpin, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let input_file = crate::tmp_file::tmp_file(Some(input_format.to_ext()));
|
let input_file = crate::tmp_file::tmp_file(Some(input_format.to_ext()));
|
||||||
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
||||||
crate::store::file_store::safe_create_parent(&input_file).await?;
|
crate::store::file_store::safe_create_parent(&input_file).await?;
|
||||||
|
|
35
src/file.rs
35
src/file.rs
|
@ -8,7 +8,7 @@ pub(crate) use tokio_file::File;
|
||||||
mod tokio_file {
|
mod tokio_file {
|
||||||
use crate::{store::file_store::FileError, Either};
|
use crate::{store::file_store::FileError, Either};
|
||||||
use actix_web::web::{Bytes, BytesMut};
|
use actix_web::web::{Bytes, BytesMut};
|
||||||
use futures_util::stream::{Stream, StreamExt};
|
use futures_util::{Stream, StreamExt, TryStreamExt};
|
||||||
use std::{io::SeekFrom, path::Path};
|
use std::{io::SeekFrom, path::Path};
|
||||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
|
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
|
||||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||||
|
@ -91,38 +91,7 @@ mod tokio_file {
|
||||||
(None, None) => Either::right(self.inner),
|
(None, None) => Either::right(self.inner),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(BytesFreezer::new(FramedRead::new(obj, BytesCodec::new())))
|
Ok(FramedRead::new(obj, BytesCodec::new()).map_ok(BytesMut::freeze))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pin_project_lite::pin_project! {
|
|
||||||
struct BytesFreezer<S> {
|
|
||||||
#[pin]
|
|
||||||
inner: S,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> BytesFreezer<S> {
|
|
||||||
fn new(inner: S) -> Self {
|
|
||||||
BytesFreezer { inner }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S, E> Stream for BytesFreezer<S>
|
|
||||||
where
|
|
||||||
S: Stream<Item = Result<BytesMut, E>> + Unpin,
|
|
||||||
{
|
|
||||||
type Item = Result<Bytes, E>;
|
|
||||||
|
|
||||||
fn poll_next(
|
|
||||||
mut self: std::pin::Pin<&mut Self>,
|
|
||||||
cx: &mut std::task::Context<'_>,
|
|
||||||
) -> std::task::Poll<Option<Self::Item>> {
|
|
||||||
let this = self.as_mut().project();
|
|
||||||
|
|
||||||
this.inner
|
|
||||||
.poll_next(cx)
|
|
||||||
.map(|opt| opt.map(|res| res.map(BytesMut::freeze)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
93
src/generate.rs
Normal file
93
src/generate.rs
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
use crate::{
|
||||||
|
concurrent_processor::CancelSafeProcessor,
|
||||||
|
config::ImageFormat,
|
||||||
|
details::Details,
|
||||||
|
error::Error,
|
||||||
|
ffmpeg::{InputFormat, ThumbnailFormat},
|
||||||
|
repo::{Alias, FullRepo},
|
||||||
|
store::Store,
|
||||||
|
};
|
||||||
|
use actix_web::web::Bytes;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
pub(crate) async fn generate<R: FullRepo, S: Store + 'static>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
format: ImageFormat,
|
||||||
|
alias: Alias,
|
||||||
|
thumbnail_path: PathBuf,
|
||||||
|
thumbnail_args: Vec<String>,
|
||||||
|
hash: R::Bytes,
|
||||||
|
) -> Result<(Details, Bytes), Error> {
|
||||||
|
let process_fut = process(
|
||||||
|
repo,
|
||||||
|
store,
|
||||||
|
format,
|
||||||
|
alias,
|
||||||
|
thumbnail_path.clone(),
|
||||||
|
thumbnail_args,
|
||||||
|
hash.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let (details, bytes) =
|
||||||
|
CancelSafeProcessor::new(hash.as_ref(), thumbnail_path, process_fut).await?;
|
||||||
|
|
||||||
|
Ok((details, bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process<R: FullRepo, S: Store + 'static>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
format: ImageFormat,
|
||||||
|
alias: Alias,
|
||||||
|
thumbnail_path: PathBuf,
|
||||||
|
thumbnail_args: Vec<String>,
|
||||||
|
hash: R::Bytes,
|
||||||
|
) -> Result<(Details, Bytes), Error> {
|
||||||
|
let permit = crate::PROCESS_SEMAPHORE.acquire().await?;
|
||||||
|
|
||||||
|
let identifier = if let Some(identifier) = repo
|
||||||
|
.still_identifier_from_alias::<S::Identifier>(&alias)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
identifier
|
||||||
|
} else {
|
||||||
|
let identifier = repo.identifier(hash.clone()).await?;
|
||||||
|
let mut reader = crate::ffmpeg::thumbnail(
|
||||||
|
store.clone(),
|
||||||
|
identifier,
|
||||||
|
InputFormat::Mp4,
|
||||||
|
ThumbnailFormat::Jpeg,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let motion_identifier = store.save_async_read(&mut reader).await?;
|
||||||
|
|
||||||
|
repo.relate_motion_identifier(hash.clone(), &motion_identifier)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
motion_identifier
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut processed_reader =
|
||||||
|
crate::magick::process_image_store_read(store.clone(), identifier, thumbnail_args, format)?;
|
||||||
|
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
processed_reader.read_to_end(&mut vec).await?;
|
||||||
|
let bytes = Bytes::from(vec);
|
||||||
|
|
||||||
|
drop(permit);
|
||||||
|
|
||||||
|
let details = Details::from_bytes(bytes.clone(), format.as_hint()).await?;
|
||||||
|
|
||||||
|
let identifier = store.save_bytes(bytes.clone()).await?;
|
||||||
|
repo.relate_details(&identifier, &details).await?;
|
||||||
|
repo.relate_variant_identifier(
|
||||||
|
hash,
|
||||||
|
thumbnail_path.to_string_lossy().to_string(),
|
||||||
|
&identifier,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((details, bytes)) as Result<(Details, Bytes), Error>
|
||||||
|
}
|
214
src/ingest.rs
Normal file
214
src/ingest.rs
Normal file
|
@ -0,0 +1,214 @@
|
||||||
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
magick::ValidInputType,
|
||||||
|
repo::{Alias, AliasRepo, DeleteToken, FullRepo, HashRepo},
|
||||||
|
store::Store,
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
use actix_web::web::{Bytes, BytesMut};
|
||||||
|
use futures_util::{Stream, StreamExt};
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
|
mod hasher;
|
||||||
|
use hasher::Hasher;
|
||||||
|
|
||||||
|
pub(crate) struct Session<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
repo: R,
|
||||||
|
hash: Option<Vec<u8>>,
|
||||||
|
alias: Option<Alias>,
|
||||||
|
identifier: Option<S::Identifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn ingest<R, S>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
stream: impl Stream<Item = Result<Bytes, Error>>,
|
||||||
|
declared_alias: Option<Alias>,
|
||||||
|
should_validate: bool,
|
||||||
|
) -> Result<Session<R, S>, Error>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
||||||
|
|
||||||
|
let mut bytes_mut = BytesMut::new();
|
||||||
|
|
||||||
|
futures_util::pin_mut!(stream);
|
||||||
|
|
||||||
|
debug!("Reading stream to memory");
|
||||||
|
while let Some(res) = stream.next().await {
|
||||||
|
let bytes = res?;
|
||||||
|
bytes_mut.extend_from_slice(&bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Validating bytes");
|
||||||
|
let (input_type, validated_reader) = crate::validate::validate_image_bytes(
|
||||||
|
bytes_mut.freeze(),
|
||||||
|
CONFIG.media.format,
|
||||||
|
CONFIG.media.enable_silent_video,
|
||||||
|
should_validate,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hasher_reader = Hasher::new(validated_reader, Sha256::new());
|
||||||
|
|
||||||
|
let identifier = store.save_async_read(&mut hasher_reader).await?;
|
||||||
|
|
||||||
|
drop(permit);
|
||||||
|
|
||||||
|
let mut session = Session {
|
||||||
|
repo: repo.clone(),
|
||||||
|
hash: None,
|
||||||
|
alias: None,
|
||||||
|
identifier: Some(identifier.clone()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let hash = hasher_reader.finalize_reset().await?;
|
||||||
|
|
||||||
|
session.hash = Some(hash.clone());
|
||||||
|
|
||||||
|
debug!("Saving upload");
|
||||||
|
|
||||||
|
save_upload(repo, store, &hash, &identifier).await?;
|
||||||
|
|
||||||
|
debug!("Adding alias");
|
||||||
|
|
||||||
|
if let Some(alias) = declared_alias {
|
||||||
|
session.add_existing_alias(&hash, alias).await?
|
||||||
|
} else {
|
||||||
|
session.create_alias(&hash, input_type).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(session)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_upload<R, S>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
hash: &[u8],
|
||||||
|
identifier: &S::Identifier,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
S: Store,
|
||||||
|
R: FullRepo,
|
||||||
|
{
|
||||||
|
if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() {
|
||||||
|
store.remove(identifier).await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.relate_identifier(hash.to_vec().into(), identifier)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R, S> Session<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
pub(crate) fn disarm(&mut self) {
|
||||||
|
let _ = self.alias.take();
|
||||||
|
let _ = self.identifier.take();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn alias(&self) -> Option<&Alias> {
|
||||||
|
self.alias.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn delete_token(&self) -> Result<DeleteToken, Error> {
|
||||||
|
let alias = self.alias.clone().ok_or(UploadError::MissingAlias)?;
|
||||||
|
|
||||||
|
debug!("Generating delete token");
|
||||||
|
let delete_token = DeleteToken::generate();
|
||||||
|
|
||||||
|
debug!("Saving delete token");
|
||||||
|
let res = self.repo.relate_delete_token(&alias, &delete_token).await?;
|
||||||
|
|
||||||
|
if res.is_err() {
|
||||||
|
let delete_token = self.repo.delete_token(&alias).await?;
|
||||||
|
debug!("Returning existing delete token, {:?}", delete_token);
|
||||||
|
return Ok(delete_token);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Returning new delete token, {:?}", delete_token);
|
||||||
|
Ok(delete_token)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn add_existing_alias(&mut self, hash: &[u8], alias: Alias) -> Result<(), Error> {
|
||||||
|
AliasRepo::create(&self.repo, &alias)
|
||||||
|
.await?
|
||||||
|
.map_err(|_| UploadError::DuplicateAlias)?;
|
||||||
|
|
||||||
|
self.alias = Some(alias.clone());
|
||||||
|
|
||||||
|
self.repo.relate_hash(&alias, hash.to_vec().into()).await?;
|
||||||
|
self.repo.relate_alias(hash.to_vec().into(), &alias).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_alias(&mut self, hash: &[u8], input_type: ValidInputType) -> Result<(), Error> {
|
||||||
|
debug!("Alias gen loop");
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let alias = Alias::generate(input_type.as_ext().to_string());
|
||||||
|
|
||||||
|
if AliasRepo::create(&self.repo, &alias).await?.is_ok() {
|
||||||
|
self.alias = Some(alias.clone());
|
||||||
|
|
||||||
|
self.repo.relate_hash(&alias, hash.to_vec().into()).await?;
|
||||||
|
self.repo.relate_alias(hash.to_vec().into(), &alias).await?;
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Alias exists, regenerating");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R, S> Drop for Session<R, S>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(hash) = self.hash.take() {
|
||||||
|
let repo = self.repo.clone();
|
||||||
|
actix_rt::spawn(async move {
|
||||||
|
let _ = crate::queue::cleanup_hash(&repo, hash.into()).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(alias) = self.alias.take() {
|
||||||
|
let repo = self.repo.clone();
|
||||||
|
|
||||||
|
actix_rt::spawn(async move {
|
||||||
|
if let Ok(token) = repo.delete_token(&alias).await {
|
||||||
|
let _ = crate::queue::cleanup_alias(&repo, alias, token).await;
|
||||||
|
} else {
|
||||||
|
let token = DeleteToken::generate();
|
||||||
|
if let Ok(Ok(())) = repo.relate_delete_token(&alias, &token).await {
|
||||||
|
let _ = crate::queue::cleanup_alias(&repo, alias, token).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(identifier) = self.identifier.take() {
|
||||||
|
let repo = self.repo.clone();
|
||||||
|
|
||||||
|
actix_rt::spawn(async move {
|
||||||
|
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,10 +16,6 @@ pin_project_lite::pin_project! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct Hash {
|
|
||||||
inner: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I, D> Hasher<I, D>
|
impl<I, D> Hasher<I, D>
|
||||||
where
|
where
|
||||||
D: Digest + FixedOutputReset + Send + 'static,
|
D: Digest + FixedOutputReset + Send + 'static,
|
||||||
|
@ -31,27 +27,13 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn finalize_reset(self) -> Result<Hash, Error> {
|
pub(super) async fn finalize_reset(self) -> Result<Vec<u8>, Error> {
|
||||||
let mut hasher = self.hasher;
|
let mut hasher = self.hasher;
|
||||||
let hash = web::block(move || Hash::new(hasher.finalize_reset().to_vec())).await?;
|
let hash = web::block(move || hasher.finalize_reset().to_vec()).await?;
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Hash {
|
|
||||||
fn new(inner: Vec<u8>) -> Self {
|
|
||||||
Hash { inner }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn as_slice(&self) -> &[u8] {
|
|
||||||
&self.inner
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn into_inner(self) -> Vec<u8> {
|
|
||||||
self.inner
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I, D> AsyncRead for Hasher<I, D>
|
impl<I, D> AsyncRead for Hasher<I, D>
|
||||||
where
|
where
|
||||||
I: AsyncRead,
|
I: AsyncRead,
|
||||||
|
@ -77,12 +59,6 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Hash {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", base64::encode(&self.inner))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::Hasher;
|
use super::Hasher;
|
||||||
|
@ -127,6 +103,6 @@ mod test {
|
||||||
hasher.update(vec);
|
hasher.update(vec);
|
||||||
let correct_hash = hasher.finalize_reset().to_vec();
|
let correct_hash = hasher.finalize_reset().to_vec();
|
||||||
|
|
||||||
assert_eq!(hash.inner, correct_hash);
|
assert_eq!(hash, correct_hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,3 +1,4 @@
|
||||||
|
use crate::config::{LogFormat, OpenTelemetry, Tracing};
|
||||||
use console_subscriber::ConsoleLayer;
|
use console_subscriber::ConsoleLayer;
|
||||||
use opentelemetry::{
|
use opentelemetry::{
|
||||||
sdk::{propagation::TraceContextPropagator, Resource},
|
sdk::{propagation::TraceContextPropagator, Resource},
|
||||||
|
@ -8,64 +9,65 @@ use tracing::subscriber::set_global_default;
|
||||||
use tracing_error::ErrorLayer;
|
use tracing_error::ErrorLayer;
|
||||||
use tracing_log::LogTracer;
|
use tracing_log::LogTracer;
|
||||||
use tracing_subscriber::{
|
use tracing_subscriber::{
|
||||||
filter::Targets, fmt::format::FmtSpan, layer::SubscriberExt, registry::LookupSpan, Layer,
|
fmt::format::FmtSpan, layer::SubscriberExt, registry::LookupSpan, Layer, Registry,
|
||||||
Registry,
|
|
||||||
};
|
};
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
pub(super) fn init_tracing(
|
pub(super) fn init_tracing(tracing: &Tracing) -> color_eyre::Result<()> {
|
||||||
servic_name: &'static str,
|
color_eyre::install()?;
|
||||||
opentelemetry_url: Option<&Url>,
|
|
||||||
buffer_capacity: Option<usize>,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
LogTracer::init()?;
|
LogTracer::init()?;
|
||||||
|
|
||||||
opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
|
opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
|
||||||
|
|
||||||
let targets = std::env::var("RUST_LOG")
|
let format_layer =
|
||||||
.unwrap_or_else(|_| "info".into())
|
tracing_subscriber::fmt::layer().with_span_events(FmtSpan::NEW | FmtSpan::CLOSE);
|
||||||
.parse::<Targets>()?;
|
|
||||||
|
|
||||||
let format_layer = tracing_subscriber::fmt::layer()
|
match tracing.logging.format {
|
||||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
LogFormat::Compact => with_format(format_layer.compact(), tracing),
|
||||||
.with_filter(targets.clone());
|
LogFormat::Json => with_format(format_layer.json(), tracing),
|
||||||
|
LogFormat::Normal => with_format(format_layer, tracing),
|
||||||
|
LogFormat::Pretty => with_format(format_layer.pretty(), tracing),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_format<F>(format_layer: F, tracing: &Tracing) -> color_eyre::Result<()>
|
||||||
|
where
|
||||||
|
F: Layer<Registry> + Send + Sync,
|
||||||
|
{
|
||||||
|
let format_layer = format_layer.with_filter(tracing.logging.targets.targets.clone());
|
||||||
|
|
||||||
let subscriber = Registry::default()
|
let subscriber = Registry::default()
|
||||||
.with(format_layer)
|
.with(format_layer)
|
||||||
.with(ErrorLayer::default());
|
.with(ErrorLayer::default());
|
||||||
|
|
||||||
if let Some(buffer_capacity) = buffer_capacity {
|
if let Some(address) = tracing.console.address {
|
||||||
let console_layer = ConsoleLayer::builder()
|
let console_layer = ConsoleLayer::builder()
|
||||||
.with_default_env()
|
.with_default_env()
|
||||||
.event_buffer_capacity(buffer_capacity)
|
.event_buffer_capacity(tracing.console.buffer_capacity)
|
||||||
.server_addr(([0, 0, 0, 0], 6669))
|
.server_addr(address)
|
||||||
.spawn();
|
.spawn();
|
||||||
|
|
||||||
let subscriber = subscriber.with(console_layer);
|
let subscriber = subscriber.with(console_layer);
|
||||||
|
|
||||||
with_otel(subscriber, targets, servic_name, opentelemetry_url)
|
with_subscriber(subscriber, &tracing.opentelemetry)
|
||||||
} else {
|
} else {
|
||||||
with_otel(subscriber, targets, servic_name, opentelemetry_url)
|
with_subscriber(subscriber, &tracing.opentelemetry)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_otel<S>(
|
fn with_subscriber<S>(subscriber: S, otel: &OpenTelemetry) -> color_eyre::Result<()>
|
||||||
subscriber: S,
|
|
||||||
targets: Targets,
|
|
||||||
servic_name: &'static str,
|
|
||||||
opentelemetry_url: Option<&Url>,
|
|
||||||
) -> anyhow::Result<()>
|
|
||||||
where
|
where
|
||||||
S: SubscriberExt + Send + Sync,
|
S: SubscriberExt + Send + Sync,
|
||||||
for<'a> S: LookupSpan<'a>,
|
for<'a> S: LookupSpan<'a>,
|
||||||
{
|
{
|
||||||
if let Some(url) = opentelemetry_url {
|
if let Some(url) = otel.url.as_ref() {
|
||||||
let tracer =
|
let tracer = opentelemetry_otlp::new_pipeline()
|
||||||
opentelemetry_otlp::new_pipeline()
|
|
||||||
.tracing()
|
.tracing()
|
||||||
.with_trace_config(opentelemetry::sdk::trace::config().with_resource(
|
.with_trace_config(
|
||||||
Resource::new(vec![KeyValue::new("service.name", servic_name)]),
|
opentelemetry::sdk::trace::config().with_resource(Resource::new(vec![
|
||||||
))
|
KeyValue::new("service.name", otel.service_name.clone()),
|
||||||
|
])),
|
||||||
|
)
|
||||||
.with_exporter(
|
.with_exporter(
|
||||||
opentelemetry_otlp::new_exporter()
|
opentelemetry_otlp::new_exporter()
|
||||||
.tonic()
|
.tonic()
|
||||||
|
@ -75,7 +77,7 @@ where
|
||||||
|
|
||||||
let otel_layer = tracing_opentelemetry::layer()
|
let otel_layer = tracing_opentelemetry::layer()
|
||||||
.with_tracer(tracer)
|
.with_tracer(tracer)
|
||||||
.with_filter(targets);
|
.with_filter(otel.targets.as_ref().targets.clone());
|
||||||
|
|
||||||
let subscriber = subscriber.with(otel_layer);
|
let subscriber = subscriber.with(otel_layer);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Format,
|
config::ImageFormat,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
process::Process,
|
process::Process,
|
||||||
|
repo::Alias,
|
||||||
store::Store,
|
store::Store,
|
||||||
};
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
|
@ -11,8 +12,9 @@ use tokio::{
|
||||||
};
|
};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
pub(crate) fn details_hint(filename: &str) -> Option<ValidInputType> {
|
pub(crate) fn details_hint(alias: &Alias) -> Option<ValidInputType> {
|
||||||
if filename.ends_with(".mp4") {
|
let ext = alias.extension()?;
|
||||||
|
if ext.ends_with(".mp4") {
|
||||||
Some(ValidInputType::Mp4)
|
Some(ValidInputType::Mp4)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -61,11 +63,11 @@ impl ValidInputType {
|
||||||
matches!(self, Self::Mp4)
|
matches!(self, Self::Mp4)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_format(format: Format) -> Self {
|
pub(crate) fn from_format(format: ImageFormat) -> Self {
|
||||||
match format {
|
match format {
|
||||||
Format::Jpeg => ValidInputType::Jpeg,
|
ImageFormat::Jpeg => ValidInputType::Jpeg,
|
||||||
Format::Png => ValidInputType::Png,
|
ImageFormat::Png => ValidInputType::Png,
|
||||||
Format::Webp => ValidInputType::Webp,
|
ImageFormat::Webp => ValidInputType::Webp,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,7 +87,7 @@ pub(crate) fn clear_metadata_bytes_read(input: Bytes) -> std::io::Result<impl As
|
||||||
|
|
||||||
pub(crate) fn convert_bytes_read(
|
pub(crate) fn convert_bytes_read(
|
||||||
input: Bytes,
|
input: Bytes,
|
||||||
format: Format,
|
format: ImageFormat,
|
||||||
) -> std::io::Result<impl AsyncRead + Unpin> {
|
) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let process = Process::run(
|
let process = Process::run(
|
||||||
"magick",
|
"magick",
|
||||||
|
@ -137,14 +139,12 @@ pub(crate) async fn details_bytes(
|
||||||
parse_details(s)
|
parse_details(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn details_store<S: Store>(
|
#[tracing::instrument(skip(store))]
|
||||||
|
pub(crate) async fn details_store<S: Store + 'static>(
|
||||||
store: S,
|
store: S,
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
hint: Option<ValidInputType>,
|
hint: Option<ValidInputType>,
|
||||||
) -> Result<Details, Error>
|
) -> Result<Details, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
if hint.as_ref().map(|h| h.is_mp4()).unwrap_or(false) {
|
if hint.as_ref().map(|h| h.is_mp4()).unwrap_or(false) {
|
||||||
let input_file = crate::tmp_file::tmp_file(Some(".mp4"));
|
let input_file = crate::tmp_file::tmp_file(Some(".mp4"));
|
||||||
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
||||||
|
@ -180,6 +180,7 @@ where
|
||||||
parse_details(s)
|
parse_details(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
pub(crate) async fn details_file(path_str: &str) -> Result<Details, Error> {
|
pub(crate) async fn details_file(path_str: &str) -> Result<Details, Error> {
|
||||||
let process = Process::run(
|
let process = Process::run(
|
||||||
"magick",
|
"magick",
|
||||||
|
@ -254,11 +255,11 @@ pub(crate) async fn input_type_bytes(input: Bytes) -> Result<ValidInputType, Err
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(name = "Spawning process command")]
|
#[instrument(name = "Spawning process command")]
|
||||||
pub(crate) fn process_image_store_read<S: Store>(
|
pub(crate) fn process_image_store_read<S: Store + 'static>(
|
||||||
store: S,
|
store: S,
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
format: Format,
|
format: ImageFormat,
|
||||||
) -> std::io::Result<impl AsyncRead + Unpin> {
|
) -> std::io::Result<impl AsyncRead + Unpin> {
|
||||||
let command = "magick";
|
let command = "magick";
|
||||||
let convert_args = ["convert", "-"];
|
let convert_args = ["convert", "-"];
|
||||||
|
@ -277,9 +278,9 @@ pub(crate) fn process_image_store_read<S: Store>(
|
||||||
impl Details {
|
impl Details {
|
||||||
#[instrument(name = "Validating input type")]
|
#[instrument(name = "Validating input type")]
|
||||||
fn validate_input(&self) -> Result<ValidInputType, Error> {
|
fn validate_input(&self) -> Result<ValidInputType, Error> {
|
||||||
if self.width > crate::CONFIG.max_width()
|
if self.width > crate::CONFIG.media.max_width
|
||||||
|| self.height > crate::CONFIG.max_height()
|
|| self.height > crate::CONFIG.media.max_height
|
||||||
|| self.width * self.height > crate::CONFIG.max_area()
|
|| self.width * self.height > crate::CONFIG.media.max_area
|
||||||
{
|
{
|
||||||
return Err(UploadError::Dimensions.into());
|
return Err(UploadError::Dimensions.into());
|
||||||
}
|
}
|
||||||
|
|
1019
src/main.rs
1019
src/main.rs
File diff suppressed because it is too large
Load diff
|
@ -1,43 +0,0 @@
|
||||||
use crate::error::Error;
|
|
||||||
use futures_util::stream::Stream;
|
|
||||||
use std::{
|
|
||||||
marker::PhantomData,
|
|
||||||
pin::Pin,
|
|
||||||
task::{Context, Poll},
|
|
||||||
};
|
|
||||||
|
|
||||||
pin_project_lite::pin_project! {
|
|
||||||
pub(super) struct MapError<E, S> {
|
|
||||||
#[pin]
|
|
||||||
inner: S,
|
|
||||||
|
|
||||||
_error: PhantomData<E>,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn map_crate_error<S>(inner: S) -> MapError<Error, S> {
|
|
||||||
map_error(inner)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn map_error<S, E>(inner: S) -> MapError<E, S> {
|
|
||||||
MapError {
|
|
||||||
inner,
|
|
||||||
_error: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, StreamErr, E, S> Stream for MapError<E, S>
|
|
||||||
where
|
|
||||||
S: Stream<Item = Result<T, StreamErr>>,
|
|
||||||
E: From<StreamErr>,
|
|
||||||
{
|
|
||||||
type Item = Result<T, E>;
|
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
|
||||||
let this = self.as_mut().project();
|
|
||||||
|
|
||||||
this.inner
|
|
||||||
.poll_next(cx)
|
|
||||||
.map(|opt| opt.map(|res| res.map_err(Into::into)))
|
|
||||||
}
|
|
||||||
}
|
|
129
src/migrate.rs
129
src/migrate.rs
|
@ -1,129 +0,0 @@
|
||||||
use crate::UploadError;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
mod s034;
|
|
||||||
|
|
||||||
type SledIter = Box<dyn Iterator<Item = Result<(Vec<u8>, Vec<u8>), UploadError>>>;
|
|
||||||
|
|
||||||
trait SledDb {
|
|
||||||
type SledTree: SledTree;
|
|
||||||
|
|
||||||
fn open_tree(&self, name: &str) -> Result<Self::SledTree, UploadError>;
|
|
||||||
|
|
||||||
fn self_tree(&self) -> &Self::SledTree;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> SledDb for &T
|
|
||||||
where
|
|
||||||
T: SledDb,
|
|
||||||
{
|
|
||||||
type SledTree = T::SledTree;
|
|
||||||
|
|
||||||
fn open_tree(&self, name: &str) -> Result<Self::SledTree, UploadError> {
|
|
||||||
(*self).open_tree(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn self_tree(&self) -> &Self::SledTree {
|
|
||||||
(*self).self_tree()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trait SledTree {
|
|
||||||
fn get<K>(&self, key: K) -> Result<Option<Vec<u8>>, UploadError>
|
|
||||||
where
|
|
||||||
K: AsRef<[u8]>;
|
|
||||||
|
|
||||||
fn insert<K, V>(&self, key: K, value: V) -> Result<(), UploadError>
|
|
||||||
where
|
|
||||||
K: AsRef<[u8]>,
|
|
||||||
V: AsRef<[u8]>;
|
|
||||||
|
|
||||||
fn iter(&self) -> SledIter;
|
|
||||||
|
|
||||||
fn range<K, R>(&self, range: R) -> SledIter
|
|
||||||
where
|
|
||||||
K: AsRef<[u8]>,
|
|
||||||
R: std::ops::RangeBounds<K>;
|
|
||||||
|
|
||||||
fn flush(&self) -> Result<(), UploadError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct LatestDb {
|
|
||||||
root_dir: PathBuf,
|
|
||||||
version: DbVersion,
|
|
||||||
cache_capacity: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LatestDb {
|
|
||||||
pub(crate) fn exists(root_dir: PathBuf, cache_capacity: u64) -> Self {
|
|
||||||
let version = DbVersion::exists(root_dir.clone(), cache_capacity);
|
|
||||||
|
|
||||||
LatestDb {
|
|
||||||
root_dir,
|
|
||||||
version,
|
|
||||||
cache_capacity,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn migrate(self) -> Result<sled::Db, UploadError> {
|
|
||||||
let LatestDb {
|
|
||||||
root_dir,
|
|
||||||
version,
|
|
||||||
cache_capacity,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let root_dir2 = root_dir.clone();
|
|
||||||
let res = std::panic::catch_unwind(move || version.migrate(root_dir2, cache_capacity));
|
|
||||||
|
|
||||||
if let Ok(res) = res {
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
enum DbVersion {
|
|
||||||
Sled034,
|
|
||||||
Fresh,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DbVersion {
|
|
||||||
fn exists(root: PathBuf, cache_capacity: u64) -> Self {
|
|
||||||
if s034::exists(root.clone()) && !s034::migrating(root, cache_capacity) {
|
|
||||||
return DbVersion::Sled034;
|
|
||||||
}
|
|
||||||
|
|
||||||
DbVersion::Fresh
|
|
||||||
}
|
|
||||||
|
|
||||||
fn migrate(self, root: PathBuf, cache_capacity: u64) -> Result<sled::Db, UploadError> {
|
|
||||||
match self {
|
|
||||||
DbVersion::Sled034 | DbVersion::Fresh => s034::open(root, cache_capacity),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alias_key_bounds(hash: &[u8]) -> (Vec<u8>, Vec<u8>) {
|
|
||||||
let mut start = hash.to_vec();
|
|
||||||
start.extend(&[0]);
|
|
||||||
|
|
||||||
let mut end = hash.to_vec();
|
|
||||||
end.extend(&[1]);
|
|
||||||
|
|
||||||
(start, end)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alias_id_key(alias: &str) -> String {
|
|
||||||
format!("{}/id", alias)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alias_key(hash: &[u8], id: &str) -> Vec<u8> {
|
|
||||||
let mut key = hash.to_vec();
|
|
||||||
// add a separator to the key between the hash and the ID
|
|
||||||
key.extend(&[0]);
|
|
||||||
key.extend(id.as_bytes());
|
|
||||||
|
|
||||||
key
|
|
||||||
}
|
|
|
@ -144,7 +144,7 @@ impl Process {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn store_read<S: Store>(
|
pub(crate) fn store_read<S: Store + 'static>(
|
||||||
mut self,
|
mut self,
|
||||||
store: S,
|
store: S,
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
|
|
|
@ -22,9 +22,9 @@ pub(crate) struct Blur(f64);
|
||||||
#[instrument]
|
#[instrument]
|
||||||
pub(crate) fn build_chain(
|
pub(crate) fn build_chain(
|
||||||
args: &[(String, String)],
|
args: &[(String, String)],
|
||||||
filename: String,
|
ext: &str,
|
||||||
) -> Result<(PathBuf, Vec<String>), Error> {
|
) -> Result<(PathBuf, Vec<String>), Error> {
|
||||||
fn parse<P: Processor>(key: &str, value: &str) -> Result<Option<P>, UploadError> {
|
fn parse<P: Processor>(key: &str, value: &str) -> Result<Option<P>, Error> {
|
||||||
if key == P::NAME {
|
if key == P::NAME {
|
||||||
return Ok(Some(P::parse(key, value).ok_or(UploadError::ParsePath)?));
|
return Ok(Some(P::parse(key, value).ok_or(UploadError::ParsePath)?));
|
||||||
}
|
}
|
||||||
|
@ -40,7 +40,7 @@ pub(crate) fn build_chain(
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
let (path, args) =
|
let (mut path, args) =
|
||||||
args.iter()
|
args.iter()
|
||||||
.fold(Ok((PathBuf::default(), vec![])), |inner, (name, value)| {
|
.fold(Ok((PathBuf::default(), vec![])), |inner, (name, value)| {
|
||||||
if let Ok(inner) = inner {
|
if let Ok(inner) = inner {
|
||||||
|
@ -56,7 +56,9 @@ pub(crate) fn build_chain(
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok((path.join(filename), args))
|
path.push(ext);
|
||||||
|
|
||||||
|
Ok((path, args))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Processor for Identity {
|
impl Processor for Identity {
|
||||||
|
|
176
src/queue.rs
Normal file
176
src/queue.rs
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
use crate::{
|
||||||
|
config::ImageFormat,
|
||||||
|
error::Error,
|
||||||
|
repo::{
|
||||||
|
Alias, AliasRepo, DeleteToken, FullRepo, HashRepo, IdentifierRepo, QueueRepo, UploadId,
|
||||||
|
},
|
||||||
|
serde_str::Serde,
|
||||||
|
store::{Identifier, Store},
|
||||||
|
};
|
||||||
|
use std::{future::Future, path::PathBuf, pin::Pin};
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
|
mod cleanup;
|
||||||
|
mod process;
|
||||||
|
|
||||||
|
const CLEANUP_QUEUE: &str = "cleanup";
|
||||||
|
const PROCESS_QUEUE: &str = "process";
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
enum Cleanup {
|
||||||
|
Hash {
|
||||||
|
hash: Vec<u8>,
|
||||||
|
},
|
||||||
|
Identifier {
|
||||||
|
identifier: Vec<u8>,
|
||||||
|
},
|
||||||
|
Alias {
|
||||||
|
alias: Serde<Alias>,
|
||||||
|
token: Serde<DeleteToken>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
enum Process {
|
||||||
|
Ingest {
|
||||||
|
identifier: Vec<u8>,
|
||||||
|
upload_id: Serde<UploadId>,
|
||||||
|
declared_alias: Option<Serde<Alias>>,
|
||||||
|
should_validate: bool,
|
||||||
|
},
|
||||||
|
Generate {
|
||||||
|
target_format: ImageFormat,
|
||||||
|
source: Serde<Alias>,
|
||||||
|
process_path: PathBuf,
|
||||||
|
process_args: Vec<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn cleanup_alias<R: QueueRepo>(
|
||||||
|
repo: &R,
|
||||||
|
alias: Alias,
|
||||||
|
token: DeleteToken,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let job = serde_json::to_vec(&Cleanup::Alias {
|
||||||
|
alias: Serde::new(alias),
|
||||||
|
token: Serde::new(token),
|
||||||
|
})?;
|
||||||
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn cleanup_hash<R: QueueRepo>(repo: &R, hash: R::Bytes) -> Result<(), Error> {
|
||||||
|
let job = serde_json::to_vec(&Cleanup::Hash {
|
||||||
|
hash: hash.as_ref().to_vec(),
|
||||||
|
})?;
|
||||||
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn cleanup_identifier<R: QueueRepo, I: Identifier>(
|
||||||
|
repo: &R,
|
||||||
|
identifier: I,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let job = serde_json::to_vec(&Cleanup::Identifier {
|
||||||
|
identifier: identifier.to_bytes()?,
|
||||||
|
})?;
|
||||||
|
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn queue_ingest<R: QueueRepo>(
|
||||||
|
repo: &R,
|
||||||
|
identifier: Vec<u8>,
|
||||||
|
upload_id: UploadId,
|
||||||
|
declared_alias: Option<Alias>,
|
||||||
|
should_validate: bool,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let job = serde_json::to_vec(&Process::Ingest {
|
||||||
|
identifier,
|
||||||
|
declared_alias: declared_alias.map(Serde::new),
|
||||||
|
upload_id: Serde::new(upload_id),
|
||||||
|
should_validate,
|
||||||
|
})?;
|
||||||
|
repo.push(PROCESS_QUEUE, job.into()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn queue_generate<R: QueueRepo>(
|
||||||
|
repo: &R,
|
||||||
|
target_format: ImageFormat,
|
||||||
|
source: Alias,
|
||||||
|
process_path: PathBuf,
|
||||||
|
process_args: Vec<String>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let job = serde_json::to_vec(&Process::Generate {
|
||||||
|
target_format,
|
||||||
|
source: Serde::new(source),
|
||||||
|
process_path,
|
||||||
|
process_args,
|
||||||
|
})?;
|
||||||
|
repo.push(PROCESS_QUEUE, job.into()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn process_cleanup<R: FullRepo, S: Store>(repo: R, store: S, worker_id: String) {
|
||||||
|
process_jobs(&repo, &store, worker_id, CLEANUP_QUEUE, cleanup::perform).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn process_images<R: FullRepo + 'static, S: Store + 'static>(
|
||||||
|
repo: R,
|
||||||
|
store: S,
|
||||||
|
worker_id: String,
|
||||||
|
) {
|
||||||
|
process_jobs(&repo, &store, worker_id, PROCESS_QUEUE, process::perform).await
|
||||||
|
}
|
||||||
|
|
||||||
|
type LocalBoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + 'a>>;
|
||||||
|
|
||||||
|
async fn process_jobs<R, S, F>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
worker_id: String,
|
||||||
|
queue: &'static str,
|
||||||
|
callback: F,
|
||||||
|
) where
|
||||||
|
R: QueueRepo + HashRepo + IdentifierRepo + AliasRepo,
|
||||||
|
R::Bytes: Clone,
|
||||||
|
S: Store,
|
||||||
|
for<'a> F: Fn(&'a R, &'a S, &'a [u8]) -> LocalBoxFuture<'a, Result<(), Error>> + Copy,
|
||||||
|
{
|
||||||
|
loop {
|
||||||
|
let res = job_loop(repo, store, worker_id.clone(), queue, callback).await;
|
||||||
|
|
||||||
|
if let Err(e) = res {
|
||||||
|
tracing::warn!("Error processing jobs: {}", e);
|
||||||
|
tracing::warn!("{:?}", e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn job_loop<R, S, F>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
worker_id: String,
|
||||||
|
queue: &'static str,
|
||||||
|
callback: F,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
R: QueueRepo + HashRepo + IdentifierRepo + AliasRepo,
|
||||||
|
R::Bytes: Clone,
|
||||||
|
S: Store,
|
||||||
|
for<'a> F: Fn(&'a R, &'a S, &'a [u8]) -> LocalBoxFuture<'a, Result<(), Error>> + Copy,
|
||||||
|
{
|
||||||
|
loop {
|
||||||
|
let bytes = repo.pop(queue, worker_id.as_bytes().to_vec()).await?;
|
||||||
|
|
||||||
|
let span = tracing::info_span!("Running Job", worker_id = ?worker_id);
|
||||||
|
|
||||||
|
span.in_scope(|| (callback)(repo, store, bytes.as_ref()))
|
||||||
|
.instrument(span)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
128
src/queue/cleanup.rs
Normal file
128
src/queue/cleanup.rs
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
queue::{Cleanup, LocalBoxFuture},
|
||||||
|
repo::{Alias, AliasRepo, DeleteToken, FullRepo, HashRepo, IdentifierRepo},
|
||||||
|
serde_str::Serde,
|
||||||
|
store::{Identifier, Store},
|
||||||
|
};
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
|
pub(super) fn perform<'a, R, S>(
|
||||||
|
repo: &'a R,
|
||||||
|
store: &'a S,
|
||||||
|
job: &'a [u8],
|
||||||
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
|
where
|
||||||
|
R: FullRepo,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
match serde_json::from_slice(job) {
|
||||||
|
Ok(job) => match job {
|
||||||
|
Cleanup::Hash { hash: in_hash } => hash::<R, S>(repo, in_hash).await?,
|
||||||
|
Cleanup::Identifier {
|
||||||
|
identifier: in_identifier,
|
||||||
|
} => identifier(repo, &store, in_identifier).await?,
|
||||||
|
Cleanup::Alias {
|
||||||
|
alias: stored_alias,
|
||||||
|
token,
|
||||||
|
} => {
|
||||||
|
alias(
|
||||||
|
repo,
|
||||||
|
Serde::into_inner(stored_alias),
|
||||||
|
Serde::into_inner(token),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("Invalid job: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(repo, store))]
|
||||||
|
async fn identifier<R, S>(repo: &R, store: &S, identifier: Vec<u8>) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
R: FullRepo,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
let identifier = S::Identifier::from_bytes(identifier)?;
|
||||||
|
|
||||||
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
|
if let Err(e) = store.remove(&identifier).await {
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(e) = IdentifierRepo::cleanup(repo, &identifier).await {
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !errors.is_empty() {
|
||||||
|
let span = tracing::error_span!("Error deleting files");
|
||||||
|
span.in_scope(|| {
|
||||||
|
for error in errors {
|
||||||
|
error!("{}", error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(repo))]
|
||||||
|
async fn hash<R, S>(repo: &R, hash: Vec<u8>) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
R: FullRepo,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
let hash: R::Bytes = hash.into();
|
||||||
|
|
||||||
|
let aliases = repo.aliases(hash.clone()).await?;
|
||||||
|
|
||||||
|
if !aliases.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut idents = repo
|
||||||
|
.variants::<S::Identifier>(hash.clone())
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, v)| v)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
idents.push(repo.identifier(hash.clone()).await?);
|
||||||
|
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
||||||
|
|
||||||
|
for identifier in idents {
|
||||||
|
let _ = crate::queue::cleanup_identifier(repo, identifier).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
HashRepo::cleanup(repo, hash).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn alias<R>(repo: &R, alias: Alias, token: DeleteToken) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
R: FullRepo,
|
||||||
|
{
|
||||||
|
let saved_delete_token = repo.delete_token(&alias).await?;
|
||||||
|
if saved_delete_token != token {
|
||||||
|
return Err(UploadError::InvalidToken.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let hash = repo.hash(&alias).await?;
|
||||||
|
|
||||||
|
AliasRepo::cleanup(repo, &alias).await?;
|
||||||
|
repo.remove_alias(hash.clone(), &alias).await?;
|
||||||
|
|
||||||
|
if repo.aliases(hash.clone()).await?.is_empty() {
|
||||||
|
crate::queue::cleanup_hash(repo, hash).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
150
src/queue/process.rs
Normal file
150
src/queue/process.rs
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
use crate::{
|
||||||
|
config::ImageFormat,
|
||||||
|
error::Error,
|
||||||
|
ingest::Session,
|
||||||
|
queue::{LocalBoxFuture, Process},
|
||||||
|
repo::{Alias, DeleteToken, FullRepo, UploadId, UploadResult},
|
||||||
|
serde_str::Serde,
|
||||||
|
store::{Identifier, Store},
|
||||||
|
};
|
||||||
|
use futures_util::TryStreamExt;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub(super) fn perform<'a, R, S>(
|
||||||
|
repo: &'a R,
|
||||||
|
store: &'a S,
|
||||||
|
job: &'a [u8],
|
||||||
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store + 'static,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
match serde_json::from_slice(job) {
|
||||||
|
Ok(job) => match job {
|
||||||
|
Process::Ingest {
|
||||||
|
identifier,
|
||||||
|
upload_id,
|
||||||
|
declared_alias,
|
||||||
|
should_validate,
|
||||||
|
} => {
|
||||||
|
process_ingest(
|
||||||
|
repo,
|
||||||
|
store,
|
||||||
|
identifier,
|
||||||
|
Serde::into_inner(upload_id),
|
||||||
|
declared_alias.map(Serde::into_inner),
|
||||||
|
should_validate,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
Process::Generate {
|
||||||
|
target_format,
|
||||||
|
source,
|
||||||
|
process_path,
|
||||||
|
process_args,
|
||||||
|
} => {
|
||||||
|
generate(
|
||||||
|
repo,
|
||||||
|
store,
|
||||||
|
target_format,
|
||||||
|
Serde::into_inner(source),
|
||||||
|
process_path,
|
||||||
|
process_args,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("Invalid job: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(repo, store))]
|
||||||
|
async fn process_ingest<R, S>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
unprocessed_identifier: Vec<u8>,
|
||||||
|
upload_id: UploadId,
|
||||||
|
declared_alias: Option<Alias>,
|
||||||
|
should_validate: bool,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
R: FullRepo + 'static,
|
||||||
|
S: Store,
|
||||||
|
{
|
||||||
|
let fut = async {
|
||||||
|
let unprocessed_identifier = S::Identifier::from_bytes(unprocessed_identifier)?;
|
||||||
|
|
||||||
|
let stream = store
|
||||||
|
.to_stream(&unprocessed_identifier, None, None)
|
||||||
|
.await?
|
||||||
|
.map_err(Error::from);
|
||||||
|
|
||||||
|
let session =
|
||||||
|
crate::ingest::ingest(repo, store, stream, declared_alias, should_validate).await?;
|
||||||
|
|
||||||
|
let token = session.delete_token().await?;
|
||||||
|
|
||||||
|
store.remove(&unprocessed_identifier).await?;
|
||||||
|
|
||||||
|
Ok((session, token)) as Result<(Session<R, S>, DeleteToken), Error>
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = match fut.await {
|
||||||
|
Ok((mut session, token)) => {
|
||||||
|
let alias = session.alias().take().expect("Alias should exist").clone();
|
||||||
|
let result = UploadResult::Success { alias, token };
|
||||||
|
session.disarm();
|
||||||
|
result
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("Failed to ingest {}, {:?}", e, e);
|
||||||
|
|
||||||
|
UploadResult::Failure {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
repo.complete(upload_id, result).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn generate<R: FullRepo, S: Store + 'static>(
|
||||||
|
repo: &R,
|
||||||
|
store: &S,
|
||||||
|
target_format: ImageFormat,
|
||||||
|
source: Alias,
|
||||||
|
process_path: PathBuf,
|
||||||
|
process_args: Vec<String>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let hash = repo.hash(&source).await?;
|
||||||
|
|
||||||
|
let path_string = process_path.to_string_lossy().to_string();
|
||||||
|
let identifier_opt = repo
|
||||||
|
.variant_identifier::<S::Identifier>(hash.clone(), path_string)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if identifier_opt.is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
crate::generate::generate(
|
||||||
|
repo,
|
||||||
|
store,
|
||||||
|
target_format,
|
||||||
|
source,
|
||||||
|
process_path,
|
||||||
|
process_args,
|
||||||
|
hash,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
11
src/range.rs
11
src/range.rs
|
@ -17,7 +17,7 @@ pub(crate) fn chop_bytes(
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
// END IS INCLUSIVE
|
// END IS INCLUSIVE
|
||||||
let end = end as usize + 1;
|
let end = end as usize + 1;
|
||||||
return Ok(once(ready(Ok(bytes.slice(start as usize..end as usize)))));
|
return Ok(once(ready(Ok(bytes.slice(start as usize..end)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(UploadError::Range.into())
|
Err(UploadError::Range.into())
|
||||||
|
@ -28,16 +28,13 @@ pub(crate) async fn chop_store<S: Store>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &S::Identifier,
|
||||||
length: u64,
|
length: u64,
|
||||||
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error>
|
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
// END IS INCLUSIVE
|
// END IS INCLUSIVE
|
||||||
let end = end + 1;
|
let end = end + 1;
|
||||||
return Ok(store
|
return store
|
||||||
.to_stream(identifier, Some(start), Some(end.saturating_sub(start)))
|
.to_stream(identifier, Some(start), Some(end.saturating_sub(start)))
|
||||||
.await?);
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(UploadError::Range.into())
|
Err(UploadError::Range.into())
|
||||||
|
|
723
src/repo.rs
Normal file
723
src/repo.rs
Normal file
|
@ -0,0 +1,723 @@
|
||||||
|
use crate::{config, details::Details, error::Error, store::Identifier};
|
||||||
|
use futures_util::Stream;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tracing::debug;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
mod old;
|
||||||
|
pub(crate) mod sled;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) enum Repo {
|
||||||
|
Sled(self::sled::SledRepo),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
enum MaybeUuid {
|
||||||
|
Uuid(Uuid),
|
||||||
|
Name(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub(crate) struct Alias {
|
||||||
|
id: MaybeUuid,
|
||||||
|
extension: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub(crate) struct DeleteToken {
|
||||||
|
id: MaybeUuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct AlreadyExists;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub(crate) struct UploadId {
|
||||||
|
id: Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) enum UploadResult {
|
||||||
|
Success { alias: Alias, token: DeleteToken },
|
||||||
|
Failure { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait FullRepo:
|
||||||
|
UploadRepo
|
||||||
|
+ SettingsRepo
|
||||||
|
+ IdentifierRepo
|
||||||
|
+ AliasRepo
|
||||||
|
+ QueueRepo
|
||||||
|
+ HashRepo
|
||||||
|
+ Send
|
||||||
|
+ Sync
|
||||||
|
+ Clone
|
||||||
|
+ Debug
|
||||||
|
{
|
||||||
|
async fn identifier_from_alias<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
alias: &Alias,
|
||||||
|
) -> Result<I, Error> {
|
||||||
|
let hash = self.hash(alias).await?;
|
||||||
|
self.identifier(hash).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn aliases_from_alias(&self, alias: &Alias) -> Result<Vec<Alias>, Error> {
|
||||||
|
let hash = self.hash(alias).await?;
|
||||||
|
self.aliases(hash).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn still_identifier_from_alias<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
alias: &Alias,
|
||||||
|
) -> Result<Option<I>, Error> {
|
||||||
|
let hash = self.hash(alias).await?;
|
||||||
|
let identifier = self.identifier::<I>(hash.clone()).await?;
|
||||||
|
|
||||||
|
match self.details(&identifier).await? {
|
||||||
|
Some(details) if details.is_motion() => self.motion_identifier::<I>(hash).await,
|
||||||
|
Some(_) => Ok(Some(identifier)),
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait BaseRepo {
|
||||||
|
type Bytes: AsRef<[u8]> + From<Vec<u8>> + Clone;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait UploadRepo: BaseRepo {
|
||||||
|
async fn create(&self, upload_id: UploadId) -> Result<(), Error>;
|
||||||
|
|
||||||
|
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error>;
|
||||||
|
|
||||||
|
async fn claim(&self, upload_id: UploadId) -> Result<(), Error>;
|
||||||
|
|
||||||
|
async fn complete(&self, upload_id: UploadId, result: UploadResult) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait QueueRepo: BaseRepo {
|
||||||
|
async fn requeue_in_progress(&self, worker_prefix: Vec<u8>) -> Result<(), Error>;
|
||||||
|
|
||||||
|
async fn push(&self, queue: &'static str, job: Self::Bytes) -> Result<(), Error>;
|
||||||
|
|
||||||
|
async fn pop(&self, queue: &'static str, worker_id: Vec<u8>) -> Result<Self::Bytes, Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait SettingsRepo: BaseRepo {
|
||||||
|
async fn set(&self, key: &'static str, value: Self::Bytes) -> Result<(), Error>;
|
||||||
|
async fn get(&self, key: &'static str) -> Result<Option<Self::Bytes>, Error>;
|
||||||
|
async fn remove(&self, key: &'static str) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait IdentifierRepo: BaseRepo {
|
||||||
|
async fn relate_details<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
identifier: &I,
|
||||||
|
details: &Details,
|
||||||
|
) -> Result<(), Error>;
|
||||||
|
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Error>;
|
||||||
|
|
||||||
|
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait HashRepo: BaseRepo {
|
||||||
|
type Stream: Stream<Item = Result<Self::Bytes, Error>>;
|
||||||
|
|
||||||
|
async fn hashes(&self) -> Self::Stream;
|
||||||
|
|
||||||
|
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
|
|
||||||
|
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error>;
|
||||||
|
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error>;
|
||||||
|
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Error>;
|
||||||
|
|
||||||
|
async fn relate_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error>;
|
||||||
|
async fn identifier<I: Identifier + 'static>(&self, hash: Self::Bytes) -> Result<I, Error>;
|
||||||
|
|
||||||
|
async fn relate_variant_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
variant: String,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error>;
|
||||||
|
async fn variant_identifier<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
variant: String,
|
||||||
|
) -> Result<Option<I>, Error>;
|
||||||
|
async fn variants<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
) -> Result<Vec<(String, I)>, Error>;
|
||||||
|
|
||||||
|
async fn relate_motion_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error>;
|
||||||
|
async fn motion_identifier<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
) -> Result<Option<I>, Error>;
|
||||||
|
|
||||||
|
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
pub(crate) trait AliasRepo: BaseRepo {
|
||||||
|
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
|
|
||||||
|
async fn relate_delete_token(
|
||||||
|
&self,
|
||||||
|
alias: &Alias,
|
||||||
|
delete_token: &DeleteToken,
|
||||||
|
) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
|
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Error>;
|
||||||
|
|
||||||
|
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Error>;
|
||||||
|
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Error>;
|
||||||
|
|
||||||
|
async fn cleanup(&self, alias: &Alias) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Repo {
|
||||||
|
pub(crate) fn open(config: config::Repo) -> color_eyre::Result<Self> {
|
||||||
|
match config {
|
||||||
|
config::Repo::Sled(config::Sled {
|
||||||
|
mut path,
|
||||||
|
cache_capacity,
|
||||||
|
}) => {
|
||||||
|
path.push("v0.4.0-alpha.1");
|
||||||
|
|
||||||
|
let db = ::sled::Config::new()
|
||||||
|
.cache_capacity(cache_capacity)
|
||||||
|
.path(path)
|
||||||
|
.open()?;
|
||||||
|
|
||||||
|
Ok(Self::Sled(self::sled::SledRepo::new(db)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
pub(crate) async fn from_db(&self, path: PathBuf) -> color_eyre::Result<()> {
|
||||||
|
if self.has_migrated().await? {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let old = self::old::Old::open(path)?;
|
||||||
|
|
||||||
|
for hash in old.hashes() {
|
||||||
|
match self {
|
||||||
|
Self::Sled(repo) => {
|
||||||
|
if let Err(e) = migrate_hash(repo, &old, hash).await {
|
||||||
|
tracing::error!("Failed to migrate hash: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.mark_migrated().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn has_migrated(&self) -> color_eyre::Result<bool> {
|
||||||
|
match self {
|
||||||
|
Self::Sled(repo) => Ok(repo.get(REPO_MIGRATION_O1).await?.is_some()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn mark_migrated(&self) -> color_eyre::Result<()> {
|
||||||
|
match self {
|
||||||
|
Self::Sled(repo) => {
|
||||||
|
repo.set(REPO_MIGRATION_O1, b"1".to_vec().into()).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const REPO_MIGRATION_O1: &str = "repo-migration-01";
|
||||||
|
const STORE_MIGRATION_PROGRESS: &str = "store-migration-progress";
|
||||||
|
const GENERATOR_KEY: &str = "last-path";
|
||||||
|
|
||||||
|
async fn migrate_hash<T>(repo: &T, old: &old::Old, hash: ::sled::IVec) -> color_eyre::Result<()>
|
||||||
|
where
|
||||||
|
T: IdentifierRepo + HashRepo + AliasRepo + SettingsRepo,
|
||||||
|
{
|
||||||
|
if HashRepo::create(repo, hash.to_vec().into()).await?.is_err() {
|
||||||
|
debug!("Duplicate hash detected");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let main_ident = old.main_identifier(&hash)?.to_vec();
|
||||||
|
|
||||||
|
repo.relate_identifier(hash.to_vec().into(), &main_ident)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for alias in old.aliases(&hash) {
|
||||||
|
if let Ok(Ok(())) = AliasRepo::create(repo, &alias).await {
|
||||||
|
let _ = repo.relate_alias(hash.to_vec().into(), &alias).await;
|
||||||
|
let _ = repo.relate_hash(&alias, hash.to_vec().into()).await;
|
||||||
|
|
||||||
|
if let Ok(Some(delete_token)) = old.delete_token(&alias) {
|
||||||
|
let _ = repo.relate_delete_token(&alias, &delete_token).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(Some(identifier)) = old.motion_identifier(&hash) {
|
||||||
|
let _ = repo
|
||||||
|
.relate_motion_identifier(hash.to_vec().into(), &identifier.to_vec())
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (variant_path, identifier) in old.variants(&hash)? {
|
||||||
|
let variant = variant_path.to_string_lossy().to_string();
|
||||||
|
|
||||||
|
let _ = repo
|
||||||
|
.relate_variant_identifier(hash.to_vec().into(), variant, &identifier.to_vec())
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (identifier, details) in old.details(&hash)? {
|
||||||
|
let _ = repo.relate_details(&identifier.to_vec(), &details).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(Some(value)) = old.setting(STORE_MIGRATION_PROGRESS.as_bytes()) {
|
||||||
|
repo.set(STORE_MIGRATION_PROGRESS, value.to_vec().into())
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(Some(value)) = old.setting(GENERATOR_KEY.as_bytes()) {
|
||||||
|
repo.set(GENERATOR_KEY, value.to_vec().into()).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MaybeUuid {
|
||||||
|
fn from_str(s: &str) -> Self {
|
||||||
|
if let Ok(uuid) = Uuid::parse_str(s) {
|
||||||
|
MaybeUuid::Uuid(uuid)
|
||||||
|
} else {
|
||||||
|
MaybeUuid::Name(s.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_bytes(&self) -> &[u8] {
|
||||||
|
match self {
|
||||||
|
Self::Uuid(uuid) => &uuid.as_bytes()[..],
|
||||||
|
Self::Name(name) => name.as_bytes(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn split_at_dot(s: &str) -> Option<(&str, &str)> {
|
||||||
|
let index = s.find('.')?;
|
||||||
|
|
||||||
|
Some(s.split_at(index))
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Alias {
|
||||||
|
pub(crate) fn generate(extension: String) -> Self {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
||||||
|
extension: Some(extension),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_existing(alias: &str) -> Self {
|
||||||
|
if let Some((start, end)) = split_at_dot(alias) {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::from_str(start),
|
||||||
|
extension: Some(end.into()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::from_str(alias),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn extension(&self) -> Option<&str> {
|
||||||
|
self.extension.as_deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_bytes(&self) -> Vec<u8> {
|
||||||
|
let mut v = self.id.as_bytes().to_vec();
|
||||||
|
|
||||||
|
if let Some(ext) = self.extension() {
|
||||||
|
v.extend_from_slice(ext.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_slice(bytes: &[u8]) -> Option<Self> {
|
||||||
|
if let Ok(s) = std::str::from_utf8(bytes) {
|
||||||
|
Some(Self::from_existing(s))
|
||||||
|
} else if bytes.len() >= 16 {
|
||||||
|
let id = Uuid::from_slice(&bytes[0..16]).expect("Already checked length");
|
||||||
|
|
||||||
|
let extension = if bytes.len() > 16 {
|
||||||
|
Some(String::from_utf8_lossy(&bytes[16..]).to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(Self {
|
||||||
|
id: MaybeUuid::Uuid(id),
|
||||||
|
extension,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DeleteToken {
|
||||||
|
pub(crate) fn from_existing(existing: &str) -> Self {
|
||||||
|
if let Ok(uuid) = Uuid::parse_str(existing) {
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(existing.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn generate() -> Self {
|
||||||
|
Self {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_bytes(&self) -> Vec<u8> {
|
||||||
|
self.id.as_bytes().to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_slice(bytes: &[u8]) -> Option<Self> {
|
||||||
|
if let Ok(s) = std::str::from_utf8(bytes) {
|
||||||
|
Some(DeleteToken::from_existing(s))
|
||||||
|
} else if bytes.len() == 16 {
|
||||||
|
Some(DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::from_slice(bytes).ok()?),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UploadId {
|
||||||
|
pub(crate) fn generate() -> Self {
|
||||||
|
Self { id: Uuid::new_v4() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_bytes(&self) -> &[u8] {
|
||||||
|
&self.id.as_bytes()[..]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for UploadId {
|
||||||
|
type Err = <Uuid as std::str::FromStr>::Err;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(UploadId { id: s.parse()? })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for UploadId {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
std::fmt::Display::fmt(&self.id, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for MaybeUuid {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Uuid(id) => write!(f, "{}", id),
|
||||||
|
Self::Name(name) => write!(f, "{}", name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for DeleteToken {
|
||||||
|
type Err = std::convert::Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(DeleteToken::from_existing(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for DeleteToken {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for Alias {
|
||||||
|
type Err = std::convert::Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(Alias::from_existing(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Alias {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
if let Some(ext) = self.extension() {
|
||||||
|
write!(f, "{}{}", self.id, ext)
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Identifier for Vec<u8> {
|
||||||
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
|
Ok(self.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{Alias, DeleteToken, MaybeUuid, Uuid};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_delete_token() {
|
||||||
|
let delete_token = DeleteToken::from_existing("blah");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(String::from("blah"))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_existing(&uuid.to_string());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_delete_token() {
|
||||||
|
let delete_token = DeleteToken::from_slice(b"blah").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(String::from("blah"))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_slice(&uuid.as_bytes()[..]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_slice(uuid.to_string().as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_alias() {
|
||||||
|
let alias = Alias::from_existing("blah");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_alias_ext() {
|
||||||
|
let alias = Alias::from_existing("blah.mp4");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_existing(&uuid.to_string());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias_str = format!("{}.mp4", uuid);
|
||||||
|
let alias = Alias::from_existing(&alias_str);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_alias() {
|
||||||
|
let alias = Alias::from_slice(b"blah").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_alias_ext() {
|
||||||
|
let alias = Alias::from_slice(b"blah.mp4").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(&uuid.as_bytes()[..]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(uuid.to_string().as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let mut alias_bytes = uuid.as_bytes().to_vec();
|
||||||
|
alias_bytes.extend_from_slice(b".mp4");
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(&alias_bytes).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias_str = format!("{}.mp4", uuid);
|
||||||
|
let alias = Alias::from_slice(alias_str.as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
184
src/repo/old.rs
Normal file
184
src/repo/old.rs
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
// TREE STRUCTURE
|
||||||
|
// - Alias Tree
|
||||||
|
// - alias -> hash
|
||||||
|
// - alias / id -> u64(id)
|
||||||
|
// - alias / delete -> delete token
|
||||||
|
// - Main Tree
|
||||||
|
// - hash -> filename
|
||||||
|
// - hash 0 u64(id) -> alias
|
||||||
|
// - Filename Tree
|
||||||
|
// - filename -> hash
|
||||||
|
// - Details Tree
|
||||||
|
// - filename / S::Identifier -> details
|
||||||
|
// - Identifier Tree
|
||||||
|
// - filename -> S::Identifier
|
||||||
|
// - filename / variant path -> S::Identifier
|
||||||
|
// - filename / motion -> S::Identifier
|
||||||
|
// - Settings Tree
|
||||||
|
// - store-migration-progress -> Path Tree Key
|
||||||
|
|
||||||
|
use super::{Alias, DeleteToken, Details};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
mod migrate;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct OldDbError(&'static str);
|
||||||
|
|
||||||
|
impl std::fmt::Display for OldDbError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for OldDbError {}
|
||||||
|
|
||||||
|
pub(super) struct Old {
|
||||||
|
alias_tree: ::sled::Tree,
|
||||||
|
filename_tree: ::sled::Tree,
|
||||||
|
main_tree: ::sled::Tree,
|
||||||
|
details_tree: ::sled::Tree,
|
||||||
|
settings_tree: ::sled::Tree,
|
||||||
|
identifier_tree: ::sled::Tree,
|
||||||
|
_db: ::sled::Db,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Old {
|
||||||
|
pub(super) fn open(path: PathBuf) -> color_eyre::Result<Self> {
|
||||||
|
let db = migrate::LatestDb::exists(path).migrate()?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
alias_tree: db.open_tree("alias")?,
|
||||||
|
filename_tree: db.open_tree("filename")?,
|
||||||
|
main_tree: db.open_tree("main")?,
|
||||||
|
details_tree: db.open_tree("details")?,
|
||||||
|
settings_tree: db.open_tree("settings")?,
|
||||||
|
identifier_tree: db.open_tree("path")?,
|
||||||
|
_db: db,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn setting(&self, key: &[u8]) -> color_eyre::Result<Option<sled::IVec>> {
|
||||||
|
Ok(self.settings_tree.get(key)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn hashes(&self) -> impl std::iter::Iterator<Item = sled::IVec> {
|
||||||
|
self.filename_tree
|
||||||
|
.iter()
|
||||||
|
.values()
|
||||||
|
.filter_map(|res| res.ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn details(
|
||||||
|
&self,
|
||||||
|
hash: &sled::IVec,
|
||||||
|
) -> color_eyre::Result<Vec<(sled::IVec, Details)>> {
|
||||||
|
let filename = self
|
||||||
|
.main_tree
|
||||||
|
.get(hash)?
|
||||||
|
.ok_or(OldDbError("Missing filename"))?;
|
||||||
|
|
||||||
|
let filename = String::from_utf8_lossy(&filename);
|
||||||
|
|
||||||
|
Ok(self
|
||||||
|
.identifier_tree
|
||||||
|
.scan_prefix(filename.as_bytes())
|
||||||
|
.values()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.filter_map(|identifier| {
|
||||||
|
let mut key = filename.as_bytes().to_vec();
|
||||||
|
key.push(b'/');
|
||||||
|
key.extend_from_slice(&identifier);
|
||||||
|
|
||||||
|
let details = self.details_tree.get(key).ok()??;
|
||||||
|
let details = serde_json::from_slice(&details).ok()?;
|
||||||
|
|
||||||
|
Some((identifier, details))
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn main_identifier(&self, hash: &sled::IVec) -> color_eyre::Result<sled::IVec> {
|
||||||
|
let filename = self
|
||||||
|
.main_tree
|
||||||
|
.get(hash)?
|
||||||
|
.ok_or(OldDbError("Missing filename"))?;
|
||||||
|
|
||||||
|
Ok(self
|
||||||
|
.identifier_tree
|
||||||
|
.get(filename)?
|
||||||
|
.ok_or(OldDbError("Missing identifier"))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn variants(
|
||||||
|
&self,
|
||||||
|
hash: &sled::IVec,
|
||||||
|
) -> color_eyre::Result<Vec<(PathBuf, sled::IVec)>> {
|
||||||
|
let filename = self
|
||||||
|
.main_tree
|
||||||
|
.get(hash)?
|
||||||
|
.ok_or(OldDbError("Missing filename"))?;
|
||||||
|
|
||||||
|
let filename_string = String::from_utf8_lossy(&filename);
|
||||||
|
|
||||||
|
let variant_prefix = format!("{}/", filename_string);
|
||||||
|
|
||||||
|
Ok(self
|
||||||
|
.identifier_tree
|
||||||
|
.scan_prefix(&variant_prefix)
|
||||||
|
.filter_map(|res| res.ok())
|
||||||
|
.filter_map(|(key, value)| {
|
||||||
|
let variant_path_bytes = &key[variant_prefix.as_bytes().len()..];
|
||||||
|
if variant_path_bytes == b"motion" {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = String::from_utf8(variant_path_bytes.to_vec()).ok()?;
|
||||||
|
let mut path = PathBuf::from(path);
|
||||||
|
let extension = path.extension()?.to_str()?.to_string();
|
||||||
|
path.pop();
|
||||||
|
path.push(extension);
|
||||||
|
|
||||||
|
Some((path, value))
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn motion_identifier(
|
||||||
|
&self,
|
||||||
|
hash: &sled::IVec,
|
||||||
|
) -> color_eyre::Result<Option<sled::IVec>> {
|
||||||
|
let filename = self
|
||||||
|
.main_tree
|
||||||
|
.get(hash)?
|
||||||
|
.ok_or(OldDbError("Missing filename"))?;
|
||||||
|
|
||||||
|
let filename_string = String::from_utf8_lossy(&filename);
|
||||||
|
|
||||||
|
let motion_key = format!("{}/motion", filename_string);
|
||||||
|
|
||||||
|
Ok(self.filename_tree.get(motion_key)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn aliases(&self, hash: &sled::IVec) -> Vec<Alias> {
|
||||||
|
let mut key = hash.to_vec();
|
||||||
|
key.push(0);
|
||||||
|
|
||||||
|
self.main_tree
|
||||||
|
.scan_prefix(key)
|
||||||
|
.values()
|
||||||
|
.filter_map(|res| res.ok())
|
||||||
|
.filter_map(|alias| Alias::from_slice(&alias))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn delete_token(&self, alias: &Alias) -> color_eyre::Result<Option<DeleteToken>> {
|
||||||
|
let key = format!("{}/delete", alias);
|
||||||
|
|
||||||
|
if let Some(ivec) = self.alias_tree.get(key)? {
|
||||||
|
return Ok(DeleteToken::from_slice(&ivec));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
97
src/repo/old/migrate.rs
Normal file
97
src/repo/old/migrate.rs
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
use crate::Error;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
mod s034;
|
||||||
|
|
||||||
|
type SledIter = Box<dyn Iterator<Item = Result<(Vec<u8>, Vec<u8>), Error>>>;
|
||||||
|
|
||||||
|
trait SledDb {
|
||||||
|
type SledTree: SledTree;
|
||||||
|
|
||||||
|
fn open_tree(&self, name: &str) -> Result<Self::SledTree, Error>;
|
||||||
|
|
||||||
|
fn self_tree(&self) -> &Self::SledTree;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> SledDb for &T
|
||||||
|
where
|
||||||
|
T: SledDb,
|
||||||
|
{
|
||||||
|
type SledTree = T::SledTree;
|
||||||
|
|
||||||
|
fn open_tree(&self, name: &str) -> Result<Self::SledTree, Error> {
|
||||||
|
(*self).open_tree(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn self_tree(&self) -> &Self::SledTree {
|
||||||
|
(*self).self_tree()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trait SledTree {
|
||||||
|
fn get<K>(&self, key: K) -> Result<Option<Vec<u8>>, Error>
|
||||||
|
where
|
||||||
|
K: AsRef<[u8]>;
|
||||||
|
|
||||||
|
fn insert<K, V>(&self, key: K, value: V) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
K: AsRef<[u8]>,
|
||||||
|
V: AsRef<[u8]>;
|
||||||
|
|
||||||
|
fn iter(&self) -> SledIter;
|
||||||
|
|
||||||
|
fn range<K, R>(&self, range: R) -> SledIter
|
||||||
|
where
|
||||||
|
K: AsRef<[u8]>,
|
||||||
|
R: std::ops::RangeBounds<K>;
|
||||||
|
|
||||||
|
fn flush(&self) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct LatestDb {
|
||||||
|
root_dir: PathBuf,
|
||||||
|
version: DbVersion,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LatestDb {
|
||||||
|
pub(crate) fn exists(root_dir: PathBuf) -> Self {
|
||||||
|
let version = DbVersion::exists(root_dir.clone());
|
||||||
|
|
||||||
|
LatestDb { root_dir, version }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn migrate(self) -> Result<sled::Db, Error> {
|
||||||
|
let LatestDb { root_dir, version } = self;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let root_dir2 = root_dir.clone();
|
||||||
|
let res = std::panic::catch_unwind(move || version.migrate(root_dir2));
|
||||||
|
|
||||||
|
if let Ok(res) = res {
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
enum DbVersion {
|
||||||
|
Sled034,
|
||||||
|
Fresh,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DbVersion {
|
||||||
|
fn exists(root: PathBuf) -> Self {
|
||||||
|
if s034::exists(root.clone()) && !s034::migrating(root) {
|
||||||
|
return DbVersion::Sled034;
|
||||||
|
}
|
||||||
|
|
||||||
|
DbVersion::Fresh
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrate(self, root: PathBuf) -> Result<sled::Db, Error> {
|
||||||
|
match self {
|
||||||
|
DbVersion::Sled034 | DbVersion::Fresh => s034::open(root),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
migrate::{SledDb, SledIter, SledTree},
|
error::Error,
|
||||||
UploadError,
|
repo::old::migrate::{SledDb, SledIter, SledTree},
|
||||||
};
|
};
|
||||||
use sled as sled034;
|
use sled as sled034;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -14,8 +14,8 @@ pub(crate) fn exists(mut base: PathBuf) -> bool {
|
||||||
std::fs::metadata(base).is_ok()
|
std::fs::metadata(base).is_ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn migrating(base: PathBuf, cache_capacity: u64) -> bool {
|
pub(crate) fn migrating(base: PathBuf) -> bool {
|
||||||
if let Ok(db) = open(base, cache_capacity) {
|
if let Ok(db) = open(base) {
|
||||||
if let Ok(tree) = db.open_tree("migrate") {
|
if let Ok(tree) = db.open_tree("migrate") {
|
||||||
if let Ok(Some(_)) = tree.get("done") {
|
if let Ok(Some(_)) = tree.get("done") {
|
||||||
return false;
|
return false;
|
||||||
|
@ -26,12 +26,12 @@ pub(crate) fn migrating(base: PathBuf, cache_capacity: u64) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn open(mut base: PathBuf, cache_capacity: u64) -> Result<sled034::Db, UploadError> {
|
pub(crate) fn open(mut base: PathBuf) -> Result<sled034::Db, Error> {
|
||||||
base.push("sled");
|
base.push("sled");
|
||||||
base.push(SLED_034);
|
base.push(SLED_034);
|
||||||
|
|
||||||
let db = sled034::Config::default()
|
let db = sled034::Config::default()
|
||||||
.cache_capacity(cache_capacity)
|
.cache_capacity(1024 * 1024 * 64)
|
||||||
.path(base)
|
.path(base)
|
||||||
.open()?;
|
.open()?;
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ pub(crate) fn open(mut base: PathBuf, cache_capacity: u64) -> Result<sled034::Db
|
||||||
impl SledDb for sled034::Db {
|
impl SledDb for sled034::Db {
|
||||||
type SledTree = sled034::Tree;
|
type SledTree = sled034::Tree;
|
||||||
|
|
||||||
fn open_tree(&self, name: &str) -> Result<Self::SledTree, UploadError> {
|
fn open_tree(&self, name: &str) -> Result<Self::SledTree, Error> {
|
||||||
Ok(sled034::Db::open_tree(self, name)?)
|
Ok(sled034::Db::open_tree(self, name)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,14 +51,14 @@ impl SledDb for sled034::Db {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SledTree for sled034::Tree {
|
impl SledTree for sled034::Tree {
|
||||||
fn get<K>(&self, key: K) -> Result<Option<Vec<u8>>, UploadError>
|
fn get<K>(&self, key: K) -> Result<Option<Vec<u8>>, Error>
|
||||||
where
|
where
|
||||||
K: AsRef<[u8]>,
|
K: AsRef<[u8]>,
|
||||||
{
|
{
|
||||||
Ok(sled034::Tree::get(self, key)?.map(|v| Vec::from(v.as_ref())))
|
Ok(sled034::Tree::get(self, key)?.map(|v| Vec::from(v.as_ref())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert<K, V>(&self, key: K, value: V) -> Result<(), UploadError>
|
fn insert<K, V>(&self, key: K, value: V) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
K: AsRef<[u8]>,
|
K: AsRef<[u8]>,
|
||||||
V: AsRef<[u8]>,
|
V: AsRef<[u8]>,
|
||||||
|
@ -69,7 +69,7 @@ impl SledTree for sled034::Tree {
|
||||||
fn iter(&self) -> SledIter {
|
fn iter(&self) -> SledIter {
|
||||||
Box::new(sled034::Tree::iter(self).map(|res| {
|
Box::new(sled034::Tree::iter(self).map(|res| {
|
||||||
res.map(|(k, v)| (k.as_ref().to_vec(), v.as_ref().to_vec()))
|
res.map(|(k, v)| (k.as_ref().to_vec(), v.as_ref().to_vec()))
|
||||||
.map_err(UploadError::from)
|
.map_err(Error::from)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,13 +80,11 @@ impl SledTree for sled034::Tree {
|
||||||
{
|
{
|
||||||
Box::new(sled034::Tree::range(self, range).map(|res| {
|
Box::new(sled034::Tree::range(self, range).map(|res| {
|
||||||
res.map(|(k, v)| (k.as_ref().to_vec(), v.as_ref().to_vec()))
|
res.map(|(k, v)| (k.as_ref().to_vec(), v.as_ref().to_vec()))
|
||||||
.map_err(UploadError::from)
|
.map_err(Error::from)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn flush(&self) -> Result<(), UploadError> {
|
fn flush(&self) -> Result<(), Error> {
|
||||||
sled034::Tree::flush(self)
|
sled034::Tree::flush(self).map(|_| ()).map_err(Error::from)
|
||||||
.map(|_| ())
|
|
||||||
.map_err(UploadError::from)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
701
src/repo/sled.rs
Normal file
701
src/repo/sled.rs
Normal file
|
@ -0,0 +1,701 @@
|
||||||
|
use crate::{
|
||||||
|
error::{Error, UploadError},
|
||||||
|
repo::{
|
||||||
|
Alias, AliasRepo, AlreadyExists, BaseRepo, DeleteToken, Details, FullRepo, HashRepo,
|
||||||
|
Identifier, IdentifierRepo, QueueRepo, SettingsRepo, UploadId, UploadRepo, UploadResult,
|
||||||
|
},
|
||||||
|
serde_str::Serde,
|
||||||
|
stream::from_iterator,
|
||||||
|
};
|
||||||
|
use futures_util::Stream;
|
||||||
|
use sled::{Db, IVec, Tree};
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
pin::Pin,
|
||||||
|
sync::{Arc, RwLock},
|
||||||
|
};
|
||||||
|
use tokio::sync::Notify;
|
||||||
|
|
||||||
|
macro_rules! b {
|
||||||
|
($self:ident.$ident:ident, $expr:expr) => {{
|
||||||
|
let $ident = $self.$ident.clone();
|
||||||
|
|
||||||
|
actix_rt::task::spawn_blocking(move || $expr)
|
||||||
|
.await
|
||||||
|
.map_err(SledError::from)??
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub(crate) enum SledError {
|
||||||
|
#[error("Error in database")]
|
||||||
|
Sled(#[from] sled::Error),
|
||||||
|
|
||||||
|
#[error("Invalid details json")]
|
||||||
|
Details(#[from] serde_json::Error),
|
||||||
|
|
||||||
|
#[error("Required field was not present")]
|
||||||
|
Missing,
|
||||||
|
|
||||||
|
#[error("Operation panicked")]
|
||||||
|
Panic,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) struct SledRepo {
|
||||||
|
settings: Tree,
|
||||||
|
identifier_details: Tree,
|
||||||
|
hashes: Tree,
|
||||||
|
hash_aliases: Tree,
|
||||||
|
hash_identifiers: Tree,
|
||||||
|
hash_variant_identifiers: Tree,
|
||||||
|
hash_motion_identifiers: Tree,
|
||||||
|
aliases: Tree,
|
||||||
|
alias_hashes: Tree,
|
||||||
|
alias_delete_tokens: Tree,
|
||||||
|
queue: Tree,
|
||||||
|
in_progress_queue: Tree,
|
||||||
|
queue_notifier: Arc<RwLock<HashMap<&'static str, Arc<Notify>>>>,
|
||||||
|
uploads: Tree,
|
||||||
|
db: Db,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SledRepo {
|
||||||
|
pub(crate) fn new(db: Db) -> Result<Self, SledError> {
|
||||||
|
Ok(SledRepo {
|
||||||
|
settings: db.open_tree("pict-rs-settings-tree")?,
|
||||||
|
identifier_details: db.open_tree("pict-rs-identifier-details-tree")?,
|
||||||
|
hashes: db.open_tree("pict-rs-hashes-tree")?,
|
||||||
|
hash_aliases: db.open_tree("pict-rs-hash-aliases-tree")?,
|
||||||
|
hash_identifiers: db.open_tree("pict-rs-hash-identifiers-tree")?,
|
||||||
|
hash_variant_identifiers: db.open_tree("pict-rs-hash-variant-identifiers-tree")?,
|
||||||
|
hash_motion_identifiers: db.open_tree("pict-rs-hash-motion-identifiers-tree")?,
|
||||||
|
aliases: db.open_tree("pict-rs-aliases-tree")?,
|
||||||
|
alias_hashes: db.open_tree("pict-rs-alias-hashes-tree")?,
|
||||||
|
alias_delete_tokens: db.open_tree("pict-rs-alias-delete-tokens-tree")?,
|
||||||
|
queue: db.open_tree("pict-rs-queue-tree")?,
|
||||||
|
in_progress_queue: db.open_tree("pict-rs-in-progress-queue-tree")?,
|
||||||
|
queue_notifier: Arc::new(RwLock::new(HashMap::new())),
|
||||||
|
uploads: db.open_tree("pict-rs-uploads-tree")?,
|
||||||
|
db,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BaseRepo for SledRepo {
|
||||||
|
type Bytes = IVec;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FullRepo for SledRepo {}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize, serde::Serialize)]
|
||||||
|
enum InnerUploadResult {
|
||||||
|
Success {
|
||||||
|
alias: Serde<Alias>,
|
||||||
|
token: Serde<DeleteToken>,
|
||||||
|
},
|
||||||
|
Failure {
|
||||||
|
message: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<UploadResult> for InnerUploadResult {
|
||||||
|
fn from(u: UploadResult) -> Self {
|
||||||
|
match u {
|
||||||
|
UploadResult::Success { alias, token } => InnerUploadResult::Success {
|
||||||
|
alias: Serde::new(alias),
|
||||||
|
token: Serde::new(token),
|
||||||
|
},
|
||||||
|
UploadResult::Failure { message } => InnerUploadResult::Failure { message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<InnerUploadResult> for UploadResult {
|
||||||
|
fn from(i: InnerUploadResult) -> Self {
|
||||||
|
match i {
|
||||||
|
InnerUploadResult::Success { alias, token } => UploadResult::Success {
|
||||||
|
alias: Serde::into_inner(alias),
|
||||||
|
token: Serde::into_inner(token),
|
||||||
|
},
|
||||||
|
InnerUploadResult::Failure { message } => UploadResult::Failure { message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl UploadRepo for SledRepo {
|
||||||
|
async fn create(&self, upload_id: UploadId) -> Result<(), Error> {
|
||||||
|
b!(self.uploads, uploads.insert(upload_id.as_bytes(), b"1"));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error> {
|
||||||
|
let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes());
|
||||||
|
|
||||||
|
let bytes = upload_id.as_bytes().to_vec();
|
||||||
|
let opt = b!(self.uploads, uploads.get(bytes));
|
||||||
|
|
||||||
|
if let Some(bytes) = opt {
|
||||||
|
if bytes != b"1" {
|
||||||
|
let result: InnerUploadResult = serde_json::from_slice(&bytes)?;
|
||||||
|
return Ok(result.into());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(UploadError::NoFiles.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
while let Some(event) = (&mut subscriber).await {
|
||||||
|
match event {
|
||||||
|
sled::Event::Remove { .. } => {
|
||||||
|
return Err(UploadError::NoFiles.into());
|
||||||
|
}
|
||||||
|
sled::Event::Insert { value, .. } => {
|
||||||
|
if value != b"1" {
|
||||||
|
let result: InnerUploadResult = serde_json::from_slice(&value)?;
|
||||||
|
return Ok(result.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(UploadError::Canceled.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn claim(&self, upload_id: UploadId) -> Result<(), Error> {
|
||||||
|
b!(self.uploads, uploads.remove(upload_id.as_bytes()));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn complete(&self, upload_id: UploadId, result: UploadResult) -> Result<(), Error> {
|
||||||
|
let result: InnerUploadResult = result.into();
|
||||||
|
let result = serde_json::to_vec(&result)?;
|
||||||
|
|
||||||
|
b!(self.uploads, uploads.insert(upload_id.as_bytes(), result));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl QueueRepo for SledRepo {
|
||||||
|
#[tracing::instrument(skip_all, fields(worker_id = %String::from_utf8_lossy(&worker_prefix)))]
|
||||||
|
async fn requeue_in_progress(&self, worker_prefix: Vec<u8>) -> Result<(), Error> {
|
||||||
|
let vec: Vec<(String, IVec)> = b!(self.in_progress_queue, {
|
||||||
|
let vec = in_progress_queue
|
||||||
|
.scan_prefix(worker_prefix)
|
||||||
|
.values()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.filter_map(|ivec| {
|
||||||
|
let index = ivec.as_ref().iter().enumerate().find_map(|(index, byte)| {
|
||||||
|
if *byte == 0 {
|
||||||
|
Some(index)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let (queue, job) = ivec.split_at(index);
|
||||||
|
if queue.is_empty() || job.len() <= 1 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let job = &job[1..];
|
||||||
|
|
||||||
|
Some((String::from_utf8_lossy(queue).to_string(), IVec::from(job)))
|
||||||
|
})
|
||||||
|
.collect::<Vec<(String, IVec)>>();
|
||||||
|
|
||||||
|
Ok(vec) as Result<_, Error>
|
||||||
|
});
|
||||||
|
|
||||||
|
let db = self.db.clone();
|
||||||
|
b!(self.queue, {
|
||||||
|
for (queue_name, job) in vec {
|
||||||
|
let id = db.generate_id()?;
|
||||||
|
let mut key = queue_name.as_bytes().to_vec();
|
||||||
|
key.extend(id.to_be_bytes());
|
||||||
|
|
||||||
|
queue.insert(key, job)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(()) as Result<(), Error>
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(self, job), fields(worker_id = %String::from_utf8_lossy(&job)))]
|
||||||
|
async fn push(&self, queue_name: &'static str, job: Self::Bytes) -> Result<(), Error> {
|
||||||
|
let id = self.db.generate_id()?;
|
||||||
|
let mut key = queue_name.as_bytes().to_vec();
|
||||||
|
key.extend(id.to_be_bytes());
|
||||||
|
|
||||||
|
b!(self.queue, queue.insert(key, job));
|
||||||
|
|
||||||
|
if let Some(notifier) = self.queue_notifier.read().unwrap().get(&queue_name) {
|
||||||
|
notifier.notify_one();
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.queue_notifier
|
||||||
|
.write()
|
||||||
|
.unwrap()
|
||||||
|
.entry(queue_name)
|
||||||
|
.or_insert_with(|| Arc::new(Notify::new()))
|
||||||
|
.notify_one();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(self, worker_id), fields(worker_id = %String::from_utf8_lossy(&worker_id)))]
|
||||||
|
async fn pop(
|
||||||
|
&self,
|
||||||
|
queue_name: &'static str,
|
||||||
|
worker_id: Vec<u8>,
|
||||||
|
) -> Result<Self::Bytes, Error> {
|
||||||
|
loop {
|
||||||
|
let in_progress_queue = self.in_progress_queue.clone();
|
||||||
|
|
||||||
|
let worker_id = worker_id.clone();
|
||||||
|
let job = b!(self.queue, {
|
||||||
|
in_progress_queue.remove(&worker_id)?;
|
||||||
|
|
||||||
|
while let Some((key, job)) = queue
|
||||||
|
.scan_prefix(queue_name.as_bytes())
|
||||||
|
.find_map(Result::ok)
|
||||||
|
{
|
||||||
|
let mut in_progress_value = queue_name.as_bytes().to_vec();
|
||||||
|
in_progress_value.push(0);
|
||||||
|
in_progress_value.extend_from_slice(&job);
|
||||||
|
|
||||||
|
in_progress_queue.insert(&worker_id, in_progress_value)?;
|
||||||
|
|
||||||
|
if queue.remove(key)?.is_some() {
|
||||||
|
return Ok(Some(job));
|
||||||
|
}
|
||||||
|
|
||||||
|
in_progress_queue.remove(&worker_id)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(None) as Result<_, SledError>
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(job) = job {
|
||||||
|
return Ok(job);
|
||||||
|
}
|
||||||
|
|
||||||
|
let opt = self
|
||||||
|
.queue_notifier
|
||||||
|
.read()
|
||||||
|
.unwrap()
|
||||||
|
.get(&queue_name)
|
||||||
|
.map(Arc::clone);
|
||||||
|
|
||||||
|
let notify = if let Some(notify) = opt {
|
||||||
|
notify
|
||||||
|
} else {
|
||||||
|
let mut guard = self.queue_notifier.write().unwrap();
|
||||||
|
let entry = guard
|
||||||
|
.entry(queue_name)
|
||||||
|
.or_insert_with(|| Arc::new(Notify::new()));
|
||||||
|
Arc::clone(entry)
|
||||||
|
};
|
||||||
|
|
||||||
|
notify.notified().await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl SettingsRepo for SledRepo {
|
||||||
|
#[tracing::instrument(skip(value))]
|
||||||
|
async fn set(&self, key: &'static str, value: Self::Bytes) -> Result<(), Error> {
|
||||||
|
b!(self.settings, settings.insert(key, value));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn get(&self, key: &'static str) -> Result<Option<Self::Bytes>, Error> {
|
||||||
|
let opt = b!(self.settings, settings.get(key));
|
||||||
|
|
||||||
|
Ok(opt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn remove(&self, key: &'static str) -> Result<(), Error> {
|
||||||
|
b!(self.settings, settings.remove(key));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn variant_key(hash: &[u8], variant: &str) -> Vec<u8> {
|
||||||
|
let mut bytes = hash.to_vec();
|
||||||
|
bytes.push(b'/');
|
||||||
|
bytes.extend_from_slice(variant.as_bytes());
|
||||||
|
bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
let prefix_len = hash.len() + 1;
|
||||||
|
let variant_bytes = key.get(prefix_len..)?.to_vec();
|
||||||
|
String::from_utf8(variant_bytes).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl IdentifierRepo for SledRepo {
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_details<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
identifier: &I,
|
||||||
|
details: &Details,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let key = identifier.to_bytes()?;
|
||||||
|
let details = serde_json::to_vec(&details)?;
|
||||||
|
|
||||||
|
b!(
|
||||||
|
self.identifier_details,
|
||||||
|
identifier_details.insert(key, details)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Error> {
|
||||||
|
let key = identifier.to_bytes()?;
|
||||||
|
|
||||||
|
let opt = b!(self.identifier_details, identifier_details.get(key));
|
||||||
|
|
||||||
|
if let Some(ivec) = opt {
|
||||||
|
Ok(Some(serde_json::from_slice(&ivec)?))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Error> {
|
||||||
|
let key = identifier.to_bytes()?;
|
||||||
|
|
||||||
|
b!(self.identifier_details, identifier_details.remove(key));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type StreamItem = Result<IVec, Error>;
|
||||||
|
type LocalBoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + 'a>>;
|
||||||
|
|
||||||
|
fn hash_alias_key(hash: &IVec, alias: &Alias) -> Vec<u8> {
|
||||||
|
let mut v = hash.to_vec();
|
||||||
|
v.append(&mut alias.to_bytes());
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl HashRepo for SledRepo {
|
||||||
|
type Stream = LocalBoxStream<'static, StreamItem>;
|
||||||
|
|
||||||
|
async fn hashes(&self) -> Self::Stream {
|
||||||
|
let iter = self
|
||||||
|
.hashes
|
||||||
|
.iter()
|
||||||
|
.keys()
|
||||||
|
.map(|res| res.map_err(Error::from));
|
||||||
|
|
||||||
|
Box::pin(from_iterator(iter, 8))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
|
let res = b!(self.hashes, {
|
||||||
|
let hash2 = hash.clone();
|
||||||
|
hashes.compare_and_swap(hash, None as Option<Self::Bytes>, Some(hash2))
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
||||||
|
let key = hash_alias_key(&hash, alias);
|
||||||
|
let value = alias.to_bytes();
|
||||||
|
|
||||||
|
b!(self.hash_aliases, hash_aliases.insert(key, value));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
||||||
|
let key = hash_alias_key(&hash, alias);
|
||||||
|
|
||||||
|
b!(self.hash_aliases, hash_aliases.remove(key));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Error> {
|
||||||
|
let v = b!(self.hash_aliases, {
|
||||||
|
Ok(hash_aliases
|
||||||
|
.scan_prefix(hash)
|
||||||
|
.values()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.filter_map(|ivec| Alias::from_slice(&ivec))
|
||||||
|
.collect::<Vec<_>>()) as Result<_, sled::Error>
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let bytes = identifier.to_bytes()?;
|
||||||
|
|
||||||
|
b!(self.hash_identifiers, hash_identifiers.insert(hash, bytes));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn identifier<I: Identifier + 'static>(&self, hash: Self::Bytes) -> Result<I, Error> {
|
||||||
|
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
||||||
|
|
||||||
|
opt.ok_or(SledError::Missing)
|
||||||
|
.map_err(Error::from)
|
||||||
|
.and_then(|ivec| I::from_bytes(ivec.to_vec()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_variant_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
variant: String,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let key = variant_key(&hash, &variant);
|
||||||
|
let value = identifier.to_bytes()?;
|
||||||
|
|
||||||
|
b!(
|
||||||
|
self.hash_variant_identifiers,
|
||||||
|
hash_variant_identifiers.insert(key, value)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn variant_identifier<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
variant: String,
|
||||||
|
) -> Result<Option<I>, Error> {
|
||||||
|
let key = variant_key(&hash, &variant);
|
||||||
|
|
||||||
|
let opt = b!(
|
||||||
|
self.hash_variant_identifiers,
|
||||||
|
hash_variant_identifiers.get(key)
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(ivec) = opt {
|
||||||
|
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn variants<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
) -> Result<Vec<(String, I)>, Error> {
|
||||||
|
let vec = b!(
|
||||||
|
self.hash_variant_identifiers,
|
||||||
|
Ok(hash_variant_identifiers
|
||||||
|
.scan_prefix(&hash)
|
||||||
|
.filter_map(|res| res.ok())
|
||||||
|
.filter_map(|(key, ivec)| {
|
||||||
|
let identifier = I::from_bytes(ivec.to_vec()).ok()?;
|
||||||
|
let variant = variant_from_key(&hash, &key)?;
|
||||||
|
|
||||||
|
Some((variant, identifier))
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()) as Result<Vec<_>, sled::Error>
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(vec)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_motion_identifier<I: Identifier>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
identifier: &I,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let bytes = identifier.to_bytes()?;
|
||||||
|
|
||||||
|
b!(
|
||||||
|
self.hash_motion_identifiers,
|
||||||
|
hash_motion_identifiers.insert(hash, bytes)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn motion_identifier<I: Identifier + 'static>(
|
||||||
|
&self,
|
||||||
|
hash: Self::Bytes,
|
||||||
|
) -> Result<Option<I>, Error> {
|
||||||
|
let opt = b!(
|
||||||
|
self.hash_motion_identifiers,
|
||||||
|
hash_motion_identifiers.get(hash)
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(ivec) = opt {
|
||||||
|
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Error> {
|
||||||
|
let hash2 = hash.clone();
|
||||||
|
b!(self.hashes, hashes.remove(hash2));
|
||||||
|
|
||||||
|
let hash2 = hash.clone();
|
||||||
|
b!(self.hash_identifiers, hash_identifiers.remove(hash2));
|
||||||
|
|
||||||
|
let hash2 = hash.clone();
|
||||||
|
b!(
|
||||||
|
self.hash_motion_identifiers,
|
||||||
|
hash_motion_identifiers.remove(hash2)
|
||||||
|
);
|
||||||
|
|
||||||
|
let aliases = self.aliases(hash.clone()).await?;
|
||||||
|
let hash2 = hash.clone();
|
||||||
|
b!(self.hash_aliases, {
|
||||||
|
for alias in aliases {
|
||||||
|
let key = hash_alias_key(&hash2, &alias);
|
||||||
|
|
||||||
|
let _ = hash_aliases.remove(key);
|
||||||
|
}
|
||||||
|
Ok(()) as Result<(), sled::Error>
|
||||||
|
});
|
||||||
|
|
||||||
|
let variant_keys = b!(self.hash_variant_identifiers, {
|
||||||
|
let v = hash_variant_identifiers
|
||||||
|
.scan_prefix(hash)
|
||||||
|
.keys()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Ok(v) as Result<Vec<_>, sled::Error>
|
||||||
|
});
|
||||||
|
b!(self.hash_variant_identifiers, {
|
||||||
|
for key in variant_keys {
|
||||||
|
let _ = hash_variant_identifiers.remove(key);
|
||||||
|
}
|
||||||
|
Ok(()) as Result<(), sled::Error>
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl AliasRepo for SledRepo {
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
|
let bytes = alias.to_bytes();
|
||||||
|
let bytes2 = bytes.clone();
|
||||||
|
|
||||||
|
let res = b!(
|
||||||
|
self.aliases,
|
||||||
|
aliases.compare_and_swap(bytes, None as Option<Self::Bytes>, Some(bytes2))
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_delete_token(
|
||||||
|
&self,
|
||||||
|
alias: &Alias,
|
||||||
|
delete_token: &DeleteToken,
|
||||||
|
) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
|
let key = alias.to_bytes();
|
||||||
|
let token = delete_token.to_bytes();
|
||||||
|
|
||||||
|
let res = b!(
|
||||||
|
self.alias_delete_tokens,
|
||||||
|
alias_delete_tokens.compare_and_swap(key, None as Option<Self::Bytes>, Some(token))
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Error> {
|
||||||
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
|
let opt = b!(self.alias_delete_tokens, alias_delete_tokens.get(key));
|
||||||
|
|
||||||
|
opt.and_then(|ivec| DeleteToken::from_slice(&ivec))
|
||||||
|
.ok_or(SledError::Missing)
|
||||||
|
.map_err(Error::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Error> {
|
||||||
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
|
b!(self.alias_hashes, alias_hashes.insert(key, hash));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Error> {
|
||||||
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
|
let opt = b!(self.alias_hashes, alias_hashes.get(key));
|
||||||
|
|
||||||
|
opt.ok_or(SledError::Missing).map_err(Error::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
async fn cleanup(&self, alias: &Alias) -> Result<(), Error> {
|
||||||
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
|
let key2 = key.clone();
|
||||||
|
b!(self.aliases, aliases.remove(key2));
|
||||||
|
|
||||||
|
let key2 = key.clone();
|
||||||
|
b!(self.alias_delete_tokens, alias_delete_tokens.remove(key2));
|
||||||
|
|
||||||
|
b!(self.alias_hashes, alias_hashes.remove(key));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for SledRepo {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("SledRepo").finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<actix_rt::task::JoinError> for SledError {
|
||||||
|
fn from(_: actix_rt::task::JoinError) -> Self {
|
||||||
|
SledError::Panic
|
||||||
|
}
|
||||||
|
}
|
|
@ -12,6 +12,22 @@ impl<T> Serde<T> {
|
||||||
pub(crate) fn new(inner: T) -> Self {
|
pub(crate) fn new(inner: T) -> Self {
|
||||||
Serde { inner }
|
Serde { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn into_inner(this: Self) -> T {
|
||||||
|
this.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> AsRef<T> for Serde<T> {
|
||||||
|
fn as_ref(&self) -> &T {
|
||||||
|
&self.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> AsMut<T> for Serde<T> {
|
||||||
|
fn as_mut(&mut self) -> &mut T {
|
||||||
|
&mut self.inner
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Deref for Serde<T> {
|
impl<T> Deref for Serde<T> {
|
||||||
|
|
84
src/store.rs
84
src/store.rs
|
@ -1,49 +1,37 @@
|
||||||
use std::fmt::Debug;
|
use crate::error::Error;
|
||||||
|
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_util::stream::Stream;
|
use futures_util::stream::Stream;
|
||||||
|
use std::fmt::Debug;
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
|
|
||||||
pub(crate) mod file_store;
|
pub(crate) mod file_store;
|
||||||
#[cfg(feature = "object-storage")]
|
|
||||||
pub(crate) mod object_store;
|
pub(crate) mod object_store;
|
||||||
|
|
||||||
pub(crate) trait Identifier: Send + Sync + Clone + Debug {
|
pub(crate) trait Identifier: Send + Sync + Clone + Debug {
|
||||||
type Error: std::error::Error;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error>;
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error>;
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error>
|
|
||||||
where
|
where
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
|
pub(crate) trait Store: Send + Sync + Clone + Debug {
|
||||||
type Error: std::error::Error;
|
type Identifier: Identifier + 'static;
|
||||||
type Identifier: Identifier<Error = Self::Error>;
|
type Stream: Stream<Item = std::io::Result<Bytes>> + 'static;
|
||||||
type Stream: Stream<Item = std::io::Result<Bytes>>;
|
|
||||||
|
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin;
|
Reader: AsyncRead + Unpin;
|
||||||
|
|
||||||
async fn save_bytes(
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error>;
|
||||||
&self,
|
|
||||||
bytes: Bytes,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>;
|
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error>;
|
) -> Result<Self::Stream, Error>;
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
|
@ -53,7 +41,55 @@ pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
|
||||||
where
|
where
|
||||||
Writer: AsyncWrite + Send + Unpin;
|
Writer: AsyncWrite + Send + Unpin;
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error>;
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error>;
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error>;
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl<'a, T> Store for &'a T
|
||||||
|
where
|
||||||
|
T: Store,
|
||||||
|
{
|
||||||
|
type Identifier = T::Identifier;
|
||||||
|
type Stream = T::Stream;
|
||||||
|
|
||||||
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
|
where
|
||||||
|
Reader: AsyncRead + Unpin,
|
||||||
|
{
|
||||||
|
T::save_async_read(self, reader).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error> {
|
||||||
|
T::save_bytes(self, bytes).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn to_stream(
|
||||||
|
&self,
|
||||||
|
identifier: &Self::Identifier,
|
||||||
|
from_start: Option<u64>,
|
||||||
|
len: Option<u64>,
|
||||||
|
) -> Result<Self::Stream, Error> {
|
||||||
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_into<Writer>(
|
||||||
|
&self,
|
||||||
|
identifier: &Self::Identifier,
|
||||||
|
writer: &mut Writer,
|
||||||
|
) -> Result<(), std::io::Error>
|
||||||
|
where
|
||||||
|
Writer: AsyncWrite + Send + Unpin,
|
||||||
|
{
|
||||||
|
T::read_into(self, identifier, writer).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error> {
|
||||||
|
T::len(self, identifier).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error> {
|
||||||
|
T::remove(self, identifier).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
use crate::{file::File, store::Store};
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
file::File,
|
||||||
|
repo::{Repo, SettingsRepo},
|
||||||
|
store::Store,
|
||||||
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_util::stream::Stream;
|
use futures_util::stream::Stream;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -10,24 +15,19 @@ use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
use tracing::{debug, error, instrument};
|
use tracing::{debug, error, instrument};
|
||||||
|
|
||||||
mod file_id;
|
mod file_id;
|
||||||
mod restructure;
|
|
||||||
pub(crate) use file_id::FileId;
|
pub(crate) use file_id::FileId;
|
||||||
|
|
||||||
// - Settings Tree
|
// - Settings Tree
|
||||||
// - last-path -> last generated path
|
// - last-path -> last generated path
|
||||||
// - fs-restructure-01-complete -> bool
|
|
||||||
|
|
||||||
const GENERATOR_KEY: &[u8] = b"last-path";
|
const GENERATOR_KEY: &str = "last-path";
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum FileError {
|
pub(crate) enum FileError {
|
||||||
#[error(transparent)]
|
#[error("Failed to read or write file")]
|
||||||
Sled(#[from] sled::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Failed to generate path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error("Error formatting file store identifier")]
|
#[error("Error formatting file store identifier")]
|
||||||
|
@ -44,48 +44,39 @@ pub(crate) enum FileError {
|
||||||
pub(crate) struct FileStore {
|
pub(crate) struct FileStore {
|
||||||
path_gen: Generator,
|
path_gen: Generator,
|
||||||
root_dir: PathBuf,
|
root_dir: PathBuf,
|
||||||
settings_tree: sled::Tree,
|
repo: Repo,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for FileStore {
|
impl Store for FileStore {
|
||||||
type Error = FileError;
|
|
||||||
type Identifier = FileId;
|
type Identifier = FileId;
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(reader))]
|
#[tracing::instrument(skip(reader))]
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin,
|
Reader: AsyncRead + Unpin,
|
||||||
{
|
{
|
||||||
let path = self.next_file(filename)?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_reader(&path, reader).await {
|
if let Err(e) = self.safe_save_reader(&path, reader).await {
|
||||||
self.safe_remove_file(&path).await?;
|
self.safe_remove_file(&path).await?;
|
||||||
return Err(e);
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.file_id_from_path(path)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(bytes))]
|
#[tracing::instrument(skip(bytes))]
|
||||||
async fn save_bytes(
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error> {
|
||||||
&self,
|
let path = self.next_file().await?;
|
||||||
bytes: Bytes,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error> {
|
|
||||||
let path = self.next_file(filename)?;
|
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
||||||
self.safe_remove_file(&path).await?;
|
self.safe_remove_file(&path).await?;
|
||||||
return Err(e);
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.file_id_from_path(path)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
|
@ -94,7 +85,7 @@ impl Store for FileStore {
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error> {
|
) -> Result<Self::Stream, Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let stream = File::open(path)
|
let stream = File::open(path)
|
||||||
|
@ -122,7 +113,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error> {
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let len = tokio::fs::metadata(path).await?.len();
|
let len = tokio::fs::metadata(path).await?.len();
|
||||||
|
@ -131,7 +122,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error> {
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
self.safe_remove_file(path).await?;
|
self.safe_remove_file(path).await?;
|
||||||
|
@ -141,25 +132,28 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub fn build(root_dir: PathBuf, db: &sled::Db) -> Result<Self, FileError> {
|
pub(crate) async fn build(root_dir: PathBuf, repo: Repo) -> Result<Self, Error> {
|
||||||
let settings_tree = db.open_tree("settings")?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
let path_gen = init_generator(&settings_tree)?;
|
|
||||||
|
|
||||||
Ok(FileStore {
|
Ok(FileStore {
|
||||||
root_dir,
|
root_dir,
|
||||||
path_gen,
|
path_gen,
|
||||||
settings_tree,
|
repo,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_directory(&self) -> Result<PathBuf, FileError> {
|
async fn next_directory(&self) -> Result<PathBuf, Error> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
self.settings_tree
|
match self.repo {
|
||||||
.insert(GENERATOR_KEY, path.to_be_bytes())?;
|
Repo::Sled(ref sled_repo) => {
|
||||||
|
sled_repo
|
||||||
|
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut target_path = self.root_dir.join("files");
|
let mut target_path = self.root_dir.clone();
|
||||||
for dir in path.to_strings() {
|
for dir in path.to_strings() {
|
||||||
target_path.push(dir)
|
target_path.push(dir)
|
||||||
}
|
}
|
||||||
|
@ -167,8 +161,9 @@ impl FileStore {
|
||||||
Ok(target_path)
|
Ok(target_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_file(&self, filename: &str) -> Result<PathBuf, FileError> {
|
async fn next_file(&self) -> Result<PathBuf, Error> {
|
||||||
let target_path = self.next_directory()?;
|
let target_path = self.next_directory().await?;
|
||||||
|
let filename = uuid::Uuid::new_v4().to_string();
|
||||||
|
|
||||||
Ok(target_path.join(filename))
|
Ok(target_path.join(filename))
|
||||||
}
|
}
|
||||||
|
@ -289,8 +284,10 @@ pub(crate) async fn safe_create_parent<P: AsRef<Path>>(path: P) -> Result<(), Fi
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_generator(settings: &sled::Tree) -> Result<Generator, FileError> {
|
async fn init_generator(repo: &Repo) -> Result<Generator, Error> {
|
||||||
if let Some(ivec) = settings.get(GENERATOR_KEY)? {
|
match repo {
|
||||||
|
Repo::Sled(sled_repo) => {
|
||||||
|
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
||||||
Ok(Generator::from_existing(
|
Ok(Generator::from_existing(
|
||||||
storage_path_generator::Path::from_be_bytes(ivec.to_vec())?,
|
storage_path_generator::Path::from_be_bytes(ivec.to_vec())?,
|
||||||
))
|
))
|
||||||
|
@ -298,6 +295,8 @@ fn init_generator(settings: &sled::Tree) -> Result<Generator, FileError> {
|
||||||
Ok(Generator::new())
|
Ok(Generator::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for FileStore {
|
impl std::fmt::Debug for FileStore {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use crate::store::{
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
store::{
|
||||||
file_store::{FileError, FileStore},
|
file_store::{FileError, FileStore},
|
||||||
Identifier,
|
Identifier,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
@ -8,9 +11,7 @@ use std::path::PathBuf;
|
||||||
pub(crate) struct FileId(PathBuf);
|
pub(crate) struct FileId(PathBuf);
|
||||||
|
|
||||||
impl Identifier for FileId {
|
impl Identifier for FileId {
|
||||||
type Error = FileError;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error> {
|
|
||||||
let vec = self
|
let vec = self
|
||||||
.0
|
.0
|
||||||
.to_str()
|
.to_str()
|
||||||
|
@ -21,7 +22,7 @@ impl Identifier for FileId {
|
||||||
Ok(vec)
|
Ok(vec)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error>
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,118 +0,0 @@
|
||||||
use crate::{
|
|
||||||
error::{Error, UploadError},
|
|
||||||
store::file_store::FileStore,
|
|
||||||
upload_manager::UploadManager,
|
|
||||||
};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
const RESTRUCTURE_COMPLETE: &[u8] = b"fs-restructure-01-complete";
|
|
||||||
const DETAILS: &[u8] = b"details";
|
|
||||||
|
|
||||||
impl UploadManager {
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub(crate) async fn restructure(&self, store: &FileStore) -> Result<(), Error> {
|
|
||||||
if self.restructure_complete(store)? {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
for res in self.inner().filename_tree.iter() {
|
|
||||||
let (filename, hash) = res?;
|
|
||||||
let filename = String::from_utf8(filename.to_vec())?;
|
|
||||||
tracing::info!("Migrating {}", filename);
|
|
||||||
|
|
||||||
let file_path = store.root_dir.join("files").join(&filename);
|
|
||||||
|
|
||||||
if tokio::fs::metadata(&file_path).await.is_ok() {
|
|
||||||
let target_path = store.next_directory()?.join(&filename);
|
|
||||||
|
|
||||||
let target_path_bytes = self
|
|
||||||
.generalize_path(store, &target_path)?
|
|
||||||
.to_str()
|
|
||||||
.ok_or(UploadError::Path)?
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
|
|
||||||
self.inner()
|
|
||||||
.identifier_tree
|
|
||||||
.insert(filename.as_bytes(), target_path_bytes)?;
|
|
||||||
|
|
||||||
store.safe_move_file(file_path, target_path).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (start, end) = variant_key_bounds(&hash);
|
|
||||||
|
|
||||||
for res in self.inner().main_tree.range(start..end) {
|
|
||||||
let (hash_variant_key, variant_path_or_details) = res?;
|
|
||||||
|
|
||||||
if !hash_variant_key.ends_with(DETAILS) {
|
|
||||||
let variant_path =
|
|
||||||
PathBuf::from(String::from_utf8(variant_path_or_details.to_vec())?);
|
|
||||||
if tokio::fs::metadata(&variant_path).await.is_ok() {
|
|
||||||
let target_path = store.next_directory()?.join(&filename);
|
|
||||||
|
|
||||||
let relative_target_path_bytes = self
|
|
||||||
.generalize_path(store, &target_path)?
|
|
||||||
.to_str()
|
|
||||||
.ok_or(UploadError::Path)?
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
|
|
||||||
let variant_key =
|
|
||||||
self.migrate_variant_key(store, &variant_path, &filename)?;
|
|
||||||
|
|
||||||
self.inner()
|
|
||||||
.identifier_tree
|
|
||||||
.insert(variant_key, relative_target_path_bytes)?;
|
|
||||||
|
|
||||||
store
|
|
||||||
.safe_move_file(variant_path.clone(), target_path)
|
|
||||||
.await?;
|
|
||||||
store.try_remove_parents(&variant_path).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.inner().main_tree.remove(hash_variant_key)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.mark_restructure_complete(store)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn restructure_complete(&self, store: &FileStore) -> Result<bool, Error> {
|
|
||||||
Ok(store.settings_tree.get(RESTRUCTURE_COMPLETE)?.is_some())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn mark_restructure_complete(&self, store: &FileStore) -> Result<(), Error> {
|
|
||||||
store.settings_tree.insert(RESTRUCTURE_COMPLETE, b"true")?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generalize_path<'a>(&self, store: &FileStore, path: &'a Path) -> Result<&'a Path, Error> {
|
|
||||||
Ok(path.strip_prefix(&store.root_dir)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn migrate_variant_key(
|
|
||||||
&self,
|
|
||||||
store: &FileStore,
|
|
||||||
variant_process_path: &Path,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Vec<u8>, Error> {
|
|
||||||
let path = self
|
|
||||||
.generalize_path(store, variant_process_path)?
|
|
||||||
.strip_prefix("files")?;
|
|
||||||
|
|
||||||
self.variant_key(path, filename)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn variant_key_bounds(hash: &[u8]) -> (Vec<u8>, Vec<u8>) {
|
|
||||||
let mut start = hash.to_vec();
|
|
||||||
start.extend(&[2]);
|
|
||||||
|
|
||||||
let mut end = hash.to_vec();
|
|
||||||
end.extend(&[3]);
|
|
||||||
|
|
||||||
(start, end)
|
|
||||||
}
|
|
|
@ -1,16 +1,17 @@
|
||||||
use crate::store::Store;
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
repo::{Repo, SettingsRepo},
|
||||||
|
store::Store,
|
||||||
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_util::stream::Stream;
|
use futures_util::{Stream, TryStreamExt};
|
||||||
use s3::{
|
use s3::{
|
||||||
client::Client, command::Command, creds::Credentials, request_trait::Request, Bucket, Region,
|
client::Client, command::Command, creds::Credentials, request_trait::Request, Bucket, Region,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{pin::Pin, string::FromUtf8Error};
|
||||||
pin::Pin,
|
|
||||||
string::FromUtf8Error,
|
|
||||||
task::{Context, Poll},
|
|
||||||
};
|
|
||||||
use storage_path_generator::{Generator, Path};
|
use storage_path_generator::{Generator, Path};
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
mod object_id;
|
mod object_id;
|
||||||
pub(crate) use object_id::ObjectId;
|
pub(crate) use object_id::ObjectId;
|
||||||
|
@ -18,74 +19,59 @@ pub(crate) use object_id::ObjectId;
|
||||||
// - Settings Tree
|
// - Settings Tree
|
||||||
// - last-path -> last generated path
|
// - last-path -> last generated path
|
||||||
|
|
||||||
const GENERATOR_KEY: &[u8] = b"last-path";
|
const GENERATOR_KEY: &str = "last-path";
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum ObjectError {
|
pub(crate) enum ObjectError {
|
||||||
#[error(transparent)]
|
#[error("Failed to generate path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error("Failed to parse string")]
|
||||||
Sled(#[from] sled::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Utf8(#[from] FromUtf8Error),
|
Utf8(#[from] FromUtf8Error),
|
||||||
|
|
||||||
#[error("Invalid length")]
|
#[error("Invalid length")]
|
||||||
Length,
|
Length,
|
||||||
|
|
||||||
#[error("Storage error: {0}")]
|
#[error("Storage error")]
|
||||||
Anyhow(#[from] anyhow::Error),
|
Anyhow(#[from] anyhow::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct ObjectStore {
|
pub(crate) struct ObjectStore {
|
||||||
path_gen: Generator,
|
path_gen: Generator,
|
||||||
settings_tree: sled::Tree,
|
repo: Repo,
|
||||||
bucket: Bucket,
|
bucket: Bucket,
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
pin_project_lite::pin_project! {
|
|
||||||
struct IoError<S> {
|
|
||||||
#[pin]
|
|
||||||
inner: S,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for ObjectStore {
|
impl Store for ObjectStore {
|
||||||
type Error = ObjectError;
|
|
||||||
type Identifier = ObjectId;
|
type Identifier = ObjectId;
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(reader))]
|
#[tracing::instrument(skip(reader))]
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin,
|
Reader: AsyncRead + Unpin,
|
||||||
{
|
{
|
||||||
let path = self.next_file(filename)?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
self.bucket
|
self.bucket
|
||||||
.put_object_stream(&self.client, reader, &path)
|
.put_object_stream(&self.client, reader, &path)
|
||||||
.await?;
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(ObjectId::from_string(path))
|
Ok(ObjectId::from_string(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(bytes))]
|
#[tracing::instrument(skip(bytes))]
|
||||||
async fn save_bytes(
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error> {
|
||||||
&self,
|
let path = self.next_file().await?;
|
||||||
bytes: Bytes,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Self::Identifier, Self::Error> {
|
|
||||||
let path = self.next_file(filename)?;
|
|
||||||
|
|
||||||
self.bucket.put_object(&self.client, &path, &bytes).await?;
|
self.bucket
|
||||||
|
.put_object(&self.client, &path, &bytes)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(ObjectId::from_string(path))
|
Ok(ObjectId::from_string(path))
|
||||||
}
|
}
|
||||||
|
@ -96,22 +82,39 @@ impl Store for ObjectStore {
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error> {
|
) -> Result<Self::Stream, Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
let start = from_start.unwrap_or(0);
|
let start = from_start.unwrap_or(0);
|
||||||
let end = len.map(|len| start + len);
|
let end = len.map(|len| start + len - 1);
|
||||||
|
|
||||||
let request = Client::request(
|
let request_span = tracing::info_span!(parent: None, "Get Object");
|
||||||
|
|
||||||
|
// NOTE: isolating reqwest in it's own span is to prevent the request's span from getting
|
||||||
|
// smuggled into a long-lived task. Unfortunately, I am unable to create a minimal
|
||||||
|
// reproduction of this problem so I can't open a bug about it.
|
||||||
|
let request = request_span.in_scope(|| {
|
||||||
|
Client::request(
|
||||||
&self.client,
|
&self.client,
|
||||||
&self.bucket,
|
&self.bucket,
|
||||||
path,
|
path,
|
||||||
Command::GetObjectRange { start, end },
|
Command::GetObjectRange { start, end },
|
||||||
);
|
)
|
||||||
|
});
|
||||||
|
|
||||||
let response = request.response().await?;
|
let response = request_span
|
||||||
|
.in_scope(|| request.response())
|
||||||
|
.instrument(request_span.clone())
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(Box::pin(io_error(response.bytes_stream())))
|
let stream = request_span.in_scope(|| {
|
||||||
|
response
|
||||||
|
.bytes_stream()
|
||||||
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Box::pin(stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(writer))]
|
#[tracing::instrument(skip(writer))]
|
||||||
|
@ -128,49 +131,55 @@ impl Store for ObjectStore {
|
||||||
self.bucket
|
self.bucket
|
||||||
.get_object_stream(&self.client, path, writer)
|
.get_object_stream(&self.client, path, writer)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, Self::Error::from(e)))?;
|
.map_err(ObjectError::from)
|
||||||
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, Error::from(e)))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error> {
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
let (head, _) = self.bucket.head_object(&self.client, path).await?;
|
let (head, _) = self
|
||||||
|
.bucket
|
||||||
|
.head_object(&self.client, path)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
let length = head.content_length.ok_or(ObjectError::Length)?;
|
let length = head.content_length.ok_or(ObjectError::Length)?;
|
||||||
|
|
||||||
Ok(length as u64)
|
Ok(length as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error> {
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
self.bucket.delete_object(&self.client, path).await?;
|
self.bucket
|
||||||
|
.delete_object(&self.client, path)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ObjectStore {
|
impl ObjectStore {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub(crate) fn build(
|
pub(crate) async fn build(
|
||||||
bucket_name: &str,
|
bucket_name: &str,
|
||||||
region: Region,
|
region: Region,
|
||||||
access_key: Option<String>,
|
access_key: Option<String>,
|
||||||
secret_key: Option<String>,
|
secret_key: Option<String>,
|
||||||
security_token: Option<String>,
|
security_token: Option<String>,
|
||||||
session_token: Option<String>,
|
session_token: Option<String>,
|
||||||
db: &sled::Db,
|
repo: Repo,
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
) -> Result<ObjectStore, ObjectError> {
|
) -> Result<ObjectStore, Error> {
|
||||||
let settings_tree = db.open_tree("settings")?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
let path_gen = init_generator(&settings_tree)?;
|
|
||||||
|
|
||||||
Ok(ObjectStore {
|
Ok(ObjectStore {
|
||||||
path_gen,
|
path_gen,
|
||||||
settings_tree,
|
repo,
|
||||||
bucket: Bucket::new_with_path_style(
|
bucket: Bucket::new_with_path_style(
|
||||||
bucket_name,
|
bucket_name,
|
||||||
match region {
|
match region {
|
||||||
|
@ -186,29 +195,38 @@ impl ObjectStore {
|
||||||
security_token,
|
security_token,
|
||||||
session_token,
|
session_token,
|
||||||
},
|
},
|
||||||
)?,
|
)
|
||||||
|
.map_err(ObjectError::from)?,
|
||||||
client,
|
client,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_directory(&self) -> Result<Path, ObjectError> {
|
async fn next_directory(&self) -> Result<Path, Error> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
self.settings_tree
|
match self.repo {
|
||||||
.insert(GENERATOR_KEY, path.to_be_bytes())?;
|
Repo::Sled(ref sled_repo) => {
|
||||||
|
sled_repo
|
||||||
|
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_file(&self, filename: &str) -> Result<String, ObjectError> {
|
async fn next_file(&self) -> Result<String, Error> {
|
||||||
let path = self.next_directory()?.to_strings().join("/");
|
let path = self.next_directory().await?.to_strings().join("/");
|
||||||
|
let filename = uuid::Uuid::new_v4().to_string();
|
||||||
|
|
||||||
Ok(format!("{}/{}", path, filename))
|
Ok(format!("{}/{}", path, filename))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_generator(settings: &sled::Tree) -> Result<Generator, ObjectError> {
|
async fn init_generator(repo: &Repo) -> Result<Generator, Error> {
|
||||||
if let Some(ivec) = settings.get(GENERATOR_KEY)? {
|
match repo {
|
||||||
|
Repo::Sled(sled_repo) => {
|
||||||
|
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
||||||
Ok(Generator::from_existing(
|
Ok(Generator::from_existing(
|
||||||
storage_path_generator::Path::from_be_bytes(ivec.to_vec())?,
|
storage_path_generator::Path::from_be_bytes(ivec.to_vec())?,
|
||||||
))
|
))
|
||||||
|
@ -216,35 +234,13 @@ fn init_generator(settings: &sled::Tree) -> Result<Generator, ObjectError> {
|
||||||
Ok(Generator::new())
|
Ok(Generator::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn io_error<S, T, E>(stream: S) -> impl Stream<Item = std::io::Result<T>>
|
|
||||||
where
|
|
||||||
S: Stream<Item = Result<T, E>>,
|
|
||||||
E: Into<Box<dyn std::error::Error + Send + Sync>>,
|
|
||||||
{
|
|
||||||
IoError { inner: stream }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S, T, E> Stream for IoError<S>
|
|
||||||
where
|
|
||||||
S: Stream<Item = Result<T, E>>,
|
|
||||||
E: Into<Box<dyn std::error::Error + Send + Sync>>,
|
|
||||||
{
|
|
||||||
type Item = std::io::Result<T>;
|
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
|
||||||
let this = self.as_mut().project();
|
|
||||||
|
|
||||||
this.inner.poll_next(cx).map(|opt| {
|
|
||||||
opt.map(|res| res.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for ObjectStore {
|
impl std::fmt::Debug for ObjectStore {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("ObjectStore")
|
f.debug_struct("ObjectStore")
|
||||||
.field("path_gen", &self.path_gen)
|
.field("path_gen", &"generator")
|
||||||
.field("bucket", &self.bucket.name)
|
.field("bucket", &self.bucket.name)
|
||||||
.field("region", &self.bucket.region)
|
.field("region", &self.bucket.region)
|
||||||
.finish()
|
.finish()
|
||||||
|
|
|
@ -1,17 +1,20 @@
|
||||||
use crate::store::{object_store::ObjectError, Identifier};
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
store::{object_store::ObjectError, Identifier},
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct ObjectId(String);
|
pub(crate) struct ObjectId(String);
|
||||||
|
|
||||||
impl Identifier for ObjectId {
|
impl Identifier for ObjectId {
|
||||||
type Error = ObjectError;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error> {
|
|
||||||
Ok(self.0.as_bytes().to_vec())
|
Ok(self.0.as_bytes().to_vec())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error> {
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error> {
|
||||||
Ok(ObjectId(String::from_utf8(bytes)?))
|
Ok(ObjectId(
|
||||||
|
String::from_utf8(bytes).map_err(ObjectError::from)?,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
233
src/stream.rs
Normal file
233
src/stream.rs
Normal file
|
@ -0,0 +1,233 @@
|
||||||
|
use actix_rt::{task::JoinHandle, time::Sleep};
|
||||||
|
use actix_web::web::Bytes;
|
||||||
|
use futures_util::Stream;
|
||||||
|
use std::{
|
||||||
|
future::Future,
|
||||||
|
pin::Pin,
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicBool, Ordering},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
|
task::{Context, Poll, Wake, Waker},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) trait StreamLimit {
|
||||||
|
fn limit(self, limit: u64) -> Limit<Self>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Limit {
|
||||||
|
inner: self,
|
||||||
|
count: 0,
|
||||||
|
limit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait StreamTimeout {
|
||||||
|
fn timeout(self, duration: Duration) -> Timeout<Self>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Timeout {
|
||||||
|
sleep: actix_rt::time::sleep(duration),
|
||||||
|
inner: self,
|
||||||
|
expired: false,
|
||||||
|
woken: Arc::new(AtomicBool::new(true)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_iterator<I: IntoIterator + Unpin + Send + 'static>(
|
||||||
|
iterator: I,
|
||||||
|
buffer: usize,
|
||||||
|
) -> IterStream<I, I::Item> {
|
||||||
|
IterStream {
|
||||||
|
state: IterStreamState::New { iterator, buffer },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, E> StreamLimit for S where S: Stream<Item = Result<Bytes, E>> {}
|
||||||
|
impl<S> StreamTimeout for S where S: Stream {}
|
||||||
|
|
||||||
|
pin_project_lite::pin_project! {
|
||||||
|
pub(crate) struct Limit<S> {
|
||||||
|
#[pin]
|
||||||
|
inner: S,
|
||||||
|
|
||||||
|
count: u64,
|
||||||
|
limit: u64,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pin_project_lite::pin_project! {
|
||||||
|
pub(crate) struct Timeout<S> {
|
||||||
|
#[pin]
|
||||||
|
sleep: Sleep,
|
||||||
|
|
||||||
|
#[pin]
|
||||||
|
inner: S,
|
||||||
|
|
||||||
|
expired: bool,
|
||||||
|
woken: Arc<AtomicBool>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum IterStreamState<I, T> {
|
||||||
|
New {
|
||||||
|
iterator: I,
|
||||||
|
buffer: usize,
|
||||||
|
},
|
||||||
|
Running {
|
||||||
|
handle: JoinHandle<()>,
|
||||||
|
receiver: tokio::sync::mpsc::Receiver<T>,
|
||||||
|
},
|
||||||
|
Pending,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct IterStream<I, T> {
|
||||||
|
state: IterStreamState<I, T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TimeoutWaker {
|
||||||
|
woken: Arc<AtomicBool>,
|
||||||
|
inner: Waker,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
#[error("Resonse body larger than size limit")]
|
||||||
|
pub(crate) struct LimitError;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
#[error("Timeout in body")]
|
||||||
|
pub(crate) struct TimeoutError;
|
||||||
|
|
||||||
|
impl<S, E> Stream for Limit<S>
|
||||||
|
where
|
||||||
|
S: Stream<Item = Result<Bytes, E>>,
|
||||||
|
E: From<LimitError>,
|
||||||
|
{
|
||||||
|
type Item = Result<Bytes, E>;
|
||||||
|
|
||||||
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
let this = self.as_mut().project();
|
||||||
|
|
||||||
|
let limit = this.limit;
|
||||||
|
let count = this.count;
|
||||||
|
let inner = this.inner;
|
||||||
|
|
||||||
|
inner.poll_next(cx).map(|opt| {
|
||||||
|
opt.map(|res| match res {
|
||||||
|
Ok(bytes) => {
|
||||||
|
*count += bytes.len() as u64;
|
||||||
|
if *count > *limit {
|
||||||
|
return Err(LimitError.into());
|
||||||
|
}
|
||||||
|
Ok(bytes)
|
||||||
|
}
|
||||||
|
Err(e) => Err(e),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Wake for TimeoutWaker {
|
||||||
|
fn wake(self: Arc<Self>) {
|
||||||
|
self.wake_by_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wake_by_ref(self: &Arc<Self>) {
|
||||||
|
self.woken.store(true, Ordering::Release);
|
||||||
|
self.inner.wake_by_ref();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, T> Stream for Timeout<S>
|
||||||
|
where
|
||||||
|
S: Stream<Item = T>,
|
||||||
|
{
|
||||||
|
type Item = Result<T, TimeoutError>;
|
||||||
|
|
||||||
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
let this = self.as_mut().project();
|
||||||
|
|
||||||
|
if *this.expired {
|
||||||
|
return Poll::Ready(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if this.woken.swap(false, Ordering::Acquire) {
|
||||||
|
let timeout_waker = Arc::new(TimeoutWaker {
|
||||||
|
woken: Arc::clone(this.woken),
|
||||||
|
inner: cx.waker().clone(),
|
||||||
|
})
|
||||||
|
.into();
|
||||||
|
|
||||||
|
let mut timeout_cx = Context::from_waker(&timeout_waker);
|
||||||
|
|
||||||
|
if this.sleep.poll(&mut timeout_cx).is_ready() {
|
||||||
|
*this.expired = true;
|
||||||
|
return Poll::Ready(Some(Err(TimeoutError)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.inner.poll_next(cx).map(|opt| opt.map(Ok))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T> Stream for IterStream<I, T>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = T> + Send + Unpin + 'static,
|
||||||
|
T: Send + 'static,
|
||||||
|
{
|
||||||
|
type Item = T;
|
||||||
|
|
||||||
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
let this = self.as_mut().get_mut();
|
||||||
|
|
||||||
|
match std::mem::replace(&mut this.state, IterStreamState::Pending) {
|
||||||
|
IterStreamState::New { iterator, buffer } => {
|
||||||
|
let (sender, receiver) = tokio::sync::mpsc::channel(buffer);
|
||||||
|
|
||||||
|
let mut handle = actix_rt::task::spawn_blocking(move || {
|
||||||
|
let iterator = iterator.into_iter();
|
||||||
|
|
||||||
|
for item in iterator {
|
||||||
|
if sender.blocking_send(item).is_err() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if Pin::new(&mut handle).poll(cx).is_ready() {
|
||||||
|
return Poll::Ready(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.state = IterStreamState::Running { handle, receiver };
|
||||||
|
|
||||||
|
self.poll_next(cx)
|
||||||
|
}
|
||||||
|
IterStreamState::Running {
|
||||||
|
mut handle,
|
||||||
|
mut receiver,
|
||||||
|
} => match Pin::new(&mut receiver).poll_recv(cx) {
|
||||||
|
Poll::Ready(Some(item)) => {
|
||||||
|
this.state = IterStreamState::Running { handle, receiver };
|
||||||
|
|
||||||
|
Poll::Ready(Some(item))
|
||||||
|
}
|
||||||
|
Poll::Ready(None) => Poll::Ready(None),
|
||||||
|
Poll::Pending => {
|
||||||
|
if Pin::new(&mut handle).poll(cx).is_ready() {
|
||||||
|
return Poll::Ready(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.state = IterStreamState::Running { handle, receiver };
|
||||||
|
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
},
|
||||||
|
IterStreamState::Pending => panic!("Polled after completion"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,765 +0,0 @@
|
||||||
use crate::{
|
|
||||||
config::Format,
|
|
||||||
error::{Error, UploadError},
|
|
||||||
ffmpeg::{InputFormat, ThumbnailFormat},
|
|
||||||
magick::{details_hint, ValidInputType},
|
|
||||||
migrate::{alias_id_key, alias_key, alias_key_bounds},
|
|
||||||
serde_str::Serde,
|
|
||||||
store::{Identifier, Store},
|
|
||||||
};
|
|
||||||
use actix_web::web;
|
|
||||||
use sha2::Digest;
|
|
||||||
use std::{string::FromUtf8Error, sync::Arc};
|
|
||||||
use tracing::{debug, error, info, instrument, warn, Span};
|
|
||||||
use tracing_futures::Instrument;
|
|
||||||
|
|
||||||
mod hasher;
|
|
||||||
mod session;
|
|
||||||
|
|
||||||
pub(super) use session::UploadManagerSession;
|
|
||||||
|
|
||||||
// TREE STRUCTURE
|
|
||||||
// - Alias Tree
|
|
||||||
// - alias -> hash
|
|
||||||
// - alias / id -> u64(id)
|
|
||||||
// - alias / delete -> delete token
|
|
||||||
// - Main Tree
|
|
||||||
// - hash -> filename
|
|
||||||
// - hash 0 u64(id) -> alias
|
|
||||||
// - DEPRECATED:
|
|
||||||
// - hash 2 variant path -> variant path
|
|
||||||
// - hash 2 vairant path details -> details
|
|
||||||
// - Filename Tree
|
|
||||||
// - filename -> hash
|
|
||||||
// - Details Tree
|
|
||||||
// - filename / S::Identifier -> details
|
|
||||||
// - Identifier Tree
|
|
||||||
// - filename -> S::Identifier
|
|
||||||
// - filename / variant path -> S::Identifier
|
|
||||||
// - filename / motion -> S::Identifier
|
|
||||||
// - Settings Tree
|
|
||||||
// - store-migration-progress -> Path Tree Key
|
|
||||||
|
|
||||||
const STORE_MIGRATION_PROGRESS: &[u8] = b"store-migration-progress";
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub(crate) struct UploadManager {
|
|
||||||
inner: Arc<UploadManagerInner>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct UploadManagerInner {
|
|
||||||
format: Option<Format>,
|
|
||||||
hasher: sha2::Sha256,
|
|
||||||
pub(crate) alias_tree: sled::Tree,
|
|
||||||
pub(crate) filename_tree: sled::Tree,
|
|
||||||
pub(crate) main_tree: sled::Tree,
|
|
||||||
details_tree: sled::Tree,
|
|
||||||
settings_tree: sled::Tree,
|
|
||||||
pub(crate) identifier_tree: sled::Tree,
|
|
||||||
db: sled::Db,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
|
||||||
pub(crate) struct Details {
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
content_type: Serde<mime::Mime>,
|
|
||||||
created_at: time::OffsetDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct FilenameIVec {
|
|
||||||
inner: sled::IVec,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UploadManager {
|
|
||||||
/// Create a new UploadManager
|
|
||||||
pub(crate) async fn new(db: sled::Db, format: Option<Format>) -> Result<Self, Error> {
|
|
||||||
let manager = UploadManager {
|
|
||||||
inner: Arc::new(UploadManagerInner {
|
|
||||||
format,
|
|
||||||
hasher: sha2::Sha256::new(),
|
|
||||||
alias_tree: db.open_tree("alias")?,
|
|
||||||
filename_tree: db.open_tree("filename")?,
|
|
||||||
main_tree: db.open_tree("main")?,
|
|
||||||
details_tree: db.open_tree("details")?,
|
|
||||||
settings_tree: db.open_tree("settings")?,
|
|
||||||
identifier_tree: db.open_tree("path")?,
|
|
||||||
db,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(manager)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn migrate_store<S1, S2>(&self, from: S1, to: S2) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
S1: Store,
|
|
||||||
S2: Store,
|
|
||||||
Error: From<S1::Error> + From<S2::Error>,
|
|
||||||
{
|
|
||||||
let iter =
|
|
||||||
if let Some(starting_line) = self.inner.settings_tree.get(STORE_MIGRATION_PROGRESS)? {
|
|
||||||
self.inner.identifier_tree.range(starting_line..)
|
|
||||||
} else {
|
|
||||||
self.inner.identifier_tree.iter()
|
|
||||||
};
|
|
||||||
|
|
||||||
for res in iter {
|
|
||||||
let (key, identifier) = res?;
|
|
||||||
|
|
||||||
let identifier = S1::Identifier::from_bytes(identifier.to_vec())?;
|
|
||||||
|
|
||||||
let filename =
|
|
||||||
if let Some((filename, _)) = String::from_utf8_lossy(&key).split_once('/') {
|
|
||||||
filename.to_string()
|
|
||||||
} else {
|
|
||||||
String::from_utf8_lossy(&key).to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let stream = from.to_stream(&identifier, None, None).await?;
|
|
||||||
futures_util::pin_mut!(stream);
|
|
||||||
let mut reader = tokio_util::io::StreamReader::new(stream);
|
|
||||||
|
|
||||||
let new_identifier = to.save_async_read(&mut reader, &filename).await?;
|
|
||||||
|
|
||||||
let details_key = self.details_key(&identifier, &filename)?;
|
|
||||||
|
|
||||||
if let Some(details) = self.inner.details_tree.get(details_key.clone())? {
|
|
||||||
let new_details_key = self.details_key(&new_identifier, &filename)?;
|
|
||||||
|
|
||||||
self.inner.details_tree.insert(new_details_key, details)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.inner
|
|
||||||
.identifier_tree
|
|
||||||
.insert(key.clone(), new_identifier.to_bytes()?)?;
|
|
||||||
self.inner.details_tree.remove(details_key)?;
|
|
||||||
self.inner
|
|
||||||
.settings_tree
|
|
||||||
.insert(STORE_MIGRATION_PROGRESS, key)?;
|
|
||||||
|
|
||||||
let (ident, detail, settings) = futures_util::future::join3(
|
|
||||||
self.inner.identifier_tree.flush_async(),
|
|
||||||
self.inner.details_tree.flush_async(),
|
|
||||||
self.inner.settings_tree.flush_async(),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
ident?;
|
|
||||||
detail?;
|
|
||||||
settings?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// clean up the migration key to avoid interfering with future migrations
|
|
||||||
self.inner.settings_tree.remove(STORE_MIGRATION_PROGRESS)?;
|
|
||||||
self.inner.settings_tree.flush_async().await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn inner(&self) -> &UploadManagerInner {
|
|
||||||
&self.inner
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn still_identifier_from_filename<S: Store + Clone>(
|
|
||||||
&self,
|
|
||||||
store: S,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<S::Identifier, Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let identifier = self.identifier_from_filename::<S>(filename.clone()).await?;
|
|
||||||
let details =
|
|
||||||
if let Some(details) = self.variant_details(&identifier, filename.clone()).await? {
|
|
||||||
details
|
|
||||||
} else {
|
|
||||||
let hint = details_hint(&filename);
|
|
||||||
Details::from_store(store.clone(), identifier.clone(), hint).await?
|
|
||||||
};
|
|
||||||
|
|
||||||
if !details.is_motion() {
|
|
||||||
return Ok(identifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(motion_identifier) = self.motion_identifier::<S>(&filename).await? {
|
|
||||||
return Ok(motion_identifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
|
||||||
let mut reader = crate::ffmpeg::thumbnail(
|
|
||||||
store.clone(),
|
|
||||||
identifier,
|
|
||||||
InputFormat::Mp4,
|
|
||||||
ThumbnailFormat::Jpeg,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let motion_identifier = store.save_async_read(&mut reader, &filename).await?;
|
|
||||||
drop(permit);
|
|
||||||
|
|
||||||
self.store_motion_path(&filename, &motion_identifier)
|
|
||||||
.await?;
|
|
||||||
Ok(motion_identifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn motion_identifier<S: Store>(
|
|
||||||
&self,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Option<S::Identifier>, Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
let motion_key = format!("{}/motion", filename);
|
|
||||||
|
|
||||||
let opt = web::block(move || identifier_tree.get(motion_key.as_bytes())).await??;
|
|
||||||
|
|
||||||
if let Some(ivec) = opt {
|
|
||||||
return Ok(Some(S::Identifier::from_bytes(ivec.to_vec())?));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn store_motion_path<I: Identifier>(
|
|
||||||
&self,
|
|
||||||
filename: &str,
|
|
||||||
identifier: &I,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let identifier_bytes = identifier.to_bytes()?;
|
|
||||||
let motion_key = format!("{}/motion", filename);
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
|
|
||||||
web::block(move || identifier_tree.insert(motion_key.as_bytes(), identifier_bytes))
|
|
||||||
.await??;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn identifier_from_filename<S: Store>(
|
|
||||||
&self,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<S::Identifier, Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
let path_ivec = web::block(move || identifier_tree.get(filename.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingFile)?;
|
|
||||||
|
|
||||||
let identifier = S::Identifier::from_bytes(path_ivec.to_vec())?;
|
|
||||||
|
|
||||||
Ok(identifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
async fn store_identifier<I: Identifier>(
|
|
||||||
&self,
|
|
||||||
filename: String,
|
|
||||||
identifier: &I,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let identifier_bytes = identifier.to_bytes()?;
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
web::block(move || identifier_tree.insert(filename.as_bytes(), identifier_bytes)).await??;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn variant_identifier<S: Store>(
|
|
||||||
&self,
|
|
||||||
process_path: &std::path::Path,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Option<S::Identifier>, Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let key = self.variant_key(process_path, filename)?;
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
let path_opt = web::block(move || identifier_tree.get(key)).await??;
|
|
||||||
|
|
||||||
if let Some(ivec) = path_opt {
|
|
||||||
let identifier = S::Identifier::from_bytes(ivec.to_vec())?;
|
|
||||||
Ok(Some(identifier))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Store the path to a generated image variant so we can easily clean it up later
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn store_variant<I: Identifier>(
|
|
||||||
&self,
|
|
||||||
variant_process_path: Option<&std::path::Path>,
|
|
||||||
identifier: &I,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let key = if let Some(path) = variant_process_path {
|
|
||||||
self.variant_key(path, filename)?
|
|
||||||
} else {
|
|
||||||
let mut vec = filename.as_bytes().to_vec();
|
|
||||||
vec.extend(b"/");
|
|
||||||
vec.extend(&identifier.to_bytes()?);
|
|
||||||
vec
|
|
||||||
};
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
let identifier_bytes = identifier.to_bytes()?;
|
|
||||||
|
|
||||||
debug!("Storing variant");
|
|
||||||
web::block(move || identifier_tree.insert(key, identifier_bytes)).await??;
|
|
||||||
debug!("Stored variant");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the image details for a given variant
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn variant_details<I: Identifier>(
|
|
||||||
&self,
|
|
||||||
identifier: &I,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<Option<Details>, Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let key = self.details_key(identifier, &filename)?;
|
|
||||||
let details_tree = self.inner.details_tree.clone();
|
|
||||||
|
|
||||||
debug!("Getting details");
|
|
||||||
let opt = match web::block(move || details_tree.get(key)).await?? {
|
|
||||||
Some(ivec) => match serde_json::from_slice(&ivec) {
|
|
||||||
Ok(details) => Some(details),
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
debug!("Got details");
|
|
||||||
|
|
||||||
Ok(opt)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn store_variant_details<I: Identifier>(
|
|
||||||
&self,
|
|
||||||
identifier: &I,
|
|
||||||
filename: String,
|
|
||||||
details: &Details,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let key = self.details_key(identifier, &filename)?;
|
|
||||||
let details_tree = self.inner.details_tree.clone();
|
|
||||||
let details_value = serde_json::to_vec(details)?;
|
|
||||||
|
|
||||||
debug!("Storing details");
|
|
||||||
web::block(move || details_tree.insert(key, details_value)).await??;
|
|
||||||
debug!("Stored details");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a list of aliases for a given file
|
|
||||||
pub(crate) async fn aliases_by_filename(&self, filename: String) -> Result<Vec<String>, Error> {
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
|
||||||
let hash = web::block(move || fname_tree.get(filename.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingAlias)?;
|
|
||||||
|
|
||||||
self.aliases_by_hash(&hash).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a list of aliases for a given alias
|
|
||||||
pub(crate) async fn aliases_by_alias(&self, alias: String) -> Result<Vec<String>, Error> {
|
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
|
||||||
let hash = web::block(move || alias_tree.get(alias.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingFilename)?;
|
|
||||||
|
|
||||||
self.aliases_by_hash(&hash).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn aliases_by_hash(&self, hash: &sled::IVec) -> Result<Vec<String>, Error> {
|
|
||||||
let (start, end) = alias_key_bounds(hash);
|
|
||||||
let main_tree = self.inner.main_tree.clone();
|
|
||||||
let aliases = web::block(move || {
|
|
||||||
main_tree
|
|
||||||
.range(start..end)
|
|
||||||
.values()
|
|
||||||
.collect::<Result<Vec<_>, _>>()
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
debug!("Got {} aliases for hash", aliases.len());
|
|
||||||
let aliases = aliases
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|s| String::from_utf8(s.to_vec()).ok())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
for alias in aliases.iter() {
|
|
||||||
debug!("{}", alias);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(aliases)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete an alias without a delete token
|
|
||||||
pub(crate) async fn delete_without_token<S: Store + 'static>(
|
|
||||||
&self,
|
|
||||||
store: S,
|
|
||||||
alias: String,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let token_key = delete_key(&alias);
|
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
|
||||||
let token = web::block(move || alias_tree.get(token_key.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingAlias)?;
|
|
||||||
|
|
||||||
self.delete(store, alias, String::from_utf8(token.to_vec())?)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete the alias, and the file & variants if no more aliases exist
|
|
||||||
#[instrument(skip(self, alias, token))]
|
|
||||||
pub(crate) async fn delete<S: Store + 'static>(
|
|
||||||
&self,
|
|
||||||
store: S,
|
|
||||||
alias: String,
|
|
||||||
token: String,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
use sled::Transactional;
|
|
||||||
let main_tree = self.inner.main_tree.clone();
|
|
||||||
let alias_tree = self.inner.alias_tree.clone();
|
|
||||||
|
|
||||||
let span = Span::current();
|
|
||||||
let alias2 = alias.clone();
|
|
||||||
let hash = web::block(move || {
|
|
||||||
[&main_tree, &alias_tree].transaction(|v| {
|
|
||||||
let entered = span.enter();
|
|
||||||
let main_tree = &v[0];
|
|
||||||
let alias_tree = &v[1];
|
|
||||||
|
|
||||||
// -- GET TOKEN --
|
|
||||||
debug!("Deleting alias -> delete-token mapping");
|
|
||||||
let existing_token = alias_tree
|
|
||||||
.remove(delete_key(&alias2).as_bytes())?
|
|
||||||
.ok_or_else(|| trans_upload_error(UploadError::MissingAlias))?;
|
|
||||||
|
|
||||||
// Bail if invalid token
|
|
||||||
if existing_token != token {
|
|
||||||
warn!("Invalid delete token");
|
|
||||||
return Err(trans_upload_error(UploadError::InvalidToken));
|
|
||||||
}
|
|
||||||
|
|
||||||
// -- GET ID FOR HASH TREE CLEANUP --
|
|
||||||
debug!("Deleting alias -> id mapping");
|
|
||||||
let id = alias_tree
|
|
||||||
.remove(alias_id_key(&alias2).as_bytes())?
|
|
||||||
.ok_or_else(|| trans_upload_error(UploadError::MissingAlias))?;
|
|
||||||
let id = String::from_utf8(id.to_vec()).map_err(trans_utf8_error)?;
|
|
||||||
|
|
||||||
// -- GET HASH FOR HASH TREE CLEANUP --
|
|
||||||
debug!("Deleting alias -> hash mapping");
|
|
||||||
let hash = alias_tree
|
|
||||||
.remove(alias2.as_bytes())?
|
|
||||||
.ok_or_else(|| trans_upload_error(UploadError::MissingAlias))?;
|
|
||||||
|
|
||||||
// -- REMOVE HASH TREE ELEMENT --
|
|
||||||
debug!("Deleting hash -> alias mapping");
|
|
||||||
main_tree.remove(alias_key(&hash, &id))?;
|
|
||||||
drop(entered);
|
|
||||||
Ok(hash)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
self.check_delete_files(store, hash).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn check_delete_files<S: Store + 'static>(
|
|
||||||
&self,
|
|
||||||
store: S,
|
|
||||||
hash: sled::IVec,
|
|
||||||
) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
// -- CHECK IF ANY OTHER ALIASES EXIST --
|
|
||||||
let main_tree = self.inner.main_tree.clone();
|
|
||||||
let (start, end) = alias_key_bounds(&hash);
|
|
||||||
debug!("Checking for additional aliases referencing hash");
|
|
||||||
let any_aliases = web::block(move || {
|
|
||||||
Ok(main_tree.range(start..end).next().is_some()) as Result<bool, Error>
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
// Bail if there are existing aliases
|
|
||||||
if any_aliases {
|
|
||||||
debug!("Other aliases reference file, not removing from disk");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// -- DELETE HASH ENTRY --
|
|
||||||
let main_tree = self.inner.main_tree.clone();
|
|
||||||
let hash2 = hash.clone();
|
|
||||||
debug!("Deleting hash -> filename mapping");
|
|
||||||
let filename = web::block(move || main_tree.remove(&hash2))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingFile)?;
|
|
||||||
|
|
||||||
// -- DELETE FILES --
|
|
||||||
let this = self.clone();
|
|
||||||
let cleanup_span = tracing::info_span!(
|
|
||||||
parent: None,
|
|
||||||
"Cleanup",
|
|
||||||
filename = &tracing::field::display(String::from_utf8_lossy(&filename)),
|
|
||||||
);
|
|
||||||
cleanup_span.follows_from(Span::current());
|
|
||||||
debug!("Spawning cleanup task");
|
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
|
||||||
if let Err(e) = this
|
|
||||||
.cleanup_files(store, FilenameIVec::new(filename.clone()))
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
error!("Error removing files from fs, {}", e);
|
|
||||||
}
|
|
||||||
info!(
|
|
||||||
"Files deleted for {:?}",
|
|
||||||
String::from_utf8(filename.to_vec())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
.instrument(cleanup_span),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the real on-disk filename given an alias
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn from_alias(&self, alias: String) -> Result<String, Error> {
|
|
||||||
let tree = self.inner.alias_tree.clone();
|
|
||||||
debug!("Getting hash from alias");
|
|
||||||
let hash = web::block(move || tree.get(alias.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingAlias)?;
|
|
||||||
|
|
||||||
let main_tree = self.inner.main_tree.clone();
|
|
||||||
debug!("Getting filename from hash");
|
|
||||||
let filename = web::block(move || main_tree.get(hash))
|
|
||||||
.await??
|
|
||||||
.ok_or(UploadError::MissingFile)?;
|
|
||||||
|
|
||||||
let filename = String::from_utf8(filename.to_vec())?;
|
|
||||||
|
|
||||||
Ok(filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn session<S: Store + Clone + 'static>(&self, store: S) -> UploadManagerSession<S>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
UploadManagerSession::new(self.clone(), store)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find image variants and remove them from the DB and the disk
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
async fn cleanup_files<S: Store>(&self, store: S, filename: FilenameIVec) -> Result<(), Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let filename = filename.inner;
|
|
||||||
|
|
||||||
let filename2 = filename.clone();
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
let identifier = web::block(move || identifier_tree.remove(filename2)).await??;
|
|
||||||
|
|
||||||
let mut errors = Vec::new();
|
|
||||||
if let Some(identifier) = identifier {
|
|
||||||
let identifier = S::Identifier::from_bytes(identifier.to_vec())?;
|
|
||||||
debug!("Deleting {:?}", identifier);
|
|
||||||
if let Err(e) = store.remove(&identifier).await {
|
|
||||||
errors.push(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let filename2 = filename.clone();
|
|
||||||
let fname_tree = self.inner.filename_tree.clone();
|
|
||||||
debug!("Deleting filename -> hash mapping");
|
|
||||||
web::block(move || fname_tree.remove(filename2)).await??;
|
|
||||||
|
|
||||||
let path_prefix = filename.clone();
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
debug!("Fetching file variants");
|
|
||||||
let identifiers = web::block(move || {
|
|
||||||
identifier_tree
|
|
||||||
.scan_prefix(path_prefix)
|
|
||||||
.values()
|
|
||||||
.collect::<Result<Vec<sled::IVec>, sled::Error>>()
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
debug!("{} files prepared for deletion", identifiers.len());
|
|
||||||
|
|
||||||
for id in identifiers {
|
|
||||||
let identifier = S::Identifier::from_bytes(id.to_vec())?;
|
|
||||||
|
|
||||||
debug!("Deleting {:?}", identifier);
|
|
||||||
if let Err(e) = store.remove(&identifier).await {
|
|
||||||
errors.push(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_prefix = filename.clone();
|
|
||||||
let identifier_tree = self.inner.identifier_tree.clone();
|
|
||||||
debug!("Deleting path info");
|
|
||||||
web::block(move || {
|
|
||||||
for res in identifier_tree.scan_prefix(path_prefix).keys() {
|
|
||||||
let key = res?;
|
|
||||||
identifier_tree.remove(key)?;
|
|
||||||
}
|
|
||||||
Ok(()) as Result<(), Error>
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
for error in errors {
|
|
||||||
error!("Error deleting files, {}", error);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn variant_key(
|
|
||||||
&self,
|
|
||||||
variant_process_path: &std::path::Path,
|
|
||||||
filename: &str,
|
|
||||||
) -> Result<Vec<u8>, Error> {
|
|
||||||
let path_string = variant_process_path
|
|
||||||
.to_str()
|
|
||||||
.ok_or(UploadError::Path)?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let vec = format!("{}/{}", filename, path_string).as_bytes().to_vec();
|
|
||||||
Ok(vec)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn details_key<I: Identifier>(&self, identifier: &I, filename: &str) -> Result<Vec<u8>, Error>
|
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
let mut vec = filename.as_bytes().to_vec();
|
|
||||||
vec.extend(b"/");
|
|
||||||
vec.extend(&identifier.to_bytes()?);
|
|
||||||
|
|
||||||
Ok(vec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Details {
|
|
||||||
fn is_motion(&self) -> bool {
|
|
||||||
self.content_type.type_() == "video"
|
|
||||||
|| self.content_type.type_() == "image" && self.content_type.subtype() == "gif"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument("Details from bytes", skip(input))]
|
|
||||||
pub(crate) async fn from_bytes(
|
|
||||||
input: web::Bytes,
|
|
||||||
hint: Option<ValidInputType>,
|
|
||||||
) -> Result<Self, Error> {
|
|
||||||
let details = crate::magick::details_bytes(input, hint).await?;
|
|
||||||
|
|
||||||
Ok(Details::now(
|
|
||||||
details.width,
|
|
||||||
details.height,
|
|
||||||
details.mime_type,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument("Details from store")]
|
|
||||||
pub(crate) async fn from_store<S: Store>(
|
|
||||||
store: S,
|
|
||||||
identifier: S::Identifier,
|
|
||||||
expected_format: Option<ValidInputType>,
|
|
||||||
) -> Result<Self, Error>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let details = crate::magick::details_store(store, identifier, expected_format).await?;
|
|
||||||
|
|
||||||
Ok(Details::now(
|
|
||||||
details.width,
|
|
||||||
details.height,
|
|
||||||
details.mime_type,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn now(width: usize, height: usize, content_type: mime::Mime) -> Self {
|
|
||||||
Details {
|
|
||||||
width,
|
|
||||||
height,
|
|
||||||
content_type: Serde::new(content_type),
|
|
||||||
created_at: time::OffsetDateTime::now_utc(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn content_type(&self) -> mime::Mime {
|
|
||||||
(*self.content_type).clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn system_time(&self) -> std::time::SystemTime {
|
|
||||||
self.created_at.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FilenameIVec {
|
|
||||||
fn new(inner: sled::IVec) -> Self {
|
|
||||||
FilenameIVec { inner }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trans_upload_error(
|
|
||||||
upload_error: UploadError,
|
|
||||||
) -> sled::transaction::ConflictableTransactionError<Error> {
|
|
||||||
trans_err(upload_error)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trans_utf8_error(e: FromUtf8Error) -> sled::transaction::ConflictableTransactionError<Error> {
|
|
||||||
trans_err(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trans_err<E>(e: E) -> sled::transaction::ConflictableTransactionError<Error>
|
|
||||||
where
|
|
||||||
Error: From<E>,
|
|
||||||
{
|
|
||||||
sled::transaction::ConflictableTransactionError::Abort(e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delete_key(alias: &str) -> String {
|
|
||||||
format!("{}/delete", alias)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for UploadManager {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
f.debug_struct("UploadManager").finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for FilenameIVec {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
write!(f, "{:?}", String::from_utf8(self.inner.to_vec()))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,414 +0,0 @@
|
||||||
use crate::{
|
|
||||||
error::{Error, UploadError},
|
|
||||||
magick::ValidInputType,
|
|
||||||
migrate::{alias_id_key, alias_key},
|
|
||||||
store::Store,
|
|
||||||
upload_manager::{
|
|
||||||
delete_key,
|
|
||||||
hasher::{Hash, Hasher},
|
|
||||||
UploadManager,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
use actix_web::web;
|
|
||||||
use futures_util::stream::{Stream, StreamExt};
|
|
||||||
use tracing::{debug, instrument, warn, Span};
|
|
||||||
use tracing_futures::Instrument;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
pub(crate) struct UploadManagerSession<S: Store + Clone + 'static>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
store: S,
|
|
||||||
manager: UploadManager,
|
|
||||||
alias: Option<String>,
|
|
||||||
finished: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: Store + Clone + 'static> UploadManagerSession<S>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
pub(super) fn new(manager: UploadManager, store: S) -> Self {
|
|
||||||
UploadManagerSession {
|
|
||||||
store,
|
|
||||||
manager,
|
|
||||||
alias: None,
|
|
||||||
finished: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn succeed(mut self) {
|
|
||||||
self.finished = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alias(&self) -> Option<&str> {
|
|
||||||
self.alias.as_deref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum Dup {
|
|
||||||
Exists,
|
|
||||||
New,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Dup {
|
|
||||||
fn exists(&self) -> bool {
|
|
||||||
matches!(self, Dup::Exists)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: Store + Clone + 'static> Drop for UploadManagerSession<S>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if self.finished {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(alias) = self.alias.take() {
|
|
||||||
let store = self.store.clone();
|
|
||||||
let manager = self.manager.clone();
|
|
||||||
let cleanup_span = tracing::info_span!(
|
|
||||||
parent: None,
|
|
||||||
"Upload cleanup",
|
|
||||||
alias = &tracing::field::display(&alias),
|
|
||||||
);
|
|
||||||
cleanup_span.follows_from(Span::current());
|
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
|
||||||
// undo alias -> hash mapping
|
|
||||||
debug!("Remove alias -> hash mapping");
|
|
||||||
if let Ok(Some(hash)) = manager.inner.alias_tree.remove(&alias) {
|
|
||||||
// undo alias -> id mapping
|
|
||||||
debug!("Remove alias -> id mapping");
|
|
||||||
let key = alias_id_key(&alias);
|
|
||||||
if let Ok(Some(id)) = manager.inner.alias_tree.remove(&key) {
|
|
||||||
// undo hash/id -> alias mapping
|
|
||||||
debug!("Remove hash/id -> alias mapping");
|
|
||||||
let id = String::from_utf8_lossy(&id);
|
|
||||||
let key = alias_key(&hash, &id);
|
|
||||||
let _ = manager.inner.main_tree.remove(&key);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _ = manager.check_delete_files(store, hash).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.instrument(cleanup_span),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: Store> UploadManagerSession<S>
|
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
/// Generate a delete token for an alias
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub(crate) async fn delete_token(&self) -> Result<String, Error> {
|
|
||||||
let alias = self.alias.clone().ok_or(UploadError::MissingAlias)?;
|
|
||||||
|
|
||||||
debug!("Generating delete token");
|
|
||||||
let s: String = Uuid::new_v4().to_string();
|
|
||||||
let delete_token = s.clone();
|
|
||||||
|
|
||||||
debug!("Saving delete token");
|
|
||||||
let alias_tree = self.manager.inner.alias_tree.clone();
|
|
||||||
let key = delete_key(&alias);
|
|
||||||
let res = web::block(move || {
|
|
||||||
alias_tree.compare_and_swap(
|
|
||||||
key.as_bytes(),
|
|
||||||
None as Option<sled::IVec>,
|
|
||||||
Some(s.as_bytes()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
if let Err(sled::CompareAndSwapError {
|
|
||||||
current: Some(ivec),
|
|
||||||
..
|
|
||||||
}) = res
|
|
||||||
{
|
|
||||||
let s = String::from_utf8(ivec.to_vec())?;
|
|
||||||
|
|
||||||
debug!("Returning existing delete token, {}", s);
|
|
||||||
return Ok(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Returning new delete token, {}", delete_token);
|
|
||||||
Ok(delete_token)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Upload the file while preserving the filename, optionally validating the uploaded image
|
|
||||||
#[instrument(skip(self, stream))]
|
|
||||||
pub(crate) async fn import(
|
|
||||||
mut self,
|
|
||||||
alias: String,
|
|
||||||
validate: bool,
|
|
||||||
mut stream: impl Stream<Item = Result<web::Bytes, Error>> + Unpin,
|
|
||||||
) -> Result<Self, Error> {
|
|
||||||
let mut bytes_mut = actix_web::web::BytesMut::new();
|
|
||||||
|
|
||||||
debug!("Reading stream to memory");
|
|
||||||
while let Some(res) = stream.next().await {
|
|
||||||
let bytes = res?;
|
|
||||||
bytes_mut.extend_from_slice(&bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Validating bytes");
|
|
||||||
let (content_type, validated_reader) = crate::validate::validate_image_bytes(
|
|
||||||
bytes_mut.freeze(),
|
|
||||||
self.manager.inner.format,
|
|
||||||
validate,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone());
|
|
||||||
|
|
||||||
let filename = self.next_file(content_type).await?;
|
|
||||||
|
|
||||||
let identifier = self
|
|
||||||
.store
|
|
||||||
.save_async_read(&mut hasher_reader, &filename)
|
|
||||||
.await?;
|
|
||||||
let hash = hasher_reader.finalize_reset().await?;
|
|
||||||
|
|
||||||
debug!("Storing alias");
|
|
||||||
self.alias = Some(alias.clone());
|
|
||||||
self.add_existing_alias(&hash, &alias).await?;
|
|
||||||
|
|
||||||
debug!("Saving file");
|
|
||||||
self.save_upload(&identifier, hash, filename).await?;
|
|
||||||
|
|
||||||
// Return alias to file
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Upload the file, discarding bytes if it's already present, or saving if it's new
|
|
||||||
#[instrument(skip(self, stream))]
|
|
||||||
pub(crate) async fn upload(
|
|
||||||
mut self,
|
|
||||||
mut stream: impl Stream<Item = Result<web::Bytes, Error>> + Unpin,
|
|
||||||
) -> Result<Self, Error> {
|
|
||||||
let mut bytes_mut = actix_web::web::BytesMut::new();
|
|
||||||
|
|
||||||
debug!("Reading stream to memory");
|
|
||||||
while let Some(res) = stream.next().await {
|
|
||||||
let bytes = res?;
|
|
||||||
bytes_mut.extend_from_slice(&bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Validating bytes");
|
|
||||||
let (input_type, validated_reader) = crate::validate::validate_image_bytes(
|
|
||||||
bytes_mut.freeze(),
|
|
||||||
self.manager.inner.format,
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hasher_reader = Hasher::new(validated_reader, self.manager.inner.hasher.clone());
|
|
||||||
|
|
||||||
let filename = self.next_file(input_type).await?;
|
|
||||||
|
|
||||||
let identifier = self
|
|
||||||
.store
|
|
||||||
.save_async_read(&mut hasher_reader, &filename)
|
|
||||||
.await?;
|
|
||||||
let hash = hasher_reader.finalize_reset().await?;
|
|
||||||
|
|
||||||
debug!("Adding alias");
|
|
||||||
self.add_alias(&hash, input_type).await?;
|
|
||||||
|
|
||||||
debug!("Saving file");
|
|
||||||
self.save_upload(&identifier, hash, filename).await?;
|
|
||||||
|
|
||||||
// Return alias to file
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check duplicates & store image if new
|
|
||||||
async fn save_upload(
|
|
||||||
&self,
|
|
||||||
identifier: &S::Identifier,
|
|
||||||
hash: Hash,
|
|
||||||
filename: String,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
let dup = self.check_duplicate(hash, filename.clone()).await?;
|
|
||||||
|
|
||||||
// bail early with alias to existing file if this is a duplicate
|
|
||||||
if dup.exists() {
|
|
||||||
debug!("Duplicate exists, removing file");
|
|
||||||
|
|
||||||
self.store.remove(identifier).await?;
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
self.manager.store_identifier(filename, identifier).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for an already-uploaded image with this hash, returning the path to the target file
|
|
||||||
#[instrument(skip(self, hash))]
|
|
||||||
async fn check_duplicate(&self, hash: Hash, filename: String) -> Result<Dup, Error> {
|
|
||||||
let main_tree = self.manager.inner.main_tree.clone();
|
|
||||||
|
|
||||||
let filename2 = filename.clone();
|
|
||||||
let hash2 = hash.as_slice().to_vec();
|
|
||||||
debug!("Inserting filename for hash");
|
|
||||||
let res = web::block(move || {
|
|
||||||
main_tree.compare_and_swap(
|
|
||||||
hash2,
|
|
||||||
None as Option<sled::IVec>,
|
|
||||||
Some(filename2.as_bytes()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
if let Err(sled::CompareAndSwapError {
|
|
||||||
current: Some(ivec),
|
|
||||||
..
|
|
||||||
}) = res
|
|
||||||
{
|
|
||||||
let name = String::from_utf8(ivec.to_vec())?;
|
|
||||||
debug!("Filename exists for hash, {}", name);
|
|
||||||
return Ok(Dup::Exists);
|
|
||||||
}
|
|
||||||
|
|
||||||
let fname_tree = self.manager.inner.filename_tree.clone();
|
|
||||||
debug!("Saving filename -> hash relation");
|
|
||||||
web::block(move || fname_tree.insert(filename, hash.into_inner())).await??;
|
|
||||||
|
|
||||||
Ok(Dup::New)
|
|
||||||
}
|
|
||||||
|
|
||||||
// generate a short filename that isn't already in-use
|
|
||||||
#[instrument(skip(self, input_type))]
|
|
||||||
async fn next_file(&self, input_type: ValidInputType) -> Result<String, Error> {
|
|
||||||
loop {
|
|
||||||
debug!("Filename generation loop");
|
|
||||||
let filename = file_name(Uuid::new_v4(), input_type);
|
|
||||||
|
|
||||||
let identifier_tree = self.manager.inner.identifier_tree.clone();
|
|
||||||
let filename2 = filename.clone();
|
|
||||||
let filename_exists = web::block(move || identifier_tree.get(filename2.as_bytes()))
|
|
||||||
.await??
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
if !filename_exists {
|
|
||||||
return Ok(filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Filename exists, trying again");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self, hash, alias))]
|
|
||||||
async fn add_existing_alias(&self, hash: &Hash, alias: &str) -> Result<(), Error> {
|
|
||||||
self.save_alias_hash_mapping(hash, alias).await??;
|
|
||||||
|
|
||||||
self.store_hash_id_alias_mapping(hash, alias).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add an alias to an existing file
|
|
||||||
//
|
|
||||||
// This will help if multiple 'users' upload the same file, and one of them wants to delete it
|
|
||||||
#[instrument(skip(self, hash, input_type))]
|
|
||||||
async fn add_alias(&mut self, hash: &Hash, input_type: ValidInputType) -> Result<(), Error> {
|
|
||||||
let alias = self.next_alias(hash, input_type).await?;
|
|
||||||
|
|
||||||
self.store_hash_id_alias_mapping(hash, &alias).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add a pre-defined alias to an existin file
|
|
||||||
//
|
|
||||||
// DANGER: this can cause BAD BAD BAD conflicts if the same alias is used for multiple files
|
|
||||||
#[instrument(skip(self, hash))]
|
|
||||||
async fn store_hash_id_alias_mapping(&self, hash: &Hash, alias: &str) -> Result<(), Error> {
|
|
||||||
let alias = alias.to_string();
|
|
||||||
loop {
|
|
||||||
debug!("hash -> alias save loop");
|
|
||||||
let db = self.manager.inner.db.clone();
|
|
||||||
let id = web::block(move || db.generate_id()).await??.to_string();
|
|
||||||
|
|
||||||
let alias_tree = self.manager.inner.alias_tree.clone();
|
|
||||||
let key = alias_id_key(&alias);
|
|
||||||
let id2 = id.clone();
|
|
||||||
debug!("Saving alias -> id mapping");
|
|
||||||
web::block(move || alias_tree.insert(key.as_bytes(), id2.as_bytes())).await??;
|
|
||||||
|
|
||||||
let key = alias_key(hash.as_slice(), &id);
|
|
||||||
let main_tree = self.manager.inner.main_tree.clone();
|
|
||||||
let alias2 = alias.clone();
|
|
||||||
debug!("Saving hash/id -> alias mapping");
|
|
||||||
let res = web::block(move || {
|
|
||||||
main_tree.compare_and_swap(key, None as Option<sled::IVec>, Some(alias2.as_bytes()))
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
if res.is_ok() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Id exists, trying again");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate an alias to the file
|
|
||||||
#[instrument(skip(self, hash, input_type))]
|
|
||||||
async fn next_alias(
|
|
||||||
&mut self,
|
|
||||||
hash: &Hash,
|
|
||||||
input_type: ValidInputType,
|
|
||||||
) -> Result<String, Error> {
|
|
||||||
loop {
|
|
||||||
debug!("Alias gen loop");
|
|
||||||
let alias = file_name(Uuid::new_v4(), input_type);
|
|
||||||
self.alias = Some(alias.clone());
|
|
||||||
|
|
||||||
let res = self.save_alias_hash_mapping(hash, &alias).await?;
|
|
||||||
|
|
||||||
if res.is_ok() {
|
|
||||||
return Ok(alias);
|
|
||||||
}
|
|
||||||
debug!("Alias exists, regenning");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save an alias to the database
|
|
||||||
#[instrument(skip(self, hash))]
|
|
||||||
async fn save_alias_hash_mapping(
|
|
||||||
&self,
|
|
||||||
hash: &Hash,
|
|
||||||
alias: &str,
|
|
||||||
) -> Result<Result<(), Error>, Error> {
|
|
||||||
let tree = self.manager.inner.alias_tree.clone();
|
|
||||||
let vec = hash.as_slice().to_vec();
|
|
||||||
let alias = alias.to_string();
|
|
||||||
|
|
||||||
debug!("Saving alias -> hash mapping");
|
|
||||||
let res = web::block(move || {
|
|
||||||
tree.compare_and_swap(alias.as_bytes(), None as Option<sled::IVec>, Some(vec))
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
if res.is_err() {
|
|
||||||
warn!("Duplicate alias");
|
|
||||||
return Ok(Err(UploadError::DuplicateAlias.into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Ok(()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_name(name: Uuid, input_type: ValidInputType) -> String {
|
|
||||||
format!("{}{}", name, input_type.as_ext())
|
|
||||||
}
|
|
|
@ -1,5 +1,9 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Format, either::Either, error::Error, ffmpeg::InputFormat, magick::ValidInputType,
|
config::ImageFormat,
|
||||||
|
either::Either,
|
||||||
|
error::{Error, UploadError},
|
||||||
|
ffmpeg::InputFormat,
|
||||||
|
magick::ValidInputType,
|
||||||
};
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use tokio::io::AsyncRead;
|
use tokio::io::AsyncRead;
|
||||||
|
@ -35,7 +39,8 @@ impl AsyncRead for UnvalidatedBytes {
|
||||||
#[instrument(name = "Validate image", skip(bytes))]
|
#[instrument(name = "Validate image", skip(bytes))]
|
||||||
pub(crate) async fn validate_image_bytes(
|
pub(crate) async fn validate_image_bytes(
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
prescribed_format: Option<Format>,
|
prescribed_format: Option<ImageFormat>,
|
||||||
|
enable_silent_video: bool,
|
||||||
validate: bool,
|
validate: bool,
|
||||||
) -> Result<(ValidInputType, impl AsyncRead + Unpin), Error> {
|
) -> Result<(ValidInputType, impl AsyncRead + Unpin), Error> {
|
||||||
let input_type = crate::magick::input_type_bytes(bytes.clone()).await?;
|
let input_type = crate::magick::input_type_bytes(bytes.clone()).await?;
|
||||||
|
@ -45,31 +50,41 @@ pub(crate) async fn validate_image_bytes(
|
||||||
}
|
}
|
||||||
|
|
||||||
match (prescribed_format, input_type) {
|
match (prescribed_format, input_type) {
|
||||||
(_, ValidInputType::Gif) => Ok((
|
(_, ValidInputType::Gif) => {
|
||||||
|
if !enable_silent_video {
|
||||||
|
return Err(UploadError::SilentVideoDisabled.into());
|
||||||
|
}
|
||||||
|
Ok((
|
||||||
ValidInputType::Mp4,
|
ValidInputType::Mp4,
|
||||||
Either::right(Either::left(
|
Either::right(Either::left(
|
||||||
crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Gif).await?,
|
crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Gif).await?,
|
||||||
)),
|
)),
|
||||||
)),
|
))
|
||||||
(_, ValidInputType::Mp4) => Ok((
|
}
|
||||||
|
(_, ValidInputType::Mp4) => {
|
||||||
|
if !enable_silent_video {
|
||||||
|
return Err(UploadError::SilentVideoDisabled.into());
|
||||||
|
}
|
||||||
|
Ok((
|
||||||
ValidInputType::Mp4,
|
ValidInputType::Mp4,
|
||||||
Either::right(Either::left(
|
Either::right(Either::left(
|
||||||
crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Mp4).await?,
|
crate::ffmpeg::to_mp4_bytes(bytes, InputFormat::Mp4).await?,
|
||||||
)),
|
)),
|
||||||
)),
|
))
|
||||||
(Some(Format::Jpeg) | None, ValidInputType::Jpeg) => Ok((
|
}
|
||||||
|
(Some(ImageFormat::Jpeg) | None, ValidInputType::Jpeg) => Ok((
|
||||||
ValidInputType::Jpeg,
|
ValidInputType::Jpeg,
|
||||||
Either::right(Either::right(Either::left(
|
Either::right(Either::right(Either::left(
|
||||||
crate::exiftool::clear_metadata_bytes_read(bytes)?,
|
crate::exiftool::clear_metadata_bytes_read(bytes)?,
|
||||||
))),
|
))),
|
||||||
)),
|
)),
|
||||||
(Some(Format::Png) | None, ValidInputType::Png) => Ok((
|
(Some(ImageFormat::Png) | None, ValidInputType::Png) => Ok((
|
||||||
ValidInputType::Png,
|
ValidInputType::Png,
|
||||||
Either::right(Either::right(Either::left(
|
Either::right(Either::right(Either::left(
|
||||||
crate::exiftool::clear_metadata_bytes_read(bytes)?,
|
crate::exiftool::clear_metadata_bytes_read(bytes)?,
|
||||||
))),
|
))),
|
||||||
)),
|
)),
|
||||||
(Some(Format::Webp) | None, ValidInputType::Webp) => Ok((
|
(Some(ImageFormat::Webp) | None, ValidInputType::Webp) => Ok((
|
||||||
ValidInputType::Webp,
|
ValidInputType::Webp,
|
||||||
Either::right(Either::right(Either::right(Either::left(
|
Either::right(Either::right(Either::right(Either::left(
|
||||||
crate::magick::clear_metadata_bytes_read(bytes)?,
|
crate::magick::clear_metadata_bytes_read(bytes)?,
|
||||||
|
|
Loading…
Reference in a new issue