Merge pull request 'Support for storing media on S3' (#1149) from lx/Plume:s3 into main

Reviewed-on: https://git.joinplu.me/Plume/Plume/pulls/1149
Reviewed-by: trinity-1686a <trinity-1686a@noreply@joinplu.me>
This commit is contained in:
trinity-1686a 2023-06-21 18:18:37 +00:00
commit 304fb740d8
13 changed files with 852 additions and 90 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake

1
.gitignore vendored
View file

@ -20,3 +20,4 @@ search_index
__pycache__ __pycache__
.vscode/ .vscode/
*-journal *-journal
.direnv/

259
Cargo.lock generated
View file

@ -227,7 +227,7 @@ dependencies = [
"derive_builder", "derive_builder",
"diligent-date-parser", "diligent-date-parser",
"never", "never",
"quick-xml", "quick-xml 0.27.1",
] ]
[[package]] [[package]]
@ -241,6 +241,20 @@ dependencies = [
"winapi 0.3.9", "winapi 0.3.9",
] ]
[[package]]
name = "attohttpc"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7"
dependencies = [
"http 0.2.8",
"log 0.4.17",
"native-tls",
"serde 1.0.152",
"serde_json",
"url 2.3.1",
]
[[package]] [[package]]
name = "atty" name = "atty"
version = "0.2.14" version = "0.2.14"
@ -267,6 +281,32 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "aws-creds"
version = "0.34.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3776743bb68d4ad02ba30ba8f64373f1be4e082fe47651767171ce75bb2f6cf5"
dependencies = [
"attohttpc",
"dirs",
"log 0.4.17",
"quick-xml 0.26.0",
"rust-ini 0.18.0",
"serde 1.0.152",
"thiserror",
"time 0.3.17",
"url 2.3.1",
]
[[package]]
name = "aws-region"
version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba"
dependencies = [
"thiserror",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.1.8" version = "0.1.8"
@ -389,6 +429,25 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "block_on_proc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b872f3528eeeb4370ee73b51194dc1cd93680c2d0eb6c7a223889038d2c1a167"
dependencies = [
"quote 1.0.23",
"syn 1.0.107",
]
[[package]] [[package]]
name = "blowfish" name = "blowfish"
version = "0.9.1" version = "0.9.1"
@ -578,7 +637,7 @@ checksum = "19b076e143e1d9538dde65da30f8481c2a6c44040edb8e02b9bf1351edb92ce3"
dependencies = [ dependencies = [
"lazy_static", "lazy_static",
"nom 5.1.2", "nom 5.1.2",
"rust-ini", "rust-ini 0.13.0",
"serde 1.0.152", "serde 1.0.152",
"serde-hjson", "serde-hjson",
"serde_json", "serde_json",
@ -636,10 +695,10 @@ dependencies = [
"aes-gcm", "aes-gcm",
"base64 0.13.1", "base64 0.13.1",
"hkdf", "hkdf",
"hmac", "hmac 0.10.1",
"percent-encoding 2.2.0", "percent-encoding 2.2.0",
"rand 0.8.5", "rand 0.8.5",
"sha2", "sha2 0.9.9",
"time 0.1.45", "time 0.1.45",
] ]
@ -1141,6 +1200,17 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "digest"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
dependencies = [
"block-buffer 0.10.4",
"crypto-common",
"subtle",
]
[[package]] [[package]]
name = "diligent-date-parser" name = "diligent-date-parser"
version = "0.1.4" version = "0.1.4"
@ -1150,6 +1220,32 @@ dependencies = [
"chrono", "chrono",
] ]
[[package]]
name = "dirs"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-sys"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
]
[[package]]
name = "dlv-list"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257"
[[package]] [[package]]
name = "dotenv" name = "dotenv"
version = "0.15.0" version = "0.15.0"
@ -1793,8 +1889,8 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51ab2f639c231793c5f6114bdb9bbe50a7dbbfcd7c7c6bd8475dec2d991e964f" checksum = "51ab2f639c231793c5f6114bdb9bbe50a7dbbfcd7c7c6bd8475dec2d991e964f"
dependencies = [ dependencies = [
"digest", "digest 0.9.0",
"hmac", "hmac 0.10.1",
] ]
[[package]] [[package]]
@ -1804,7 +1900,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15"
dependencies = [ dependencies = [
"crypto-mac", "crypto-mac",
"digest", "digest 0.9.0",
]
[[package]]
name = "hmac"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
dependencies = [
"digest 0.10.6",
] ]
[[package]] [[package]]
@ -2549,6 +2654,17 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "maybe-async"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f1b8c13cb1f814b634a96b2c725449fe7ed464a7b8781de8688be5ffbd3f305"
dependencies = [
"proc-macro2 1.0.49",
"quote 1.0.23",
"syn 1.0.107",
]
[[package]] [[package]]
name = "maybe-uninit" name = "maybe-uninit"
version = "2.0.0" version = "2.0.0"
@ -2641,6 +2757,15 @@ dependencies = [
"unicase 2.6.0", "unicase 2.6.0",
] ]
[[package]]
name = "minidom"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e9ce45d459e358790a285e7609ff5ae4cfab88b75f237e8838e62029dda397b"
dependencies = [
"rxml",
]
[[package]] [[package]]
name = "minimal-lexical" name = "minimal-lexical"
version = "0.2.1" version = "0.2.1"
@ -3063,6 +3188,16 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "ordered-multimap"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a"
dependencies = [
"dlv-list",
"hashbrown 0.12.3",
]
[[package]] [[package]]
name = "overload" name = "overload"
version = "0.1.1" version = "0.1.1"
@ -3409,6 +3544,7 @@ dependencies = [
"riker", "riker",
"rocket", "rocket",
"rocket_i18n", "rocket_i18n",
"rust-s3",
"scheduled-thread-pool", "scheduled-thread-pool",
"serde 1.0.152", "serde 1.0.152",
"serde_derive", "serde_derive",
@ -3552,6 +3688,16 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-xml"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f50b1c63b38611e7d4d7f68b82d3ad0cc71a2ad2e7f61fc10f1328d917c93cd"
dependencies = [
"memchr",
"serde 1.0.152",
]
[[package]] [[package]]
name = "quick-xml" name = "quick-xml"
version = "0.27.1" version = "0.27.1"
@ -3836,6 +3982,17 @@ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
] ]
[[package]]
name = "redox_users"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
dependencies = [
"getrandom 0.2.8",
"redox_syscall 0.2.16",
"thiserror",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.7.0" version = "1.7.0"
@ -3967,6 +4124,7 @@ dependencies = [
"tokio 1.24.1", "tokio 1.24.1",
"tokio-native-tls", "tokio-native-tls",
"tokio-socks", "tokio-socks",
"tokio-util 0.7.4",
"tower-service", "tower-service",
"url 2.3.1", "url 2.3.1",
"wasm-bindgen", "wasm-bindgen",
@ -4159,6 +4317,50 @@ version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2"
[[package]]
name = "rust-ini"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df"
dependencies = [
"cfg-if 1.0.0",
"ordered-multimap",
]
[[package]]
name = "rust-s3"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b2ac5ff6acfbe74226fa701b5ef793aaa054055c13ebb7060ad36942956e027"
dependencies = [
"async-trait",
"aws-creds",
"aws-region",
"base64 0.13.1",
"block_on_proc",
"bytes 1.3.0",
"cfg-if 1.0.0",
"futures 0.3.25",
"hex",
"hmac 0.12.1",
"http 0.2.8",
"log 0.4.17",
"maybe-async",
"md5",
"minidom",
"percent-encoding 2.2.0",
"quick-xml 0.26.0",
"reqwest 0.11.13",
"serde 1.0.152",
"serde_derive",
"sha2 0.10.6",
"thiserror",
"time 0.3.17",
"tokio 1.24.1",
"tokio-stream",
"url 2.3.1",
]
[[package]] [[package]]
name = "rust-stemmers" name = "rust-stemmers"
version = "1.2.0" version = "1.2.0"
@ -4184,6 +4386,25 @@ dependencies = [
"semver", "semver",
] ]
[[package]]
name = "rxml"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a071866b8c681dc2cfffa77184adc32b57b0caad4e620b6292609703bceb804"
dependencies = [
"bytes 1.3.0",
"pin-project-lite 0.2.9",
"rxml_validation",
"smartstring",
"tokio 1.24.1",
]
[[package]]
name = "rxml_validation"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53bc79743f9a66c2fb1f951cd83735f275d46bfe466259fbc5897bb60a0d00ee"
[[package]] [[package]]
name = "ryu" name = "ryu"
version = "1.0.12" version = "1.0.12"
@ -4363,13 +4584,24 @@ version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800"
dependencies = [ dependencies = [
"block-buffer", "block-buffer 0.9.0",
"cfg-if 1.0.0", "cfg-if 1.0.0",
"cpufeatures", "cpufeatures",
"digest", "digest 0.9.0",
"opaque-debug", "opaque-debug",
] ]
[[package]]
name = "sha2"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
dependencies = [
"cfg-if 1.0.0",
"cpufeatures",
"digest 0.10.6",
]
[[package]] [[package]]
name = "sharded-slab" name = "sharded-slab"
version = "0.1.4" version = "0.1.4"
@ -4459,6 +4691,15 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "smartstring"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e714dff2b33f2321fdcd475b71cec79781a692d846f37f415fb395a1d2bcd48e"
dependencies = [
"static_assertions",
]
[[package]] [[package]]
name = "snap" name = "snap"
version = "1.1.0" version = "1.1.0"

View file

@ -68,12 +68,13 @@ ructe = "0.15.0"
rsass = "0.26" rsass = "0.26"
[features] [features]
default = ["postgres"] default = ["postgres", "s3"]
postgres = ["plume-models/postgres", "diesel/postgres"] postgres = ["plume-models/postgres", "diesel/postgres"]
sqlite = ["plume-models/sqlite", "diesel/sqlite"] sqlite = ["plume-models/sqlite", "diesel/sqlite"]
debug-mailer = [] debug-mailer = []
test = [] test = []
search-lindera = ["plume-models/search-lindera"] search-lindera = ["plume-models/search-lindera"]
s3 = ["plume-models/s3"]
[workspace] [workspace]
members = ["plume-api", "plume-cli", "plume-models", "plume-common", "plume-front", "plume-macro"] members = ["plume-api", "plume-cli", "plume-models", "plume-common", "plume-front", "plume-macro"]

116
flake.lock Normal file
View file

@ -0,0 +1,116 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1683408522,
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1683857898,
"narHash": "sha256-pyVY4UxM6zUX97g6bk6UyCbZGCWZb2Zykrne8YxacRA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "4e7fba3f37f5e184ada0ef3cf1e4d8ef450f240b",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

60
flake.nix Normal file
View file

@ -0,0 +1,60 @@
{
description = "Developpment shell for Plume including nightly Rust compiler";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
inputs.rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils, rust-overlay, ... }:
flake-utils.lib.eachDefaultSystem (system:
let
overlays = [ (import rust-overlay) ];
pkgs = import nixpkgs { inherit system overlays; };
inputs = with pkgs; [
(rust-bin.nightly.latest.default.override {
targets = [ "wasm32-unknown-unknown" ];
})
wasm-pack
openssl
pkg-config
gettext
postgresql
sqlite
];
in {
packages.default = pkgs.rustPlatform.buildRustPackage {
pname = "plume";
version = "0.7.3-dev";
src = ./.;
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"pulldown-cmark-0.8.0" = "sha256-lpfoRDuY3zJ3QmUqJ5k9OL0MEdGDpwmpJ+u5BCj2kIA=";
"rocket_csrf-0.1.2" = "sha256-WywZfMiwZqTPfSDcAE7ivTSYSaFX+N9fjnRsLSLb9wE=";
};
};
buildNoDefaultFeatures = true;
buildFeatures = ["postgresql" "s3"];
nativeBuildInputs = inputs;
buildPhase = ''
wasm-pack build --target web --release plume-front
cargo build --no-default-features --features postgresql,s3 --path .
cargo build --no-default-features --features postgresql,s3 --path plume-cli
'';
installPhase = ''
cargo install --no-default-features --features postgresql,s3 --path . --target-dir $out
cargo install --no-default-features --features postgresql,s3 --path plume-cli --target-dir $out
'';
};
devShells.default = pkgs.mkShell {
packages = inputs;
};
});
}

View file

@ -24,3 +24,4 @@ path = "../plume-models"
postgres = ["plume-models/postgres", "diesel/postgres"] postgres = ["plume-models/postgres", "diesel/postgres"]
sqlite = ["plume-models/sqlite", "diesel/sqlite"] sqlite = ["plume-models/sqlite", "diesel/sqlite"]
search-lindera = ["plume-models/search-lindera"] search-lindera = ["plume-models/search-lindera"]
s3 = ["plume-models/s3"]

View file

@ -18,6 +18,7 @@ rocket_i18n = "0.4.1"
reqwest = "0.11.11" reqwest = "0.11.11"
scheduled-thread-pool = "0.2.6" scheduled-thread-pool = "0.2.6"
serde = "1.0.137" serde = "1.0.137"
rust-s3 = { version = "0.33.0", optional = true, features = ["blocking"] }
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0.81" serde_json = "1.0.81"
tantivy = "0.13.3" tantivy = "0.13.3"
@ -61,3 +62,4 @@ diesel_migrations = "1.3.0"
postgres = ["diesel/postgres", "plume-macro/postgres" ] postgres = ["diesel/postgres", "plume-macro/postgres" ]
sqlite = ["diesel/sqlite", "plume-macro/sqlite" ] sqlite = ["diesel/sqlite", "plume-macro/sqlite" ]
search-lindera = ["lindera-tantivy"] search-lindera = ["lindera-tantivy"]
s3 = ["rust-s3"]

View file

@ -6,6 +6,9 @@ use rocket::Config as RocketConfig;
use std::collections::HashSet; use std::collections::HashSet;
use std::env::{self, var}; use std::env::{self, var};
#[cfg(feature = "s3")]
use s3::{Bucket, Region, creds::Credentials};
#[cfg(not(test))] #[cfg(not(test))]
const DB_NAME: &str = "plume"; const DB_NAME: &str = "plume";
#[cfg(test)] #[cfg(test)]
@ -27,13 +30,23 @@ pub struct Config {
pub mail: Option<MailConfig>, pub mail: Option<MailConfig>,
pub ldap: Option<LdapConfig>, pub ldap: Option<LdapConfig>,
pub proxy: Option<ProxyConfig>, pub proxy: Option<ProxyConfig>,
pub s3: Option<S3Config>,
} }
impl Config { impl Config {
pub fn proxy(&self) -> Option<&reqwest::Proxy> { pub fn proxy(&self) -> Option<&reqwest::Proxy> {
self.proxy.as_ref().map(|p| &p.proxy) self.proxy.as_ref().map(|p| &p.proxy)
} }
} }
fn string_to_bool(val: &str, name: &str) -> bool {
match val {
"1" | "true" | "TRUE" => true,
"0" | "false" | "FALSE" => false,
_ => panic!("Invalid configuration: {} is not boolean", name),
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum InvalidRocketConfig { pub enum InvalidRocketConfig {
Env, Env,
@ -288,11 +301,7 @@ fn get_ldap_config() -> Option<LdapConfig> {
match (addr, base_dn) { match (addr, base_dn) {
(Some(addr), Some(base_dn)) => { (Some(addr), Some(base_dn)) => {
let tls = var("LDAP_TLS").unwrap_or_else(|_| "false".to_owned()); let tls = var("LDAP_TLS").unwrap_or_else(|_| "false".to_owned());
let tls = match tls.as_ref() { let tls = string_to_bool(&tls, "LDAP_TLS");
"1" | "true" | "TRUE" => true,
"0" | "false" | "FALSE" => false,
_ => panic!("Invalid LDAP configuration : tls"),
};
let user_name_attr = var("LDAP_USER_NAME_ATTR").unwrap_or_else(|_| "cn".to_owned()); let user_name_attr = var("LDAP_USER_NAME_ATTR").unwrap_or_else(|_| "cn".to_owned());
let mail_attr = var("LDAP_USER_MAIL_ATTR").unwrap_or_else(|_| "mail".to_owned()); let mail_attr = var("LDAP_USER_MAIL_ATTR").unwrap_or_else(|_| "mail".to_owned());
Some(LdapConfig { Some(LdapConfig {
@ -349,6 +358,104 @@ fn get_proxy_config() -> Option<ProxyConfig> {
}) })
} }
pub struct S3Config {
pub bucket: String,
pub access_key_id: String,
pub access_key_secret: String,
// region? If not set, default to us-east-1
pub region: String,
// hostname for s3. If not set, default to $region.amazonaws.com
pub hostname: String,
// may be useful when using self hosted s3. Won't work with recent AWS buckets
pub path_style: bool,
// http or https
pub protocol: String,
// download directly from s3 to user, wihout going through Plume. Require public read on bucket
pub direct_download: bool,
// use this hostname for downloads, can be used with caching proxy in front of s3 (expected to
// be reachable through https)
pub alias: Option<String>,
}
impl S3Config {
#[cfg(feature = "s3")]
pub fn get_bucket(&self) -> Bucket {
let region = Region::Custom {
region: self.region.clone(),
endpoint: format!("{}://{}", self.protocol, self.hostname),
};
let credentials = Credentials {
access_key: Some(self.access_key_id.clone()),
secret_key: Some(self.access_key_secret.clone()),
security_token: None,
session_token: None,
expiration: None,
};
let bucket = Bucket::new(&self.bucket, region, credentials).unwrap();
if self.path_style {
bucket.with_path_style()
} else {
bucket
}
}
}
fn get_s3_config() -> Option<S3Config> {
let bucket = var("S3_BUCKET").ok();
let access_key_id = var("AWS_ACCESS_KEY_ID").ok();
let access_key_secret = var("AWS_SECRET_ACCESS_KEY").ok();
if bucket.is_none() && access_key_id.is_none() && access_key_secret.is_none() {
return None;
}
#[cfg(not(feature = "s3"))]
panic!("S3 support is not enabled in this build");
#[cfg(feature = "s3")]
{
if bucket.is_none() || access_key_id.is_none() || access_key_secret.is_none() {
panic!("Invalid S3 configuration: some required values are set, but not others");
}
let bucket = bucket.unwrap();
let access_key_id = access_key_id.unwrap();
let access_key_secret = access_key_secret.unwrap();
let region = var("S3_REGION").unwrap_or_else(|_| "us-east-1".to_owned());
let hostname = var("S3_HOSTNAME").unwrap_or_else(|_| format!("{}.amazonaws.com", region));
let protocol = var("S3_PROTOCOL").unwrap_or_else(|_| "https".to_owned());
if protocol != "http" && protocol != "https" {
panic!("Invalid S3 configuration: invalid protocol {}", protocol);
}
let path_style = var("S3_PATH_STYLE").unwrap_or_else(|_| "false".to_owned());
let path_style = string_to_bool(&path_style, "S3_PATH_STYLE");
let direct_download = var("S3_DIRECT_DOWNLOAD").unwrap_or_else(|_| "false".to_owned());
let direct_download = string_to_bool(&direct_download, "S3_DIRECT_DOWNLOAD");
let alias = var("S3_ALIAS_HOST").ok();
if direct_download && protocol == "http" && alias.is_none() {
panic!("S3 direct download is disabled because bucket is accessed through plain HTTP. Use HTTPS or set an alias hostname (S3_ALIAS_HOST).");
}
Some(S3Config {
bucket,
access_key_id,
access_key_secret,
region,
hostname,
protocol,
path_style,
direct_download,
alias,
})
}
}
lazy_static! { lazy_static! {
pub static ref CONFIG: Config = Config { pub static ref CONFIG: Config = Config {
base_url: var("BASE_URL").unwrap_or_else(|_| format!( base_url: var("BASE_URL").unwrap_or_else(|_| format!(
@ -380,5 +487,6 @@ lazy_static! {
mail: get_mail_config(), mail: get_mail_config(),
ldap: get_ldap_config(), ldap: get_ldap_config(),
proxy: get_proxy_config(), proxy: get_proxy_config(),
s3: get_s3_config(),
}; };
} }

View file

@ -69,6 +69,8 @@ pub enum Error {
Webfinger, Webfinger,
Expired, Expired,
UserAlreadyExists, UserAlreadyExists,
#[cfg(feature = "s3")]
S3(s3::error::S3Error),
} }
impl From<bcrypt::BcryptError> for Error { impl From<bcrypt::BcryptError> for Error {
@ -170,6 +172,13 @@ impl From<request::Error> for Error {
} }
} }
#[cfg(feature = "s3")]
impl From<s3::error::S3Error> for Error {
fn from(err: s3::error::S3Error) -> Error {
Error::S3(err)
}
}
pub type Result<T> = std::result::Result<T, Error>; pub type Result<T> = std::result::Result<T, Error>;
/// Adds a function to a model, that returns the first /// Adds a function to a model, that returns the first

View file

@ -16,6 +16,9 @@ use std::{
use tracing::warn; use tracing::warn;
use url::Url; use url::Url;
#[cfg(feature = "s3")]
use crate::config::S3Config;
const REMOTE_MEDIA_DIRECTORY: &str = "remote"; const REMOTE_MEDIA_DIRECTORY: &str = "remote";
#[derive(Clone, Identifiable, Queryable, AsChangeset)] #[derive(Clone, Identifiable, Queryable, AsChangeset)]
@ -105,7 +108,7 @@ impl Media {
.file_path .file_path
.rsplit_once('.') .rsplit_once('.')
.map(|x| x.1) .map(|x| x.1)
.expect("Media::category: extension error") .unwrap_or("")
.to_lowercase() .to_lowercase()
{ {
"png" | "jpg" | "jpeg" | "gif" | "svg" => MediaCategory::Image, "png" | "jpg" | "jpeg" | "gif" | "svg" => MediaCategory::Image,
@ -151,26 +154,99 @@ impl Media {
}) })
} }
/// Returns full file path for medias stored in the local media directory.
pub fn local_path(&self) -> Option<PathBuf> {
if self.file_path.is_empty() {
return None;
}
if CONFIG.s3.is_some() {
#[cfg(feature="s3")]
unreachable!("Called Media::local_path() but media are stored on S3");
#[cfg(not(feature="s3"))]
unreachable!();
}
let relative_path = self
.file_path
.trim_start_matches(&CONFIG.media_directory)
.trim_start_matches(path::MAIN_SEPARATOR)
.trim_start_matches("static/media/");
Some(Path::new(&CONFIG.media_directory).join(relative_path))
}
/// Returns the relative URL to access this file, which is also the key at which
/// it is stored in the S3 bucket if we are using S3 storage.
/// Does not start with a '/', it is of the form "static/media/<...>"
pub fn relative_url(&self) -> Option<String> {
if self.file_path.is_empty() {
return None;
}
let relative_path = self
.file_path
.trim_start_matches(&CONFIG.media_directory)
.replace(path::MAIN_SEPARATOR, "/");
let relative_path = relative_path
.trim_start_matches('/')
.trim_start_matches("static/media/");
Some(format!("static/media/{}", relative_path))
}
/// Returns a public URL through which this media file can be accessed
pub fn url(&self) -> Result<String> { pub fn url(&self) -> Result<String> {
if self.is_remote { if self.is_remote {
Ok(self.remote_url.clone().unwrap_or_default()) Ok(self.remote_url.clone().unwrap_or_default())
} else { } else {
let file_path = self.file_path.replace(path::MAIN_SEPARATOR, "/").replacen( let relative_url = self.relative_url().unwrap_or_default();
&CONFIG.media_directory,
"static/media", #[cfg(feature="s3")]
1, if CONFIG.s3.as_ref().map(|x| x.direct_download).unwrap_or(false) {
); // "static/media" from plume::routs::plume_media_files() let s3_url = match CONFIG.s3.as_ref().unwrap() {
S3Config { alias: Some(alias), .. } => {
format!("https://{}/{}", alias, relative_url)
}
S3Config { path_style: true, hostname, bucket, .. } => {
format!("https://{}/{}/{}",
hostname,
bucket,
relative_url
)
}
S3Config { path_style: false, hostname, bucket, .. } => {
format!("https://{}.{}/{}",
bucket,
hostname,
relative_url
)
}
};
return Ok(s3_url);
}
Ok(ap_url(&format!( Ok(ap_url(&format!(
"{}/{}", "{}/{}",
Instance::get_local()?.public_domain, Instance::get_local()?.public_domain,
&file_path relative_url
))) )))
} }
} }
pub fn delete(&self, conn: &Connection) -> Result<()> { pub fn delete(&self, conn: &Connection) -> Result<()> {
if !self.is_remote { if !self.is_remote {
fs::remove_file(self.file_path.as_str())?; if CONFIG.s3.is_some() {
#[cfg(not(feature="s3"))]
unreachable!();
#[cfg(feature = "s3")]
CONFIG.s3.as_ref().unwrap().get_bucket()
.delete_object_blocking(&self.relative_url().ok_or(Error::NotFound)?)?;
} else {
fs::remove_file(self.local_path().ok_or(Error::NotFound)?)?;
}
} }
diesel::delete(self) diesel::delete(self)
.execute(conn) .execute(conn)
@ -211,6 +287,42 @@ impl Media {
.url() .url()
.and_then(|url| url.to_as_uri()) .and_then(|url| url.to_as_uri())
.ok_or(Error::MissingApProperty)?; .ok_or(Error::MissingApProperty)?;
let file_path = if CONFIG.s3.is_some() {
#[cfg(not(feature="s3"))]
unreachable!();
#[cfg(feature = "s3")]
{
use rocket::http::ContentType;
let dest = determine_mirror_s3_path(&remote_url);
let media = request::get(
remote_url.as_str(),
User::get_sender(),
CONFIG.proxy().cloned(),
)?;
let content_type = media
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|x| x.to_str().ok())
.and_then(ContentType::parse_flexible)
.unwrap_or(ContentType::Binary);
let bytes = media.bytes()?;
let bucket = CONFIG.s3.as_ref().unwrap().get_bucket();
bucket.put_object_with_content_type_blocking(
&dest,
&bytes,
&content_type.to_string()
)?;
dest
}
} else {
let path = determine_mirror_file_path(&remote_url); let path = determine_mirror_file_path(&remote_url);
let parent = path.parent().ok_or(Error::InvalidValue)?; let parent = path.parent().ok_or(Error::InvalidValue)?;
if !parent.is_dir() { if !parent.is_dir() {
@ -225,8 +337,10 @@ impl Media {
CONFIG.proxy().cloned(), CONFIG.proxy().cloned(),
)? )?
.copy_to(&mut dest)?; .copy_to(&mut dest)?;
path.to_str().ok_or(Error::InvalidValue)?.to_string()
};
Media::find_by_file_path(conn, path.to_str().ok_or(Error::InvalidValue)?) Media::find_by_file_path(conn, &file_path)
.and_then(|mut media| { .and_then(|mut media| {
let mut updated = false; let mut updated = false;
@ -267,7 +381,7 @@ impl Media {
Media::insert( Media::insert(
conn, conn,
NewMedia { NewMedia {
file_path: path.to_str().ok_or(Error::InvalidValue)?.to_string(), file_path,
alt_text: image alt_text: image
.content() .content()
.and_then(|content| content.to_as_string()) .and_then(|content| content.to_as_string())
@ -307,12 +421,10 @@ impl Media {
} }
fn determine_mirror_file_path(url: &str) -> PathBuf { fn determine_mirror_file_path(url: &str) -> PathBuf {
let mut file_path = Path::new(&super::CONFIG.media_directory).join(REMOTE_MEDIA_DIRECTORY); let mut file_path = Path::new(&CONFIG.media_directory).join(REMOTE_MEDIA_DIRECTORY);
Url::parse(url)
.map(|url| { match Url::parse(url) {
if !url.has_host() { Ok(url) if url.has_host() => {
return;
}
file_path.push(url.host_str().unwrap()); file_path.push(url.host_str().unwrap());
for segment in url.path_segments().expect("FIXME") { for segment in url.path_segments().expect("FIXME") {
file_path.push(segment); file_path.push(segment);
@ -320,19 +432,54 @@ fn determine_mirror_file_path(url: &str) -> PathBuf {
// TODO: handle query // TODO: handle query
// HINT: Use characters which must be percent-encoded in path as separator between path and query // HINT: Use characters which must be percent-encoded in path as separator between path and query
// HINT: handle extension // HINT: handle extension
}) }
.unwrap_or_else(|err| { other => {
if let Err(err) = other {
warn!("Failed to parse url: {} {}", &url, err); warn!("Failed to parse url: {} {}", &url, err);
} else {
warn!("Error without a host: {}", &url);
}
let ext = url let ext = url
.rsplit('.') .rsplit('.')
.next() .next()
.map(ToOwned::to_owned) .map(ToOwned::to_owned)
.unwrap_or_else(|| String::from("png")); .unwrap_or_else(|| String::from("png"));
file_path.push(format!("{}.{}", GUID::rand(), ext)); file_path.push(format!("{}.{}", GUID::rand(), ext));
}); }
}
file_path file_path
} }
#[cfg(feature="s3")]
fn determine_mirror_s3_path(url: &str) -> String {
match Url::parse(url) {
Ok(url) if url.has_host() => {
format!("static/media/{}/{}/{}",
REMOTE_MEDIA_DIRECTORY,
url.host_str().unwrap(),
url.path().trim_start_matches('/'),
)
}
other => {
if let Err(err) = other {
warn!("Failed to parse url: {} {}", &url, err);
} else {
warn!("Error without a host: {}", &url);
}
let ext = url
.rsplit('.')
.next()
.map(ToOwned::to_owned)
.unwrap_or_else(|| String::from("png"));
format!("static/media/{}/{}.{}",
REMOTE_MEDIA_DIRECTORY,
GUID::rand(),
ext,
)
}
}
}
#[cfg(test)] #[cfg(test)]
pub(crate) mod tests { pub(crate) mod tests {
use super::*; use super::*;

View file

@ -2,7 +2,7 @@ use crate::routes::{errors::ErrorPage, Page};
use crate::template_utils::{IntoContext, Ructe}; use crate::template_utils::{IntoContext, Ructe};
use guid_create::GUID; use guid_create::GUID;
use multipart::server::{ use multipart::server::{
save::{SaveResult, SavedData}, save::{SaveResult, SavedField, SavedData},
Multipart, Multipart,
}; };
use plume_models::{db_conn::DbConn, medias::*, users::User, Error, PlumeRocket, CONFIG}; use plume_models::{db_conn::DbConn, medias::*, users::User, Error, PlumeRocket, CONFIG};
@ -55,41 +55,16 @@ pub fn upload(
if let SaveResult::Full(entries) = Multipart::with_body(data.open(), boundary).save().temp() { if let SaveResult::Full(entries) = Multipart::with_body(data.open(), boundary).save().temp() {
let fields = entries.fields; let fields = entries.fields;
let filename = fields let file = fields
.get("file") .get("file")
.and_then(|v| v.iter().next()) .and_then(|v| v.iter().next())
.ok_or(status::BadRequest(Some("No file uploaded")))? .ok_or(status::BadRequest(Some("No file uploaded")))?;
.headers
.filename
.clone();
// Remove extension if it contains something else than just letters and numbers
let ext = filename
.and_then(|f| {
f.rsplit('.')
.next()
.and_then(|ext| {
if ext.chars().any(|c| !c.is_alphanumeric()) {
None
} else {
Some(ext.to_lowercase())
}
})
.map(|ext| format!(".{}", ext))
})
.unwrap_or_default();
let dest = format!("{}/{}{}", CONFIG.media_directory, GUID::rand(), ext);
match fields["file"][0].data { let file_path = match save_uploaded_file(file) {
SavedData::Bytes(ref bytes) => fs::write(&dest, bytes) Ok(Some(file_path)) => file_path,
.map_err(|_| status::BadRequest(Some("Couldn't save upload")))?, Ok(None) => return Ok(Redirect::to(uri!(new))),
SavedData::File(ref path, _) => { Err(_) => return Err(status::BadRequest(Some("Couldn't save uploaded media: {}"))),
fs::copy(path, &dest) };
.map_err(|_| status::BadRequest(Some("Couldn't copy upload")))?;
}
_ => {
return Ok(Redirect::to(uri!(new)));
}
}
let has_cw = !read(&fields["cw"][0].data) let has_cw = !read(&fields["cw"][0].data)
.map(|cw| cw.is_empty()) .map(|cw| cw.is_empty())
@ -97,7 +72,7 @@ pub fn upload(
let media = Media::insert( let media = Media::insert(
&conn, &conn,
NewMedia { NewMedia {
file_path: dest, file_path,
alt_text: read(&fields["alt"][0].data)?, alt_text: read(&fields["alt"][0].data)?,
is_remote: false, is_remote: false,
remote_url: None, remote_url: None,
@ -117,6 +92,74 @@ pub fn upload(
} }
} }
fn save_uploaded_file(file: &SavedField) -> Result<Option<String>, plume_models::Error> {
// Remove extension if it contains something else than just letters and numbers
let ext = file
.headers
.filename
.as_ref()
.and_then(|f| {
f.rsplit('.')
.next()
.and_then(|ext| {
if ext.chars().any(|c| !c.is_alphanumeric()) {
None
} else {
Some(ext.to_lowercase())
}
})
})
.unwrap_or_default();
if CONFIG.s3.is_some() {
#[cfg(not(feature="s3"))]
unreachable!();
#[cfg(feature="s3")]
{
use std::borrow::Cow;
let dest = format!("static/media/{}.{}", GUID::rand(), ext);
let bytes = match file.data {
SavedData::Bytes(ref bytes) => Cow::from(bytes),
SavedData::File(ref path, _) => Cow::from(fs::read(path)?),
_ => {
return Ok(None);
}
};
let bucket = CONFIG.s3.as_ref().unwrap().get_bucket();
let content_type = match &file.headers.content_type {
Some(ct) => ct.to_string(),
None => ContentType::from_extension(&ext)
.unwrap_or(ContentType::Binary)
.to_string(),
};
bucket.put_object_with_content_type_blocking(&dest, &bytes, &content_type)?;
Ok(Some(dest))
}
} else {
let dest = format!("{}/{}.{}", CONFIG.media_directory, GUID::rand(), ext);
match file.data {
SavedData::Bytes(ref bytes) => {
fs::write(&dest, bytes)?;
}
SavedData::File(ref path, _) => {
fs::copy(path, &dest)?;
}
_ => {
return Ok(None);
}
}
Ok(Some(dest))
}
}
fn read(data: &SavedData) -> Result<String, status::BadRequest<&'static str>> { fn read(data: &SavedData) -> Result<String, status::BadRequest<&'static str>> {
if let SavedData::Text(s) = data { if let SavedData::Text(s) = data {
Ok(s.clone()) Ok(s.clone())

View file

@ -21,6 +21,9 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
#[cfg(feature = "s3")]
use rocket::http::ContentType;
/// Special return type used for routes that "cannot fail", and instead /// Special return type used for routes that "cannot fail", and instead
/// `Redirect`, or `Flash<Redirect>`, when we cannot deliver a `Ructe` Response /// `Redirect`, or `Flash<Redirect>`, when we cannot deliver a `Ructe` Response
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
@ -204,10 +207,17 @@ pub mod timelines;
pub mod user; pub mod user;
pub mod well_known; pub mod well_known;
#[derive(Responder)]
enum FileKind {
Local(NamedFile),
#[cfg(feature = "s3")]
S3(Vec<u8>, ContentType),
}
#[derive(Responder)] #[derive(Responder)]
#[response()] #[response()]
pub struct CachedFile { pub struct CachedFile {
inner: NamedFile, inner: FileKind,
cache_control: CacheControl, cache_control: CacheControl,
} }
@ -253,19 +263,41 @@ pub fn plume_static_files(file: PathBuf, build_id: &RawStr) -> Option<CachedFile
} }
#[get("/static/media/<file..>")] #[get("/static/media/<file..>")]
pub fn plume_media_files(file: PathBuf) -> Option<CachedFile> { pub fn plume_media_files(file: PathBuf) -> Option<CachedFile> {
if CONFIG.s3.is_some() {
#[cfg(not(feature="s3"))]
unreachable!();
#[cfg(feature="s3")]
{
let data = CONFIG.s3.as_ref().unwrap().get_bucket()
.get_object_blocking(format!("static/media/{}", file.to_string_lossy())).ok()?;
let ct = data.headers().get("content-type")
.and_then(|x| ContentType::parse_flexible(&x))
.or_else(|| file.extension()
.and_then(|ext| ContentType::from_extension(&ext.to_string_lossy())))
.unwrap_or(ContentType::Binary);
Some(CachedFile {
inner: FileKind::S3(data.to_vec(), ct),
cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]),
})
}
} else {
NamedFile::open(Path::new(&CONFIG.media_directory).join(file)) NamedFile::open(Path::new(&CONFIG.media_directory).join(file))
.ok() .ok()
.map(|f| CachedFile { .map(|f| CachedFile {
inner: f, inner: FileKind::Local(f),
cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]), cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]),
}) })
} }
}
#[get("/static/<file..>", rank = 3)] #[get("/static/<file..>", rank = 3)]
pub fn static_files(file: PathBuf) -> Option<CachedFile> { pub fn static_files(file: PathBuf) -> Option<CachedFile> {
NamedFile::open(Path::new("static/").join(file)) NamedFile::open(Path::new("static/").join(file))
.ok() .ok()
.map(|f| CachedFile { .map(|f| CachedFile {
inner: f, inner: FileKind::Local(f),
cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]), cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]),
}) })
} }