mirror of
https://git.asonix.dog/asonix/pict-rs.git
synced 2024-12-01 05:51:07 +00:00
Fix Range, consolidate errors, test object storage
This commit is contained in:
parent
15b52ba6ec
commit
37e6b21b55
24 changed files with 458 additions and 436 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1643,6 +1643,7 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-uring",
|
"tokio-uring",
|
||||||
"tokio-util 0.7.0",
|
"tokio-util 0.7.0",
|
||||||
|
"toml",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-actix-web",
|
"tracing-actix-web",
|
||||||
"tracing-awc",
|
"tracing-awc",
|
||||||
|
|
|
@ -56,6 +56,7 @@ time = { version = "0.3.0", features = ["serde"] }
|
||||||
tokio = { version = "1", features = ["full", "tracing"] }
|
tokio = { version = "1", features = ["full", "tracing"] }
|
||||||
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
||||||
tokio-util = { version = "0.7", default-features = false, features = ["codec"] }
|
tokio-util = { version = "0.7", default-features = false, features = ["codec"] }
|
||||||
|
toml = "0.5.8"
|
||||||
tracing = "0.1.15"
|
tracing = "0.1.15"
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
tracing-futures = "0.2.4"
|
tracing-futures = "0.2.4"
|
||||||
|
|
166
README.md
166
README.md
|
@ -9,83 +9,145 @@ _a simple image hosting service_
|
||||||
## Usage
|
## Usage
|
||||||
### Running
|
### Running
|
||||||
```
|
```
|
||||||
pict-rs 0.3.0-rc.7
|
pict-rs
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
pict-rs [FLAGS] [OPTIONS] [SUBCOMMAND]
|
pict-rs [OPTIONS] <SUBCOMMAND>
|
||||||
|
|
||||||
FLAGS:
|
|
||||||
-h, --help Prints help information
|
|
||||||
-s, --skip-validate-imports Whether to skip validating images uploaded via the internal import API
|
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
-a, --addr <addr> The address and port the server binds to.
|
-a, --addr <ADDR>
|
||||||
--api-key <api-key>
|
The address and port the server binds to.
|
||||||
|
|
||||||
|
--api-key <API_KEY>
|
||||||
An optional string to be checked on requests to privileged endpoints
|
An optional string to be checked on requests to privileged endpoints
|
||||||
|
|
||||||
-c, --config-file <config-file> Path to the pict-rs configuration file
|
-c, --config-file <CONFIG_FILE>
|
||||||
--console-buffer-capacity <console-buffer-capacity>
|
Path to the pict-rs configuration file
|
||||||
|
|
||||||
|
--console-buffer-capacity <CONSOLE_BUFFER_CAPACITY>
|
||||||
Specify the number of events the console subscriber is allowed to buffer
|
Specify the number of events the console subscriber is allowed to buffer
|
||||||
|
|
||||||
-f, --filters <filters>...
|
-f, --filters <FILTERS>
|
||||||
An optional list of filters to permit, supports 'identity', 'thumbnail', 'resize', 'crop', and 'blur'
|
An optional list of filters to permit, supports 'identity', 'thumbnail', 'resize',
|
||||||
|
'crop', and 'blur'
|
||||||
|
|
||||||
-i, --image-format <image-format>
|
--filesystem-storage-path <FILESYSTEM_STORAGE_PATH>
|
||||||
An optional image format to convert all uploaded files into, supports 'jpg', 'png', and 'webp'
|
Path in which pict-rs will create it's 'files' directory
|
||||||
|
|
||||||
-m, --max-file-size <max-file-size>
|
-h, --help
|
||||||
|
Print help information
|
||||||
|
|
||||||
|
-i, --image-format <IMAGE_FORMAT>
|
||||||
|
An optional image format to convert all uploaded files into, supports 'jpg', 'png', and
|
||||||
|
'webp'
|
||||||
|
|
||||||
|
-m, --max-file-size <MAX_FILE_SIZE>
|
||||||
Specify the maximum allowed uploaded file size (in Megabytes)
|
Specify the maximum allowed uploaded file size (in Megabytes)
|
||||||
|
|
||||||
--max-image-area <max-image-area> Specify the maximum area in pixels allowed in an image
|
--max-image-area <MAX_IMAGE_AREA>
|
||||||
--max-image-height <max-image-height> Specify the maximum width in pixels allowed on an image
|
Specify the maximum area in pixels allowed in an image
|
||||||
--max-image-width <max-image-width> Specify the maximum width in pixels allowed on an image
|
|
||||||
--migrate-file <migrate-file> Path to a file defining a store migration
|
--max-image-height <MAX_IMAGE_HEIGHT>
|
||||||
-o, --opentelemetry-url <opentelemetry-url>
|
Specify the maximum width in pixels allowed on an image
|
||||||
|
|
||||||
|
--max-image-width <MAX_IMAGE_WIDTH>
|
||||||
|
Specify the maximum width in pixels allowed on an image
|
||||||
|
|
||||||
|
-o, --opentelemetry-url <OPENTELEMETRY_URL>
|
||||||
Enable OpenTelemetry Tracing exports to the given OpenTelemetry collector
|
Enable OpenTelemetry Tracing exports to the given OpenTelemetry collector
|
||||||
|
|
||||||
-p, --path <path> The path to the data directory, e.g. data/
|
--object-store-access-key <OBJECT_STORE_ACCESS_KEY>
|
||||||
--sled-cache-capacity <sled-cache-capacity>
|
|
||||||
Specify the number of bytes sled is allowed to use for it's cache
|
|
||||||
|
|
||||||
|
|
||||||
|
--object-store-bucket-name <OBJECT_STORE_BUCKET_NAME>
|
||||||
|
Name of the bucket in which pict-rs will store images
|
||||||
|
|
||||||
|
--object-store-region <OBJECT_STORE_REGION>
|
||||||
|
Region in which the bucket exists, can be an http endpoint
|
||||||
|
|
||||||
|
--object-store-secret-key <OBJECT_STORE_SECRET_KEY>
|
||||||
|
|
||||||
|
|
||||||
|
--object-store-security-token <OBJECT_STORE_SECURITY_TOKEN>
|
||||||
|
|
||||||
|
|
||||||
|
--object-store-session-token <OBJECT_STORE_SESSION_TOKEN>
|
||||||
|
|
||||||
|
|
||||||
|
-p, --path <PATH>
|
||||||
|
The path to the data directory, e.g. data/
|
||||||
|
|
||||||
|
-R, --repo <REPO>
|
||||||
|
Set the database implementation. Available options are 'sled'. Default is 'sled'
|
||||||
|
|
||||||
|
-s, --skip-validate-imports
|
||||||
|
Whether to skip validating images uploaded via the internal import API
|
||||||
|
|
||||||
|
-S, --store <STORE>
|
||||||
|
Set the image store. Available options are 'object-storage' or 'filesystem'. Default is
|
||||||
|
'filesystem'
|
||||||
|
|
||||||
|
--sled-cache-capacity <SLED_CACHE_CAPACITY>
|
||||||
|
The number of bytes sled is allowed to use for it's in-memory cache
|
||||||
|
|
||||||
|
--sled-path <SLED_PATH>
|
||||||
|
Path in which pict-rs will create it's 'repo' directory
|
||||||
|
|
||||||
SUBCOMMANDS:
|
SUBCOMMANDS:
|
||||||
file-store
|
dump
|
||||||
help Prints this message or the help of the given subcommand(s)
|
help Print this message or the help of the given subcommand(s)
|
||||||
s3-store
|
migrate-repo
|
||||||
|
migrate-store
|
||||||
|
run
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```
|
||||||
pict-rs-file-store 0.3.0-rc.1
|
pict-rs-dump
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
pict-rs file-store [OPTIONS]
|
pict-rs dump <PATH>
|
||||||
|
|
||||||
FLAGS:
|
ARGS:
|
||||||
-h, --help Prints help information
|
<PATH>
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
--path <path> Path in which pict-rs will create it's 'files' directory
|
-h, --help Print help information
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```
|
||||||
pict-rs-s3-store 0.3.0-rc.1
|
pict-rs-migrate-repo
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
pict-rs s3-store [OPTIONS] --bucket-name <bucket-name> --region <region>
|
pict-rs migrate-repo <TO>
|
||||||
|
|
||||||
FLAGS:
|
ARGS:
|
||||||
-h, --help Prints help information
|
<TO>
|
||||||
-V, --version Prints version information
|
|
||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
--access-key <access-key>
|
-h, --help Print help information
|
||||||
--bucket-name <bucket-name> Name of the bucket in which pict-rs will store images
|
```
|
||||||
--region <region> Region in which the bucket exists, can be an http endpoint
|
|
||||||
--secret-key <secret-key>
|
```
|
||||||
--security-token <security-token>
|
pict-rs-migrate-store
|
||||||
--session-token <session-token>
|
|
||||||
|
USAGE:
|
||||||
|
pict-rs migrate-store <TO>
|
||||||
|
|
||||||
|
ARGS:
|
||||||
|
<TO>
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
-h, --help Print help information
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
pict-rs-run
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
pict-rs run
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
-h, --help Print help information
|
||||||
```
|
```
|
||||||
|
|
||||||
See [`pict-rs.toml`](https://git.asonix.dog/asonix/pict-rs/src/branch/main/pict-rs.toml) and
|
See [`pict-rs.toml`](https://git.asonix.dog/asonix/pict-rs/src/branch/main/pict-rs.toml) and
|
||||||
|
@ -95,23 +157,27 @@ configuration
|
||||||
#### Example:
|
#### Example:
|
||||||
Running on all interfaces, port 8080, storing data in /opt/data
|
Running on all interfaces, port 8080, storing data in /opt/data
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 0.0.0.0:8080 -p /opt/data
|
$ ./pict-rs -a 0.0.0.0:8080 -p /opt/data run
|
||||||
```
|
```
|
||||||
Running locally, port 9000, storing data in data/, and converting all uploads to PNG
|
Running locally, port 9000, storing data in data/, and converting all uploads to PNG
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 127.0.0.1:9000 -p data/ -f png
|
$ ./pict-rs -a 127.0.0.1:9000 -p data/ -f png run
|
||||||
```
|
```
|
||||||
Running locally, port 8080, storing data in data/, and only allowing the `thumbnail` and `identity` filters
|
Running locally, port 8080, storing data in data/, and only allowing the `thumbnail` and `identity` filters
|
||||||
```
|
```
|
||||||
$ ./pict-rs -a 127.0.0.1:8080 -p data/ -w thumbnail identity
|
$ ./pict-rs -a 127.0.0.1:8080 -p data/ -w thumbnail identity run
|
||||||
```
|
```
|
||||||
Running from a configuration file
|
Running from a configuration file
|
||||||
```
|
```
|
||||||
$ ./pict-rs -c ./pict-rs.toml
|
$ ./pict-rs -c ./pict-rs.toml run
|
||||||
```
|
```
|
||||||
Migrating between storage backends
|
Migrating to object storage from filesystem storage (both storages must be configured in pict-rs.toml)
|
||||||
```
|
```
|
||||||
$ ./pict-rs -p ./data --migrate-file ./migrate.toml
|
$ ./pict-rs -c ./pict-rs.toml --store filesystem migrate-store object-storage
|
||||||
|
```
|
||||||
|
Dumping commandline flags to a toml file
|
||||||
|
```
|
||||||
|
$ ./pict-rs -p data/ --store object-storage --object-storage-bucket-name pict-rs --object-storage-region us-east-1 dump pict-rs.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Docker
|
#### Docker
|
||||||
|
|
|
@ -1,25 +1,17 @@
|
||||||
FROM archlinux:latest
|
FROM alpine:edge
|
||||||
|
|
||||||
ARG UID=1000
|
ARG UID=1000
|
||||||
ARG GID=1000
|
ARG GID=1000
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
pacman -Syu --noconfirm \
|
apk add exiftool imagemagick ffmpeg && \
|
||||||
perl-image-exiftool \
|
addgroup -g $GID app && \
|
||||||
imagemagick \
|
adduser -h /opt/app -g "" -G app -u $UID -D app && \
|
||||||
ffmpeg && \
|
chown -R app:app /mnt
|
||||||
groupadd -g 1000 app && \
|
|
||||||
useradd -m \
|
|
||||||
-d /opt/app \
|
|
||||||
-u $UID \
|
|
||||||
-g $GID \
|
|
||||||
app
|
|
||||||
|
|
||||||
COPY root/ /
|
COPY root/ /
|
||||||
|
|
||||||
COPY ./pict-rs.toml /etc/pict-rs.toml
|
COPY ./pict-rs.toml /etc/pict-rs.toml
|
||||||
|
|
||||||
ENV PATH=$PATH:/usr/bin/vendor_perl
|
|
||||||
|
|
||||||
WORKDIR /opt/app
|
WORKDIR /opt/app
|
||||||
USER app
|
USER app
|
||||||
|
|
|
@ -1,9 +1,17 @@
|
||||||
path = '/mnt'
|
path = 'data/'
|
||||||
addr = '0.0.0.0:8080'
|
addr = '0.0.0.0:8080'
|
||||||
|
|
||||||
[store]
|
repo = 'sled'
|
||||||
type = 's3_store'
|
store = 'object_storage'
|
||||||
bucket_name = 'pict-rs'
|
|
||||||
region = 'http://minio:9000'
|
[sled]
|
||||||
access_key = '09ODZ3BGBISV4U92JLIM'
|
sled_cache_capacity = 67108864
|
||||||
secret_key = 'j35YE9RrxhBP0dpiD5mmdXRXvPkEJR4k6zK12q3o'
|
|
||||||
|
[filesystem_storage]
|
||||||
|
filesystem_storage_path = '/mnt/files'
|
||||||
|
|
||||||
|
[object_storage]
|
||||||
|
object_store_bucket_name = 'pict-rs'
|
||||||
|
object_store_region = 'http://minio:9000'
|
||||||
|
object_store_access_key = 'XZEZ5B8Y3UCINU1KCVF6'
|
||||||
|
object_store_secret_key = 'cWbE5LcCK9YH8j1NvhOZocl+vH+b6T5Zvy3z+BZu'
|
||||||
|
|
|
@ -8,12 +8,14 @@
|
||||||
<policy domain="resource" name="disk" value="1GiB" />
|
<policy domain="resource" name="disk" value="1GiB" />
|
||||||
<policy domain="resource" name="file" value="768" />
|
<policy domain="resource" name="file" value="768" />
|
||||||
<policy domain="resource" name="thread" value="2" />
|
<policy domain="resource" name="thread" value="2" />
|
||||||
|
<policy domain="path" rights="none" pattern="@*" />
|
||||||
<policy domain="coder" rights="none" pattern="*" />
|
<policy domain="coder" rights="none" pattern="*" />
|
||||||
<policy domain="coder" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
<policy domain="coder" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
||||||
|
<policy domain="delegate" rights="none" pattern="*" />
|
||||||
|
<policy domain="delegate" rights="execute" pattern="ffmpeg" />
|
||||||
<policy domain="filter" rights="none" pattern="*" />
|
<policy domain="filter" rights="none" pattern="*" />
|
||||||
<policy domain="path" rights="none" pattern="@*" />
|
|
||||||
<policy domain="module" rights="none" pattern="*" />
|
<policy domain="module" rights="none" pattern="*" />
|
||||||
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,VIDEO}" />
|
||||||
<!-- indirect reads not permitted -->
|
<!-- indirect reads not permitted -->
|
||||||
<policy domain="system" name="precision" value="6" />
|
<policy domain="system" name="precision" value="6" />
|
||||||
</policymap>
|
</policymap>
|
||||||
|
|
|
@ -8,12 +8,14 @@
|
||||||
<policy domain="resource" name="disk" value="1GiB" />
|
<policy domain="resource" name="disk" value="1GiB" />
|
||||||
<policy domain="resource" name="file" value="768" />
|
<policy domain="resource" name="file" value="768" />
|
||||||
<policy domain="resource" name="thread" value="2" />
|
<policy domain="resource" name="thread" value="2" />
|
||||||
|
<policy domain="path" rights="none" pattern="@*" />
|
||||||
<policy domain="coder" rights="none" pattern="*" />
|
<policy domain="coder" rights="none" pattern="*" />
|
||||||
<policy domain="coder" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
<policy domain="coder" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
||||||
|
<policy domain="delegate" rights="none" pattern="*" />
|
||||||
|
<policy domain="delegate" rights="execute" pattern="ffmpeg" />
|
||||||
<policy domain="filter" rights="none" pattern="*" />
|
<policy domain="filter" rights="none" pattern="*" />
|
||||||
<policy domain="path" rights="none" pattern="@*" />
|
|
||||||
<policy domain="module" rights="none" pattern="*" />
|
<policy domain="module" rights="none" pattern="*" />
|
||||||
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,MP4,TMP,PAM}" />
|
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,VIDEO}" />
|
||||||
<!-- indirect reads not permitted -->
|
<!-- indirect reads not permitted -->
|
||||||
<policy domain="system" name="precision" value="6" />
|
<policy domain="system" name="precision" value="6" />
|
||||||
</policymap>
|
</policymap>
|
||||||
|
|
116
pict-rs.toml
116
pict-rs.toml
|
@ -43,13 +43,6 @@ max_image_area = 40_000_000 # in Pixels
|
||||||
# default: false
|
# default: false
|
||||||
skip_validate_imports = false
|
skip_validate_imports = false
|
||||||
|
|
||||||
## Optional: set sled's cache capacity to a given number of bytes
|
|
||||||
# environment variable: PICTRS_SLED_CACHE_CAPACITY
|
|
||||||
# default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB
|
|
||||||
#
|
|
||||||
# Increasing this value can improve performance by keeping more of the database in RAM
|
|
||||||
sled_cache_capacity = 67_108_864 # in bytes
|
|
||||||
|
|
||||||
## Optional: enable tokio-console and set the event buffer size
|
## Optional: enable tokio-console and set the event buffer size
|
||||||
# environment variable: PICTRS_CONSOLE_BUFFER_CAPACITY
|
# environment variable: PICTRS_CONSOLE_BUFFER_CAPACITY
|
||||||
# default: empty
|
# default: empty
|
||||||
|
@ -95,58 +88,65 @@ api_key = 'API_KEY'
|
||||||
# Not specifying opentelemetry_url means no traces will be exported
|
# Not specifying opentelemetry_url means no traces will be exported
|
||||||
opentelemetry_url = 'http://localhost:4317/'
|
opentelemetry_url = 'http://localhost:4317/'
|
||||||
|
|
||||||
## Optional: store definition
|
## Optional: the data repository to use
|
||||||
# default store: file_store
|
# environment variable: PICTRS_REPO
|
||||||
#
|
# default: 'sled'
|
||||||
# Not specifying a store means a file_store will be used with the top-level pict-rs' path
|
# available options: 'sled'
|
||||||
[store]
|
repo = 'sled'
|
||||||
type = "file_store"
|
|
||||||
|
|
||||||
## Example file store
|
## Optional: the file storage to use
|
||||||
# [store]
|
# environment variable: PICTRS_STORE
|
||||||
#
|
# default: 'filesystem'
|
||||||
# # environment variable: PICTRS_STORE__TYPE
|
# available options: 'filesystem', 'object_storage'
|
||||||
# type = 'file_store'
|
store = 'filesystem'
|
||||||
#
|
|
||||||
# # Optional: file path
|
|
||||||
# # environment variable: PICTRS_STORE__PATH
|
|
||||||
# # default: empty
|
|
||||||
# #
|
|
||||||
# # Not specifying path means pict-rs' top-level `path` config is used
|
|
||||||
# path = './data'
|
|
||||||
|
|
||||||
## Example s3 store
|
|
||||||
# [store]
|
## Optional: Sled store configration definition
|
||||||
|
[sled]
|
||||||
|
## Optional: set sled's cache capacity to a given number of bytes
|
||||||
|
# environment variable: PICTRS_SLED__SLED_CACHE_CAPACITY
|
||||||
|
# default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB
|
||||||
#
|
#
|
||||||
# # environment variable: PICTRS_STORE__TYPE
|
# Increasing this value can improve performance by keeping more of the database in RAM
|
||||||
# type = 's3_store'
|
sled_cache_capacity = 67_108_864 # in bytes
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: Filesystem storage configuration
|
||||||
|
[filesystem_storage]
|
||||||
|
## Optional: set the path for pict-rs filesystem file storage
|
||||||
|
# environment variable: PICTRS_FILESYSTEM_STORAGE__FILESYSTEM_STORAGE_PATH
|
||||||
|
# default '${path}/files'
|
||||||
|
filesystem_storage_path = 'data/files'
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: Object Storage configuration
|
||||||
|
[object_storage]
|
||||||
|
## Required: bucket name
|
||||||
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_BUCKET_NAME
|
||||||
|
object_store_bucket_name = 'pict-rs'
|
||||||
|
|
||||||
|
## Required: bucket region
|
||||||
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_REGION
|
||||||
#
|
#
|
||||||
# # Required: bucket name
|
# can also be endpoint of local s3 store, e.g. 'http://minio:9000'
|
||||||
# # environment variable: PICTRS_STORE__BUCKET_NAME
|
object_store_region = 'eu-central-1'
|
||||||
# bucket_name = 'rust_s3'
|
|
||||||
#
|
## Optional: bucket access key
|
||||||
# # Required: bucket region
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_ACCESS_KEY
|
||||||
# # environment variable: PICTRS_STORE__REGION
|
# default: empty
|
||||||
# #
|
object_store_access_key = '09ODZ3BGBISV4U92JLIM'
|
||||||
# # can also be endpoint of local s3 store, e.g. 'http://minio:9000'
|
|
||||||
# region = 'eu-central-1'
|
## Optional: bucket secret key
|
||||||
#
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SECRET_KEY
|
||||||
# # Optional: bucket access key
|
# default: empty
|
||||||
# # environment variable: PICTRS_STORE__ACCESS_KEY
|
object_store_secret_key = 'j35YE9RrxhBP0dpiD5mmdXRXvPkEJR4k6zK12q3o'
|
||||||
# # default: empty
|
|
||||||
# access_key = 'ACCESS_KEY'
|
## Optional: bucket security token
|
||||||
#
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SECURITY_TOKEN
|
||||||
# # Optional: bucket secret key
|
# default: empty
|
||||||
# # environment variable: PICTRS_STORE__SECRET_KEY
|
object_store_security_token = 'SECURITY_TOKEN'
|
||||||
# # default: empty
|
|
||||||
# secret_key = 'SECRET_KEY'
|
## Optional: bucket session token
|
||||||
#
|
# environment variable: PICTRS_OBJECT_STORAGE__OBJECT_STORE_SESSION_TOKEN
|
||||||
# # Optional: bucket security token
|
# default: empty
|
||||||
# # environment variable: PICTRS_STORE__SECURITY_TOKEN
|
object_store_session_token = 'SESSION_TOKEN'
|
||||||
# # default: empty
|
|
||||||
# security_token = 'SECURITY_TOKEN'
|
|
||||||
#
|
|
||||||
# # Optional: bucket session token
|
|
||||||
# # environment variable: PICTRS_STORE__SESSION_TOKEN
|
|
||||||
# # default: empty
|
|
||||||
# session_token = 'SESSION_TOKEN'
|
|
||||||
|
|
|
@ -170,12 +170,16 @@ impl Overrides {
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub(crate) enum Command {
|
pub(crate) enum Command {
|
||||||
Run,
|
Run,
|
||||||
|
Dump { path: PathBuf },
|
||||||
MigrateStore { to: Store },
|
MigrateStore { to: Store },
|
||||||
MigrateRepo { to: Repo },
|
MigrateRepo { to: Repo },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) enum CommandConfig {
|
pub(crate) enum CommandConfig {
|
||||||
Run,
|
Run,
|
||||||
|
Dump {
|
||||||
|
path: PathBuf,
|
||||||
|
},
|
||||||
MigrateStore {
|
MigrateStore {
|
||||||
to: Storage,
|
to: Storage,
|
||||||
},
|
},
|
||||||
|
@ -287,7 +291,6 @@ pub(crate) enum Repository {
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub(crate) struct Config {
|
pub(crate) struct Config {
|
||||||
command: Command,
|
|
||||||
skip_validate_imports: bool,
|
skip_validate_imports: bool,
|
||||||
addr: SocketAddr,
|
addr: SocketAddr,
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
@ -301,8 +304,9 @@ pub(crate) struct Config {
|
||||||
api_key: Option<String>,
|
api_key: Option<String>,
|
||||||
opentelemetry_url: Option<Url>,
|
opentelemetry_url: Option<Url>,
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
sled: Option<Sled>,
|
|
||||||
store: Store,
|
store: Store,
|
||||||
|
command: Command,
|
||||||
|
sled: Option<Sled>,
|
||||||
filesystem_storage: Option<FilesystemStorage>,
|
filesystem_storage: Option<FilesystemStorage>,
|
||||||
object_storage: Option<ObjectStorage>,
|
object_storage: Option<ObjectStorage>,
|
||||||
}
|
}
|
||||||
|
@ -329,9 +333,9 @@ struct SledDefaults {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Defaults {
|
impl Defaults {
|
||||||
fn new() -> Self {
|
fn new(command: Command) -> Self {
|
||||||
Defaults {
|
Defaults {
|
||||||
command: Command::Run,
|
command,
|
||||||
skip_validate_imports: false,
|
skip_validate_imports: false,
|
||||||
addr: ([0, 0, 0, 0], 8080).into(),
|
addr: ([0, 0, 0, 0], 8080).into(),
|
||||||
max_file_size: 40,
|
max_file_size: 40,
|
||||||
|
@ -351,8 +355,8 @@ impl Config {
|
||||||
pub(crate) fn build() -> anyhow::Result<Self> {
|
pub(crate) fn build() -> anyhow::Result<Self> {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
let mut base_config =
|
let mut base_config = config::Config::builder()
|
||||||
config::Config::builder().add_source(config::Config::try_from(&Defaults::new())?);
|
.add_source(config::Config::try_from(&Defaults::new(args.command))?);
|
||||||
|
|
||||||
if let Some(path) = args.config_file {
|
if let Some(path) = args.config_file {
|
||||||
base_config = base_config.add_source(config::File::from(path));
|
base_config = base_config.add_source(config::File::from(path));
|
||||||
|
@ -375,6 +379,7 @@ impl Config {
|
||||||
pub(crate) fn command(&self) -> anyhow::Result<CommandConfig> {
|
pub(crate) fn command(&self) -> anyhow::Result<CommandConfig> {
|
||||||
Ok(match &self.command {
|
Ok(match &self.command {
|
||||||
Command::Run => CommandConfig::Run,
|
Command::Run => CommandConfig::Run,
|
||||||
|
Command::Dump { path } => CommandConfig::Dump { path: path.clone() },
|
||||||
Command::MigrateStore { to } => CommandConfig::MigrateStore {
|
Command::MigrateStore { to } => CommandConfig::MigrateStore {
|
||||||
to: match to {
|
to: match to {
|
||||||
Store::ObjectStorage => Storage::ObjectStorage(
|
Store::ObjectStorage => Storage::ObjectStorage(
|
||||||
|
|
|
@ -34,10 +34,7 @@ impl Details {
|
||||||
store: S,
|
store: S,
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
expected_format: Option<ValidInputType>,
|
expected_format: Option<ValidInputType>,
|
||||||
) -> Result<Self, Error>
|
) -> Result<Self, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let details = crate::magick::details_store(store, identifier, expected_format).await?;
|
let details = crate::magick::details_store(store, identifier, expected_format).await?;
|
||||||
|
|
||||||
Ok(Details::now(
|
Ok(Details::now(
|
||||||
|
|
16
src/error.rs
16
src/error.rs
|
@ -15,6 +15,20 @@ impl std::fmt::Debug for Error {
|
||||||
impl std::fmt::Display for Error {
|
impl std::fmt::Display for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
writeln!(f, "{}", self.kind)?;
|
writeln!(f, "{}", self.kind)?;
|
||||||
|
writeln!(f)?;
|
||||||
|
let mut count = 0;
|
||||||
|
let mut source = std::error::Error::source(self);
|
||||||
|
if source.is_some() {
|
||||||
|
writeln!(f, "Chain:")?;
|
||||||
|
}
|
||||||
|
while let Some(err) = source {
|
||||||
|
write!(f, "{}. ", count)?;
|
||||||
|
writeln!(f, "{}", err)?;
|
||||||
|
|
||||||
|
count += 1;
|
||||||
|
source = std::error::Error::source(err);
|
||||||
|
}
|
||||||
|
|
||||||
std::fmt::Display::fmt(&self.context, f)
|
std::fmt::Display::fmt(&self.context, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +57,7 @@ pub(crate) enum UploadError {
|
||||||
Upload(#[from] actix_form_data::Error),
|
Upload(#[from] actix_form_data::Error),
|
||||||
|
|
||||||
#[error("Error in DB")]
|
#[error("Error in DB")]
|
||||||
Sled(#[from] crate::repo::sled::Error),
|
Sled(#[from] crate::repo::sled::SledError),
|
||||||
|
|
||||||
#[error("Error in old sled DB")]
|
#[error("Error in old sled DB")]
|
||||||
OldSled(#[from] ::sled::Error),
|
OldSled(#[from] ::sled::Error),
|
||||||
|
|
|
@ -44,7 +44,7 @@ impl ThumbnailFormat {
|
||||||
|
|
||||||
fn as_format(&self) -> &'static str {
|
fn as_format(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
ThumbnailFormat::Jpeg => "singlejpeg",
|
ThumbnailFormat::Jpeg => "image2",
|
||||||
// ThumbnailFormat::Webp => "webp",
|
// ThumbnailFormat::Webp => "webp",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -101,10 +101,7 @@ pub(crate) async fn thumbnail<S: Store>(
|
||||||
from: S::Identifier,
|
from: S::Identifier,
|
||||||
input_format: InputFormat,
|
input_format: InputFormat,
|
||||||
format: ThumbnailFormat,
|
format: ThumbnailFormat,
|
||||||
) -> Result<impl AsyncRead + Unpin, Error>
|
) -> Result<impl AsyncRead + Unpin, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let input_file = crate::tmp_file::tmp_file(Some(input_format.to_ext()));
|
let input_file = crate::tmp_file::tmp_file(Some(input_format.to_ext()));
|
||||||
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
||||||
crate::store::file_store::safe_create_parent(&input_file).await?;
|
crate::store::file_store::safe_create_parent(&input_file).await?;
|
||||||
|
|
|
@ -139,14 +139,12 @@ pub(crate) async fn details_bytes(
|
||||||
parse_details(s)
|
parse_details(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(store))]
|
||||||
pub(crate) async fn details_store<S: Store>(
|
pub(crate) async fn details_store<S: Store>(
|
||||||
store: S,
|
store: S,
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
hint: Option<ValidInputType>,
|
hint: Option<ValidInputType>,
|
||||||
) -> Result<Details, Error>
|
) -> Result<Details, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
if hint.as_ref().map(|h| h.is_mp4()).unwrap_or(false) {
|
if hint.as_ref().map(|h| h.is_mp4()).unwrap_or(false) {
|
||||||
let input_file = crate::tmp_file::tmp_file(Some(".mp4"));
|
let input_file = crate::tmp_file::tmp_file(Some(".mp4"));
|
||||||
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
let input_file_str = input_file.to_str().ok_or(UploadError::Path)?;
|
||||||
|
@ -182,6 +180,7 @@ where
|
||||||
parse_details(s)
|
parse_details(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
pub(crate) async fn details_file(path_str: &str) -> Result<Details, Error> {
|
pub(crate) async fn details_file(path_str: &str) -> Result<Details, Error> {
|
||||||
let process = Process::run(
|
let process = Process::run(
|
||||||
"magick",
|
"magick",
|
||||||
|
|
69
src/main.rs
69
src/main.rs
|
@ -77,10 +77,7 @@ async fn upload<S: Store>(
|
||||||
value: Value<UploadManagerSession<S>>,
|
value: Value<UploadManagerSession<S>>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let images = value
|
let images = value
|
||||||
.map()
|
.map()
|
||||||
.and_then(|mut m| m.remove("images"))
|
.and_then(|mut m| m.remove("images"))
|
||||||
|
@ -196,10 +193,7 @@ async fn download<S: Store>(
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
query: web::Query<UrlQuery>,
|
query: web::Query<UrlQuery>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let res = client.get(&query.url).send().await?;
|
let res = client.get(&query.url).send().await?;
|
||||||
|
|
||||||
if !res.status().is_success() {
|
if !res.status().is_success() {
|
||||||
|
@ -249,14 +243,11 @@ async fn delete<S: Store>(
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
path_entries: web::Path<(String, String)>,
|
path_entries: web::Path<(String, String)>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
let (token, alias) = path_entries.into_inner();
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let (alias, token) = path_entries.into_inner();
|
|
||||||
|
|
||||||
let alias = Alias::from_existing(&alias);
|
|
||||||
let token = DeleteToken::from_existing(&token);
|
let token = DeleteToken::from_existing(&token);
|
||||||
|
let alias = Alias::from_existing(&alias);
|
||||||
|
|
||||||
manager.delete((**store).clone(), alias, token).await?;
|
manager.delete((**store).clone(), alias, token).await?;
|
||||||
|
|
||||||
|
@ -314,10 +305,7 @@ async fn process_details<S: Store>(
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
filters: web::Data<Option<HashSet<String>>>,
|
filters: web::Data<Option<HashSet<String>>>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let (_, alias, thumbnail_path, _) = prepare_process(query, ext.as_str(), &filters)?;
|
let (_, alias, thumbnail_path, _) = prepare_process(query, ext.as_str(), &filters)?;
|
||||||
|
|
||||||
let identifier = manager
|
let identifier = manager
|
||||||
|
@ -341,10 +329,7 @@ async fn process<S: Store + 'static>(
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
filters: web::Data<Option<HashSet<String>>>,
|
filters: web::Data<Option<HashSet<String>>>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let (format, alias, thumbnail_path, thumbnail_args) =
|
let (format, alias, thumbnail_path, thumbnail_args) =
|
||||||
prepare_process(query, ext.as_str(), &filters)?;
|
prepare_process(query, ext.as_str(), &filters)?;
|
||||||
|
|
||||||
|
@ -468,10 +453,7 @@ async fn details<S: Store>(
|
||||||
alias: web::Path<String>,
|
alias: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let alias = alias.into_inner();
|
let alias = alias.into_inner();
|
||||||
let alias = Alias::from_existing(&alias);
|
let alias = Alias::from_existing(&alias);
|
||||||
|
|
||||||
|
@ -498,10 +480,7 @@ async fn serve<S: Store>(
|
||||||
alias: web::Path<String>,
|
alias: web::Path<String>,
|
||||||
manager: web::Data<UploadManager>,
|
manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let alias = alias.into_inner();
|
let alias = alias.into_inner();
|
||||||
let alias = Alias::from_existing(&alias);
|
let alias = Alias::from_existing(&alias);
|
||||||
let identifier = manager.identifier_from_alias::<S>(&alias).await?;
|
let identifier = manager.identifier_from_alias::<S>(&alias).await?;
|
||||||
|
@ -525,10 +504,7 @@ async fn ranged_file_resp<S: Store>(
|
||||||
identifier: S::Identifier,
|
identifier: S::Identifier,
|
||||||
range: Option<web::Header<Range>>,
|
range: Option<web::Header<Range>>,
|
||||||
details: Details,
|
details: Details,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let (builder, stream) = if let Some(web::Header(range_header)) = range {
|
let (builder, stream) = if let Some(web::Header(range_header)) = range {
|
||||||
//Range header exists - return as ranged
|
//Range header exists - return as ranged
|
||||||
if let Some(range) = range::single_bytes_range(&range_header) {
|
if let Some(range) = range::single_bytes_range(&range_header) {
|
||||||
|
@ -602,10 +578,7 @@ async fn purge<S: Store>(
|
||||||
query: web::Query<AliasQuery>,
|
query: web::Query<AliasQuery>,
|
||||||
upload_manager: web::Data<UploadManager>,
|
upload_manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let alias = Alias::from_existing(&query.alias);
|
let alias = Alias::from_existing(&query.alias);
|
||||||
let aliases = upload_manager.aliases_by_alias(&alias).await?;
|
let aliases = upload_manager.aliases_by_alias(&alias).await?;
|
||||||
|
|
||||||
|
@ -626,10 +599,7 @@ async fn aliases<S: Store>(
|
||||||
query: web::Query<AliasQuery>,
|
query: web::Query<AliasQuery>,
|
||||||
upload_manager: web::Data<UploadManager>,
|
upload_manager: web::Data<UploadManager>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
) -> Result<HttpResponse, Error>
|
) -> Result<HttpResponse, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let alias = Alias::from_existing(&query.alias);
|
let alias = Alias::from_existing(&query.alias);
|
||||||
let aliases = upload_manager.aliases_by_alias(&alias).await?;
|
let aliases = upload_manager.aliases_by_alias(&alias).await?;
|
||||||
|
|
||||||
|
@ -658,11 +628,10 @@ fn build_reqwest_client() -> reqwest::Result<reqwest::Client> {
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn launch<S: Store + Clone + 'static>(manager: UploadManager, store: S) -> anyhow::Result<()>
|
async fn launch<S: Store + Clone + 'static>(
|
||||||
where
|
manager: UploadManager,
|
||||||
S::Error: Unpin,
|
store: S,
|
||||||
Error: From<S::Error>,
|
) -> anyhow::Result<()> {
|
||||||
{
|
|
||||||
// Create a new Multipart Form validator
|
// Create a new Multipart Form validator
|
||||||
//
|
//
|
||||||
// This form is expecting a single array field, 'images' with at most 10 files in it
|
// This form is expecting a single array field, 'images' with at most 10 files in it
|
||||||
|
@ -797,7 +766,6 @@ async fn migrate_inner<S1>(
|
||||||
) -> anyhow::Result<()>
|
) -> anyhow::Result<()>
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
Error: From<S1::Error>,
|
|
||||||
{
|
{
|
||||||
match to {
|
match to {
|
||||||
config::Storage::Filesystem(RequiredFilesystemStorage { path }) => {
|
config::Storage::Filesystem(RequiredFilesystemStorage { path }) => {
|
||||||
|
@ -848,6 +816,11 @@ async fn main() -> anyhow::Result<()> {
|
||||||
|
|
||||||
match CONFIG.command()? {
|
match CONFIG.command()? {
|
||||||
CommandConfig::Run => (),
|
CommandConfig::Run => (),
|
||||||
|
CommandConfig::Dump { path } => {
|
||||||
|
let configuration = toml::to_string_pretty(&*CONFIG)?;
|
||||||
|
tokio::fs::write(path, configuration).await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
CommandConfig::MigrateRepo { to: _ } => {
|
CommandConfig::MigrateRepo { to: _ } => {
|
||||||
unimplemented!("Repo migrations are currently unsupported")
|
unimplemented!("Repo migrations are currently unsupported")
|
||||||
}
|
}
|
||||||
|
|
15
src/range.rs
15
src/range.rs
|
@ -15,7 +15,9 @@ pub(crate) fn chop_bytes(
|
||||||
length: u64,
|
length: u64,
|
||||||
) -> Result<impl Stream<Item = Result<Bytes, Error>>, Error> {
|
) -> Result<impl Stream<Item = Result<Bytes, Error>>, Error> {
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
return Ok(once(ready(Ok(bytes.slice(start as usize..end as usize)))));
|
// END IS INCLUSIVE
|
||||||
|
let end = end as usize + 1;
|
||||||
|
return Ok(once(ready(Ok(bytes.slice(start as usize..end)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(UploadError::Range.into())
|
Err(UploadError::Range.into())
|
||||||
|
@ -26,14 +28,13 @@ pub(crate) async fn chop_store<S: Store>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &S::Identifier,
|
||||||
length: u64,
|
length: u64,
|
||||||
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error>
|
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
return Ok(store
|
// END IS INCLUSIVE
|
||||||
|
let end = end + 1;
|
||||||
|
return store
|
||||||
.to_stream(identifier, Some(start), Some(end.saturating_sub(start)))
|
.to_stream(identifier, Some(start), Some(end.saturating_sub(start)))
|
||||||
.await?);
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(UploadError::Range.into())
|
Err(UploadError::Range.into())
|
||||||
|
|
72
src/repo.rs
72
src/repo.rs
|
@ -1,5 +1,9 @@
|
||||||
use crate::config::RequiredSledRepo;
|
use crate::{
|
||||||
use crate::{config::Repository, details::Details, store::Identifier};
|
config::{Repository, RequiredSledRepo},
|
||||||
|
details::Details,
|
||||||
|
error::Error,
|
||||||
|
store::Identifier,
|
||||||
|
};
|
||||||
use futures_util::Stream;
|
use futures_util::Stream;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
@ -34,98 +38,90 @@ pub(crate) struct AlreadyExists;
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait SettingsRepo {
|
pub(crate) trait SettingsRepo {
|
||||||
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
|
||||||
|
|
||||||
async fn set(&self, key: &'static [u8], value: Self::Bytes) -> Result<(), Self::Error>;
|
async fn set(&self, key: &'static [u8], value: Self::Bytes) -> Result<(), Error>;
|
||||||
async fn get(&self, key: &'static [u8]) -> Result<Option<Self::Bytes>, Self::Error>;
|
async fn get(&self, key: &'static [u8]) -> Result<Option<Self::Bytes>, Error>;
|
||||||
async fn remove(&self, key: &'static [u8]) -> Result<(), Self::Error>;
|
async fn remove(&self, key: &'static [u8]) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait IdentifierRepo {
|
pub(crate) trait IdentifierRepo {
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
|
||||||
|
|
||||||
async fn relate_details<I: Identifier>(
|
async fn relate_details<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), Self::Error>;
|
) -> Result<(), Error>;
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Self::Error>;
|
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Error>;
|
||||||
|
|
||||||
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Self::Error>;
|
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait HashRepo {
|
pub(crate) trait HashRepo {
|
||||||
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
type Stream: Stream<Item = Result<Self::Bytes, Error>>;
|
||||||
type Stream: Stream<Item = Result<Self::Bytes, Self::Error>>;
|
|
||||||
|
|
||||||
async fn hashes(&self) -> Self::Stream;
|
async fn hashes(&self) -> Self::Stream;
|
||||||
|
|
||||||
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Self::Error>;
|
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
|
|
||||||
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Self::Error>;
|
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error>;
|
||||||
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Self::Error>;
|
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error>;
|
||||||
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Self::Error>;
|
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Error>;
|
||||||
|
|
||||||
async fn relate_identifier<I: Identifier>(
|
async fn relate_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error>;
|
) -> Result<(), Error>;
|
||||||
async fn identifier<I: Identifier + 'static>(
|
async fn identifier<I: Identifier + 'static>(&self, hash: Self::Bytes) -> Result<I, Error>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<I, Self::Error>;
|
|
||||||
|
|
||||||
async fn relate_variant_identifier<I: Identifier>(
|
async fn relate_variant_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error>;
|
) -> Result<(), Error>;
|
||||||
async fn variant_identifier<I: Identifier + 'static>(
|
async fn variant_identifier<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<I>, Self::Error>;
|
) -> Result<Option<I>, Error>;
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
) -> Result<Vec<(String, I)>, Self::Error>;
|
) -> Result<Vec<(String, I)>, Error>;
|
||||||
|
|
||||||
async fn relate_motion_identifier<I: Identifier>(
|
async fn relate_motion_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error>;
|
) -> Result<(), Error>;
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
) -> Result<Option<I>, Self::Error>;
|
) -> Result<Option<I>, Error>;
|
||||||
|
|
||||||
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Self::Error>;
|
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait AliasRepo {
|
pub(crate) trait AliasRepo {
|
||||||
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
type Bytes: AsRef<[u8]> + From<Vec<u8>>;
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
|
||||||
|
|
||||||
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Self::Error>;
|
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
|
|
||||||
async fn relate_delete_token(
|
async fn relate_delete_token(
|
||||||
&self,
|
&self,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
delete_token: &DeleteToken,
|
delete_token: &DeleteToken,
|
||||||
) -> Result<Result<(), AlreadyExists>, Self::Error>;
|
) -> Result<Result<(), AlreadyExists>, Error>;
|
||||||
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Self::Error>;
|
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Error>;
|
||||||
|
|
||||||
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Self::Error>;
|
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Error>;
|
||||||
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Self::Error>;
|
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Error>;
|
||||||
|
|
||||||
async fn cleanup(&self, alias: &Alias) -> Result<(), Self::Error>;
|
async fn cleanup(&self, alias: &Alias) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Repo {
|
impl Repo {
|
||||||
|
@ -389,16 +385,14 @@ impl std::fmt::Display for Alias {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Identifier for Vec<u8> {
|
impl Identifier for Vec<u8> {
|
||||||
type Error = std::convert::Infallible;
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error>
|
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Ok(bytes)
|
Ok(bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error> {
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
Ok(self.clone())
|
Ok(self.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
144
src/repo/sled.rs
144
src/repo/sled.rs
|
@ -2,24 +2,24 @@ use super::{
|
||||||
Alias, AliasRepo, AlreadyExists, DeleteToken, Details, HashRepo, Identifier, IdentifierRepo,
|
Alias, AliasRepo, AlreadyExists, DeleteToken, Details, HashRepo, Identifier, IdentifierRepo,
|
||||||
SettingsRepo,
|
SettingsRepo,
|
||||||
};
|
};
|
||||||
|
use crate::error::Error;
|
||||||
use sled::{Db, IVec, Tree};
|
use sled::{Db, IVec, Tree};
|
||||||
|
|
||||||
macro_rules! b {
|
macro_rules! b {
|
||||||
($self:ident.$ident:ident, $expr:expr) => {{
|
($self:ident.$ident:ident, $expr:expr) => {{
|
||||||
let $ident = $self.$ident.clone();
|
let $ident = $self.$ident.clone();
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || $expr).await??
|
actix_rt::task::spawn_blocking(move || $expr)
|
||||||
|
.await
|
||||||
|
.map_err(SledError::from)??
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum Error {
|
pub(crate) enum SledError {
|
||||||
#[error("Error in database")]
|
#[error("Error in database")]
|
||||||
Sled(#[from] sled::Error),
|
Sled(#[from] sled::Error),
|
||||||
|
|
||||||
#[error("Invalid identifier")]
|
|
||||||
Identifier(#[source] Box<dyn std::error::Error + Sync + Send>),
|
|
||||||
|
|
||||||
#[error("Invalid details json")]
|
#[error("Invalid details json")]
|
||||||
Details(#[from] serde_json::Error),
|
Details(#[from] serde_json::Error),
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ pub(crate) struct SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SledRepo {
|
impl SledRepo {
|
||||||
pub(crate) fn new(db: Db) -> Result<Self, Error> {
|
pub(crate) fn new(db: Db) -> Result<Self, SledError> {
|
||||||
Ok(SledRepo {
|
Ok(SledRepo {
|
||||||
settings: db.open_tree("pict-rs-settings-tree")?,
|
settings: db.open_tree("pict-rs-settings-tree")?,
|
||||||
identifier_details: db.open_tree("pict-rs-identifier-details-tree")?,
|
identifier_details: db.open_tree("pict-rs-identifier-details-tree")?,
|
||||||
|
@ -66,36 +66,29 @@ impl SledRepo {
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl SettingsRepo for SledRepo {
|
impl SettingsRepo for SledRepo {
|
||||||
type Bytes = IVec;
|
type Bytes = IVec;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
async fn set(&self, key: &'static [u8], value: Self::Bytes) -> Result<(), Self::Error> {
|
#[tracing::instrument(skip(value))]
|
||||||
|
async fn set(&self, key: &'static [u8], value: Self::Bytes) -> Result<(), Error> {
|
||||||
b!(self.settings, settings.insert(key, value));
|
b!(self.settings, settings.insert(key, value));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get(&self, key: &'static [u8]) -> Result<Option<Self::Bytes>, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn get(&self, key: &'static [u8]) -> Result<Option<Self::Bytes>, Error> {
|
||||||
let opt = b!(self.settings, settings.get(key));
|
let opt = b!(self.settings, settings.get(key));
|
||||||
|
|
||||||
Ok(opt)
|
Ok(opt)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove(&self, key: &'static [u8]) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn remove(&self, key: &'static [u8]) -> Result<(), Error> {
|
||||||
b!(self.settings, settings.remove(key));
|
b!(self.settings, settings.remove(key));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn identifier_bytes<I>(identifier: &I) -> Result<Vec<u8>, Error>
|
|
||||||
where
|
|
||||||
I: Identifier,
|
|
||||||
{
|
|
||||||
identifier
|
|
||||||
.to_bytes()
|
|
||||||
.map_err(|e| Error::Identifier(Box::new(e)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn variant_key(hash: &[u8], variant: &str) -> Vec<u8> {
|
fn variant_key(hash: &[u8], variant: &str) -> Vec<u8> {
|
||||||
let mut bytes = hash.to_vec();
|
let mut bytes = hash.to_vec();
|
||||||
bytes.push(b'/');
|
bytes.push(b'/');
|
||||||
|
@ -111,14 +104,13 @@ fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl IdentifierRepo for SledRepo {
|
impl IdentifierRepo for SledRepo {
|
||||||
type Error = Error;
|
#[tracing::instrument]
|
||||||
|
|
||||||
async fn relate_details<I: Identifier>(
|
async fn relate_details<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Error> {
|
||||||
let key = identifier_bytes(identifier)?;
|
let key = identifier.to_bytes()?;
|
||||||
let details = serde_json::to_vec(&details)?;
|
let details = serde_json::to_vec(&details)?;
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
|
@ -129,8 +121,9 @@ impl IdentifierRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Self::Error> {
|
#[tracing::instrument]
|
||||||
let key = identifier_bytes(identifier)?;
|
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Error> {
|
||||||
|
let key = identifier.to_bytes()?;
|
||||||
|
|
||||||
let opt = b!(self.identifier_details, identifier_details.get(key));
|
let opt = b!(self.identifier_details, identifier_details.get(key));
|
||||||
|
|
||||||
|
@ -141,8 +134,9 @@ impl IdentifierRepo for SledRepo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
let key = identifier_bytes(identifier)?;
|
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Error> {
|
||||||
|
let key = identifier.to_bytes()?;
|
||||||
|
|
||||||
b!(self.identifier_details, identifier_details.remove(key));
|
b!(self.identifier_details, identifier_details.remove(key));
|
||||||
|
|
||||||
|
@ -192,7 +186,13 @@ impl futures_util::Stream for HashStream {
|
||||||
(iter, opt)
|
(iter, opt)
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.map(|(iter, opt)| (iter, opt.map(|res| res.map_err(Error::from))))
|
.map(|(iter, opt)| {
|
||||||
|
(
|
||||||
|
iter,
|
||||||
|
opt.map(|res| res.map_err(SledError::from).map_err(Error::from)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map_err(SledError::from)
|
||||||
.map_err(Error::from)
|
.map_err(Error::from)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -213,7 +213,6 @@ fn hash_alias_key(hash: &IVec, alias: &Alias) -> Vec<u8> {
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl HashRepo for SledRepo {
|
impl HashRepo for SledRepo {
|
||||||
type Bytes = IVec;
|
type Bytes = IVec;
|
||||||
type Error = Error;
|
|
||||||
type Stream = HashStream;
|
type Stream = HashStream;
|
||||||
|
|
||||||
async fn hashes(&self) -> Self::Stream {
|
async fn hashes(&self) -> Self::Stream {
|
||||||
|
@ -225,7 +224,8 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
let res = b!(self.hashes, {
|
let res = b!(self.hashes, {
|
||||||
let hash2 = hash.clone();
|
let hash2 = hash.clone();
|
||||||
hashes.compare_and_swap(hash, None as Option<Self::Bytes>, Some(hash2))
|
hashes.compare_and_swap(hash, None as Option<Self::Bytes>, Some(hash2))
|
||||||
|
@ -234,7 +234,8 @@ impl HashRepo for SledRepo {
|
||||||
Ok(res.map_err(|_| AlreadyExists))
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
||||||
let key = hash_alias_key(&hash, alias);
|
let key = hash_alias_key(&hash, alias);
|
||||||
let value = alias.to_bytes();
|
let value = alias.to_bytes();
|
||||||
|
|
||||||
|
@ -243,7 +244,8 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
||||||
let key = hash_alias_key(&hash, alias);
|
let key = hash_alias_key(&hash, alias);
|
||||||
|
|
||||||
b!(self.hash_aliases, hash_aliases.remove(key));
|
b!(self.hash_aliases, hash_aliases.remove(key));
|
||||||
|
@ -251,7 +253,8 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Error> {
|
||||||
let v = b!(self.hash_aliases, {
|
let v = b!(self.hash_aliases, {
|
||||||
Ok(hash_aliases
|
Ok(hash_aliases
|
||||||
.scan_prefix(hash)
|
.scan_prefix(hash)
|
||||||
|
@ -264,37 +267,37 @@ impl HashRepo for SledRepo {
|
||||||
Ok(v)
|
Ok(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn relate_identifier<I: Identifier>(
|
async fn relate_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Error> {
|
||||||
let bytes = identifier_bytes(identifier)?;
|
let bytes = identifier.to_bytes()?;
|
||||||
|
|
||||||
b!(self.hash_identifiers, hash_identifiers.insert(hash, bytes));
|
b!(self.hash_identifiers, hash_identifiers.insert(hash, bytes));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn identifier<I: Identifier + 'static>(
|
#[tracing::instrument]
|
||||||
&self,
|
async fn identifier<I: Identifier + 'static>(&self, hash: Self::Bytes) -> Result<I, Error> {
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<I, Self::Error> {
|
|
||||||
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
||||||
|
|
||||||
opt.ok_or(Error::Missing).and_then(|ivec| {
|
opt.ok_or(SledError::Missing)
|
||||||
I::from_bytes(ivec.to_vec()).map_err(|e| Error::Identifier(Box::new(e)))
|
.map_err(Error::from)
|
||||||
})
|
.and_then(|ivec| I::from_bytes(ivec.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn relate_variant_identifier<I: Identifier>(
|
async fn relate_variant_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Error> {
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
let value = identifier_bytes(identifier)?;
|
let value = identifier.to_bytes()?;
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_variant_identifiers,
|
self.hash_variant_identifiers,
|
||||||
|
@ -304,11 +307,12 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn variant_identifier<I: Identifier + 'static>(
|
async fn variant_identifier<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<I>, Self::Error> {
|
) -> Result<Option<I>, Error> {
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
|
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
|
@ -317,18 +321,17 @@ impl HashRepo for SledRepo {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(ivec) = opt {
|
if let Some(ivec) = opt {
|
||||||
Ok(Some(
|
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
||||||
I::from_bytes(ivec.to_vec()).map_err(|e| Error::Identifier(Box::new(e)))?,
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
) -> Result<Vec<(String, I)>, Self::Error> {
|
) -> Result<Vec<(String, I)>, Error> {
|
||||||
let vec = b!(
|
let vec = b!(
|
||||||
self.hash_variant_identifiers,
|
self.hash_variant_identifiers,
|
||||||
Ok(hash_variant_identifiers
|
Ok(hash_variant_identifiers
|
||||||
|
@ -346,12 +349,13 @@ impl HashRepo for SledRepo {
|
||||||
Ok(vec)
|
Ok(vec)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn relate_motion_identifier<I: Identifier>(
|
async fn relate_motion_identifier<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Error> {
|
||||||
let bytes = identifier_bytes(identifier)?;
|
let bytes = identifier.to_bytes()?;
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
|
@ -361,25 +365,25 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier<I: Identifier + 'static>(
|
||||||
&self,
|
&self,
|
||||||
hash: Self::Bytes,
|
hash: Self::Bytes,
|
||||||
) -> Result<Option<I>, Self::Error> {
|
) -> Result<Option<I>, Error> {
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
hash_motion_identifiers.get(hash)
|
hash_motion_identifiers.get(hash)
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(ivec) = opt {
|
if let Some(ivec) = opt {
|
||||||
Ok(Some(
|
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
||||||
I::from_bytes(ivec.to_vec()).map_err(|e| Error::Identifier(Box::new(e)))?,
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Error> {
|
||||||
let hash2 = hash.clone();
|
let hash2 = hash.clone();
|
||||||
b!(self.hashes, hashes.remove(hash2));
|
b!(self.hashes, hashes.remove(hash2));
|
||||||
|
|
||||||
|
@ -426,9 +430,9 @@ impl HashRepo for SledRepo {
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl AliasRepo for SledRepo {
|
impl AliasRepo for SledRepo {
|
||||||
type Bytes = sled::IVec;
|
type Bytes = sled::IVec;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
let bytes = alias.to_bytes();
|
let bytes = alias.to_bytes();
|
||||||
let bytes2 = bytes.clone();
|
let bytes2 = bytes.clone();
|
||||||
|
|
||||||
|
@ -440,11 +444,12 @@ impl AliasRepo for SledRepo {
|
||||||
Ok(res.map_err(|_| AlreadyExists))
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
async fn relate_delete_token(
|
async fn relate_delete_token(
|
||||||
&self,
|
&self,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
delete_token: &DeleteToken,
|
delete_token: &DeleteToken,
|
||||||
) -> Result<Result<(), AlreadyExists>, Self::Error> {
|
) -> Result<Result<(), AlreadyExists>, Error> {
|
||||||
let key = alias.to_bytes();
|
let key = alias.to_bytes();
|
||||||
let token = delete_token.to_bytes();
|
let token = delete_token.to_bytes();
|
||||||
|
|
||||||
|
@ -456,16 +461,19 @@ impl AliasRepo for SledRepo {
|
||||||
Ok(res.map_err(|_| AlreadyExists))
|
Ok(res.map_err(|_| AlreadyExists))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Error> {
|
||||||
let key = alias.to_bytes();
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
let opt = b!(self.alias_delete_tokens, alias_delete_tokens.get(key));
|
let opt = b!(self.alias_delete_tokens, alias_delete_tokens.get(key));
|
||||||
|
|
||||||
opt.and_then(|ivec| DeleteToken::from_slice(&ivec))
|
opt.and_then(|ivec| DeleteToken::from_slice(&ivec))
|
||||||
.ok_or(Error::Missing)
|
.ok_or(SledError::Missing)
|
||||||
|
.map_err(Error::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Error> {
|
||||||
let key = alias.to_bytes();
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
b!(self.alias_hashes, alias_hashes.insert(key, hash));
|
b!(self.alias_hashes, alias_hashes.insert(key, hash));
|
||||||
|
@ -473,15 +481,17 @@ impl AliasRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Error> {
|
||||||
let key = alias.to_bytes();
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
let opt = b!(self.alias_hashes, alias_hashes.get(key));
|
let opt = b!(self.alias_hashes, alias_hashes.get(key));
|
||||||
|
|
||||||
opt.ok_or(Error::Missing)
|
opt.ok_or(SledError::Missing).map_err(Error::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cleanup(&self, alias: &Alias) -> Result<(), Self::Error> {
|
#[tracing::instrument]
|
||||||
|
async fn cleanup(&self, alias: &Alias) -> Result<(), Error> {
|
||||||
let key = alias.to_bytes();
|
let key = alias.to_bytes();
|
||||||
|
|
||||||
let key2 = key.clone();
|
let key2 = key.clone();
|
||||||
|
@ -502,8 +512,8 @@ impl std::fmt::Debug for SledRepo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<actix_rt::task::JoinError> for Error {
|
impl From<actix_rt::task::JoinError> for SledError {
|
||||||
fn from(_: actix_rt::task::JoinError) -> Self {
|
fn from(_: actix_rt::task::JoinError) -> Self {
|
||||||
Error::Panic
|
SledError::Panic
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
26
src/store.rs
26
src/store.rs
|
@ -1,43 +1,37 @@
|
||||||
use std::fmt::Debug;
|
use crate::error::Error;
|
||||||
|
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_util::stream::Stream;
|
use futures_util::stream::Stream;
|
||||||
|
use std::fmt::Debug;
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
|
|
||||||
pub(crate) mod file_store;
|
pub(crate) mod file_store;
|
||||||
pub(crate) mod object_store;
|
pub(crate) mod object_store;
|
||||||
|
|
||||||
pub(crate) trait Identifier: Send + Sync + Clone + Debug {
|
pub(crate) trait Identifier: Send + Sync + Clone + Debug {
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error>;
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error>;
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error>
|
|
||||||
where
|
where
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
|
pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
|
||||||
type Error: std::error::Error + Send + Sync + 'static;
|
type Identifier: Identifier;
|
||||||
type Identifier: Identifier<Error = Self::Error>;
|
|
||||||
type Stream: Stream<Item = std::io::Result<Bytes>>;
|
type Stream: Stream<Item = std::io::Result<Bytes>>;
|
||||||
|
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin;
|
Reader: AsyncRead + Unpin;
|
||||||
|
|
||||||
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error>;
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error>;
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error>;
|
) -> Result<Self::Stream, Error>;
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
|
@ -47,7 +41,7 @@ pub(crate) trait Store: Send + Sync + Clone + Debug + 'static {
|
||||||
where
|
where
|
||||||
Writer: AsyncWrite + Send + Unpin;
|
Writer: AsyncWrite + Send + Unpin;
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error>;
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error>;
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error>;
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Error,
|
||||||
file::File,
|
file::File,
|
||||||
repo::{Repo, SettingsRepo},
|
repo::{Repo, SettingsRepo},
|
||||||
store::Store,
|
store::Store,
|
||||||
|
@ -23,9 +24,6 @@ const GENERATOR_KEY: &[u8] = b"last-path";
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum FileError {
|
pub(crate) enum FileError {
|
||||||
#[error("Failed to interact with sled db")]
|
|
||||||
Sled(#[from] crate::repo::sled::Error),
|
|
||||||
|
|
||||||
#[error("Failed to read or write file")]
|
#[error("Failed to read or write file")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
@ -51,15 +49,11 @@ pub(crate) struct FileStore {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for FileStore {
|
impl Store for FileStore {
|
||||||
type Error = FileError;
|
|
||||||
type Identifier = FileId;
|
type Identifier = FileId;
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(reader))]
|
#[tracing::instrument(skip(reader))]
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin,
|
Reader: AsyncRead + Unpin,
|
||||||
{
|
{
|
||||||
|
@ -67,22 +61,22 @@ impl Store for FileStore {
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_reader(&path, reader).await {
|
if let Err(e) = self.safe_save_reader(&path, reader).await {
|
||||||
self.safe_remove_file(&path).await?;
|
self.safe_remove_file(&path).await?;
|
||||||
return Err(e);
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.file_id_from_path(path)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(bytes))]
|
#[tracing::instrument(skip(bytes))]
|
||||||
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error> {
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
||||||
self.safe_remove_file(&path).await?;
|
self.safe_remove_file(&path).await?;
|
||||||
return Err(e);
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.file_id_from_path(path)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
|
@ -91,7 +85,7 @@ impl Store for FileStore {
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error> {
|
) -> Result<Self::Stream, Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let stream = File::open(path)
|
let stream = File::open(path)
|
||||||
|
@ -119,7 +113,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error> {
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let len = tokio::fs::metadata(path).await?.len();
|
let len = tokio::fs::metadata(path).await?.len();
|
||||||
|
@ -128,7 +122,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error> {
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
self.safe_remove_file(path).await?;
|
self.safe_remove_file(path).await?;
|
||||||
|
@ -138,7 +132,7 @@ impl Store for FileStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub(crate) async fn build(root_dir: PathBuf, repo: Repo) -> Result<Self, FileError> {
|
pub(crate) async fn build(root_dir: PathBuf, repo: Repo) -> Result<Self, Error> {
|
||||||
let path_gen = init_generator(&repo).await?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
Ok(FileStore {
|
Ok(FileStore {
|
||||||
|
@ -148,7 +142,7 @@ impl FileStore {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn next_directory(&self) -> Result<PathBuf, FileError> {
|
async fn next_directory(&self) -> Result<PathBuf, Error> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
match self.repo {
|
match self.repo {
|
||||||
|
@ -167,7 +161,7 @@ impl FileStore {
|
||||||
Ok(target_path)
|
Ok(target_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn next_file(&self) -> Result<PathBuf, FileError> {
|
async fn next_file(&self) -> Result<PathBuf, Error> {
|
||||||
let target_path = self.next_directory().await?;
|
let target_path = self.next_directory().await?;
|
||||||
let filename = uuid::Uuid::new_v4().to_string();
|
let filename = uuid::Uuid::new_v4().to_string();
|
||||||
|
|
||||||
|
@ -290,7 +284,7 @@ pub(crate) async fn safe_create_parent<P: AsRef<Path>>(path: P) -> Result<(), Fi
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init_generator(repo: &Repo) -> Result<Generator, FileError> {
|
async fn init_generator(repo: &Repo) -> Result<Generator, Error> {
|
||||||
match repo {
|
match repo {
|
||||||
Repo::Sled(sled_repo) => {
|
Repo::Sled(sled_repo) => {
|
||||||
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use crate::store::{
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
store::{
|
||||||
file_store::{FileError, FileStore},
|
file_store::{FileError, FileStore},
|
||||||
Identifier,
|
Identifier,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
@ -8,9 +11,7 @@ use std::path::PathBuf;
|
||||||
pub(crate) struct FileId(PathBuf);
|
pub(crate) struct FileId(PathBuf);
|
||||||
|
|
||||||
impl Identifier for FileId {
|
impl Identifier for FileId {
|
||||||
type Error = FileError;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error> {
|
|
||||||
let vec = self
|
let vec = self
|
||||||
.0
|
.0
|
||||||
.to_str()
|
.to_str()
|
||||||
|
@ -21,7 +22,7 @@ impl Identifier for FileId {
|
||||||
Ok(vec)
|
Ok(vec)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error>
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Error,
|
||||||
repo::{Repo, SettingsRepo},
|
repo::{Repo, SettingsRepo},
|
||||||
store::Store,
|
store::Store,
|
||||||
};
|
};
|
||||||
|
@ -28,9 +29,6 @@ pub(crate) enum ObjectError {
|
||||||
#[error("Failed to generate path")]
|
#[error("Failed to generate path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error("Failed to interact with sled repo")]
|
|
||||||
Sled(#[from] crate::repo::sled::Error),
|
|
||||||
|
|
||||||
#[error("Failed to parse string")]
|
#[error("Failed to parse string")]
|
||||||
Utf8(#[from] FromUtf8Error),
|
Utf8(#[from] FromUtf8Error),
|
||||||
|
|
||||||
|
@ -58,15 +56,11 @@ pin_project_lite::pin_project! {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for ObjectStore {
|
impl Store for ObjectStore {
|
||||||
type Error = ObjectError;
|
|
||||||
type Identifier = ObjectId;
|
type Identifier = ObjectId;
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(reader))]
|
#[tracing::instrument(skip(reader))]
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(&self, reader: &mut Reader) -> Result<Self::Identifier, Error>
|
||||||
&self,
|
|
||||||
reader: &mut Reader,
|
|
||||||
) -> Result<Self::Identifier, Self::Error>
|
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin,
|
Reader: AsyncRead + Unpin,
|
||||||
{
|
{
|
||||||
|
@ -74,16 +68,20 @@ impl Store for ObjectStore {
|
||||||
|
|
||||||
self.bucket
|
self.bucket
|
||||||
.put_object_stream(&self.client, reader, &path)
|
.put_object_stream(&self.client, reader, &path)
|
||||||
.await?;
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(ObjectId::from_string(path))
|
Ok(ObjectId::from_string(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(bytes))]
|
#[tracing::instrument(skip(bytes))]
|
||||||
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Self::Error> {
|
async fn save_bytes(&self, bytes: Bytes) -> Result<Self::Identifier, Error> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
self.bucket.put_object(&self.client, &path, &bytes).await?;
|
self.bucket
|
||||||
|
.put_object(&self.client, &path, &bytes)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(ObjectId::from_string(path))
|
Ok(ObjectId::from_string(path))
|
||||||
}
|
}
|
||||||
|
@ -94,7 +92,7 @@ impl Store for ObjectStore {
|
||||||
identifier: &Self::Identifier,
|
identifier: &Self::Identifier,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, Self::Error> {
|
) -> Result<Self::Stream, Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
let start = from_start.unwrap_or(0);
|
let start = from_start.unwrap_or(0);
|
||||||
|
@ -107,7 +105,7 @@ impl Store for ObjectStore {
|
||||||
Command::GetObjectRange { start, end },
|
Command::GetObjectRange { start, end },
|
||||||
);
|
);
|
||||||
|
|
||||||
let response = request.response().await?;
|
let response = request.response().await.map_err(ObjectError::from)?;
|
||||||
|
|
||||||
Ok(Box::pin(io_error(response.bytes_stream())))
|
Ok(Box::pin(io_error(response.bytes_stream())))
|
||||||
}
|
}
|
||||||
|
@ -126,26 +124,34 @@ impl Store for ObjectStore {
|
||||||
self.bucket
|
self.bucket
|
||||||
.get_object_stream(&self.client, path, writer)
|
.get_object_stream(&self.client, path, writer)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, Self::Error::from(e)))?;
|
.map_err(ObjectError::from)
|
||||||
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, Error::from(e)))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Self::Error> {
|
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
let (head, _) = self.bucket.head_object(&self.client, path).await?;
|
let (head, _) = self
|
||||||
|
.bucket
|
||||||
|
.head_object(&self.client, path)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
let length = head.content_length.ok_or(ObjectError::Length)?;
|
let length = head.content_length.ok_or(ObjectError::Length)?;
|
||||||
|
|
||||||
Ok(length as u64)
|
Ok(length as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Self::Error> {
|
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), Error> {
|
||||||
let path = identifier.as_str();
|
let path = identifier.as_str();
|
||||||
|
|
||||||
self.bucket.delete_object(&self.client, path).await?;
|
self.bucket
|
||||||
|
.delete_object(&self.client, path)
|
||||||
|
.await
|
||||||
|
.map_err(ObjectError::from)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,7 +167,7 @@ impl ObjectStore {
|
||||||
session_token: Option<String>,
|
session_token: Option<String>,
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
) -> Result<ObjectStore, ObjectError> {
|
) -> Result<ObjectStore, Error> {
|
||||||
let path_gen = init_generator(&repo).await?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
Ok(ObjectStore {
|
Ok(ObjectStore {
|
||||||
|
@ -182,12 +188,13 @@ impl ObjectStore {
|
||||||
security_token,
|
security_token,
|
||||||
session_token,
|
session_token,
|
||||||
},
|
},
|
||||||
)?,
|
)
|
||||||
|
.map_err(ObjectError::from)?,
|
||||||
client,
|
client,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn next_directory(&self) -> Result<Path, ObjectError> {
|
async fn next_directory(&self) -> Result<Path, Error> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
match self.repo {
|
match self.repo {
|
||||||
|
@ -201,7 +208,7 @@ impl ObjectStore {
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn next_file(&self) -> Result<String, ObjectError> {
|
async fn next_file(&self) -> Result<String, Error> {
|
||||||
let path = self.next_directory().await?.to_strings().join("/");
|
let path = self.next_directory().await?.to_strings().join("/");
|
||||||
let filename = uuid::Uuid::new_v4().to_string();
|
let filename = uuid::Uuid::new_v4().to_string();
|
||||||
|
|
||||||
|
@ -209,7 +216,7 @@ impl ObjectStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init_generator(repo: &Repo) -> Result<Generator, ObjectError> {
|
async fn init_generator(repo: &Repo) -> Result<Generator, Error> {
|
||||||
match repo {
|
match repo {
|
||||||
Repo::Sled(sled_repo) => {
|
Repo::Sled(sled_repo) => {
|
||||||
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
||||||
|
@ -250,7 +257,7 @@ where
|
||||||
impl std::fmt::Debug for ObjectStore {
|
impl std::fmt::Debug for ObjectStore {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("ObjectStore")
|
f.debug_struct("ObjectStore")
|
||||||
.field("path_gen", &self.path_gen)
|
.field("path_gen", &"generator")
|
||||||
.field("bucket", &self.bucket.name)
|
.field("bucket", &self.bucket.name)
|
||||||
.field("region", &self.bucket.region)
|
.field("region", &self.bucket.region)
|
||||||
.finish()
|
.finish()
|
||||||
|
|
|
@ -1,17 +1,20 @@
|
||||||
use crate::store::{object_store::ObjectError, Identifier};
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
store::{object_store::ObjectError, Identifier},
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct ObjectId(String);
|
pub(crate) struct ObjectId(String);
|
||||||
|
|
||||||
impl Identifier for ObjectId {
|
impl Identifier for ObjectId {
|
||||||
type Error = ObjectError;
|
fn to_bytes(&self) -> Result<Vec<u8>, Error> {
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, Self::Error> {
|
|
||||||
Ok(self.0.as_bytes().to_vec())
|
Ok(self.0.as_bytes().to_vec())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Self::Error> {
|
fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error> {
|
||||||
Ok(ObjectId(String::from_utf8(bytes)?))
|
Ok(ObjectId(
|
||||||
|
String::from_utf8(bytes).map_err(ObjectError::from)?,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,6 @@ impl UploadManager {
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
Error: From<S1::Error> + From<S2::Error>,
|
|
||||||
{
|
{
|
||||||
match self.inner.repo {
|
match self.inner.repo {
|
||||||
Repo::Sled(ref sled_repo) => do_migrate_store(sled_repo, from, to).await,
|
Repo::Sled(ref sled_repo) => do_migrate_store(sled_repo, from, to).await,
|
||||||
|
@ -62,10 +61,7 @@ impl UploadManager {
|
||||||
&self,
|
&self,
|
||||||
store: S,
|
store: S,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
) -> Result<S::Identifier, Error>
|
) -> Result<S::Identifier, Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let identifier = self.identifier_from_alias::<S>(alias).await?;
|
let identifier = self.identifier_from_alias::<S>(alias).await?;
|
||||||
let details = if let Some(details) = self.details(&identifier).await? {
|
let details = if let Some(details) = self.details(&identifier).await? {
|
||||||
details
|
details
|
||||||
|
@ -205,10 +201,7 @@ impl UploadManager {
|
||||||
pub(crate) async fn details<I: Identifier>(
|
pub(crate) async fn details<I: Identifier>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &I,
|
identifier: &I,
|
||||||
) -> Result<Option<Details>, Error>
|
) -> Result<Option<Details>, Error> {
|
||||||
where
|
|
||||||
Error: From<I::Error>,
|
|
||||||
{
|
|
||||||
match self.inner.repo {
|
match self.inner.repo {
|
||||||
Repo::Sled(ref sled_repo) => Ok(sled_repo.details(identifier).await?),
|
Repo::Sled(ref sled_repo) => Ok(sled_repo.details(identifier).await?),
|
||||||
}
|
}
|
||||||
|
@ -240,10 +233,7 @@ impl UploadManager {
|
||||||
&self,
|
&self,
|
||||||
store: S,
|
store: S,
|
||||||
alias: Alias,
|
alias: Alias,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let token = match self.inner.repo {
|
let token = match self.inner.repo {
|
||||||
Repo::Sled(ref sled_repo) => sled_repo.delete_token(&alias).await?,
|
Repo::Sled(ref sled_repo) => sled_repo.delete_token(&alias).await?,
|
||||||
};
|
};
|
||||||
|
@ -258,10 +248,7 @@ impl UploadManager {
|
||||||
store: S,
|
store: S,
|
||||||
alias: Alias,
|
alias: Alias,
|
||||||
token: DeleteToken,
|
token: DeleteToken,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
let hash = match self.inner.repo {
|
let hash = match self.inner.repo {
|
||||||
Repo::Sled(ref sled_repo) => {
|
Repo::Sled(ref sled_repo) => {
|
||||||
let saved_delete_token = sled_repo.delete_token(&alias).await?;
|
let saved_delete_token = sled_repo.delete_token(&alias).await?;
|
||||||
|
@ -282,10 +269,7 @@ impl UploadManager {
|
||||||
&self,
|
&self,
|
||||||
store: S,
|
store: S,
|
||||||
hash: Vec<u8>,
|
hash: Vec<u8>,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
match self.inner.repo {
|
match self.inner.repo {
|
||||||
Repo::Sled(ref sled_repo) => {
|
Repo::Sled(ref sled_repo) => {
|
||||||
let hash: <SledRepo as HashRepo>::Bytes = hash.into();
|
let hash: <SledRepo as HashRepo>::Bytes = hash.into();
|
||||||
|
@ -309,7 +293,7 @@ impl UploadManager {
|
||||||
|
|
||||||
HashRepo::cleanup(sled_repo, hash).await?;
|
HashRepo::cleanup(sled_repo, hash).await?;
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!("Cleaning files");
|
let cleanup_span = tracing::info_span!(parent: None, "Cleaning files");
|
||||||
cleanup_span.follows_from(Span::current());
|
cleanup_span.follows_from(Span::current());
|
||||||
|
|
||||||
actix_rt::spawn(
|
actix_rt::spawn(
|
||||||
|
@ -323,12 +307,10 @@ impl UploadManager {
|
||||||
{
|
{
|
||||||
debug!("Deleting {:?}", identifier);
|
debug!("Deleting {:?}", identifier);
|
||||||
if let Err(e) = store.remove(identifier).await {
|
if let Err(e) = store.remove(identifier).await {
|
||||||
let e: Error = e.into();
|
|
||||||
errors.push(e);
|
errors.push(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = IdentifierRepo::cleanup(&repo, identifier).await {
|
if let Err(e) = IdentifierRepo::cleanup(&repo, identifier).await {
|
||||||
let e: Error = e.into();
|
|
||||||
errors.push(e);
|
errors.push(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -350,10 +332,7 @@ impl UploadManager {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn session<S: Store + Clone + 'static>(&self, store: S) -> UploadManagerSession<S>
|
pub(crate) fn session<S: Store + Clone + 'static>(&self, store: S) -> UploadManagerSession<S> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
UploadManagerSession::new(self.clone(), store)
|
UploadManagerSession::new(self.clone(), store)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -366,7 +345,6 @@ async fn migrate_file<S1, S2>(
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
Error: From<S1::Error> + From<S2::Error>,
|
|
||||||
{
|
{
|
||||||
let stream = from.to_stream(identifier, None, None).await?;
|
let stream = from.to_stream(identifier, None, None).await?;
|
||||||
futures_util::pin_mut!(stream);
|
futures_util::pin_mut!(stream);
|
||||||
|
@ -382,7 +360,6 @@ where
|
||||||
R: IdentifierRepo,
|
R: IdentifierRepo,
|
||||||
I1: Identifier,
|
I1: Identifier,
|
||||||
I2: Identifier,
|
I2: Identifier,
|
||||||
Error: From<<R as IdentifierRepo>::Error>,
|
|
||||||
{
|
{
|
||||||
if let Some(details) = repo.details(&from).await? {
|
if let Some(details) = repo.details(&from).await? {
|
||||||
repo.relate_details(to, &details).await?;
|
repo.relate_details(to, &details).await?;
|
||||||
|
@ -396,11 +373,7 @@ async fn do_migrate_store<R, S1, S2>(repo: &R, from: S1, to: S2) -> Result<(), E
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
Error: From<S1::Error> + From<S2::Error>,
|
|
||||||
R: IdentifierRepo + HashRepo + SettingsRepo,
|
R: IdentifierRepo + HashRepo + SettingsRepo,
|
||||||
Error: From<<R as IdentifierRepo>::Error>,
|
|
||||||
Error: From<<R as HashRepo>::Error>,
|
|
||||||
Error: From<<R as SettingsRepo>::Error>,
|
|
||||||
{
|
{
|
||||||
let stream = repo.hashes().await;
|
let stream = repo.hashes().await;
|
||||||
let mut stream = Box::pin(stream);
|
let mut stream = Box::pin(stream);
|
||||||
|
|
|
@ -13,20 +13,14 @@ use futures_util::stream::{Stream, StreamExt};
|
||||||
use tracing::{debug, instrument, Span};
|
use tracing::{debug, instrument, Span};
|
||||||
use tracing_futures::Instrument;
|
use tracing_futures::Instrument;
|
||||||
|
|
||||||
pub(crate) struct UploadManagerSession<S: Store + Clone + 'static>
|
pub(crate) struct UploadManagerSession<S: Store + Clone + 'static> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
store: S,
|
store: S,
|
||||||
manager: UploadManager,
|
manager: UploadManager,
|
||||||
alias: Option<Alias>,
|
alias: Option<Alias>,
|
||||||
finished: bool,
|
finished: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Store + Clone + 'static> UploadManagerSession<S>
|
impl<S: Store + Clone + 'static> UploadManagerSession<S> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
pub(super) fn new(manager: UploadManager, store: S) -> Self {
|
pub(super) fn new(manager: UploadManager, store: S) -> Self {
|
||||||
UploadManagerSession {
|
UploadManagerSession {
|
||||||
store,
|
store,
|
||||||
|
@ -45,10 +39,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Store + Clone + 'static> Drop for UploadManagerSession<S>
|
impl<S: Store + Clone + 'static> Drop for UploadManagerSession<S> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if self.finished {
|
if self.finished {
|
||||||
return;
|
return;
|
||||||
|
@ -91,10 +82,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Store> UploadManagerSession<S>
|
impl<S: Store> UploadManagerSession<S> {
|
||||||
where
|
|
||||||
Error: From<S::Error>,
|
|
||||||
{
|
|
||||||
/// Generate a delete token for an alias
|
/// Generate a delete token for an alias
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub(crate) async fn delete_token(&self) -> Result<DeleteToken, Error> {
|
pub(crate) async fn delete_token(&self) -> Result<DeleteToken, Error> {
|
||||||
|
|
Loading…
Reference in a new issue