2022-03-29 17:51:16 +00:00
|
|
|
use crate::{
|
2022-04-02 22:41:00 +00:00
|
|
|
error::{Error, UploadError},
|
2022-03-29 17:51:16 +00:00
|
|
|
repo::{
|
2022-04-06 01:29:30 +00:00
|
|
|
Alias, AliasRepo, AlreadyExists, BaseRepo, CachedRepo, DeleteToken, Details, FullRepo,
|
|
|
|
HashRepo, Identifier, IdentifierRepo, QueueRepo, SettingsRepo, UploadId, UploadRepo,
|
|
|
|
UploadResult,
|
2022-03-29 17:51:16 +00:00
|
|
|
},
|
2022-04-02 22:41:00 +00:00
|
|
|
serde_str::Serde,
|
2022-03-29 20:59:17 +00:00
|
|
|
stream::from_iterator,
|
2022-03-24 22:09:15 +00:00
|
|
|
};
|
2022-03-29 20:59:17 +00:00
|
|
|
use futures_util::Stream;
|
2022-04-06 01:29:30 +00:00
|
|
|
use sled::{CompareAndSwapError, Db, IVec, Tree};
|
2022-04-01 16:51:46 +00:00
|
|
|
use std::{
|
2022-04-06 17:13:46 +00:00
|
|
|
collections::HashMap,
|
2022-04-01 16:51:46 +00:00
|
|
|
pin::Pin,
|
2023-01-29 17:36:09 +00:00
|
|
|
sync::{
|
|
|
|
atomic::{AtomicU64, Ordering},
|
|
|
|
Arc, RwLock,
|
|
|
|
},
|
2022-04-01 16:51:46 +00:00
|
|
|
};
|
2022-03-29 17:51:16 +00:00
|
|
|
use tokio::sync::Notify;
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
mod bucket;
|
2022-04-06 01:29:30 +00:00
|
|
|
mod datetime;
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
use bucket::Bucket;
|
2022-04-06 01:29:30 +00:00
|
|
|
use datetime::DateTime;
|
|
|
|
|
2022-03-24 22:09:15 +00:00
|
|
|
macro_rules! b {
|
|
|
|
($self:ident.$ident:ident, $expr:expr) => {{
|
|
|
|
let $ident = $self.$ident.clone();
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
let span = tracing::Span::current();
|
|
|
|
|
|
|
|
actix_rt::task::spawn_blocking(move || span.in_scope(|| $expr))
|
2022-03-27 01:45:12 +00:00
|
|
|
.await
|
|
|
|
.map_err(SledError::from)??
|
2022-03-24 22:09:15 +00:00
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, thiserror::Error)]
|
2022-03-27 01:45:12 +00:00
|
|
|
pub(crate) enum SledError {
|
2022-03-24 22:09:15 +00:00
|
|
|
#[error("Error in database")]
|
|
|
|
Sled(#[from] sled::Error),
|
|
|
|
|
|
|
|
#[error("Invalid details json")]
|
|
|
|
Details(#[from] serde_json::Error),
|
|
|
|
|
|
|
|
#[error("Required field was not present")]
|
|
|
|
Missing,
|
|
|
|
|
|
|
|
#[error("Operation panicked")]
|
|
|
|
Panic,
|
|
|
|
}
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
#[derive(Clone)]
|
2022-03-24 22:09:15 +00:00
|
|
|
pub(crate) struct SledRepo {
|
2023-01-29 17:36:09 +00:00
|
|
|
healthz_count: Arc<AtomicU64>,
|
|
|
|
healthz: Tree,
|
2022-03-24 22:09:15 +00:00
|
|
|
settings: Tree,
|
|
|
|
identifier_details: Tree,
|
|
|
|
hashes: Tree,
|
|
|
|
hash_aliases: Tree,
|
|
|
|
hash_identifiers: Tree,
|
2022-03-25 23:47:50 +00:00
|
|
|
hash_variant_identifiers: Tree,
|
|
|
|
hash_motion_identifiers: Tree,
|
2022-03-24 22:09:15 +00:00
|
|
|
aliases: Tree,
|
|
|
|
alias_hashes: Tree,
|
|
|
|
alias_delete_tokens: Tree,
|
2022-03-29 17:51:16 +00:00
|
|
|
queue: Tree,
|
|
|
|
in_progress_queue: Tree,
|
2022-04-01 16:51:46 +00:00
|
|
|
queue_notifier: Arc<RwLock<HashMap<&'static str, Arc<Notify>>>>,
|
2022-04-02 22:41:00 +00:00
|
|
|
uploads: Tree,
|
2022-04-06 01:29:30 +00:00
|
|
|
cache: Tree,
|
|
|
|
cache_inverse: Tree,
|
2022-03-29 17:51:16 +00:00
|
|
|
db: Db,
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl SledRepo {
|
2022-03-27 01:45:12 +00:00
|
|
|
pub(crate) fn new(db: Db) -> Result<Self, SledError> {
|
2022-03-24 22:09:15 +00:00
|
|
|
Ok(SledRepo {
|
2023-01-29 17:36:09 +00:00
|
|
|
healthz_count: Arc::new(AtomicU64::new(0)),
|
|
|
|
healthz: db.open_tree("pict-rs-healthz-tree")?,
|
2022-03-24 22:09:15 +00:00
|
|
|
settings: db.open_tree("pict-rs-settings-tree")?,
|
|
|
|
identifier_details: db.open_tree("pict-rs-identifier-details-tree")?,
|
|
|
|
hashes: db.open_tree("pict-rs-hashes-tree")?,
|
|
|
|
hash_aliases: db.open_tree("pict-rs-hash-aliases-tree")?,
|
|
|
|
hash_identifiers: db.open_tree("pict-rs-hash-identifiers-tree")?,
|
2022-03-25 23:47:50 +00:00
|
|
|
hash_variant_identifiers: db.open_tree("pict-rs-hash-variant-identifiers-tree")?,
|
|
|
|
hash_motion_identifiers: db.open_tree("pict-rs-hash-motion-identifiers-tree")?,
|
2022-03-24 22:09:15 +00:00
|
|
|
aliases: db.open_tree("pict-rs-aliases-tree")?,
|
|
|
|
alias_hashes: db.open_tree("pict-rs-alias-hashes-tree")?,
|
|
|
|
alias_delete_tokens: db.open_tree("pict-rs-alias-delete-tokens-tree")?,
|
2022-03-29 17:51:16 +00:00
|
|
|
queue: db.open_tree("pict-rs-queue-tree")?,
|
|
|
|
in_progress_queue: db.open_tree("pict-rs-in-progress-queue-tree")?,
|
2022-04-01 16:51:46 +00:00
|
|
|
queue_notifier: Arc::new(RwLock::new(HashMap::new())),
|
2022-04-02 22:41:00 +00:00
|
|
|
uploads: db.open_tree("pict-rs-uploads-tree")?,
|
2022-04-06 01:29:30 +00:00
|
|
|
cache: db.open_tree("pict-rs-cache-tree")?,
|
|
|
|
cache_inverse: db.open_tree("pict-rs-cache-inverse-tree")?,
|
2022-03-29 17:51:16 +00:00
|
|
|
db,
|
2022-03-24 22:09:15 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-29 17:51:16 +00:00
|
|
|
impl BaseRepo for SledRepo {
|
2022-03-24 22:09:15 +00:00
|
|
|
type Bytes = IVec;
|
2022-03-29 17:51:16 +00:00
|
|
|
}
|
|
|
|
|
2023-01-29 17:36:09 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl FullRepo for SledRepo {
|
|
|
|
async fn health_check(&self) -> Result<(), Error> {
|
|
|
|
let next = self.healthz_count.fetch_add(1, Ordering::Relaxed);
|
|
|
|
b!(self.healthz, {
|
|
|
|
healthz.insert("healthz", &next.to_be_bytes()[..])
|
|
|
|
});
|
|
|
|
self.healthz.flush_async().await?;
|
|
|
|
b!(self.healthz, healthz.get("healthz"));
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
2022-04-02 21:44:03 +00:00
|
|
|
|
2022-04-02 22:41:00 +00:00
|
|
|
#[derive(serde::Deserialize, serde::Serialize)]
|
|
|
|
enum InnerUploadResult {
|
|
|
|
Success {
|
|
|
|
alias: Serde<Alias>,
|
|
|
|
token: Serde<DeleteToken>,
|
|
|
|
},
|
|
|
|
Failure {
|
|
|
|
message: String,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<UploadResult> for InnerUploadResult {
|
|
|
|
fn from(u: UploadResult) -> Self {
|
|
|
|
match u {
|
|
|
|
UploadResult::Success { alias, token } => InnerUploadResult::Success {
|
|
|
|
alias: Serde::new(alias),
|
|
|
|
token: Serde::new(token),
|
|
|
|
},
|
|
|
|
UploadResult::Failure { message } => InnerUploadResult::Failure { message },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<InnerUploadResult> for UploadResult {
|
|
|
|
fn from(i: InnerUploadResult) -> Self {
|
|
|
|
match i {
|
|
|
|
InnerUploadResult::Success { alias, token } => UploadResult::Success {
|
|
|
|
alias: Serde::into_inner(alias),
|
|
|
|
token: Serde::into_inner(token),
|
|
|
|
},
|
|
|
|
InnerUploadResult::Failure { message } => UploadResult::Failure { message },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
fn insert_cache_inverse(
|
|
|
|
cache_inverse: &Tree,
|
|
|
|
now_bytes: &[u8],
|
|
|
|
alias_bytes: &[u8],
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let mut old = cache_inverse.get(now_bytes)?;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
// unsure of whether to bail on deserialize error or fail with empty bucket
|
|
|
|
let mut bucket = old
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|old| serde_cbor::from_slice::<Bucket>(old).ok())
|
|
|
|
.unwrap_or_else(Bucket::empty);
|
|
|
|
|
|
|
|
bucket.insert(alias_bytes.to_vec());
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Inserting new {:?}", bucket);
|
2022-04-06 17:13:46 +00:00
|
|
|
let bucket_bytes = serde_cbor::to_vec(&bucket)?;
|
|
|
|
let new = Some(bucket_bytes);
|
|
|
|
|
|
|
|
let res = cache_inverse.compare_and_swap(now_bytes, old, new)?;
|
|
|
|
|
|
|
|
if let Err(CompareAndSwapError { current, .. }) = res {
|
|
|
|
old = current;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
2022-04-06 01:29:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl CachedRepo for SledRepo {
|
2022-09-28 04:19:52 +00:00
|
|
|
#[tracing::instrument(skip(self))]
|
2022-04-06 17:13:46 +00:00
|
|
|
async fn mark_cached(&self, alias: &Alias) -> Result<(), Error> {
|
2022-04-06 01:29:30 +00:00
|
|
|
let now = DateTime::now();
|
2022-04-06 17:13:46 +00:00
|
|
|
let now_bytes = serde_json::to_vec(&now)?;
|
|
|
|
|
|
|
|
let alias_bytes = alias.to_bytes();
|
2022-04-06 01:29:30 +00:00
|
|
|
|
|
|
|
let cache_inverse = self.cache_inverse.clone();
|
|
|
|
b!(self.cache, {
|
2022-04-06 17:13:46 +00:00
|
|
|
cache.insert(&alias_bytes, now_bytes.clone())?;
|
2022-04-06 01:29:30 +00:00
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
insert_cache_inverse(&cache_inverse, &now_bytes, &alias_bytes)
|
2022-04-06 01:29:30 +00:00
|
|
|
});
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-09-28 04:19:52 +00:00
|
|
|
#[tracing::instrument(skip(self))]
|
2022-04-06 17:13:46 +00:00
|
|
|
async fn update(&self, alias: &Alias) -> Result<Vec<Alias>, Error> {
|
2022-04-06 01:29:30 +00:00
|
|
|
let now = DateTime::now();
|
2022-04-06 02:47:35 +00:00
|
|
|
let now_bytes = serde_json::to_vec(&now)?;
|
2022-04-06 01:29:30 +00:00
|
|
|
|
|
|
|
let to_clean = now.min_cache_date();
|
|
|
|
let to_clean_bytes = serde_json::to_vec(&to_clean)?;
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
let alias_bytes = alias.to_bytes();
|
|
|
|
|
2022-04-06 01:29:30 +00:00
|
|
|
let cache_inverse = self.cache_inverse.clone();
|
2022-04-06 17:13:46 +00:00
|
|
|
let aliases = b!(self.cache, {
|
2022-04-06 02:47:35 +00:00
|
|
|
let previous_datetime_opt = cache
|
2022-04-06 17:13:46 +00:00
|
|
|
.fetch_and_update(&alias_bytes, |previous_datetime_opt| {
|
2022-04-06 02:47:35 +00:00
|
|
|
previous_datetime_opt.map(|_| now_bytes.clone())
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if let Some(previous_datetime_bytes) = previous_datetime_opt {
|
|
|
|
// Insert cached media into new date bucket
|
2022-04-06 17:13:46 +00:00
|
|
|
insert_cache_inverse(&cache_inverse, &now_bytes, &alias_bytes)?;
|
2022-04-06 01:29:30 +00:00
|
|
|
|
2022-04-06 02:47:35 +00:00
|
|
|
// Remove cached media from old date bucket
|
2022-04-06 17:13:46 +00:00
|
|
|
let mut old = cache_inverse.get(&previous_datetime_bytes)?;
|
2022-04-06 01:29:30 +00:00
|
|
|
loop {
|
2022-04-06 17:13:46 +00:00
|
|
|
let new = old
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|bucket_bytes| {
|
|
|
|
let mut bucket = serde_cbor::from_slice::<Bucket>(bucket_bytes).ok()?;
|
|
|
|
|
|
|
|
bucket.remove(&alias_bytes);
|
|
|
|
|
|
|
|
if bucket.is_empty() {
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Removed old {:?}", bucket);
|
2022-04-06 02:47:35 +00:00
|
|
|
None
|
|
|
|
} else {
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Inserting old {:?}", bucket);
|
2022-04-06 17:13:46 +00:00
|
|
|
Some(serde_cbor::to_vec(&bucket))
|
2022-04-06 02:47:35 +00:00
|
|
|
}
|
2022-04-06 17:13:46 +00:00
|
|
|
})
|
|
|
|
.transpose()?;
|
2022-04-06 01:29:30 +00:00
|
|
|
|
|
|
|
if let Err(CompareAndSwapError { current, .. }) =
|
2022-04-06 17:13:46 +00:00
|
|
|
cache_inverse.compare_and_swap(&previous_datetime_bytes, old, new)?
|
2022-04-06 01:29:30 +00:00
|
|
|
{
|
|
|
|
old = current;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
let mut aliases: Vec<Alias> = Vec::new();
|
2022-04-06 01:29:30 +00:00
|
|
|
|
|
|
|
for (date_bytes, bucket_bytes) in
|
|
|
|
cache_inverse.range(..to_clean_bytes).filter_map(Result::ok)
|
|
|
|
{
|
2022-04-06 02:47:35 +00:00
|
|
|
if let Ok(datetime) = serde_json::from_slice::<DateTime>(&date_bytes) {
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Checking {}", datetime);
|
2022-04-06 02:47:35 +00:00
|
|
|
} else {
|
|
|
|
tracing::warn!("Invalid date bytes");
|
|
|
|
}
|
2022-04-06 01:29:30 +00:00
|
|
|
if let Ok(bucket) = serde_cbor::from_slice::<Bucket>(&bucket_bytes) {
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Read for deletion: {:?}", bucket);
|
2022-04-06 17:13:46 +00:00
|
|
|
for alias_bytes in bucket {
|
2022-04-06 01:29:30 +00:00
|
|
|
// Best effort cleanup
|
2022-04-06 17:13:46 +00:00
|
|
|
let _ = cache.remove(&alias_bytes);
|
|
|
|
if let Some(alias) = Alias::from_slice(&alias_bytes) {
|
|
|
|
aliases.push(alias);
|
|
|
|
}
|
2022-04-06 01:29:30 +00:00
|
|
|
}
|
2022-04-06 02:47:35 +00:00
|
|
|
} else {
|
|
|
|
tracing::warn!("Invalid bucket");
|
2022-04-06 01:29:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
cache_inverse.remove(date_bytes)?;
|
|
|
|
}
|
|
|
|
|
2022-04-06 02:47:35 +00:00
|
|
|
#[cfg(debug)]
|
|
|
|
for date_bytes in cache_inverse.range(to_clean_bytes..).filter_map(Result::ok) {
|
|
|
|
if let Ok(datetime) = serde_json::from_slice::<DateTime>(&date_bytes) {
|
2022-10-02 02:17:18 +00:00
|
|
|
tracing::trace!("Not cleaning for {}", datetime);
|
2022-04-06 02:47:35 +00:00
|
|
|
} else {
|
|
|
|
tracing::warn!("Invalid date bytes");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
Ok(aliases) as Result<_, Error>
|
2022-04-06 01:29:30 +00:00
|
|
|
});
|
|
|
|
|
2022-04-06 17:13:46 +00:00
|
|
|
Ok(aliases)
|
2022-04-06 01:29:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-02 21:44:03 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl UploadRepo for SledRepo {
|
2022-10-02 03:47:52 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-04-03 01:56:29 +00:00
|
|
|
async fn create(&self, upload_id: UploadId) -> Result<(), Error> {
|
|
|
|
b!(self.uploads, uploads.insert(upload_id.as_bytes(), b"1"));
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-09-25 23:46:50 +00:00
|
|
|
#[tracing::instrument(skip(self))]
|
2022-04-02 21:44:03 +00:00
|
|
|
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, Error> {
|
2022-04-02 22:41:00 +00:00
|
|
|
let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes());
|
|
|
|
|
2022-04-03 01:56:29 +00:00
|
|
|
let bytes = upload_id.as_bytes().to_vec();
|
|
|
|
let opt = b!(self.uploads, uploads.get(bytes));
|
|
|
|
|
|
|
|
if let Some(bytes) = opt {
|
|
|
|
if bytes != b"1" {
|
|
|
|
let result: InnerUploadResult = serde_json::from_slice(&bytes)?;
|
2022-04-02 22:41:00 +00:00
|
|
|
return Ok(result.into());
|
|
|
|
}
|
2022-04-03 01:56:29 +00:00
|
|
|
} else {
|
2022-06-06 04:24:27 +00:00
|
|
|
return Err(UploadError::AlreadyClaimed.into());
|
2022-04-03 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
while let Some(event) = (&mut subscriber).await {
|
|
|
|
match event {
|
|
|
|
sled::Event::Remove { .. } => {
|
2022-06-06 04:24:27 +00:00
|
|
|
return Err(UploadError::AlreadyClaimed.into());
|
2022-04-03 01:56:29 +00:00
|
|
|
}
|
|
|
|
sled::Event::Insert { value, .. } => {
|
|
|
|
if value != b"1" {
|
|
|
|
let result: InnerUploadResult = serde_json::from_slice(&value)?;
|
|
|
|
return Ok(result.into());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-04-02 22:41:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Err(UploadError::Canceled.into())
|
2022-04-02 21:44:03 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 03:47:52 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-04-02 21:44:03 +00:00
|
|
|
async fn claim(&self, upload_id: UploadId) -> Result<(), Error> {
|
2022-04-02 22:41:00 +00:00
|
|
|
b!(self.uploads, uploads.remove(upload_id.as_bytes()));
|
|
|
|
Ok(())
|
2022-04-02 21:44:03 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 03:47:52 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, result))]
|
2022-04-02 21:44:03 +00:00
|
|
|
async fn complete(&self, upload_id: UploadId, result: UploadResult) -> Result<(), Error> {
|
2022-04-02 22:41:00 +00:00
|
|
|
let result: InnerUploadResult = result.into();
|
|
|
|
let result = serde_json::to_vec(&result)?;
|
|
|
|
|
|
|
|
b!(self.uploads, uploads.insert(upload_id.as_bytes(), result));
|
|
|
|
|
|
|
|
Ok(())
|
2022-04-02 21:44:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-29 17:51:16 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl QueueRepo for SledRepo {
|
2022-04-02 23:53:03 +00:00
|
|
|
#[tracing::instrument(skip_all, fields(worker_id = %String::from_utf8_lossy(&worker_prefix)))]
|
|
|
|
async fn requeue_in_progress(&self, worker_prefix: Vec<u8>) -> Result<(), Error> {
|
|
|
|
let vec: Vec<(String, IVec)> = b!(self.in_progress_queue, {
|
|
|
|
let vec = in_progress_queue
|
|
|
|
.scan_prefix(worker_prefix)
|
|
|
|
.values()
|
|
|
|
.filter_map(Result::ok)
|
|
|
|
.filter_map(|ivec| {
|
|
|
|
let index = ivec.as_ref().iter().enumerate().find_map(|(index, byte)| {
|
|
|
|
if *byte == 0 {
|
|
|
|
Some(index)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let (queue, job) = ivec.split_at(index);
|
|
|
|
if queue.is_empty() || job.len() <= 1 {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let job = &job[1..];
|
|
|
|
|
|
|
|
Some((String::from_utf8_lossy(queue).to_string(), IVec::from(job)))
|
|
|
|
})
|
|
|
|
.collect::<Vec<(String, IVec)>>();
|
|
|
|
|
|
|
|
Ok(vec) as Result<_, Error>
|
|
|
|
});
|
2022-03-29 17:51:16 +00:00
|
|
|
|
2022-04-02 23:53:03 +00:00
|
|
|
let db = self.db.clone();
|
|
|
|
b!(self.queue, {
|
|
|
|
for (queue_name, job) in vec {
|
|
|
|
let id = db.generate_id()?;
|
|
|
|
let mut key = queue_name.as_bytes().to_vec();
|
|
|
|
key.extend(id.to_be_bytes());
|
|
|
|
|
|
|
|
queue.insert(key, job)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(()) as Result<(), Error>
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(())
|
2022-03-29 17:51:16 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 03:47:52 +00:00
|
|
|
#[tracing::instrument(skip(self, job), fields(job = %String::from_utf8_lossy(&job)))]
|
2022-04-02 23:53:03 +00:00
|
|
|
async fn push(&self, queue_name: &'static str, job: Self::Bytes) -> Result<(), Error> {
|
2022-03-29 17:51:16 +00:00
|
|
|
let id = self.db.generate_id()?;
|
2022-04-02 23:53:03 +00:00
|
|
|
let mut key = queue_name.as_bytes().to_vec();
|
2022-04-01 16:51:46 +00:00
|
|
|
key.extend(id.to_be_bytes());
|
|
|
|
|
|
|
|
b!(self.queue, queue.insert(key, job));
|
|
|
|
|
2022-04-02 23:53:03 +00:00
|
|
|
if let Some(notifier) = self.queue_notifier.read().unwrap().get(&queue_name) {
|
2022-04-01 16:51:46 +00:00
|
|
|
notifier.notify_one();
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
self.queue_notifier
|
|
|
|
.write()
|
|
|
|
.unwrap()
|
2022-04-02 23:53:03 +00:00
|
|
|
.entry(queue_name)
|
2022-04-01 16:51:46 +00:00
|
|
|
.or_insert_with(|| Arc::new(Notify::new()))
|
|
|
|
.notify_one();
|
|
|
|
|
2022-03-29 17:51:16 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-04-02 23:53:03 +00:00
|
|
|
#[tracing::instrument(skip(self, worker_id), fields(worker_id = %String::from_utf8_lossy(&worker_id)))]
|
2022-04-01 16:51:46 +00:00
|
|
|
async fn pop(
|
|
|
|
&self,
|
|
|
|
queue_name: &'static str,
|
|
|
|
worker_id: Vec<u8>,
|
|
|
|
) -> Result<Self::Bytes, Error> {
|
2022-03-29 17:51:16 +00:00
|
|
|
loop {
|
|
|
|
let in_progress_queue = self.in_progress_queue.clone();
|
|
|
|
|
|
|
|
let worker_id = worker_id.clone();
|
|
|
|
let job = b!(self.queue, {
|
|
|
|
in_progress_queue.remove(&worker_id)?;
|
|
|
|
|
2022-04-01 16:51:46 +00:00
|
|
|
while let Some((key, job)) = queue
|
|
|
|
.scan_prefix(queue_name.as_bytes())
|
|
|
|
.find_map(Result::ok)
|
|
|
|
{
|
2022-04-02 23:53:03 +00:00
|
|
|
let mut in_progress_value = queue_name.as_bytes().to_vec();
|
|
|
|
in_progress_value.push(0);
|
|
|
|
in_progress_value.extend_from_slice(&job);
|
|
|
|
|
|
|
|
in_progress_queue.insert(&worker_id, in_progress_value)?;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
2022-03-29 17:51:16 +00:00
|
|
|
if queue.remove(key)?.is_some() {
|
|
|
|
return Ok(Some(job));
|
|
|
|
}
|
|
|
|
|
|
|
|
in_progress_queue.remove(&worker_id)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(None) as Result<_, SledError>
|
|
|
|
});
|
|
|
|
|
|
|
|
if let Some(job) = job {
|
|
|
|
return Ok(job);
|
|
|
|
}
|
|
|
|
|
2022-04-01 16:51:46 +00:00
|
|
|
let opt = self
|
|
|
|
.queue_notifier
|
|
|
|
.read()
|
|
|
|
.unwrap()
|
|
|
|
.get(&queue_name)
|
|
|
|
.map(Arc::clone);
|
|
|
|
|
|
|
|
let notify = if let Some(notify) = opt {
|
|
|
|
notify
|
|
|
|
} else {
|
|
|
|
let mut guard = self.queue_notifier.write().unwrap();
|
|
|
|
let entry = guard
|
|
|
|
.entry(queue_name)
|
|
|
|
.or_insert_with(|| Arc::new(Notify::new()));
|
|
|
|
Arc::clone(entry)
|
|
|
|
};
|
|
|
|
|
2022-03-29 17:51:16 +00:00
|
|
|
notify.notified().await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl SettingsRepo for SledRepo {
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(value))]
|
2022-04-01 16:51:46 +00:00
|
|
|
async fn set(&self, key: &'static str, value: Self::Bytes) -> Result<(), Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
b!(self.settings, settings.insert(key, value));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-04-01 16:51:46 +00:00
|
|
|
async fn get(&self, key: &'static str) -> Result<Option<Self::Bytes>, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let opt = b!(self.settings, settings.get(key));
|
|
|
|
|
|
|
|
Ok(opt)
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-04-01 16:51:46 +00:00
|
|
|
async fn remove(&self, key: &'static str) -> Result<(), Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
b!(self.settings, settings.remove(key));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
fn variant_key(hash: &[u8], variant: &str) -> Vec<u8> {
|
2022-03-25 23:47:50 +00:00
|
|
|
let mut bytes = hash.to_vec();
|
|
|
|
bytes.push(b'/');
|
|
|
|
bytes.extend_from_slice(variant.as_bytes());
|
2022-03-26 21:49:23 +00:00
|
|
|
bytes
|
2022-03-25 23:47:50 +00:00
|
|
|
}
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
|
|
|
let prefix_len = hash.len() + 1;
|
|
|
|
let variant_bytes = key.get(prefix_len..)?.to_vec();
|
|
|
|
String::from_utf8(variant_bytes).ok()
|
|
|
|
}
|
2022-03-24 22:09:15 +00:00
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl IdentifierRepo for SledRepo {
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn relate_details<I: Identifier>(
|
|
|
|
&self,
|
|
|
|
identifier: &I,
|
|
|
|
details: &Details,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<(), Error> {
|
|
|
|
let key = identifier.to_bytes()?;
|
2022-03-24 22:09:15 +00:00
|
|
|
let details = serde_json::to_vec(&details)?;
|
|
|
|
|
|
|
|
b!(
|
|
|
|
self.identifier_details,
|
|
|
|
identifier_details.insert(key, details)
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, Error> {
|
|
|
|
let key = identifier.to_bytes()?;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
let opt = b!(self.identifier_details, identifier_details.get(key));
|
|
|
|
|
2023-02-06 02:50:59 +00:00
|
|
|
opt.map(|ivec| serde_json::from_slice(&ivec))
|
|
|
|
.transpose()
|
|
|
|
.map_err(Error::from)
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn cleanup<I: Identifier>(&self, identifier: &I) -> Result<(), Error> {
|
|
|
|
let key = identifier.to_bytes()?;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
b!(self.identifier_details, identifier_details.remove(key));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type StreamItem = Result<IVec, Error>;
|
2022-03-29 20:59:17 +00:00
|
|
|
type LocalBoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + 'a>>;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
fn hash_alias_key(hash: &IVec, alias: &Alias) -> Vec<u8> {
|
|
|
|
let mut v = hash.to_vec();
|
|
|
|
v.append(&mut alias.to_bytes());
|
|
|
|
v
|
|
|
|
}
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
|
|
|
impl HashRepo for SledRepo {
|
2022-03-29 20:59:17 +00:00
|
|
|
type Stream = LocalBoxStream<'static, StreamItem>;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
async fn hashes(&self) -> Self::Stream {
|
2022-03-29 20:59:17 +00:00
|
|
|
let iter = self
|
|
|
|
.hashes
|
|
|
|
.iter()
|
|
|
|
.keys()
|
|
|
|
.map(|res| res.map_err(Error::from));
|
2022-03-24 22:09:15 +00:00
|
|
|
|
2022-03-29 21:18:00 +00:00
|
|
|
Box::pin(from_iterator(iter, 8))
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn create(&self, hash: Self::Bytes) -> Result<Result<(), AlreadyExists>, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let res = b!(self.hashes, {
|
|
|
|
let hash2 = hash.clone();
|
|
|
|
hashes.compare_and_swap(hash, None as Option<Self::Bytes>, Some(hash2))
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(res.map_err(|_| AlreadyExists))
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn relate_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
2022-03-26 21:49:23 +00:00
|
|
|
let key = hash_alias_key(&hash, alias);
|
|
|
|
let value = alias.to_bytes();
|
2022-03-24 22:09:15 +00:00
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
b!(self.hash_aliases, hash_aliases.insert(key, value));
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn remove_alias(&self, hash: Self::Bytes, alias: &Alias) -> Result<(), Error> {
|
2022-03-26 21:49:23 +00:00
|
|
|
let key = hash_alias_key(&hash, alias);
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
b!(self.hash_aliases, hash_aliases.remove(key));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 03:47:52 +00:00
|
|
|
#[tracing::instrument(skip_all)]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn aliases(&self, hash: Self::Bytes) -> Result<Vec<Alias>, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let v = b!(self.hash_aliases, {
|
|
|
|
Ok(hash_aliases
|
|
|
|
.scan_prefix(hash)
|
|
|
|
.values()
|
|
|
|
.filter_map(Result::ok)
|
|
|
|
.filter_map(|ivec| Alias::from_slice(&ivec))
|
2022-03-26 21:49:23 +00:00
|
|
|
.collect::<Vec<_>>()) as Result<_, sled::Error>
|
2022-03-24 22:09:15 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash, identifier), fields(hash = hex::encode(&hash), identifier = identifier.string_repr()))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn relate_identifier<I: Identifier>(
|
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
|
|
|
identifier: &I,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<(), Error> {
|
|
|
|
let bytes = identifier.to_bytes()?;
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
b!(self.hash_identifiers, hash_identifiers.insert(hash, bytes));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn identifier<I: Identifier + 'static>(&self, hash: Self::Bytes) -> Result<I, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
|
|
|
|
2022-03-27 01:45:12 +00:00
|
|
|
opt.ok_or(SledError::Missing)
|
|
|
|
.map_err(Error::from)
|
|
|
|
.and_then(|ivec| I::from_bytes(ivec.to_vec()))
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash, identifier), fields(hash = hex::encode(&hash), identifier = identifier.string_repr()))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn relate_variant_identifier<I: Identifier>(
|
2022-03-25 23:47:50 +00:00
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
|
|
|
variant: String,
|
2022-03-26 21:49:23 +00:00
|
|
|
identifier: &I,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<(), Error> {
|
2022-03-26 21:49:23 +00:00
|
|
|
let key = variant_key(&hash, &variant);
|
2022-03-27 01:45:12 +00:00
|
|
|
let value = identifier.to_bytes()?;
|
2022-03-25 23:47:50 +00:00
|
|
|
|
|
|
|
b!(
|
|
|
|
self.hash_variant_identifiers,
|
|
|
|
hash_variant_identifiers.insert(key, value)
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn variant_identifier<I: Identifier + 'static>(
|
2022-03-25 23:47:50 +00:00
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
|
|
|
variant: String,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<Option<I>, Error> {
|
2022-03-26 21:49:23 +00:00
|
|
|
let key = variant_key(&hash, &variant);
|
2022-03-25 23:47:50 +00:00
|
|
|
|
|
|
|
let opt = b!(
|
|
|
|
self.hash_variant_identifiers,
|
|
|
|
hash_variant_identifiers.get(key)
|
|
|
|
);
|
|
|
|
|
2023-02-06 02:50:59 +00:00
|
|
|
opt.map(|ivec| I::from_bytes(ivec.to_vec()))
|
|
|
|
.transpose()
|
|
|
|
.map_err(Error::from)
|
2022-03-25 23:47:50 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 04:19:52 +00:00
|
|
|
#[tracing::instrument(skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn variants<I: Identifier + 'static>(
|
2022-03-25 23:47:50 +00:00
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<Vec<(String, I)>, Error> {
|
2022-03-26 21:49:23 +00:00
|
|
|
let vec = b!(
|
|
|
|
self.hash_variant_identifiers,
|
|
|
|
Ok(hash_variant_identifiers
|
|
|
|
.scan_prefix(&hash)
|
|
|
|
.filter_map(|res| res.ok())
|
|
|
|
.filter_map(|(key, ivec)| {
|
2023-01-05 00:58:05 +00:00
|
|
|
let identifier = I::from_bytes(ivec.to_vec()).ok();
|
|
|
|
if identifier.is_none() {
|
|
|
|
tracing::warn!(
|
|
|
|
"Skipping an identifier: {}",
|
|
|
|
String::from_utf8_lossy(&ivec)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let variant = variant_from_key(&hash, &key);
|
|
|
|
if variant.is_none() {
|
|
|
|
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
|
|
|
}
|
2022-03-26 21:49:23 +00:00
|
|
|
|
2023-01-05 00:58:05 +00:00
|
|
|
Some((variant?, identifier?))
|
2022-03-26 21:49:23 +00:00
|
|
|
})
|
|
|
|
.collect::<Vec<_>>()) as Result<Vec<_>, sled::Error>
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(vec)
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-04-11 21:56:39 +00:00
|
|
|
async fn remove_variant(&self, hash: Self::Bytes, variant: String) -> Result<(), Error> {
|
|
|
|
let key = variant_key(&hash, &variant);
|
|
|
|
|
|
|
|
b!(
|
|
|
|
self.hash_variant_identifiers,
|
|
|
|
hash_variant_identifiers.remove(key)
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash, identifier), fields(hash = hex::encode(&hash), identifier = identifier.string_repr()))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn relate_motion_identifier<I: Identifier>(
|
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
|
|
|
identifier: &I,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<(), Error> {
|
|
|
|
let bytes = identifier.to_bytes()?;
|
2022-03-25 23:47:50 +00:00
|
|
|
|
|
|
|
b!(
|
|
|
|
self.hash_motion_identifiers,
|
|
|
|
hash_motion_identifiers.insert(hash, bytes)
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-26 21:49:23 +00:00
|
|
|
async fn motion_identifier<I: Identifier + 'static>(
|
|
|
|
&self,
|
|
|
|
hash: Self::Bytes,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<Option<I>, Error> {
|
2022-03-25 23:47:50 +00:00
|
|
|
let opt = b!(
|
|
|
|
self.hash_motion_identifiers,
|
|
|
|
hash_motion_identifiers.get(hash)
|
|
|
|
);
|
|
|
|
|
2023-02-06 02:50:59 +00:00
|
|
|
opt.map(|ivec| I::from_bytes(ivec.to_vec()))
|
|
|
|
.transpose()
|
|
|
|
.map_err(Error::from)
|
2022-03-25 23:47:50 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 04:19:52 +00:00
|
|
|
#[tracing::instrument(skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn cleanup(&self, hash: Self::Bytes) -> Result<(), Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let hash2 = hash.clone();
|
|
|
|
b!(self.hashes, hashes.remove(hash2));
|
|
|
|
|
|
|
|
let hash2 = hash.clone();
|
|
|
|
b!(self.hash_identifiers, hash_identifiers.remove(hash2));
|
|
|
|
|
2022-03-25 23:47:50 +00:00
|
|
|
let hash2 = hash.clone();
|
|
|
|
b!(
|
|
|
|
self.hash_motion_identifiers,
|
|
|
|
hash_motion_identifiers.remove(hash2)
|
|
|
|
);
|
2022-03-24 22:09:15 +00:00
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
let aliases = self.aliases(hash.clone()).await?;
|
2022-03-25 23:47:50 +00:00
|
|
|
let hash2 = hash.clone();
|
2022-03-24 22:09:15 +00:00
|
|
|
b!(self.hash_aliases, {
|
|
|
|
for alias in aliases {
|
2022-03-25 23:47:50 +00:00
|
|
|
let key = hash_alias_key(&hash2, &alias);
|
2022-03-24 22:09:15 +00:00
|
|
|
|
|
|
|
let _ = hash_aliases.remove(key);
|
|
|
|
}
|
2022-03-26 21:49:23 +00:00
|
|
|
Ok(()) as Result<(), sled::Error>
|
2022-03-24 22:09:15 +00:00
|
|
|
});
|
|
|
|
|
2022-03-25 23:47:50 +00:00
|
|
|
let variant_keys = b!(self.hash_variant_identifiers, {
|
|
|
|
let v = hash_variant_identifiers
|
|
|
|
.scan_prefix(hash)
|
|
|
|
.keys()
|
|
|
|
.filter_map(Result::ok)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
Ok(v) as Result<Vec<_>, sled::Error>
|
2022-03-25 23:47:50 +00:00
|
|
|
});
|
|
|
|
b!(self.hash_variant_identifiers, {
|
|
|
|
for key in variant_keys {
|
|
|
|
let _ = hash_variant_identifiers.remove(key);
|
|
|
|
}
|
2022-03-26 21:49:23 +00:00
|
|
|
Ok(()) as Result<(), sled::Error>
|
2022-03-25 23:47:50 +00:00
|
|
|
});
|
|
|
|
|
2022-03-24 22:09:15 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-26 21:49:23 +00:00
|
|
|
#[async_trait::async_trait(?Send)]
|
2022-03-24 22:09:15 +00:00
|
|
|
impl AliasRepo for SledRepo {
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn create(&self, alias: &Alias) -> Result<Result<(), AlreadyExists>, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let bytes = alias.to_bytes();
|
|
|
|
let bytes2 = bytes.clone();
|
|
|
|
|
|
|
|
let res = b!(
|
|
|
|
self.aliases,
|
|
|
|
aliases.compare_and_swap(bytes, None as Option<Self::Bytes>, Some(bytes2))
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(res.map_err(|_| AlreadyExists))
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-03-24 22:09:15 +00:00
|
|
|
async fn relate_delete_token(
|
|
|
|
&self,
|
2022-03-26 21:49:23 +00:00
|
|
|
alias: &Alias,
|
|
|
|
delete_token: &DeleteToken,
|
2022-03-27 01:45:12 +00:00
|
|
|
) -> Result<Result<(), AlreadyExists>, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let key = alias.to_bytes();
|
|
|
|
let token = delete_token.to_bytes();
|
|
|
|
|
|
|
|
let res = b!(
|
|
|
|
self.alias_delete_tokens,
|
|
|
|
alias_delete_tokens.compare_and_swap(key, None as Option<Self::Bytes>, Some(token))
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(res.map_err(|_| AlreadyExists))
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn delete_token(&self, alias: &Alias) -> Result<DeleteToken, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let key = alias.to_bytes();
|
|
|
|
|
|
|
|
let opt = b!(self.alias_delete_tokens, alias_delete_tokens.get(key));
|
|
|
|
|
|
|
|
opt.and_then(|ivec| DeleteToken::from_slice(&ivec))
|
2022-03-27 01:45:12 +00:00
|
|
|
.ok_or(SledError::Missing)
|
|
|
|
.map_err(Error::from)
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn relate_hash(&self, alias: &Alias, hash: Self::Bytes) -> Result<(), Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let key = alias.to_bytes();
|
|
|
|
|
|
|
|
b!(self.alias_hashes, alias_hashes.insert(key, hash));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-02 02:17:18 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn hash(&self, alias: &Alias) -> Result<Self::Bytes, Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let key = alias.to_bytes();
|
|
|
|
|
|
|
|
let opt = b!(self.alias_hashes, alias_hashes.get(key));
|
|
|
|
|
2022-03-27 01:45:12 +00:00
|
|
|
opt.ok_or(SledError::Missing).map_err(Error::from)
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 04:19:52 +00:00
|
|
|
#[tracing::instrument(skip(self))]
|
2022-03-27 01:45:12 +00:00
|
|
|
async fn cleanup(&self, alias: &Alias) -> Result<(), Error> {
|
2022-03-24 22:09:15 +00:00
|
|
|
let key = alias.to_bytes();
|
|
|
|
|
|
|
|
let key2 = key.clone();
|
|
|
|
b!(self.aliases, aliases.remove(key2));
|
|
|
|
|
|
|
|
let key2 = key.clone();
|
|
|
|
b!(self.alias_delete_tokens, alias_delete_tokens.remove(key2));
|
|
|
|
|
|
|
|
b!(self.alias_hashes, alias_hashes.remove(key));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::fmt::Debug for SledRepo {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
f.debug_struct("SledRepo").finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-27 01:45:12 +00:00
|
|
|
impl From<actix_rt::task::JoinError> for SledError {
|
2022-03-24 22:09:15 +00:00
|
|
|
fn from(_: actix_rt::task::JoinError) -> Self {
|
2022-03-27 01:45:12 +00:00
|
|
|
SledError::Panic
|
2022-03-24 22:09:15 +00:00
|
|
|
}
|
|
|
|
}
|