Replace log with tracing

This commit is contained in:
Aode (lion) 2021-09-16 17:50:32 -05:00
parent 5828b3d68a
commit ba60a5cbe0
18 changed files with 226 additions and 107 deletions

View file

@ -1,7 +1,7 @@
[package] [package]
name = "background-jobs" name = "background-jobs"
description = "Background Jobs implemented with actix and futures" description = "Background Jobs implemented with actix and futures"
version = "0.9.0" version = "0.10.0"
license-file = "LICENSE" license-file = "LICENSE"
authors = ["asonix <asonix@asonix.dog>"] authors = ["asonix <asonix@asonix.dog>"]
repository = "https://git.asonix.dog/Aardwolf/background-jobs" repository = "https://git.asonix.dog/Aardwolf/background-jobs"
@ -19,12 +19,14 @@ members = [
[features] [features]
default = ["background-jobs-actix"] default = ["background-jobs-actix"]
completion-logging = ["background-jobs-core/completion-logging", "error-logging"]
error-logging = ["background-jobs-core/error-logging"]
[dependencies.background-jobs-core] [dependencies.background-jobs-core]
version = "0.9.0" version = "0.10.0"
path = "jobs-core" path = "jobs-core"
[dependencies.background-jobs-actix] [dependencies.background-jobs-actix]
version = "0.9.0" version = "0.10.0"
path = "jobs-actix" path = "jobs-actix"
optional = true optional = true

View file

@ -10,9 +10,10 @@ edition = "2018"
actix-rt = "2.0.0" actix-rt = "2.0.0"
anyhow = "1.0" anyhow = "1.0"
async-trait = "0.1.24" async-trait = "0.1.24"
background-jobs = { version = "0.9.0", path = "../.." } background-jobs = { version = "0.10.0", path = "../..", features = ["error-logging"] }
background-jobs-sled-storage = { version = "0.9.0", path = "../../jobs-sled" } background-jobs-sled-storage = { version = "0.10.0", path = "../../jobs-sled" }
chrono = "0.4" chrono = "0.4"
env_logger = "0.8" tracing = "0.1"
tracing-subscriber = { version = "0.2", features = ["fmt"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
sled = "0.34" sled = "0.34"

View file

@ -1,8 +1,10 @@
use anyhow::Error; use anyhow::Error;
use background_jobs::{create_server, Job, MaxRetries, WorkerConfig}; use background_jobs::{create_server, ActixJob as Job, MaxRetries, WorkerConfig};
use background_jobs_sled_storage::Storage; use background_jobs_sled_storage::Storage;
use chrono::{Duration, Utc}; use chrono::{Duration, Utc};
use std::future::{ready, Ready}; use std::future::{ready, Ready};
use tracing::info;
use tracing_subscriber::EnvFilter;
const DEFAULT_QUEUE: &str = "default"; const DEFAULT_QUEUE: &str = "default";
@ -22,10 +24,12 @@ pub struct PanickingJob;
#[actix_rt::main] #[actix_rt::main]
async fn main() -> Result<(), Error> { async fn main() -> Result<(), Error> {
if std::env::var_os("RUST_LOG").is_none() { let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
std::env::set_var("RUST_LOG", "info");
} tracing_subscriber::fmt::fmt()
env_logger::init(); .with_env_filter(env_filter)
.init();
// Set up our Storage // Set up our Storage
let db = sled::Config::new().temporary(true).open()?; let db = sled::Config::new().temporary(true).open()?;
let storage = Storage::new(db)?; let storage = Storage::new(db)?;
@ -97,7 +101,7 @@ impl Job for MyJob {
const MAX_RETRIES: MaxRetries = MaxRetries::Count(1); const MAX_RETRIES: MaxRetries = MaxRetries::Count(1);
fn run(self, state: MyState) -> Self::Future { fn run(self, state: MyState) -> Self::Future {
println!("{}: args, {:?}", state.app_name, self); info!("{}: args, {:?}", state.app_name, self);
ready(Ok(())) ready(Ok(()))
} }

View file

@ -1,7 +1,7 @@
[package] [package]
name = "background-jobs-actix" name = "background-jobs-actix"
description = "in-process jobs processor based on Actix" description = "in-process jobs processor based on Actix"
version = "0.9.1" version = "0.10.0"
license-file = "../LICENSE" license-file = "../LICENSE"
authors = ["asonix <asonix@asonix.dog>"] authors = ["asonix <asonix@asonix.dog>"]
repository = "https://git.asonix.dog/Aardwolf/background-jobs" repository = "https://git.asonix.dog/Aardwolf/background-jobs"
@ -10,13 +10,14 @@ readme = "../README.md"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
actix-rt = "2.0.0" actix-rt = "2.2.0"
anyhow = "1.0" anyhow = "1.0"
async-mutex = "1.0.1" async-mutex = "1.0.1"
async-trait = "0.1.24" async-trait = "0.1.24"
background-jobs-core = { version = "0.9.0", path = "../jobs-core", features = ["with-actix"] } background-jobs-core = { version = "0.10.0", path = "../jobs-core", features = ["with-actix"] }
chrono = "0.4" chrono = "0.4"
log = "0.4" tracing = "0.1"
tracing-futures = "0.2"
num_cpus = "1.10.0" num_cpus = "1.10.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"

View file

@ -1,7 +1,7 @@
use crate::{Job, QueueHandle}; use crate::{Job, QueueHandle};
use actix_rt::time::{interval_at, Instant}; use actix_rt::time::{interval_at, Instant};
use log::error;
use std::time::Duration; use std::time::Duration;
use tracing::error;
/// A type used to schedule recurring jobs. /// A type used to schedule recurring jobs.
/// ///

View file

@ -120,8 +120,8 @@ use actix_rt::{Arbiter, ArbiterHandle};
use anyhow::Error; use anyhow::Error;
use background_jobs_core::{new_job, new_scheduled_job, Job, ProcessorMap, Stats, Storage}; use background_jobs_core::{new_job, new_scheduled_job, Job, ProcessorMap, Stats, Storage};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use log::error;
use std::{collections::BTreeMap, sync::Arc, time::Duration}; use std::{collections::BTreeMap, sync::Arc, time::Duration};
use tracing::error;
mod every; mod every;
mod server; mod server;

View file

@ -9,12 +9,12 @@ use actix_rt::{
use anyhow::Error; use anyhow::Error;
use async_mutex::Mutex; use async_mutex::Mutex;
use background_jobs_core::{NewJobInfo, ReturnJobInfo, Stats, Storage}; use background_jobs_core::{NewJobInfo, ReturnJobInfo, Stats, Storage};
use log::{error, trace};
use std::{ use std::{
collections::{HashMap, VecDeque}, collections::{HashMap, VecDeque},
sync::Arc, sync::Arc,
time::Duration, time::Duration,
}; };
use tracing::{error, trace};
type WorkerQueue = VecDeque<Box<dyn Worker + Send + Sync>>; type WorkerQueue = VecDeque<Box<dyn Worker + Send + Sync>>;

View file

@ -1,8 +1,9 @@
use crate::Server; use crate::Server;
use actix_rt::spawn; use actix_rt::spawn;
use background_jobs_core::{CachedProcessorMap, JobInfo}; use background_jobs_core::{CachedProcessorMap, JobInfo};
use log::{debug, error, warn};
use tokio::sync::mpsc::{channel, Sender}; use tokio::sync::mpsc::{channel, Sender};
use tracing::{debug, error, info, warn, Span};
use tracing_futures::Instrument;
use uuid::Uuid; use uuid::Uuid;
#[async_trait::async_trait] #[async_trait::async_trait]
@ -51,27 +52,49 @@ pub(crate) fn local_worker<State>(
{ {
let id = Uuid::new_v4(); let id = Uuid::new_v4();
let span = tracing::info_span!(
parent: None,
"Worker",
worker.id = tracing::field::display(&id),
worker.queue = tracing::field::display(&queue),
exception.message = tracing::field::Empty,
exception.details = tracing::field::Empty,
);
spawn(
async move {
let (tx, mut rx) = channel(16); let (tx, mut rx) = channel(16);
let handle = LocalWorkerHandle { tx, id, queue }; let handle = LocalWorkerHandle { tx, id, queue };
spawn(async move { let span = Span::current();
debug!("Beginning worker loop for {}", id); debug!("Beginning worker loop for {}", id);
if let Err(e) = server.request_job(Box::new(handle.clone())).await { if let Err(e) = server.request_job(Box::new(handle.clone())).await {
error!("Couldn't request first job, bailing, {}", e); let display = format!("{}", e);
let debug = format!("{:?}", e);
span.record("exception.message", &tracing::field::display(&display));
span.record("exception.details", &tracing::field::display(&debug));
error!("Failed to notify server of new worker, {}", e);
return; return;
} }
while let Some(job) = rx.recv().await { while let Some(job) = rx.recv().await {
let return_job = processors.process(job).await; let return_job = processors.process(job).await;
if let Err(e) = server.return_job(return_job).await { if let Err(e) = server.return_job(return_job).await {
error!("Error returning job, {}", e); warn!("Failed to return completed job, {}", e);
} }
if let Err(e) = server.request_job(Box::new(handle.clone())).await { if let Err(e) = server.request_job(Box::new(handle.clone())).await {
error!("Error requesting job, {}", e); let display = format!("{}", e);
let debug = format!("{:?}", e);
span.record("exception.message", &tracing::field::display(&display));
span.record("exception.details", &tracing::field::display(&debug));
error!("Failed to notify server of ready worker, {}", e);
break; break;
} }
} }
warn!("Worker {} closing", id); info!("Worker closing");
}); }
.instrument(span),
);
} }

View file

@ -1,7 +1,7 @@
[package] [package]
name = "background-jobs-core" name = "background-jobs-core"
description = "Core types for implementing an asynchronous jobs processor" description = "Core types for implementing an asynchronous jobs processor"
version = "0.9.3" version = "0.10.0"
license-file = "../LICENSE" license-file = "../LICENSE"
authors = ["asonix <asonix@asonix.dog>"] authors = ["asonix <asonix@asonix.dog>"]
repository = "https://git.asonix.dog/Aardwolf/background-jobs" repository = "https://git.asonix.dog/Aardwolf/background-jobs"
@ -12,14 +12,17 @@ edition = "2018"
[features] [features]
default = [] default = []
with-actix = ["actix-rt", "tokio"] with-actix = ["actix-rt", "tokio"]
completion-logging = []
error-logging = []
[dependencies] [dependencies]
actix-rt = { version = "2.0.0", optional = true } actix-rt = { version = "2.2.0", optional = true }
anyhow = "1.0" anyhow = "1.0"
async-mutex = "1.0.1" async-mutex = "1.0.1"
async-trait = "0.1.24" async-trait = "0.1.24"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
log = "0.4" tracing = "0.1"
tracing-futures = "0.2.5"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"

View file

@ -1,9 +1,10 @@
use crate::{Backoff, Job, MaxRetries}; use crate::{Backoff, Job, MaxRetries};
use anyhow::Error; use anyhow::Error;
use log::error;
use serde::{de::DeserializeOwned, ser::Serialize}; use serde::{de::DeserializeOwned, ser::Serialize};
use std::{future::Future, pin::Pin}; use std::{future::Future, pin::Pin};
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tracing::{error, Span};
use tracing_futures::Instrument;
/// The ActixJob trait defines parameters pertaining to an instance of background job /// The ActixJob trait defines parameters pertaining to an instance of background job
/// ///
@ -92,7 +93,7 @@ pub trait ActixJob: Serialize + DeserializeOwned + 'static {
impl<T> Job for T impl<T> Job for T
where where
T: ActixJob, T: ActixJob + std::panic::UnwindSafe,
{ {
type State = T::State; type State = T::State;
type Future = Pin<Box<dyn Future<Output = Result<(), Error>> + Send>>; type Future = Pin<Box<dyn Future<Output = Result<(), Error>> + Send>>;
@ -106,13 +107,25 @@ where
fn run(self, state: Self::State) -> Self::Future { fn run(self, state: Self::State) -> Self::Future {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
actix_rt::spawn(async move { let span = Span::current();
if tx.send(ActixJob::run(self, state).await).is_err() { let handle = actix_rt::spawn(async move {
let entered = span.enter();
let fut = ActixJob::run(self, state);
drop(entered);
let result = fut.instrument(span.clone()).await;
if tx.send(result).is_err() {
let entered = span.enter();
error!("Job dropped"); error!("Job dropped");
drop(entered);
} }
}); });
Box::pin(async move { rx.await? }) Box::pin(async move {
handle.await.unwrap();
rx.await?
})
} }
fn queue(&self) -> &str { fn queue(&self) -> &str {

View file

@ -0,0 +1,41 @@
use std::{
future::Future,
pin::Pin,
sync::Mutex,
task::{Context, Poll},
};
pub(crate) struct CatchUnwindFuture<F> {
future: Mutex<F>,
}
pub(crate) fn catch_unwind<F>(future: F) -> CatchUnwindFuture<F>
where
F: Future + Unpin,
{
CatchUnwindFuture {
future: Mutex::new(future),
}
}
impl<F> Future for CatchUnwindFuture<F>
where
F: Future + Unpin,
{
type Output = std::thread::Result<F::Output>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let future = &self.future;
let waker = cx.waker().clone();
let res = std::panic::catch_unwind(|| {
let mut context = Context::from_waker(&waker);
let mut guard = future.lock().unwrap();
Pin::new(&mut *guard).poll(&mut context)
});
match res {
Ok(poll) => poll.map(Ok),
Err(e) => Poll::Ready(Err(e)),
}
}
}

View file

@ -17,7 +17,7 @@ use std::{future::Future, pin::Pin};
/// ```rust /// ```rust
/// use anyhow::Error; /// use anyhow::Error;
/// use background_jobs_core::{Job, new_job}; /// use background_jobs_core::{Job, new_job};
/// use log::info; /// use tracing::info;
/// use std::future::{ready, Ready}; /// use std::future::{ready, Ready};
/// ///
/// #[derive(serde::Deserialize, serde::Serialize)] /// #[derive(serde::Deserialize, serde::Serialize)]

View file

@ -1,7 +1,7 @@
use crate::{Backoff, JobResult, JobStatus, MaxRetries, ShouldStop}; use crate::{Backoff, JobResult, JobStatus, MaxRetries, ShouldStop};
use chrono::{offset::Utc, DateTime, Duration}; use chrono::{offset::Utc, DateTime, Duration};
use log::trace;
use serde_json::Value; use serde_json::Value;
use tracing::trace;
use uuid::Uuid; use uuid::Uuid;
#[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)] #[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)]

View file

@ -10,6 +10,7 @@ use anyhow::Error;
#[cfg(feature = "with-actix")] #[cfg(feature = "with-actix")]
mod actix_job; mod actix_job;
mod catch_unwind;
mod job; mod job;
mod job_info; mod job_info;
mod processor_map; mod processor_map;

View file

@ -1,8 +1,10 @@
use crate::{Job, JobError, JobInfo, ReturnJobInfo}; use crate::{catch_unwind::catch_unwind, Job, JobError, JobInfo, ReturnJobInfo};
use chrono::Utc; use chrono::Utc;
use log::{error, info};
use serde_json::Value; use serde_json::Value;
use std::{collections::HashMap, future::Future, pin::Pin, sync::Arc}; use std::{collections::HashMap, future::Future, pin::Pin, sync::Arc};
use tracing::{error, Span};
use tracing_futures::Instrument;
use uuid::Uuid;
/// A generic function that processes a job /// A generic function that processes a job
/// ///
@ -76,17 +78,32 @@ where
/// This should not be called from outside implementations of a backgoround-jobs runtime. It is /// This should not be called from outside implementations of a backgoround-jobs runtime. It is
/// intended for internal use. /// intended for internal use.
pub async fn process(&self, job: JobInfo) -> ReturnJobInfo { pub async fn process(&self, job: JobInfo) -> ReturnJobInfo {
let opt = self let span = job_span(&job);
.inner let opt = self.inner.get(job.name()).map(|name| {
.get(job.name()) let entered = span.enter();
.map(|name| process(Arc::clone(name), (self.state_fn)(), job.clone())); let fut = process(Arc::clone(name), (self.state_fn)(), job.clone());
drop(entered);
fut
});
if let Some(fut) = opt { let res = if let Some(fut) = opt {
fut.await fut.instrument(span.clone()).await
} else { } else {
error!("Job {} not registered", job.name()); span.record(
"exception.message",
&tracing::field::display("Not registered"),
);
span.record(
"exception.details",
&tracing::field::display("Not registered"),
);
let entered = span.enter();
error!("Not registered");
drop(entered);
ReturnJobInfo::unregistered(job.id()) ReturnJobInfo::unregistered(job.id())
} };
res
} }
} }
@ -99,51 +116,44 @@ where
/// This should not be called from outside implementations of a backgoround-jobs runtime. It is /// This should not be called from outside implementations of a backgoround-jobs runtime. It is
/// intended for internal use. /// intended for internal use.
pub async fn process(&self, job: JobInfo) -> ReturnJobInfo { pub async fn process(&self, job: JobInfo) -> ReturnJobInfo {
if let Some(name) = self.inner.get(job.name()) { let span = job_span(&job);
process(Arc::clone(name), self.state.clone(), job).await
let res = if let Some(name) = self.inner.get(job.name()) {
let entered = span.enter();
let fut = process(Arc::clone(name), self.state.clone(), job);
drop(entered);
fut.instrument(span.clone()).await
} else { } else {
error!("Job {} not registered", job.name()); let entered = span.enter();
span.record(
"exception.message",
&tracing::field::display("Not registered"),
);
span.record(
"exception.details",
&tracing::field::display("Not registered"),
);
error!("Not registered");
drop(entered);
ReturnJobInfo::unregistered(job.id()) ReturnJobInfo::unregistered(job.id())
} };
res
} }
} }
struct CatchUnwindFuture<F> { fn job_span(job: &JobInfo) -> Span {
future: std::sync::Mutex<F>, tracing::info_span!(
} "Job",
execution_id = tracing::field::display(&Uuid::new_v4()),
fn catch_unwind<F>(future: F) -> CatchUnwindFuture<F> job.id = tracing::field::display(&job.id()),
where job.name = tracing::field::display(&job.name()),
F: Future + Unpin, job.args = tracing::field::debug(&job.args()),
{ job.execution_time = tracing::field::Empty,
CatchUnwindFuture { exception.message = tracing::field::Empty,
future: std::sync::Mutex::new(future), exception.details = tracing::field::Empty,
} )
}
impl<F> std::future::Future for CatchUnwindFuture<F>
where
F: Future + Unpin,
{
type Output = std::thread::Result<F::Output>;
fn poll(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Self::Output> {
let future = &self.future;
let waker = cx.waker().clone();
let res = std::panic::catch_unwind(|| {
let mut context = std::task::Context::from_waker(&waker);
let mut guard = future.lock().unwrap();
Pin::new(&mut *guard).poll(&mut context)
});
match res {
Ok(poll) => poll.map(Ok),
Err(e) => std::task::Poll::Ready(Err(e)),
}
}
} }
async fn process<S>(process_fn: ProcessFn<S>, state: S, job: JobInfo) -> ReturnJobInfo async fn process<S>(process_fn: ProcessFn<S>, state: S, job: JobInfo) -> ReturnJobInfo
@ -152,7 +162,6 @@ where
{ {
let args = job.args(); let args = job.args();
let id = job.id(); let id = job.id();
let name = job.name().to_owned();
let start = Utc::now(); let start = Utc::now();
@ -176,17 +185,36 @@ where
0_f64 0_f64
}; };
let span = Span::current();
span.record("job.execution_time", &tracing::field::display(&seconds));
match res { match res {
Ok(Ok(_)) => { Ok(Ok(_)) => {
info!("Job {} {} completed {:.6}", id, name, seconds); #[cfg(feature = "completion-logging")]
tracing::info!("Job completed");
ReturnJobInfo::pass(id) ReturnJobInfo::pass(id)
} }
Ok(Err(e)) => { Ok(Err(e)) => {
info!("Job {} {} errored {} {:.6}", id, name, e, seconds); let display = format!("{}", e);
let debug = format!("{:?}", e);
span.record("exception.message", &tracing::field::display(&display));
span.record("exception.details", &tracing::field::display(&debug));
#[cfg(feature = "error-logging")]
tracing::warn!("Job errored: {:?}", e);
ReturnJobInfo::fail(id) ReturnJobInfo::fail(id)
} }
Err(_) => { Err(_) => {
info!("Job {} {} panicked {:.6}", id, name, seconds); span.record(
"exception.message",
&tracing::field::display("Job panicked"),
);
span.record(
"exception.details",
&tracing::field::display("Job panicked"),
);
#[cfg(feature = "error-logging")]
tracing::warn!("Job panicked");
ReturnJobInfo::fail(id) ReturnJobInfo::fail(id)
} }
} }

View file

@ -1,7 +1,7 @@
use crate::{JobInfo, NewJobInfo, ReturnJobInfo, Stats}; use crate::{JobInfo, NewJobInfo, ReturnJobInfo, Stats};
use chrono::offset::Utc; use chrono::offset::Utc;
use log::info;
use std::error::Error; use std::error::Error;
use tracing::info;
use uuid::Uuid; use uuid::Uuid;
/// Define a storage backend for jobs /// Define a storage backend for jobs
@ -109,7 +109,9 @@ pub trait Storage: Clone + Send {
self.save_job(job).await?; self.save_job(job).await?;
self.update_stats(Stats::retry_job).await self.update_stats(Stats::retry_job).await
} else { } else {
info!("Job {} failed permanently", id); #[cfg(feature = "error-logging")]
tracing::warn!("Job {} failed permanently", id);
self.delete_job(id).await?; self.delete_job(id).await?;
self.update_stats(Stats::fail_job).await self.update_stats(Stats::fail_job).await
} }

View file

@ -1,7 +1,7 @@
[package] [package]
name = "background-jobs-sled-storage" name = "background-jobs-sled-storage"
description = "Sled storage backend for background-jobs" description = "Sled storage backend for background-jobs"
version = "0.9.1" version = "0.10.0"
license-file = "../LICENSE" license-file = "../LICENSE"
authors = ["asonix <asonix@asonix.dog>"] authors = ["asonix <asonix@asonix.dog>"]
repository = "https://git.asonix.dog/Aardwolf/background-jobs" repository = "https://git.asonix.dog/Aardwolf/background-jobs"
@ -13,7 +13,7 @@ edition = "2018"
[dependencies] [dependencies]
actix-rt = "2.0.1" actix-rt = "2.0.1"
async-trait = "0.1.24" async-trait = "0.1.24"
background-jobs-core = { version = "0.9.0", path = "../jobs-core" } background-jobs-core = { version = "0.10.0", path = "../jobs-core" }
bincode = "1.2" bincode = "1.2"
chrono = "0.4" chrono = "0.4"
sled = "0.34" sled = "0.34"

View file

@ -125,7 +125,7 @@ impl background_jobs_core::Storage for Storage {
} }
}) })
.filter_map(|id| this.jobinfo.get(id).ok()) .filter_map(|id| this.jobinfo.get(id).ok())
.filter_map(|opt| opt) .flatten()
.filter_map(|ivec| serde_cbor::from_slice(&ivec).ok()) .filter_map(|ivec| serde_cbor::from_slice(&ivec).ok())
.find(|job: &JobInfo| job.is_ready(now) && job.is_pending(now)); .find(|job: &JobInfo| job.is_ready(now) && job.is_pending(now));
@ -216,7 +216,7 @@ impl background_jobs_core::Storage for Storage {
Ok(spawn_blocking(move || { Ok(spawn_blocking(move || {
this.stats.fetch_and_update("stats", move |opt| { this.stats.fetch_and_update("stats", move |opt| {
let stats = if let Some(stats_ivec) = opt { let stats = if let Some(stats_ivec) = opt {
bincode::deserialize(&stats_ivec).unwrap_or_default() bincode::deserialize(stats_ivec).unwrap_or_default()
} else { } else {
Stats::default() Stats::default()
}; };