2022-12-31 00:06:33 +00:00
|
|
|
use chrono::{Duration, Utc};
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
use serde_json::Value;
|
2022-12-11 18:41:08 +00:00
|
|
|
use uuid::Uuid;
|
2022-12-31 00:06:33 +00:00
|
|
|
|
|
|
|
use crate::config::Config;
|
2022-12-03 21:06:15 +00:00
|
|
|
use crate::database::{
|
|
|
|
get_database_client,
|
2023-01-17 23:14:18 +00:00
|
|
|
DatabaseClient,
|
2022-12-03 21:06:15 +00:00
|
|
|
DatabaseError,
|
|
|
|
DatabaseTypeError,
|
|
|
|
DbPool,
|
|
|
|
};
|
2022-12-31 00:06:33 +00:00
|
|
|
use crate::models::{
|
|
|
|
background_jobs::queries::{
|
|
|
|
enqueue_job,
|
|
|
|
get_job_batch,
|
|
|
|
delete_job_from_queue,
|
|
|
|
},
|
|
|
|
background_jobs::types::JobType,
|
2022-12-11 18:41:08 +00:00
|
|
|
users::queries::get_user_by_id,
|
2022-12-31 00:06:33 +00:00
|
|
|
};
|
2022-12-11 18:41:08 +00:00
|
|
|
use super::deliverer::{OutgoingActivity, Recipient};
|
2023-01-26 13:52:30 +00:00
|
|
|
use super::fetcher::fetchers::FetchError;
|
|
|
|
use super::receiver::{handle_activity, HandlerError};
|
2022-12-31 00:06:33 +00:00
|
|
|
|
|
|
|
#[derive(Deserialize, Serialize)]
|
2022-12-31 13:28:25 +00:00
|
|
|
pub struct IncomingActivityJobData {
|
2022-12-31 00:06:33 +00:00
|
|
|
activity: Value,
|
|
|
|
is_authenticated: bool,
|
|
|
|
failure_count: i32,
|
|
|
|
}
|
|
|
|
|
2022-12-31 13:28:25 +00:00
|
|
|
impl IncomingActivityJobData {
|
2022-12-31 00:06:33 +00:00
|
|
|
pub fn new(activity: &Value, is_authenticated: bool) -> Self {
|
|
|
|
Self {
|
|
|
|
activity: activity.clone(),
|
|
|
|
is_authenticated,
|
|
|
|
failure_count: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-31 13:28:25 +00:00
|
|
|
pub async fn into_job(
|
2022-12-31 00:06:33 +00:00
|
|
|
self,
|
2023-01-17 23:14:18 +00:00
|
|
|
db_client: &impl DatabaseClient,
|
2022-12-31 00:06:33 +00:00
|
|
|
delay: i64,
|
|
|
|
) -> Result<(), DatabaseError> {
|
|
|
|
let job_data = serde_json::to_value(self)
|
|
|
|
.expect("activity should be serializable");
|
|
|
|
let scheduled_for = Utc::now() + Duration::seconds(delay);
|
|
|
|
enqueue_job(
|
|
|
|
db_client,
|
|
|
|
&JobType::IncomingActivity,
|
|
|
|
&job_data,
|
|
|
|
&scheduled_for,
|
|
|
|
).await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-11 18:41:08 +00:00
|
|
|
pub async fn process_queued_incoming_activities(
|
2022-12-31 00:06:33 +00:00
|
|
|
config: &Config,
|
2023-01-17 23:14:18 +00:00
|
|
|
db_client: &mut impl DatabaseClient,
|
2022-12-31 00:06:33 +00:00
|
|
|
) -> Result<(), DatabaseError> {
|
|
|
|
let batch_size = 10;
|
|
|
|
let max_retries = 2;
|
|
|
|
let retry_after = 60 * 10; // 10 minutes
|
|
|
|
|
|
|
|
let batch = get_job_batch(
|
|
|
|
db_client,
|
|
|
|
&JobType::IncomingActivity,
|
|
|
|
batch_size,
|
|
|
|
).await?;
|
|
|
|
for job in batch {
|
2022-12-31 13:28:25 +00:00
|
|
|
let mut job_data: IncomingActivityJobData =
|
2022-12-31 00:06:33 +00:00
|
|
|
serde_json::from_value(job.job_data)
|
|
|
|
.map_err(|_| DatabaseTypeError)?;
|
2023-01-26 13:52:30 +00:00
|
|
|
if let Err(error) = handle_activity(
|
2022-12-31 00:06:33 +00:00
|
|
|
config,
|
|
|
|
db_client,
|
2022-12-31 13:28:25 +00:00
|
|
|
&job_data.activity,
|
|
|
|
job_data.is_authenticated,
|
2022-12-31 00:06:33 +00:00
|
|
|
).await {
|
2023-01-26 13:52:30 +00:00
|
|
|
job_data.failure_count += 1;
|
|
|
|
log::warn!(
|
|
|
|
"failed to process activity ({}) (attempt #{}): {}",
|
|
|
|
error,
|
|
|
|
job_data.failure_count,
|
|
|
|
job_data.activity,
|
|
|
|
);
|
|
|
|
if job_data.failure_count <= max_retries &&
|
|
|
|
// Don't retry after fetcher recursion error
|
|
|
|
!matches!(error, HandlerError::FetchError(FetchError::RecursionError))
|
|
|
|
{
|
|
|
|
// Re-queue
|
|
|
|
log::info!("activity re-queued");
|
|
|
|
job_data.into_job(db_client, retry_after).await?;
|
|
|
|
};
|
2022-12-31 00:06:33 +00:00
|
|
|
};
|
|
|
|
delete_job_from_queue(db_client, &job.id).await?;
|
|
|
|
};
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-12-11 18:41:08 +00:00
|
|
|
|
|
|
|
#[derive(Deserialize, Serialize)]
|
|
|
|
pub struct OutgoingActivityJobData {
|
|
|
|
pub activity: Value,
|
|
|
|
pub sender_id: Uuid,
|
|
|
|
pub recipients: Vec<Recipient>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl OutgoingActivityJobData {
|
|
|
|
pub async fn into_job(
|
|
|
|
self,
|
2023-01-17 23:14:18 +00:00
|
|
|
db_client: &impl DatabaseClient,
|
2022-12-11 18:41:08 +00:00
|
|
|
) -> Result<(), DatabaseError> {
|
|
|
|
let job_data = serde_json::to_value(self)
|
|
|
|
.expect("activity should be serializable");
|
|
|
|
let scheduled_for = Utc::now();
|
|
|
|
enqueue_job(
|
|
|
|
db_client,
|
|
|
|
&JobType::OutgoingActivity,
|
|
|
|
&job_data,
|
|
|
|
&scheduled_for,
|
|
|
|
).await
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn process_queued_outgoing_activities(
|
|
|
|
config: &Config,
|
2022-12-03 21:06:15 +00:00
|
|
|
db_pool: &DbPool,
|
2022-12-11 18:41:08 +00:00
|
|
|
) -> Result<(), DatabaseError> {
|
2022-12-03 21:06:15 +00:00
|
|
|
let db_client = &**get_database_client(db_pool).await?;
|
2022-12-11 18:41:08 +00:00
|
|
|
let batch_size = 1;
|
|
|
|
let batch = get_job_batch(
|
|
|
|
db_client,
|
|
|
|
&JobType::OutgoingActivity,
|
|
|
|
batch_size,
|
|
|
|
).await?;
|
|
|
|
for job in batch {
|
|
|
|
let job_data: OutgoingActivityJobData =
|
|
|
|
serde_json::from_value(job.job_data)
|
|
|
|
.map_err(|_| DatabaseTypeError)?;
|
|
|
|
let sender = get_user_by_id(db_client, &job_data.sender_id).await?;
|
|
|
|
let outgoing_activity = OutgoingActivity {
|
2023-01-04 19:26:21 +00:00
|
|
|
db_pool: Some(db_pool.clone()),
|
2022-12-11 18:41:08 +00:00
|
|
|
instance: config.instance(),
|
|
|
|
sender,
|
|
|
|
activity: job_data.activity,
|
|
|
|
recipients: job_data.recipients,
|
|
|
|
};
|
2023-01-04 19:26:21 +00:00
|
|
|
outgoing_activity.spawn_deliver();
|
2022-12-11 18:41:08 +00:00
|
|
|
delete_job_from_queue(db_client, &job.id).await?;
|
|
|
|
};
|
|
|
|
Ok(())
|
|
|
|
}
|