2021-06-23 10:48:03 +00:00
|
|
|
use crate::executor::Runnable;
|
2021-07-18 19:09:30 +00:00
|
|
|
use crate::schema::fang_periodic_tasks;
|
2021-06-06 08:44:46 +00:00
|
|
|
use crate::schema::fang_tasks;
|
2021-06-12 12:59:09 +00:00
|
|
|
use crate::schema::FangTaskState;
|
2021-07-18 19:09:30 +00:00
|
|
|
use chrono::DateTime;
|
|
|
|
use chrono::Duration;
|
|
|
|
use chrono::Utc;
|
2021-06-06 08:44:46 +00:00
|
|
|
use diesel::pg::PgConnection;
|
|
|
|
use diesel::prelude::*;
|
2021-07-25 12:20:16 +00:00
|
|
|
use diesel::r2d2;
|
2021-06-06 08:44:46 +00:00
|
|
|
use diesel::result::Error;
|
|
|
|
use dotenv::dotenv;
|
|
|
|
use std::env;
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[derive(Queryable, Identifiable, Debug, Eq, PartialEq, Clone)]
|
2021-06-06 08:44:46 +00:00
|
|
|
#[table_name = "fang_tasks"]
|
|
|
|
pub struct Task {
|
|
|
|
pub id: Uuid,
|
|
|
|
pub metadata: serde_json::Value,
|
2021-06-12 12:59:09 +00:00
|
|
|
pub error_message: Option<String>,
|
|
|
|
pub state: FangTaskState,
|
2021-07-03 04:23:05 +00:00
|
|
|
pub task_type: String,
|
2021-06-06 08:44:46 +00:00
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
pub updated_at: DateTime<Utc>,
|
|
|
|
}
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
#[derive(Queryable, Identifiable, Debug, Eq, PartialEq, Clone)]
|
|
|
|
#[table_name = "fang_periodic_tasks"]
|
|
|
|
pub struct PeriodicTask {
|
|
|
|
pub id: Uuid,
|
|
|
|
pub metadata: serde_json::Value,
|
|
|
|
pub period_in_seconds: i32,
|
|
|
|
pub scheduled_at: Option<DateTime<Utc>>,
|
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
pub updated_at: DateTime<Utc>,
|
|
|
|
}
|
|
|
|
|
2021-06-06 08:44:46 +00:00
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name = "fang_tasks"]
|
|
|
|
pub struct NewTask {
|
|
|
|
pub metadata: serde_json::Value,
|
2021-07-03 04:23:05 +00:00
|
|
|
pub task_type: String,
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name = "fang_periodic_tasks"]
|
|
|
|
pub struct NewPeriodicTask {
|
|
|
|
pub metadata: serde_json::Value,
|
|
|
|
pub period_in_seconds: i32,
|
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
pub struct Queue {
|
2021-06-06 08:44:46 +00:00
|
|
|
pub connection: PgConnection,
|
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
impl Default for Queue {
|
2021-07-11 10:17:02 +00:00
|
|
|
fn default() -> Self {
|
|
|
|
Self::new()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
impl Queue {
|
2021-07-03 12:18:41 +00:00
|
|
|
pub fn new() -> Self {
|
|
|
|
let connection = Self::pg_connection(None);
|
|
|
|
|
|
|
|
Self { connection }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_with_url(database_url: String) -> Self {
|
|
|
|
let connection = Self::pg_connection(Some(database_url));
|
2021-06-06 08:44:46 +00:00
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
Self { connection }
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
pub fn new_with_connection(connection: PgConnection) -> Self {
|
|
|
|
Self { connection }
|
|
|
|
}
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
pub fn push_task(&self, job: &dyn Runnable) -> Result<Task, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::push_task_query(&self.connection, job)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn push_task_query(connection: &PgConnection, job: &dyn Runnable) -> Result<Task, Error> {
|
2021-06-23 10:48:03 +00:00
|
|
|
let json_job = serde_json::to_value(job).unwrap();
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
match Self::find_task_by_metadata_query(connection, &json_job) {
|
2021-07-18 19:09:30 +00:00
|
|
|
Some(task) => Ok(task),
|
|
|
|
None => {
|
|
|
|
let new_task = NewTask {
|
|
|
|
metadata: json_job.clone(),
|
|
|
|
task_type: job.task_type(),
|
|
|
|
};
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::insert_query(connection, &new_task)
|
2021-07-18 19:09:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn push_periodic_task(
|
|
|
|
&self,
|
|
|
|
job: &dyn Runnable,
|
|
|
|
period: i32,
|
2021-07-25 12:20:16 +00:00
|
|
|
) -> Result<PeriodicTask, Error> {
|
|
|
|
Self::push_periodic_task_query(&self.connection, job, period)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn push_periodic_task_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
job: &dyn Runnable,
|
|
|
|
period: i32,
|
2021-07-18 19:09:30 +00:00
|
|
|
) -> Result<PeriodicTask, Error> {
|
|
|
|
let json_job = serde_json::to_value(job).unwrap();
|
2021-06-23 10:48:03 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
match Self::find_periodic_task_by_metadata_query(connection, &json_job) {
|
2021-07-18 19:09:30 +00:00
|
|
|
Some(task) => Ok(task),
|
|
|
|
None => {
|
|
|
|
let new_task = NewPeriodicTask {
|
|
|
|
metadata: json_job,
|
|
|
|
period_in_seconds: period,
|
|
|
|
};
|
|
|
|
|
|
|
|
diesel::insert_into(fang_periodic_tasks::table)
|
|
|
|
.values(new_task)
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<PeriodicTask>(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
}
|
|
|
|
}
|
2021-06-23 10:48:03 +00:00
|
|
|
}
|
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
pub fn enqueue_task(job: &dyn Runnable) -> Result<Task, Error> {
|
2021-07-03 12:18:41 +00:00
|
|
|
Self::new().push_task(job)
|
2021-06-24 09:58:02 +00:00
|
|
|
}
|
|
|
|
|
2021-06-06 08:44:46 +00:00
|
|
|
pub fn insert(&self, params: &NewTask) -> Result<Task, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::insert_query(&self.connection, params)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn insert_query(connection: &PgConnection, params: &NewTask) -> Result<Task, Error> {
|
2021-06-06 08:44:46 +00:00
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(params)
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
pub fn fetch_task(&self, task_type: &Option<String>) -> Option<Task> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::fetch_task_query(&self.connection, task_type)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn fetch_task_query(connection: &PgConnection, task_type: &Option<String>) -> Option<Task> {
|
2021-07-03 04:23:05 +00:00
|
|
|
match task_type {
|
2021-07-25 12:20:16 +00:00
|
|
|
None => Self::fetch_any_task_query(connection),
|
|
|
|
Some(task_type_str) => Self::fetch_task_of_type_query(connection, task_type_str),
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
|
|
|
}
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
pub fn fetch_and_touch(&self, task_type: &Option<String>) -> Result<Option<Task>, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::fetch_and_touch_query(&self.connection, task_type)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn fetch_and_touch_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
task_type: &Option<String>,
|
|
|
|
) -> Result<Option<Task>, Error> {
|
|
|
|
connection.transaction::<Option<Task>, Error, _>(|| {
|
|
|
|
let found_task = Self::fetch_task_query(connection, task_type);
|
2021-06-20 08:58:03 +00:00
|
|
|
|
2021-06-23 11:30:19 +00:00
|
|
|
if found_task.is_none() {
|
2021-06-20 08:58:03 +00:00
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
match Self::start_processing_task_query(connection, &found_task.unwrap()) {
|
2021-06-20 08:58:03 +00:00
|
|
|
Ok(updated_task) => Ok(Some(updated_task)),
|
|
|
|
Err(err) => Err(err),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
pub fn find_task_by_id(&self, id: Uuid) -> Option<Task> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::find_task_by_id_query(&self.connection, id)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn find_task_by_id_query(connection: &PgConnection, id: Uuid) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-06-20 08:10:02 +00:00
|
|
|
.filter(fang_tasks::id.eq(id))
|
2021-07-25 12:20:16 +00:00
|
|
|
.first::<Task>(connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
|
|
|
}
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
pub fn find_periodic_task_by_id(&self, id: Uuid) -> Option<PeriodicTask> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::find_periodic_task_by_id_query(&self.connection, id)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn find_periodic_task_by_id_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
id: Uuid,
|
|
|
|
) -> Option<PeriodicTask> {
|
2021-07-18 19:09:30 +00:00
|
|
|
fang_periodic_tasks::table
|
|
|
|
.filter(fang_periodic_tasks::id.eq(id))
|
2021-07-25 12:20:16 +00:00
|
|
|
.first::<PeriodicTask>(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
.ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn fetch_periodic_tasks(&self, error_margin_seconds: i64) -> Option<Vec<PeriodicTask>> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::fetch_periodic_tasks_query(&self.connection, error_margin_seconds)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn fetch_periodic_tasks_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
error_margin_seconds: i64,
|
|
|
|
) -> Option<Vec<PeriodicTask>> {
|
2021-07-18 19:09:30 +00:00
|
|
|
let current_time = Self::current_time();
|
|
|
|
|
|
|
|
let low_limit = current_time - Duration::seconds(error_margin_seconds);
|
|
|
|
let high_limit = current_time + Duration::seconds(error_margin_seconds);
|
|
|
|
|
|
|
|
fang_periodic_tasks::table
|
|
|
|
.filter(
|
|
|
|
fang_periodic_tasks::scheduled_at
|
|
|
|
.gt(low_limit)
|
|
|
|
.and(fang_periodic_tasks::scheduled_at.lt(high_limit)),
|
|
|
|
)
|
|
|
|
.or_filter(fang_periodic_tasks::scheduled_at.is_null())
|
2021-07-25 12:20:16 +00:00
|
|
|
.load::<PeriodicTask>(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
.ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn schedule_next_task_execution(&self, task: &PeriodicTask) -> Result<PeriodicTask, Error> {
|
|
|
|
let current_time = Self::current_time();
|
|
|
|
let scheduled_at = current_time + Duration::seconds(task.period_in_seconds.into());
|
|
|
|
|
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_periodic_tasks::scheduled_at.eq(scheduled_at),
|
|
|
|
fang_periodic_tasks::updated_at.eq(current_time),
|
|
|
|
))
|
|
|
|
.get_result::<PeriodicTask>(&self.connection)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_all_tasks(&self) -> Result<usize, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::remove_all_tasks_query(&self.connection)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_all_tasks_query(connection: &PgConnection) -> Result<usize, Error> {
|
|
|
|
diesel::delete(fang_tasks::table).execute(connection)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_tasks_of_type(&self, task_type: &str) -> Result<usize, Error> {
|
|
|
|
Self::remove_tasks_of_type_query(&self.connection, task_type)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_tasks_of_type_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
task_type: &str,
|
|
|
|
) -> Result<usize, Error> {
|
|
|
|
let query = fang_tasks::table.filter(fang_tasks::task_type.eq(task_type));
|
|
|
|
|
|
|
|
diesel::delete(query).execute(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_all_periodic_tasks(&self) -> Result<usize, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::remove_all_periodic_tasks_query(&self.connection)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_all_periodic_tasks_query(connection: &PgConnection) -> Result<usize, Error> {
|
|
|
|
diesel::delete(fang_periodic_tasks::table).execute(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
}
|
|
|
|
|
2021-07-03 12:18:41 +00:00
|
|
|
pub fn remove_task(&self, id: Uuid) -> Result<usize, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::remove_task_query(&self.connection, id)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_task_query(connection: &PgConnection, id: Uuid) -> Result<usize, Error> {
|
2021-07-03 12:18:41 +00:00
|
|
|
let query = fang_tasks::table.filter(fang_tasks::id.eq(id));
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
diesel::delete(query).execute(connection)
|
2021-06-20 08:10:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finish_task(&self, task: &Task) -> Result<Task, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::finish_task_query(&self.connection, task)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finish_task_query(connection: &PgConnection, task: &Task) -> Result<Task, Error> {
|
2021-06-20 08:10:02 +00:00
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::Finished),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
2021-06-20 08:10:02 +00:00
|
|
|
}
|
|
|
|
|
2021-06-20 08:58:03 +00:00
|
|
|
pub fn start_processing_task(&self, task: &Task) -> Result<Task, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::start_processing_task_query(&self.connection, task)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn start_processing_task_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
task: &Task,
|
|
|
|
) -> Result<Task, Error> {
|
2021-06-20 08:58:03 +00:00
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::InProgress),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
2021-06-20 08:58:03 +00:00
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
pub fn fail_task(&self, task: &Task, error: String) -> Result<Task, Error> {
|
2021-07-25 12:20:16 +00:00
|
|
|
Self::fail_task_query(&self.connection, task, error)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn fail_task_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
task: &Task,
|
|
|
|
error: String,
|
|
|
|
) -> Result<Task, Error> {
|
2021-06-20 08:10:02 +00:00
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::Failed),
|
|
|
|
fang_tasks::error_message.eq(error),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn connection_pool(pool_size: u32) -> r2d2::Pool<r2d2::ConnectionManager<PgConnection>> {
|
|
|
|
dotenv().ok();
|
|
|
|
|
|
|
|
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
|
|
|
|
|
|
|
let manager = r2d2::ConnectionManager::<PgConnection>::new(database_url);
|
|
|
|
|
|
|
|
r2d2::Pool::builder()
|
|
|
|
.max_size(pool_size)
|
|
|
|
.build(manager)
|
|
|
|
.unwrap()
|
2021-06-20 08:10:02 +00:00
|
|
|
}
|
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
fn current_time() -> DateTime<Utc> {
|
2021-06-20 08:10:02 +00:00
|
|
|
Utc::now()
|
|
|
|
}
|
2021-06-24 09:58:02 +00:00
|
|
|
|
|
|
|
fn pg_connection(database_url: Option<String>) -> PgConnection {
|
|
|
|
dotenv().ok();
|
|
|
|
|
|
|
|
let url = match database_url {
|
|
|
|
Some(string_url) => string_url,
|
|
|
|
None => env::var("DATABASE_URL").expect("DATABASE_URL must be set"),
|
|
|
|
};
|
|
|
|
|
|
|
|
PgConnection::establish(&url).unwrap_or_else(|_| panic!("Error connecting to {}", url))
|
|
|
|
}
|
2021-07-03 04:23:05 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
fn fetch_any_task_query(connection: &PgConnection) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-07-03 04:23:05 +00:00
|
|
|
.order(fang_tasks::created_at.asc())
|
|
|
|
.limit(1)
|
|
|
|
.filter(fang_tasks::state.eq(FangTaskState::New))
|
|
|
|
.for_update()
|
|
|
|
.skip_locked()
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
2021-07-03 04:23:05 +00:00
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
fn fetch_task_of_type_query(connection: &PgConnection, task_type: &str) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-07-03 04:23:05 +00:00
|
|
|
.order(fang_tasks::created_at.asc())
|
|
|
|
.limit(1)
|
|
|
|
.filter(fang_tasks::state.eq(FangTaskState::New))
|
|
|
|
.filter(fang_tasks::task_type.eq(task_type))
|
|
|
|
.for_update()
|
|
|
|
.skip_locked()
|
2021-07-25 12:20:16 +00:00
|
|
|
.get_result::<Task>(connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
2021-07-03 04:23:05 +00:00
|
|
|
}
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
fn find_periodic_task_by_metadata_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
metadata: &serde_json::Value,
|
|
|
|
) -> Option<PeriodicTask> {
|
2021-07-18 19:09:30 +00:00
|
|
|
fang_periodic_tasks::table
|
|
|
|
.filter(fang_periodic_tasks::metadata.eq(metadata))
|
2021-07-25 12:20:16 +00:00
|
|
|
.first::<PeriodicTask>(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
.ok()
|
|
|
|
}
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
fn find_task_by_metadata_query(
|
|
|
|
connection: &PgConnection,
|
|
|
|
metadata: &serde_json::Value,
|
|
|
|
) -> Option<Task> {
|
2021-07-18 19:09:30 +00:00
|
|
|
fang_tasks::table
|
|
|
|
.filter(fang_tasks::metadata.eq(metadata))
|
|
|
|
.filter(
|
|
|
|
fang_tasks::state
|
|
|
|
.eq(FangTaskState::New)
|
|
|
|
.or(fang_tasks::state.eq(FangTaskState::InProgress)),
|
|
|
|
)
|
2021-07-25 12:20:16 +00:00
|
|
|
.first::<Task>(connection)
|
2021-07-18 19:09:30 +00:00
|
|
|
.ok()
|
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
2021-07-25 12:20:16 +00:00
|
|
|
mod queue_tests {
|
2021-06-12 12:59:09 +00:00
|
|
|
use super::NewTask;
|
2021-07-18 19:09:30 +00:00
|
|
|
use super::PeriodicTask;
|
2021-07-25 12:20:16 +00:00
|
|
|
use super::Queue;
|
2021-06-12 12:59:09 +00:00
|
|
|
use super::Task;
|
2021-06-23 10:48:03 +00:00
|
|
|
use crate::executor::Error as ExecutorError;
|
|
|
|
use crate::executor::Runnable;
|
2021-07-18 19:09:30 +00:00
|
|
|
use crate::schema::fang_periodic_tasks;
|
2021-06-12 12:59:09 +00:00
|
|
|
use crate::schema::fang_tasks;
|
|
|
|
use crate::schema::FangTaskState;
|
2021-07-11 07:26:20 +00:00
|
|
|
use crate::typetag;
|
|
|
|
use crate::{Deserialize, Serialize};
|
2021-07-18 19:09:30 +00:00
|
|
|
use chrono::prelude::*;
|
2021-06-13 09:26:40 +00:00
|
|
|
use chrono::{DateTime, Duration, Utc};
|
2021-06-12 12:59:09 +00:00
|
|
|
use diesel::connection::Connection;
|
|
|
|
use diesel::prelude::*;
|
|
|
|
use diesel::result::Error;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn insert_inserts_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let new_task = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
2021-07-03 04:23:05 +00:00
|
|
|
task_type: "common".to_string(),
|
2021-06-12 12:59:09 +00:00
|
|
|
};
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let result = queue
|
2021-06-12 12:59:09 +00:00
|
|
|
.connection
|
2021-07-25 12:20:16 +00:00
|
|
|
.test_transaction::<Task, Error, _>(|| queue.insert(&new_task));
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
assert_eq!(result.state, FangTaskState::New);
|
|
|
|
assert_eq!(result.error_message, None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_task_fetches_the_oldest_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-06-12 12:59:09 +00:00
|
|
|
let timestamp1 = Utc::now() - Duration::hours(40);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let task1 = insert_job(serde_json::json!(true), timestamp1, &queue.connection);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let timestamp2 = Utc::now() - Duration::hours(20);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
insert_job(serde_json::json!(false), timestamp2, &queue.connection);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let found_task = queue.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
assert_eq!(found_task.id, task1.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
#[test]
|
|
|
|
fn finish_task_updates_state_field() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = insert_new_job(&queue.connection);
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let updated_task = queue.finish_task(&task).unwrap();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
|
|
|
assert_eq!(FangTaskState::Finished, updated_task.state);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fail_task_updates_state_field_and_sets_error_message() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = insert_new_job(&queue.connection);
|
2021-06-20 08:10:02 +00:00
|
|
|
let error = "Failed".to_string();
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let updated_task = queue.fail_task(&task, error.clone()).unwrap();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
|
|
|
assert_eq!(FangTaskState::Failed, updated_task.state);
|
|
|
|
assert_eq!(error, updated_task.error_message.unwrap());
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:58:03 +00:00
|
|
|
#[test]
|
|
|
|
fn fetch_and_touch_updates_state() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let _task = insert_new_job(&queue.connection);
|
2021-06-20 08:58:03 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let updated_task = queue.fetch_and_touch(&None).unwrap().unwrap();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
assert_eq!(FangTaskState::InProgress, updated_task.state);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_and_touch_returns_none() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = queue.fetch_and_touch(&None).unwrap();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
assert_eq!(None, task);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[test]
|
|
|
|
fn push_task_serializes_and_inserts_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-23 10:48:03 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-06-23 10:48:03 +00:00
|
|
|
let job = Job { number: 10 };
|
2021-07-25 12:20:16 +00:00
|
|
|
let task = queue.push_task(&job).unwrap();
|
2021-06-23 10:48:03 +00:00
|
|
|
|
|
|
|
let mut m = serde_json::value::Map::new();
|
|
|
|
m.insert(
|
|
|
|
"number".to_string(),
|
|
|
|
serde_json::value::Value::Number(10.into()),
|
|
|
|
);
|
|
|
|
m.insert(
|
|
|
|
"type".to_string(),
|
|
|
|
serde_json::value::Value::String("Job".to_string()),
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(task.metadata, serde_json::value::Value::Object(m));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
#[test]
|
|
|
|
fn push_task_does_not_insert_the_same_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-18 19:09:30 +00:00
|
|
|
let job = Job { number: 10 };
|
2021-07-25 12:20:16 +00:00
|
|
|
let task2 = queue.push_task(&job).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let task1 = queue.push_task(&job).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(task1.id, task2.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn push_periodic_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-18 19:09:30 +00:00
|
|
|
let job = Job { number: 10 };
|
2021-07-25 12:20:16 +00:00
|
|
|
let task = queue.push_periodic_task(&job, 60).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(task.period_in_seconds, 60);
|
2021-07-25 12:20:16 +00:00
|
|
|
assert!(queue.find_periodic_task_by_id(task.id).is_some());
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn push_periodic_task_returns_existing_job() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-18 19:09:30 +00:00
|
|
|
let job = Job { number: 10 };
|
2021-07-25 12:20:16 +00:00
|
|
|
let task1 = queue.push_periodic_task(&job, 60).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let task2 = queue.push_periodic_task(&job, 60).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(task1.id, task2.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_periodic_tasks_fetches_periodic_task_without_scheduled_at() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-18 19:09:30 +00:00
|
|
|
let job = Job { number: 10 };
|
2021-07-25 12:20:16 +00:00
|
|
|
let task = queue.push_periodic_task(&job, 60).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
let schedule_in_future = Utc::now() + Duration::hours(100);
|
|
|
|
|
|
|
|
insert_periodic_job(
|
|
|
|
serde_json::json!(true),
|
|
|
|
schedule_in_future,
|
|
|
|
100,
|
2021-07-25 12:20:16 +00:00
|
|
|
&queue.connection,
|
2021-07-18 19:09:30 +00:00
|
|
|
);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let tasks = queue.fetch_periodic_tasks(100).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(tasks.len(), 1);
|
|
|
|
assert_eq!(tasks[0].id, task.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn schedule_next_task_execution() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task =
|
|
|
|
insert_periodic_job(serde_json::json!(true), Utc::now(), 100, &queue.connection);
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let updated_task = queue.schedule_next_task_execution(&task).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
let next_schedule = (task.scheduled_at.unwrap()
|
|
|
|
+ Duration::seconds(task.period_in_seconds.into()))
|
|
|
|
.round_subsecs(0);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
next_schedule,
|
|
|
|
updated_task.scheduled_at.unwrap().round_subsecs(0)
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn remove_all_periodic_tasks() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task =
|
|
|
|
insert_periodic_job(serde_json::json!(true), Utc::now(), 100, &queue.connection);
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let result = queue.remove_all_periodic_tasks().unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(1, result);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
assert_eq!(None, queue.find_periodic_task_by_id(task.id));
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn remove_all_tasks() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = insert_job(serde_json::json!(true), Utc::now(), &queue.connection);
|
|
|
|
let result = queue.remove_all_tasks().unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(1, result);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
assert_eq!(None, queue.find_task_by_id(task.id));
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_periodic_tasks() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-18 19:09:30 +00:00
|
|
|
let schedule_in_future = Utc::now() + Duration::hours(100);
|
|
|
|
|
|
|
|
insert_periodic_job(
|
|
|
|
serde_json::json!(true),
|
|
|
|
schedule_in_future,
|
|
|
|
100,
|
2021-07-25 12:20:16 +00:00
|
|
|
&queue.connection,
|
2021-07-18 19:09:30 +00:00
|
|
|
);
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let task =
|
|
|
|
insert_periodic_job(serde_json::json!(true), Utc::now(), 100, &queue.connection);
|
2021-07-18 19:09:30 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let tasks = queue.fetch_periodic_tasks(100).unwrap();
|
2021-07-18 19:09:30 +00:00
|
|
|
|
|
|
|
assert_eq!(tasks.len(), 1);
|
|
|
|
assert_eq!(tasks[0].id, task.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-07-03 12:18:41 +00:00
|
|
|
#[test]
|
|
|
|
fn remove_task() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-07-03 12:18:41 +00:00
|
|
|
|
|
|
|
let new_task1 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "common".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let new_task2 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "common".to_string(),
|
|
|
|
};
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task1 = queue.insert(&new_task1).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task1.id).is_some());
|
|
|
|
|
|
|
|
let task2 = queue.insert(&new_task2).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task2.id).is_some());
|
|
|
|
|
|
|
|
queue.remove_task(task1.id).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task1.id).is_none());
|
|
|
|
assert!(queue.find_task_by_id(task2.id).is_some());
|
|
|
|
|
|
|
|
queue.remove_task(task2.id).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task2.id).is_none());
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn remove_task_of_type() {
|
|
|
|
let queue = Queue::new();
|
|
|
|
|
|
|
|
let new_task1 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "type1".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let new_task2 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "type2".to_string(),
|
|
|
|
};
|
2021-07-03 12:18:41 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task1 = queue.insert(&new_task1).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task1.id).is_some());
|
2021-07-03 12:18:41 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let task2 = queue.insert(&new_task2).unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task2.id).is_some());
|
2021-07-03 12:18:41 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.remove_tasks_of_type("type1").unwrap();
|
|
|
|
assert!(queue.find_task_by_id(task1.id).is_none());
|
|
|
|
assert!(queue.find_task_by_id(task2.id).is_some());
|
2021-07-03 12:18:41 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
// this test is ignored because it commits data to the db
|
2021-06-12 12:59:09 +00:00
|
|
|
#[test]
|
|
|
|
#[ignore]
|
|
|
|
fn fetch_task_locks_the_record() {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
let timestamp1 = Utc::now() - Duration::hours(40);
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
let task1 = insert_job(
|
|
|
|
serde_json::json!(Job { number: 12 }),
|
|
|
|
timestamp1,
|
2021-07-25 12:20:16 +00:00
|
|
|
&queue.connection,
|
2021-07-18 19:09:30 +00:00
|
|
|
);
|
2021-06-13 09:26:40 +00:00
|
|
|
|
|
|
|
let task1_id = task1.id;
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let timestamp2 = Utc::now() - Duration::hours(20);
|
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
let task2 = insert_job(
|
|
|
|
serde_json::json!(Job { number: 11 }),
|
|
|
|
timestamp2,
|
2021-07-25 12:20:16 +00:00
|
|
|
&queue.connection,
|
2021-07-18 19:09:30 +00:00
|
|
|
);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let thread = std::thread::spawn(move || {
|
2021-07-25 12:20:16 +00:00
|
|
|
let queue = Queue::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
queue.connection.transaction::<(), Error, _>(|| {
|
|
|
|
let found_task = queue.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
assert_eq!(found_task.id, task1.id);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
std::thread::sleep(std::time::Duration::from_millis(5000));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
});
|
|
|
|
|
|
|
|
std::thread::sleep(std::time::Duration::from_millis(1000));
|
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let found_task = queue.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
assert_eq!(found_task.id, task2.id);
|
|
|
|
|
|
|
|
let _result = thread.join();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
// returns unlocked record
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-25 12:20:16 +00:00
|
|
|
let found_task = queue.fetch_task(&None).unwrap();
|
2021-06-13 09:26:40 +00:00
|
|
|
|
|
|
|
assert_eq!(found_task.id, task1_id);
|
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[derive(Serialize, Deserialize)]
|
|
|
|
struct Job {
|
|
|
|
pub number: u16,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[typetag::serde]
|
|
|
|
impl Runnable for Job {
|
2021-07-25 12:20:16 +00:00
|
|
|
fn run(&self, _connection: &PgConnection) -> Result<(), ExecutorError> {
|
2021-06-23 10:48:03 +00:00
|
|
|
println!("the number is {}", self.number);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
fn insert_job(
|
|
|
|
metadata: serde_json::Value,
|
|
|
|
timestamp: DateTime<Utc>,
|
|
|
|
connection: &PgConnection,
|
|
|
|
) -> Task {
|
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(&vec![(
|
|
|
|
fang_tasks::metadata.eq(metadata),
|
|
|
|
fang_tasks::created_at.eq(timestamp),
|
|
|
|
)])
|
|
|
|
.get_result::<Task>(connection)
|
|
|
|
.unwrap()
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-18 19:09:30 +00:00
|
|
|
fn insert_periodic_job(
|
|
|
|
metadata: serde_json::Value,
|
|
|
|
timestamp: DateTime<Utc>,
|
|
|
|
period_in_seconds: i32,
|
|
|
|
connection: &PgConnection,
|
|
|
|
) -> PeriodicTask {
|
|
|
|
diesel::insert_into(fang_periodic_tasks::table)
|
|
|
|
.values(&vec![(
|
|
|
|
fang_periodic_tasks::metadata.eq(metadata),
|
|
|
|
fang_periodic_tasks::scheduled_at.eq(timestamp),
|
|
|
|
fang_periodic_tasks::period_in_seconds.eq(period_in_seconds),
|
|
|
|
)])
|
|
|
|
.get_result::<PeriodicTask>(connection)
|
|
|
|
.unwrap()
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
fn insert_new_job(connection: &PgConnection) -> Task {
|
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(&vec![(fang_tasks::metadata.eq(serde_json::json!(true)),)])
|
|
|
|
.get_result::<Task>(connection)
|
|
|
|
.unwrap()
|
|
|
|
}
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|