2021-06-23 10:48:03 +00:00
|
|
|
use crate::executor::Runnable;
|
2021-06-06 08:44:46 +00:00
|
|
|
use crate::schema::fang_tasks;
|
2021-06-12 12:59:09 +00:00
|
|
|
use crate::schema::FangTaskState;
|
2021-06-06 08:44:46 +00:00
|
|
|
use chrono::{DateTime, Utc};
|
|
|
|
use diesel::pg::PgConnection;
|
|
|
|
use diesel::prelude::*;
|
|
|
|
use diesel::result::Error;
|
|
|
|
use dotenv::dotenv;
|
|
|
|
use std::env;
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[derive(Queryable, Identifiable, Debug, Eq, PartialEq, Clone)]
|
2021-06-06 08:44:46 +00:00
|
|
|
#[table_name = "fang_tasks"]
|
|
|
|
pub struct Task {
|
|
|
|
pub id: Uuid,
|
|
|
|
pub metadata: serde_json::Value,
|
2021-06-12 12:59:09 +00:00
|
|
|
pub error_message: Option<String>,
|
|
|
|
pub state: FangTaskState,
|
2021-07-03 04:23:05 +00:00
|
|
|
pub task_type: String,
|
2021-06-06 08:44:46 +00:00
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
pub updated_at: DateTime<Utc>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Insertable)]
|
|
|
|
#[table_name = "fang_tasks"]
|
|
|
|
pub struct NewTask {
|
|
|
|
pub metadata: serde_json::Value,
|
2021-07-03 04:23:05 +00:00
|
|
|
pub task_type: String,
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Postgres {
|
|
|
|
pub connection: PgConnection,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Postgres {
|
2021-07-03 12:18:41 +00:00
|
|
|
pub fn new() -> Self {
|
|
|
|
let connection = Self::pg_connection(None);
|
|
|
|
|
|
|
|
Self { connection }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_with_url(database_url: String) -> Self {
|
|
|
|
let connection = Self::pg_connection(Some(database_url));
|
2021-06-06 08:44:46 +00:00
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
Self { connection }
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
pub fn push_task(&self, job: &dyn Runnable) -> Result<Task, Error> {
|
|
|
|
let json_job = serde_json::to_value(job).unwrap();
|
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
let new_task = NewTask {
|
|
|
|
metadata: json_job,
|
|
|
|
task_type: job.task_type(),
|
|
|
|
};
|
2021-06-23 10:48:03 +00:00
|
|
|
|
|
|
|
self.insert(&new_task)
|
|
|
|
}
|
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
pub fn enqueue_task(job: &dyn Runnable) -> Result<Task, Error> {
|
2021-07-03 12:18:41 +00:00
|
|
|
Self::new().push_task(job)
|
2021-06-24 09:58:02 +00:00
|
|
|
}
|
|
|
|
|
2021-06-06 08:44:46 +00:00
|
|
|
pub fn insert(&self, params: &NewTask) -> Result<Task, Error> {
|
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(params)
|
|
|
|
.get_result::<Task>(&self.connection)
|
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
pub fn fetch_task(&self, task_type: &Option<String>) -> Option<Task> {
|
|
|
|
match task_type {
|
|
|
|
None => self.fetch_any_task(),
|
|
|
|
Some(task_type_str) => self.fetch_task_of_type(task_type_str),
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
|
|
|
}
|
2021-06-20 08:10:02 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
pub fn fetch_and_touch(&self, task_type: &Option<String>) -> Result<Option<Task>, Error> {
|
2021-06-20 08:58:03 +00:00
|
|
|
self.connection.transaction::<Option<Task>, Error, _>(|| {
|
2021-07-03 04:23:05 +00:00
|
|
|
let found_task = self.fetch_task(task_type);
|
2021-06-20 08:58:03 +00:00
|
|
|
|
2021-06-23 11:30:19 +00:00
|
|
|
if found_task.is_none() {
|
2021-06-20 08:58:03 +00:00
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
|
|
|
|
match self.start_processing_task(&found_task.unwrap()) {
|
|
|
|
Ok(updated_task) => Ok(Some(updated_task)),
|
|
|
|
Err(err) => Err(err),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
pub fn find_task_by_id(&self, id: Uuid) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-06-20 08:10:02 +00:00
|
|
|
.filter(fang_tasks::id.eq(id))
|
|
|
|
.first::<Task>(&self.connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn remove_task(&self, id: Uuid) -> Result<usize, Error> {
|
|
|
|
let query = fang_tasks::table.filter(fang_tasks::id.eq(id));
|
|
|
|
|
|
|
|
diesel::delete(query).execute(&self.connection)
|
2021-06-20 08:10:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finish_task(&self, task: &Task) -> Result<Task, Error> {
|
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::Finished),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
|
|
|
.get_result::<Task>(&self.connection)
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:58:03 +00:00
|
|
|
pub fn start_processing_task(&self, task: &Task) -> Result<Task, Error> {
|
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::InProgress),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
|
|
|
.get_result::<Task>(&self.connection)
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
pub fn fail_task(&self, task: &Task, error: String) -> Result<Task, Error> {
|
|
|
|
diesel::update(task)
|
|
|
|
.set((
|
|
|
|
fang_tasks::state.eq(FangTaskState::Failed),
|
|
|
|
fang_tasks::error_message.eq(error),
|
|
|
|
fang_tasks::updated_at.eq(Self::current_time()),
|
|
|
|
))
|
|
|
|
.get_result::<Task>(&self.connection)
|
|
|
|
}
|
|
|
|
|
2021-06-24 09:58:02 +00:00
|
|
|
fn current_time() -> DateTime<Utc> {
|
2021-06-20 08:10:02 +00:00
|
|
|
Utc::now()
|
|
|
|
}
|
2021-06-24 09:58:02 +00:00
|
|
|
|
|
|
|
fn pg_connection(database_url: Option<String>) -> PgConnection {
|
|
|
|
dotenv().ok();
|
|
|
|
|
|
|
|
let url = match database_url {
|
|
|
|
Some(string_url) => string_url,
|
|
|
|
None => env::var("DATABASE_URL").expect("DATABASE_URL must be set"),
|
|
|
|
};
|
|
|
|
|
|
|
|
PgConnection::establish(&url).unwrap_or_else(|_| panic!("Error connecting to {}", url))
|
|
|
|
}
|
2021-07-03 04:23:05 +00:00
|
|
|
|
|
|
|
fn fetch_any_task(&self) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-07-03 04:23:05 +00:00
|
|
|
.order(fang_tasks::created_at.asc())
|
|
|
|
.limit(1)
|
|
|
|
.filter(fang_tasks::state.eq(FangTaskState::New))
|
|
|
|
.for_update()
|
|
|
|
.skip_locked()
|
|
|
|
.get_result::<Task>(&self.connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
2021-07-03 04:23:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn fetch_task_of_type(&self, task_type: &String) -> Option<Task> {
|
2021-07-03 12:18:41 +00:00
|
|
|
fang_tasks::table
|
2021-07-03 04:23:05 +00:00
|
|
|
.order(fang_tasks::created_at.asc())
|
|
|
|
.limit(1)
|
|
|
|
.filter(fang_tasks::state.eq(FangTaskState::New))
|
|
|
|
.filter(fang_tasks::task_type.eq(task_type))
|
|
|
|
.for_update()
|
|
|
|
.skip_locked()
|
|
|
|
.get_result::<Task>(&self.connection)
|
2021-07-03 12:18:41 +00:00
|
|
|
.ok()
|
2021-07-03 04:23:05 +00:00
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod postgres_tests {
|
|
|
|
use super::NewTask;
|
|
|
|
use super::Postgres;
|
|
|
|
use super::Task;
|
2021-06-23 10:48:03 +00:00
|
|
|
use crate::executor::Error as ExecutorError;
|
|
|
|
use crate::executor::Runnable;
|
2021-06-12 12:59:09 +00:00
|
|
|
use crate::schema::fang_tasks;
|
|
|
|
use crate::schema::FangTaskState;
|
2021-06-13 09:26:40 +00:00
|
|
|
use chrono::{DateTime, Duration, Utc};
|
2021-06-12 12:59:09 +00:00
|
|
|
use diesel::connection::Connection;
|
|
|
|
use diesel::prelude::*;
|
|
|
|
use diesel::result::Error;
|
2021-06-23 10:48:03 +00:00
|
|
|
use serde::{Deserialize, Serialize};
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn insert_inserts_task() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let new_task = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
2021-07-03 04:23:05 +00:00
|
|
|
task_type: "common".to_string(),
|
2021-06-12 12:59:09 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let result = postgres
|
|
|
|
.connection
|
|
|
|
.test_transaction::<Task, Error, _>(|| postgres.insert(&new_task));
|
|
|
|
|
|
|
|
assert_eq!(result.state, FangTaskState::New);
|
|
|
|
assert_eq!(result.error_message, None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_task_fetches_the_oldest_task() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let timestamp1 = Utc::now() - Duration::hours(40);
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
let task1 = insert_job(serde_json::json!(true), timestamp1, &postgres.connection);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let timestamp2 = Utc::now() - Duration::hours(20);
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
insert_job(serde_json::json!(false), timestamp2, &postgres.connection);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
let found_task = postgres.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
assert_eq!(found_task.id, task1.id);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
#[test]
|
|
|
|
fn finish_task_updates_state_field() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = insert_new_job(&postgres.connection);
|
|
|
|
|
|
|
|
let updated_task = postgres.finish_task(&task).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(FangTaskState::Finished, updated_task.state);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fail_task_updates_state_field_and_sets_error_message() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-20 08:10:02 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task = insert_new_job(&postgres.connection);
|
|
|
|
let error = "Failed".to_string();
|
|
|
|
|
|
|
|
let updated_task = postgres.fail_task(&task, error.clone()).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(FangTaskState::Failed, updated_task.state);
|
|
|
|
assert_eq!(error, updated_task.error_message.unwrap());
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:58:03 +00:00
|
|
|
#[test]
|
|
|
|
fn fetch_and_touch_updates_state() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let _task = insert_new_job(&postgres.connection);
|
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
let updated_task = postgres.fetch_and_touch(&None).unwrap().unwrap();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
assert_eq!(FangTaskState::InProgress, updated_task.state);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fetch_and_touch_returns_none() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
2021-07-03 04:23:05 +00:00
|
|
|
let task = postgres.fetch_and_touch(&None).unwrap();
|
2021-06-20 08:58:03 +00:00
|
|
|
|
|
|
|
assert_eq!(None, task);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[test]
|
|
|
|
fn push_task_serializes_and_inserts_task() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-23 10:48:03 +00:00
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let job = Job { number: 10 };
|
|
|
|
let task = postgres.push_task(&job).unwrap();
|
|
|
|
|
|
|
|
let mut m = serde_json::value::Map::new();
|
|
|
|
m.insert(
|
|
|
|
"number".to_string(),
|
|
|
|
serde_json::value::Value::Number(10.into()),
|
|
|
|
);
|
|
|
|
m.insert(
|
|
|
|
"type".to_string(),
|
|
|
|
serde_json::value::Value::String("Job".to_string()),
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(task.metadata, serde_json::value::Value::Object(m));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-07-03 12:18:41 +00:00
|
|
|
#[test]
|
|
|
|
fn remove_task() {
|
|
|
|
let postgres = Postgres::new();
|
|
|
|
|
|
|
|
let new_task1 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "common".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let new_task2 = NewTask {
|
|
|
|
metadata: serde_json::json!(true),
|
|
|
|
task_type: "common".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
postgres.connection.test_transaction::<(), Error, _>(|| {
|
|
|
|
let task1 = postgres.insert(&new_task1).unwrap();
|
|
|
|
assert!(postgres.find_task_by_id(task1.id).is_some());
|
|
|
|
|
|
|
|
let task2 = postgres.insert(&new_task2).unwrap();
|
|
|
|
assert!(postgres.find_task_by_id(task2.id).is_some());
|
|
|
|
|
|
|
|
postgres.remove_task(task1.id).unwrap();
|
|
|
|
assert!(postgres.find_task_by_id(task1.id).is_none());
|
|
|
|
assert!(postgres.find_task_by_id(task2.id).is_some());
|
|
|
|
|
|
|
|
postgres.remove_task(task2.id).unwrap();
|
|
|
|
assert!(postgres.find_task_by_id(task2.id).is_none());
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-06-20 08:10:02 +00:00
|
|
|
// this test is ignored because it commits data to the db
|
2021-06-12 12:59:09 +00:00
|
|
|
#[test]
|
|
|
|
#[ignore]
|
|
|
|
fn fetch_task_locks_the_record() {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
let timestamp1 = Utc::now() - Duration::hours(40);
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
let task1 = insert_job(serde_json::json!(true), timestamp1, &postgres.connection);
|
|
|
|
|
|
|
|
let task1_id = task1.id;
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let timestamp2 = Utc::now() - Duration::hours(20);
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
let task2 = insert_job(serde_json::json!(false), timestamp2, &postgres.connection);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
let thread = std::thread::spawn(move || {
|
2021-07-03 12:18:41 +00:00
|
|
|
let postgres = Postgres::new();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
postgres.connection.transaction::<(), Error, _>(|| {
|
2021-07-03 04:23:05 +00:00
|
|
|
let found_task = postgres.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
assert_eq!(found_task.id, task1.id);
|
2021-06-12 12:59:09 +00:00
|
|
|
|
|
|
|
std::thread::sleep(std::time::Duration::from_millis(5000));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
});
|
|
|
|
|
|
|
|
std::thread::sleep(std::time::Duration::from_millis(1000));
|
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
let found_task = postgres.fetch_task(&None).unwrap();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
assert_eq!(found_task.id, task2.id);
|
|
|
|
|
|
|
|
let _result = thread.join();
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
// returns unlocked record
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-07-03 04:23:05 +00:00
|
|
|
let found_task = postgres.fetch_task(&None).unwrap();
|
2021-06-13 09:26:40 +00:00
|
|
|
|
|
|
|
assert_eq!(found_task.id, task1_id);
|
|
|
|
}
|
2021-06-12 12:59:09 +00:00
|
|
|
|
2021-06-23 10:48:03 +00:00
|
|
|
#[derive(Serialize, Deserialize)]
|
|
|
|
struct Job {
|
|
|
|
pub number: u16,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[typetag::serde]
|
|
|
|
impl Runnable for Job {
|
|
|
|
fn run(&self) -> Result<(), ExecutorError> {
|
|
|
|
println!("the number is {}", self.number);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-13 09:26:40 +00:00
|
|
|
fn insert_job(
|
|
|
|
metadata: serde_json::Value,
|
|
|
|
timestamp: DateTime<Utc>,
|
|
|
|
connection: &PgConnection,
|
|
|
|
) -> Task {
|
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(&vec![(
|
|
|
|
fang_tasks::metadata.eq(metadata),
|
|
|
|
fang_tasks::created_at.eq(timestamp),
|
|
|
|
)])
|
|
|
|
.get_result::<Task>(connection)
|
|
|
|
.unwrap()
|
2021-06-12 12:59:09 +00:00
|
|
|
}
|
2021-06-20 08:10:02 +00:00
|
|
|
|
|
|
|
fn insert_new_job(connection: &PgConnection) -> Task {
|
|
|
|
diesel::insert_into(fang_tasks::table)
|
|
|
|
.values(&vec![(fang_tasks::metadata.eq(serde_json::json!(true)),)])
|
|
|
|
.get_result::<Task>(connection)
|
|
|
|
.unwrap()
|
|
|
|
}
|
2021-06-06 08:44:46 +00:00
|
|
|
}
|