mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-05-17 01:12:39 +00:00
4ba6221e04
* stuff * stuff including batch_upsert function * stuff * do things * stuff * different timestamps * stuff * Revert changes to comment.rs * Update comment.rs * Update comment.rs * Update post_view.rs * Update utils.rs * Update up.sql * Update up.sql * Update down.sql * Update up.sql * Update main.rs * use anyhow macro * Create down.sql * Create up.sql * Create replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update utils.rs * Update .woodpecker.yml * Update sql_format_check.sh * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Create dump_schema.sh * Update start_dev_db.sh * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * Update replaceable_schema.sql * stuff * Update replaceable_schema.sql * Update .pg_format * fmt * stuff * stuff (#21) * Update replaceable_schema.sql * Update up.sql * Update replaceable_schema.sql * fmt * update cargo.lock * stuff * Update replaceable_schema.sql * Remove truncate trigger because truncate is already restricted by foreign keys * Update replaceable_schema.sql * fix some things * Update replaceable_schema.sql * Update replaceable_schema.sql * Update .woodpecker.yml * stuff * fix TG_OP * Psql env vars * try to fix combine_transition_tables parse error * Revert "try to fix combine_transition_tables parse error" This reverts commit75d00a4626
. * refactor combine_transition_tables * try to fix create_triggers * fix some things * try to fix combined_transition_tables * fix sql errors * update comment count in post trigger * fmt * Revert "fmt" This reverts commita5bcd0834b
. * Revert "update comment count in post trigger" This reverts commit0066a4b42b
. * fix everything * Update replaceable_schema.sql * actually fix everything * refactor create_triggers * fix * add semicolons * add is_counted function and fix incorrect bool operator in update_comment_count_from_post * refactor comment trigger * refactor post trigger * fix * Delete crates/db_schema/src/utils/series.rs * subscribers_local * edit migrations * move migrations * remove utils::series module declaration * fix everything * stuff * Move sql to schema_setup dir * utils.sql * delete .pg_format * Update .woodpecker.yml * Update sql_format_check.sh * Update .woodpecker.yml * Merge remote-tracking branch 'upstream/main' into bliss * fmt * Create main.rs * Update lib.rs * Update main.rs * Update .woodpecker.yml * Update main.rs * Update Cargo.toml * Update .woodpecker.yml * Update .woodpecker.yml * Update triggers.sql * YAY * Update mod.rs * Update Cargo.toml * a * Update Cargo.toml * Update Cargo.toml * Delete crates/db_schema/src/main.rs * Update Cargo.toml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update utils.sql * Update utils.sql * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update down.sql * Update up.sql * Update triggers.sql * Update .woodpecker.yml * Update .woodpecker.yml * Update triggers.sql * Update down.sql * Update .woodpecker.yml * Update Cargo.toml * Update .woodpecker.yml * Update Cargo.toml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update .woodpecker.yml * Update mod.rs * Update Cargo.toml * Update mod.rs * make dump_schema.sh executable * fix dump_schema.sh * defer * diff dumps * fmt * Update utils.sql * Update .woodpecker.yml * use correct version for pg_dump * Update .woodpecker.yml * Update .woodpecker.yml * change migration date * atomic site_aggregates insert * temporarily repeat tests in CI * drop r schema in CI migration check * show ReceivedActivity::create error * move check_diesel_migration CI step * Update .woodpecker.yml * Update scheduled_tasks.rs * Update scheduled_tasks.rs * update cargo.lock * move sql files * move rank functions * filter post_aggregates update * fmt * cargo fmt * replace post_id with id * update cargo.lock * avoid locking rows that need no change in up.sql * only run replaceable_schema if migrations were run * debug ci test failure * make replaceable_schema work in CI * Update .woodpecker.yml * remove println * Use migration revert and git checkout * Update schema_setup.rs * Fix * Update schema_setup.rs * Update schema_setup.rs * Update .woodpecker.yml --------- Co-authored-by: Nutomic <me@nutomic.com> Co-authored-by: Dessalines <dessalines@users.noreply.github.com>
131 lines
3.6 KiB
Rust
131 lines
3.6 KiB
Rust
use crate::{
|
|
diesel::OptionalExtension,
|
|
newtypes::{ActivityId, DbUrl},
|
|
source::activity::{ReceivedActivity, SentActivity, SentActivityForm},
|
|
utils::{get_conn, DbPool},
|
|
};
|
|
use diesel::{
|
|
dsl::insert_into,
|
|
result::{DatabaseErrorKind, Error, Error::DatabaseError},
|
|
ExpressionMethods,
|
|
QueryDsl,
|
|
};
|
|
use diesel_async::RunQueryDsl;
|
|
|
|
impl SentActivity {
|
|
pub async fn create(pool: &mut DbPool<'_>, form: SentActivityForm) -> Result<Self, Error> {
|
|
use crate::schema::sent_activity::dsl::sent_activity;
|
|
let conn = &mut get_conn(pool).await?;
|
|
insert_into(sent_activity)
|
|
.values(form)
|
|
.get_result::<Self>(conn)
|
|
.await
|
|
}
|
|
|
|
pub async fn read_from_apub_id(
|
|
pool: &mut DbPool<'_>,
|
|
object_id: &DbUrl,
|
|
) -> Result<Option<Self>, Error> {
|
|
use crate::schema::sent_activity::dsl::{ap_id, sent_activity};
|
|
let conn = &mut get_conn(pool).await?;
|
|
sent_activity
|
|
.filter(ap_id.eq(object_id))
|
|
.first(conn)
|
|
.await
|
|
.optional()
|
|
}
|
|
pub async fn read(pool: &mut DbPool<'_>, object_id: ActivityId) -> Result<Option<Self>, Error> {
|
|
use crate::schema::sent_activity::dsl::sent_activity;
|
|
let conn = &mut get_conn(pool).await?;
|
|
sent_activity.find(object_id).first(conn).await.optional()
|
|
}
|
|
}
|
|
|
|
impl ReceivedActivity {
|
|
pub async fn create(pool: &mut DbPool<'_>, ap_id_: &DbUrl) -> Result<(), Error> {
|
|
use crate::schema::received_activity::dsl::{ap_id, received_activity};
|
|
let conn = &mut get_conn(pool).await?;
|
|
let rows_affected = insert_into(received_activity)
|
|
.values(ap_id.eq(ap_id_))
|
|
.on_conflict_do_nothing()
|
|
.execute(conn)
|
|
.await
|
|
.optional()?;
|
|
if rows_affected == Some(1) {
|
|
// new activity inserted successfully
|
|
Ok(())
|
|
} else {
|
|
// duplicate activity
|
|
Err(DatabaseError(
|
|
DatabaseErrorKind::UniqueViolation,
|
|
Box::<String>::default(),
|
|
))
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
#[allow(clippy::unwrap_used)]
|
|
#[allow(clippy::indexing_slicing)]
|
|
mod tests {
|
|
|
|
use super::*;
|
|
use crate::{source::activity::ActorType, utils::build_db_pool_for_tests};
|
|
use pretty_assertions::assert_eq;
|
|
use serde_json::json;
|
|
use serial_test::serial;
|
|
use url::Url;
|
|
|
|
#[tokio::test]
|
|
#[serial]
|
|
async fn receive_activity_duplicate() {
|
|
let pool = &build_db_pool_for_tests().await;
|
|
let pool = &mut pool.into();
|
|
let ap_id: DbUrl = Url::parse("http://example.com/activity/531")
|
|
.unwrap()
|
|
.into();
|
|
|
|
// inserting activity should only work once
|
|
ReceivedActivity::create(pool, &ap_id).await.unwrap();
|
|
ReceivedActivity::create(pool, &ap_id).await.unwrap_err();
|
|
}
|
|
|
|
#[tokio::test]
|
|
#[serial]
|
|
async fn sent_activity_write_read() {
|
|
let pool = &build_db_pool_for_tests().await;
|
|
let pool = &mut pool.into();
|
|
let ap_id: DbUrl = Url::parse("http://example.com/activity/412")
|
|
.unwrap()
|
|
.into();
|
|
let data = json!({
|
|
"key1": "0xF9BA143B95FF6D82",
|
|
"key2": "42",
|
|
});
|
|
let sensitive = false;
|
|
|
|
let form = SentActivityForm {
|
|
ap_id: ap_id.clone(),
|
|
data: data.clone(),
|
|
sensitive,
|
|
actor_apub_id: Url::parse("http://example.com/u/exampleuser")
|
|
.unwrap()
|
|
.into(),
|
|
actor_type: ActorType::Person,
|
|
send_all_instances: false,
|
|
send_community_followers_of: None,
|
|
send_inboxes: vec![],
|
|
};
|
|
|
|
SentActivity::create(pool, form).await.unwrap();
|
|
|
|
let res = SentActivity::read_from_apub_id(pool, &ap_id)
|
|
.await
|
|
.unwrap()
|
|
.unwrap();
|
|
assert_eq!(res.ap_id, ap_id);
|
|
assert_eq!(res.data, data);
|
|
assert_eq!(res.sensitive, sensitive);
|
|
}
|
|
}
|