Merge pull request #1180 from LemmyNet/no_conflict_triggers

No send blocked and no conflict triggers
This commit is contained in:
Dessalines 2020-10-08 14:51:04 -04:00 committed by GitHub
commit 8d0580461b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 565 additions and 322 deletions

View file

@ -29,7 +29,7 @@ services:
- ./volumes/pictrs_alpha:/mnt - ./volumes/pictrs_alpha:/mnt
lemmy-alpha-ui: lemmy-alpha-ui:
image: dessalines/lemmy-ui:v0.0.14 image: dessalines/lemmy-ui:dev
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-alpha:8541 - LEMMY_INTERNAL_HOST=lemmy-alpha:8541
- LEMMY_EXTERNAL_HOST=localhost:8541 - LEMMY_EXTERNAL_HOST=localhost:8541
@ -68,7 +68,7 @@ services:
- ./volumes/postgres_alpha:/var/lib/postgresql/data - ./volumes/postgres_alpha:/var/lib/postgresql/data
lemmy-beta-ui: lemmy-beta-ui:
image: dessalines/lemmy-ui:v0.0.14 image: dessalines/lemmy-ui:dev
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-beta:8551 - LEMMY_INTERNAL_HOST=lemmy-beta:8551
- LEMMY_EXTERNAL_HOST=localhost:8551 - LEMMY_EXTERNAL_HOST=localhost:8551
@ -107,7 +107,7 @@ services:
- ./volumes/postgres_beta:/var/lib/postgresql/data - ./volumes/postgres_beta:/var/lib/postgresql/data
lemmy-gamma-ui: lemmy-gamma-ui:
image: dessalines/lemmy-ui:v0.0.14 image: dessalines/lemmy-ui:dev
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-gamma:8561 - LEMMY_INTERNAL_HOST=lemmy-gamma:8561
- LEMMY_EXTERNAL_HOST=localhost:8561 - LEMMY_EXTERNAL_HOST=localhost:8561
@ -147,7 +147,7 @@ services:
# An instance with only an allowlist for beta # An instance with only an allowlist for beta
lemmy-delta-ui: lemmy-delta-ui:
image: dessalines/lemmy-ui:v0.0.14 image: dessalines/lemmy-ui:dev
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-delta:8571 - LEMMY_INTERNAL_HOST=lemmy-delta:8571
- LEMMY_EXTERNAL_HOST=localhost:8571 - LEMMY_EXTERNAL_HOST=localhost:8571
@ -187,7 +187,7 @@ services:
# An instance who has a blocklist, with lemmy-alpha blocked # An instance who has a blocklist, with lemmy-alpha blocked
lemmy-epsilon-ui: lemmy-epsilon-ui:
image: dessalines/lemmy-ui:v0.0.14 image: dessalines/lemmy-ui:dev
environment: environment:
- LEMMY_INTERNAL_HOST=lemmy-epsilon:8581 - LEMMY_INTERNAL_HOST=lemmy-epsilon:8581
- LEMMY_EXTERNAL_HOST=localhost:8581 - LEMMY_EXTERNAL_HOST=localhost:8581

View file

@ -1,49 +0,0 @@
use crate::{activity_queue::send_activity, community::do_announce, insert_activity};
use activitystreams::{
base::{Extends, ExtendsExt},
object::AsObject,
};
use lemmy_db::{community::Community, user::User_};
use lemmy_utils::{settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
use serde::{export::fmt::Debug, Serialize};
use url::{ParseError, Url};
use uuid::Uuid;
pub async fn send_activity_to_community<T, Kind>(
creator: &User_,
community: &Community,
to: Vec<Url>,
activity: T,
context: &LemmyContext,
) -> Result<(), LemmyError>
where
T: AsObject<Kind> + Extends<Kind> + Serialize + Debug + Send + Clone + 'static,
Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{
// TODO: looks like call this sometimes with activity, and sometimes with any_base
insert_activity(creator.id, activity.clone(), true, context.pool()).await?;
// if this is a local community, we need to do an announce from the community instead
if community.local {
do_announce(activity.into_any_base()?, &community, creator, context).await?;
} else {
send_activity(context.activity_queue(), activity, creator, to)?;
}
Ok(())
}
pub(in crate) fn generate_activity_id<T>(kind: T) -> Result<Url, ParseError>
where
T: ToString,
{
let id = format!(
"{}/activities/{}/{}",
Settings::get().get_protocol_and_hostname(),
kind.to_string().to_lowercase(),
Uuid::new_v4()
);
Url::parse(&id)
}

View file

@ -1,6 +1,12 @@
use crate::{check_is_apub_id_valid, extensions::signatures::sign_and_send, ActorType}; use crate::{
check_is_apub_id_valid,
community::do_announce,
extensions::signatures::sign_and_send,
insert_activity,
ActorType,
};
use activitystreams::{ use activitystreams::{
base::{Extends, ExtendsExt}, base::{BaseExt, Extends, ExtendsExt},
object::AsObject, object::AsObject,
}; };
use anyhow::{anyhow, Context, Error}; use anyhow::{anyhow, Context, Error};
@ -13,45 +19,198 @@ use background_jobs::{
QueueHandle, QueueHandle,
WorkerConfig, WorkerConfig,
}; };
use itertools::Itertools;
use lemmy_db::{community::Community, user::User_, DbPool};
use lemmy_utils::{location_info, settings::Settings, LemmyError}; use lemmy_utils::{location_info, settings::Settings, LemmyError};
use log::warn; use lemmy_websocket::LemmyContext;
use log::{debug, warn};
use reqwest::Client; use reqwest::Client;
use serde::{Deserialize, Serialize}; use serde::{export::fmt::Debug, Deserialize, Serialize};
use std::{collections::BTreeMap, future::Future, pin::Pin}; use std::{collections::BTreeMap, future::Future, pin::Pin};
use url::Url; use url::Url;
pub fn send_activity<T, Kind>( pub async fn send_activity_single_dest<T, Kind>(
activity: T,
creator: &dyn ActorType,
to: Url,
context: &LemmyContext,
) -> Result<(), LemmyError>
where
T: AsObject<Kind> + Extends<Kind> + Debug + BaseExt<Kind>,
Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{
if check_is_apub_id_valid(&to).is_ok() {
debug!("Sending activity {:?} to {}", &activity.id_unchecked(), &to);
send_activity_internal(
context.activity_queue(),
activity,
creator,
vec![to],
context.pool(),
true,
)
.await?;
}
Ok(())
}
pub async fn send_to_community_followers<T, Kind>(
activity: T,
community: &Community,
context: &LemmyContext,
sender_shared_inbox: Option<Url>,
) -> Result<(), LemmyError>
where
T: AsObject<Kind> + Extends<Kind> + Debug + BaseExt<Kind>,
Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{
// dont send to the local instance, nor to the instance where the activity originally came from,
// because that would result in a database error (same data inserted twice)
let community_shared_inbox = community.get_shared_inbox_url()?;
let to: Vec<Url> = community
.get_follower_inboxes(context.pool())
.await?
.iter()
.filter(|inbox| Some(inbox) != sender_shared_inbox.as_ref().as_ref())
.filter(|inbox| inbox != &&community_shared_inbox)
.filter(|inbox| check_is_apub_id_valid(inbox).is_ok())
.unique()
.map(|inbox| inbox.to_owned())
.collect();
debug!(
"Sending activity {:?} to followers of {}",
&activity.id_unchecked(),
&community.actor_id
);
send_activity_internal(
context.activity_queue(),
activity,
community,
to,
context.pool(),
true,
)
.await?;
Ok(())
}
pub async fn send_to_community<T, Kind>(
creator: &User_,
community: &Community,
activity: T,
context: &LemmyContext,
) -> Result<(), LemmyError>
where
T: AsObject<Kind> + Extends<Kind> + Debug + BaseExt<Kind>,
Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{
// if this is a local community, we need to do an announce from the community instead
if community.local {
do_announce(activity.into_any_base()?, &community, creator, context).await?;
} else {
let inbox = community.get_shared_inbox_url()?;
check_is_apub_id_valid(&inbox)?;
debug!(
"Sending activity {:?} to community {}",
&activity.id_unchecked(),
&community.actor_id
);
send_activity_internal(
context.activity_queue(),
activity,
creator,
vec![inbox],
context.pool(),
true,
)
.await?;
}
Ok(())
}
pub async fn send_comment_mentions<T, Kind>(
creator: &User_,
mentions: Vec<Url>,
activity: T,
context: &LemmyContext,
) -> Result<(), LemmyError>
where
T: AsObject<Kind> + Extends<Kind> + Debug + BaseExt<Kind>,
Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{
debug!(
"Sending mentions activity {:?} to {:?}",
&activity.id_unchecked(),
&mentions
);
let mentions = mentions
.iter()
.filter(|inbox| check_is_apub_id_valid(inbox).is_ok())
.map(|i| i.to_owned())
.collect();
send_activity_internal(
context.activity_queue(),
activity,
creator,
mentions,
context.pool(),
false, // Don't create a new DB row
)
.await?;
Ok(())
}
/// Asynchronously sends the given `activity` from `actor` to every inbox URL in `to`.
///
/// The caller of this function needs to remove any blocked domains from `to`,
/// using `check_is_apub_id_valid()`.
async fn send_activity_internal<T, Kind>(
activity_sender: &QueueHandle, activity_sender: &QueueHandle,
activity: T, activity: T,
actor: &dyn ActorType, actor: &dyn ActorType,
to: Vec<Url>, to: Vec<Url>,
pool: &DbPool,
insert_into_db: bool,
) -> Result<(), LemmyError> ) -> Result<(), LemmyError>
where where
T: AsObject<Kind>, T: AsObject<Kind> + Extends<Kind> + Debug,
T: Extends<Kind>,
Kind: Serialize, Kind: Serialize,
<T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static, <T as Extends<Kind>>::Error: From<serde_json::Error> + Send + Sync + 'static,
{ {
if !Settings::get().federation.enabled { if !Settings::get().federation.enabled || to.is_empty() {
return Ok(()); return Ok(());
} }
let activity = activity.into_any_base()?;
let serialised_activity = serde_json::to_string(&activity)?;
for to_url in &to { for to_url in &to {
check_is_apub_id_valid(&to_url)?; check_is_apub_id_valid(&to_url)?;
} }
// TODO: it would make sense to create a separate task for each destination server let activity = activity.into_any_base()?;
let message = SendActivityTask { let serialised_activity = serde_json::to_string(&activity)?;
activity: serialised_activity,
to,
actor_id: actor.actor_id()?,
private_key: actor.private_key().context(location_info!())?,
};
activity_sender.queue::<SendActivityTask>(message)?; // This is necessary because send_comment and send_comment_mentions
// might send the same ap_id
if insert_into_db {
insert_activity(actor.user_id(), activity.clone(), true, pool).await?;
}
for t in to {
let message = SendActivityTask {
activity: serialised_activity.to_owned(),
to: t,
actor_id: actor.actor_id()?,
private_key: actor.private_key().context(location_info!())?,
};
activity_sender.queue::<SendActivityTask>(message)?;
}
Ok(()) Ok(())
} }
@ -59,7 +218,7 @@ where
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Clone, Debug, Deserialize, Serialize)]
struct SendActivityTask { struct SendActivityTask {
activity: String, activity: String,
to: Vec<Url>, to: Url,
actor_id: Url, actor_id: Url,
private_key: String, private_key: String,
} }
@ -74,27 +233,25 @@ impl ActixJob for SendActivityTask {
fn run(self, state: Self::State) -> Self::Future { fn run(self, state: Self::State) -> Self::Future {
Box::pin(async move { Box::pin(async move {
for to_url in &self.to { let mut headers = BTreeMap::<String, String>::new();
let mut headers = BTreeMap::<String, String>::new(); headers.insert("Content-Type".into(), "application/json".into());
headers.insert("Content-Type".into(), "application/json".into()); let result = sign_and_send(
let result = sign_and_send( &state.client,
&state.client, headers,
headers, &self.to,
to_url, self.activity.clone(),
self.activity.clone(), &self.actor_id,
&self.actor_id, self.private_key.to_owned(),
self.private_key.to_owned(), )
) .await;
.await;
if let Err(e) = result { if let Err(e) = result {
warn!("{}", e); warn!("{}", e);
return Err(anyhow!( return Err(anyhow!(
"Failed to send activity {} to {}", "Failed to send activity {} to {}",
&self.activity, &self.activity,
to_url self.to
)); ));
}
} }
Ok(()) Ok(())
}) })

View file

@ -1,5 +1,5 @@
use crate::{ use crate::{
activities::{generate_activity_id, send_activity_to_community}, activity_queue::{send_comment_mentions, send_to_community},
check_actor_domain, check_actor_domain,
create_apub_response, create_apub_response,
create_apub_tombstone_response, create_apub_tombstone_response,
@ -10,6 +10,7 @@ use crate::{
get_or_fetch_and_insert_post, get_or_fetch_and_insert_post,
get_or_fetch_and_upsert_user, get_or_fetch_and_upsert_user,
}, },
generate_activity_id,
ActorType, ActorType,
ApubLikeableType, ApubLikeableType,
ApubObjectType, ApubObjectType,
@ -223,7 +224,8 @@ impl ApubObjectType for Comment {
// Set the mention tags // Set the mention tags
.set_many_tags(maa.get_tags()?); .set_many_tags(maa.get_tags()?);
send_activity_to_community(&creator, &community, maa.inboxes, create, context).await?; send_to_community(&creator, &community, create.clone(), context).await?;
send_comment_mentions(&creator, maa.inboxes, create, context).await?;
Ok(()) Ok(())
} }
@ -251,7 +253,8 @@ impl ApubObjectType for Comment {
// Set the mention tags // Set the mention tags
.set_many_tags(maa.get_tags()?); .set_many_tags(maa.get_tags()?);
send_activity_to_community(&creator, &community, maa.inboxes, update, context).await?; send_to_community(&creator, &community, update.clone(), context).await?;
send_comment_mentions(&creator, maa.inboxes, update, context).await?;
Ok(()) Ok(())
} }
@ -274,14 +277,7 @@ impl ApubObjectType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, delete, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
delete,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -317,14 +313,7 @@ impl ApubObjectType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, undo, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -347,14 +336,7 @@ impl ApubObjectType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&mod_, &community, remove, context).await?;
&mod_,
&community,
vec![community.get_shared_inbox_url()?],
remove,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -386,14 +368,7 @@ impl ApubObjectType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&mod_, &community, undo, context).await?;
&mod_,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
} }
@ -419,14 +394,7 @@ impl ApubLikeableType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, like, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
like,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -449,14 +417,7 @@ impl ApubLikeableType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, dislike, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
dislike,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -491,14 +452,7 @@ impl ApubLikeableType for Comment {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, undo, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
} }
@ -556,9 +510,7 @@ async fn collect_non_local_mentions_and_addresses(
} }
} }
let mut inboxes = vec![community.get_shared_inbox_url()?]; let inboxes = mention_inboxes.into_iter().unique().collect();
inboxes.extend(mention_inboxes);
inboxes = inboxes.into_iter().unique().collect();
Ok(MentionsAndAddresses { Ok(MentionsAndAddresses {
addressed_ccs, addressed_ccs,

View file

@ -1,13 +1,13 @@
use crate::{ use crate::{
activities::generate_activity_id, activity_queue::{send_activity_single_dest, send_to_community_followers},
activity_queue::send_activity,
check_actor_domain, check_actor_domain,
check_is_apub_id_valid,
create_apub_response, create_apub_response,
create_apub_tombstone_response, create_apub_tombstone_response,
create_tombstone, create_tombstone,
extensions::group_extensions::GroupExtension, extensions::group_extensions::GroupExtension,
fetcher::{get_or_fetch_and_upsert_actor, get_or_fetch_and_upsert_user}, fetcher::{get_or_fetch_and_upsert_actor, get_or_fetch_and_upsert_user},
insert_activity, generate_activity_id,
ActorType, ActorType,
FromApub, FromApub,
GroupExt, GroupExt,
@ -165,9 +165,7 @@ impl ActorType for Community {
.set_id(generate_activity_id(AcceptType::Accept)?) .set_id(generate_activity_id(AcceptType::Accept)?)
.set_to(to.clone()); .set_to(to.clone());
insert_activity(self.creator_id, accept.clone(), true, context.pool()).await?; send_activity_single_dest(accept, self, to, context).await?;
send_activity(context.activity_queue(), accept, self, vec![to])?;
Ok(()) Ok(())
} }
@ -181,14 +179,7 @@ impl ActorType for Community {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![self.get_followers_url()?]); .set_many_ccs(vec![self.get_followers_url()?]);
insert_activity(self.creator_id, delete.clone(), true, context.pool()).await?; send_to_community_followers(delete, self, context, None).await?;
let inboxes = self.get_follower_inboxes(context.pool()).await?;
// Note: For an accept, since it was automatic, no one pushed a button,
// the community was the actor.
// But for delete, the creator is the actor, and does the signing
send_activity(context.activity_queue(), delete, creator, inboxes)?;
Ok(()) Ok(())
} }
@ -213,14 +204,7 @@ impl ActorType for Community {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![self.get_followers_url()?]); .set_many_ccs(vec![self.get_followers_url()?]);
insert_activity(self.creator_id, undo.clone(), true, context.pool()).await?; send_to_community_followers(undo, self, context, None).await?;
let inboxes = self.get_follower_inboxes(context.pool()).await?;
// Note: For an accept, since it was automatic, no one pushed a button,
// the community was the actor.
// But for delete, the creator is the actor, and does the signing
send_activity(context.activity_queue(), undo, creator, inboxes)?;
Ok(()) Ok(())
} }
@ -234,14 +218,7 @@ impl ActorType for Community {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![self.get_followers_url()?]); .set_many_ccs(vec![self.get_followers_url()?]);
insert_activity(mod_.id, remove.clone(), true, context.pool()).await?; send_to_community_followers(remove, self, context, None).await?;
let inboxes = self.get_follower_inboxes(context.pool()).await?;
// Note: For an accept, since it was automatic, no one pushed a button,
// the community was the actor.
// But for delete, the creator is the actor, and does the signing
send_activity(context.activity_queue(), remove, mod_, inboxes)?;
Ok(()) Ok(())
} }
@ -263,14 +240,7 @@ impl ActorType for Community {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![self.get_followers_url()?]); .set_many_ccs(vec![self.get_followers_url()?]);
insert_activity(mod_.id, undo.clone(), true, context.pool()).await?; send_to_community_followers(undo, self, context, None).await?;
let inboxes = self.get_follower_inboxes(context.pool()).await?;
// Note: For an accept, since it was automatic, no one pushed a button,
// the community was the actor.
// But for remove , the creator is the actor, and does the signing
send_activity(context.activity_queue(), undo, mod_, inboxes)?;
Ok(()) Ok(())
} }
@ -287,6 +257,7 @@ impl ActorType for Community {
.await??; .await??;
let inboxes = inboxes let inboxes = inboxes
.into_iter() .into_iter()
.filter(|i| !i.user_local)
.map(|u| -> Result<Url, LemmyError> { .map(|u| -> Result<Url, LemmyError> {
let url = Url::parse(&u.user_actor_id)?; let url = Url::parse(&u.user_actor_id)?;
let domain = url.domain().context(location_info!())?; let domain = url.domain().context(location_info!())?;
@ -303,6 +274,8 @@ impl ActorType for Community {
))?) ))?)
}) })
.filter_map(Result::ok) .filter_map(Result::ok)
// Don't send to blocked instances
.filter(|inbox| check_is_apub_id_valid(inbox).is_ok())
.unique() .unique()
.collect(); .collect();
@ -511,19 +484,13 @@ pub async fn do_announce(
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
insert_activity(community.creator_id, announce.clone(), true, context.pool()).await?; send_to_community_followers(
announce,
let mut to: Vec<Url> = community.get_follower_inboxes(context.pool()).await?; community,
context,
// dont send to the local instance, nor to the instance where the activity originally came from, Some(sender.get_shared_inbox_url()?),
// because that would result in a database error (same data inserted twice) )
// this seems to be the "easiest" stable alternative for remove_item() .await?;
let sender_shared_inbox = sender.get_shared_inbox_url()?;
to.retain(|x| x != &sender_shared_inbox);
let community_shared_inbox = community.get_shared_inbox_url()?;
to.retain(|x| x != &community_shared_inbox);
send_activity(context.activity_queue(), announce, community, to)?;
Ok(()) Ok(())
} }

View file

@ -55,9 +55,6 @@ where
let timeout = Duration::from_secs(60); let timeout = Duration::from_secs(60);
// speed up tests
// before: 305s
// after: 240s
let json = retry(|| { let json = retry(|| {
client client
.get(url.as_str()) .get(url.as_str())

View file

@ -11,7 +11,7 @@ use activitystreams::{
prelude::*, prelude::*,
}; };
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use anyhow::Context; use anyhow::{anyhow, Context};
use lemmy_db::{ use lemmy_db::{
community::{Community, CommunityFollower, CommunityFollowerForm}, community::{Community, CommunityFollower, CommunityFollowerForm},
user::User_, user::User_,
@ -20,7 +20,7 @@ use lemmy_db::{
use lemmy_structs::blocking; use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError}; use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext; use lemmy_websocket::LemmyContext;
use log::debug; use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
@ -48,7 +48,16 @@ pub async fn community_inbox(
}) })
.await??; .await??;
debug!( if !community.local {
return Err(
anyhow!(
"Received activity is addressed to remote community {}",
&community.actor_id
)
.into(),
);
}
info!(
"Community {} received activity {:?}", "Community {} received activity {:?}",
&community.name, &activity &community.name, &activity
); );
@ -56,6 +65,12 @@ pub async fn community_inbox(
.actor()? .actor()?
.as_single_xsd_any_uri() .as_single_xsd_any_uri()
.context(location_info!())?; .context(location_info!())?;
info!(
"Community {} inbox received activity {:?} from {}",
community.name,
&activity.id_unchecked(),
&user_uri
);
check_is_apub_id_valid(user_uri)?; check_is_apub_id_valid(user_uri)?;
let user = get_or_fetch_and_upsert_user(&user_uri, &context).await?; let user = get_or_fetch_and_upsert_user(&user_uri, &context).await?;

View file

@ -60,17 +60,17 @@ pub async fn shared_inbox(
) -> Result<HttpResponse, LemmyError> { ) -> Result<HttpResponse, LemmyError> {
let activity = input.into_inner(); let activity = input.into_inner();
let json = serde_json::to_string(&activity)?;
debug!("Shared inbox received activity: {}", json);
// TODO: if we already received an activity with identical ID, then ignore this (same in other inboxes)
let sender = &activity let sender = &activity
.actor()? .actor()?
.to_owned() .to_owned()
.single_xsd_any_uri() .single_xsd_any_uri()
.context(location_info!())?; .context(location_info!())?;
let community = get_community_id_from_activity(&activity)?; let community = get_community_id_from_activity(&activity)?;
debug!(
"Shared inbox received activity {:?} from {}",
&activity.id_unchecked(),
&sender
);
check_is_apub_id_valid(sender)?; check_is_apub_id_valid(sender)?;
check_is_apub_id_valid(&community)?; check_is_apub_id_valid(&community)?;

View file

@ -50,12 +50,17 @@ pub async fn user_inbox(
) -> Result<HttpResponse, LemmyError> { ) -> Result<HttpResponse, LemmyError> {
let activity = input.into_inner(); let activity = input.into_inner();
let username = path.into_inner(); let username = path.into_inner();
debug!("User {} received activity: {:?}", &username, &activity);
let actor_uri = activity let actor_uri = activity
.actor()? .actor()?
.as_single_xsd_any_uri() .as_single_xsd_any_uri()
.context(location_info!())?; .context(location_info!())?;
debug!(
"User {} inbox received activity {:?} from {}",
username,
&activity.id_unchecked(),
&actor_uri
);
check_is_apub_id_valid(actor_uri)?; check_is_apub_id_valid(actor_uri)?;

View file

@ -1,7 +1,6 @@
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
pub mod activities;
pub mod activity_queue; pub mod activity_queue;
pub mod comment; pub mod comment;
pub mod community; pub mod community;
@ -43,6 +42,7 @@ use log::debug;
use reqwest::Client; use reqwest::Client;
use serde::Serialize; use serde::Serialize;
use url::{ParseError, Url}; use url::{ParseError, Url};
use uuid::Uuid;
type GroupExt = Ext2<ApActor<Group>, GroupExtension, PublicKeyExtension>; type GroupExt = Ext2<ApActor<Group>, GroupExtension, PublicKeyExtension>;
type PersonExt = Ext1<ApActor<Person>, PublicKeyExtension>; type PersonExt = Ext1<ApActor<Person>, PublicKeyExtension>;
@ -102,8 +102,9 @@ fn check_is_apub_id_valid(apub_id: &Url) -> Result<(), LemmyError> {
let mut allowed_instances = Settings::get().get_allowed_instances(); let mut allowed_instances = Settings::get().get_allowed_instances();
let blocked_instances = Settings::get().get_blocked_instances(); let blocked_instances = Settings::get().get_blocked_instances();
if allowed_instances.is_empty() && blocked_instances.is_empty() {
if !allowed_instances.is_empty() { Ok(())
} else if !allowed_instances.is_empty() {
// need to allow this explicitly because apub activities might contain objects from our local // need to allow this explicitly because apub activities might contain objects from our local
// instance. split is needed to remove the port in our federation test setup. // instance. split is needed to remove the port in our federation test setup.
allowed_instances.push(local_instance); allowed_instances.push(local_instance);
@ -352,3 +353,16 @@ where
.await??; .await??;
Ok(()) Ok(())
} }
pub(in crate) fn generate_activity_id<T>(kind: T) -> Result<Url, ParseError>
where
T: ToString,
{
let id = format!(
"{}/activities/{}/{}",
Settings::get().get_protocol_and_hostname(),
kind.to_string().to_lowercase(),
Uuid::new_v4()
);
Url::parse(&id)
}

View file

@ -1,11 +1,12 @@
use crate::{ use crate::{
activities::{generate_activity_id, send_activity_to_community}, activity_queue::send_to_community,
check_actor_domain, check_actor_domain,
create_apub_response, create_apub_response,
create_apub_tombstone_response, create_apub_tombstone_response,
create_tombstone, create_tombstone,
extensions::page_extension::PageExtension, extensions::page_extension::PageExtension,
fetcher::{get_or_fetch_and_upsert_community, get_or_fetch_and_upsert_user}, fetcher::{get_or_fetch_and_upsert_community, get_or_fetch_and_upsert_user},
generate_activity_id,
ActorType, ActorType,
ApubLikeableType, ApubLikeableType,
ApubObjectType, ApubObjectType,
@ -261,14 +262,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(creator, &community, create, context).await?;
creator,
&community,
vec![community.get_shared_inbox_url()?],
create,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -289,14 +283,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(creator, &community, update, context).await?;
creator,
&community,
vec![community.get_shared_inbox_url()?],
update,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -316,14 +303,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(creator, &community, delete, context).await?;
creator,
&community,
vec![community.get_shared_inbox_url()?],
delete,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -355,14 +335,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(creator, &community, undo, context).await?;
creator,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -382,14 +355,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(mod_, &community, remove, context).await?;
mod_,
&community,
vec![community.get_shared_inbox_url()?],
remove,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -417,14 +383,7 @@ impl ApubObjectType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(mod_, &community, undo, context).await?;
mod_,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
} }
@ -447,14 +406,7 @@ impl ApubLikeableType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, like, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
like,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -474,14 +426,7 @@ impl ApubLikeableType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, dislike, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
dislike,
context,
)
.await?;
Ok(()) Ok(())
} }
@ -513,14 +458,7 @@ impl ApubLikeableType for Post {
.set_to(public()) .set_to(public())
.set_many_ccs(vec![community.get_followers_url()?]); .set_many_ccs(vec![community.get_followers_url()?]);
send_activity_to_community( send_to_community(&creator, &community, undo, context).await?;
&creator,
&community,
vec![community.get_shared_inbox_url()?],
undo,
context,
)
.await?;
Ok(()) Ok(())
} }
} }

View file

@ -1,11 +1,10 @@
use crate::{ use crate::{
activities::generate_activity_id, activity_queue::send_activity_single_dest,
activity_queue::send_activity,
check_actor_domain, check_actor_domain,
check_is_apub_id_valid, check_is_apub_id_valid,
create_tombstone, create_tombstone,
fetcher::get_or_fetch_and_upsert_user, fetcher::get_or_fetch_and_upsert_user,
insert_activity, generate_activity_id,
ActorType, ActorType,
ApubObjectType, ApubObjectType,
FromApub, FromApub,
@ -130,9 +129,7 @@ impl ApubObjectType for PrivateMessage {
.set_id(generate_activity_id(CreateType::Create)?) .set_id(generate_activity_id(CreateType::Create)?)
.set_to(to.clone()); .set_to(to.clone());
insert_activity(creator.id, create.clone(), true, context.pool()).await?; send_activity_single_dest(create, creator, to, context).await?;
send_activity(context.activity_queue(), create, creator, vec![to])?;
Ok(()) Ok(())
} }
@ -150,9 +147,7 @@ impl ApubObjectType for PrivateMessage {
.set_id(generate_activity_id(UpdateType::Update)?) .set_id(generate_activity_id(UpdateType::Update)?)
.set_to(to.clone()); .set_to(to.clone());
insert_activity(creator.id, update.clone(), true, context.pool()).await?; send_activity_single_dest(update, creator, to, context).await?;
send_activity(context.activity_queue(), update, creator, vec![to])?;
Ok(()) Ok(())
} }
@ -169,9 +164,7 @@ impl ApubObjectType for PrivateMessage {
.set_id(generate_activity_id(DeleteType::Delete)?) .set_id(generate_activity_id(DeleteType::Delete)?)
.set_to(to.clone()); .set_to(to.clone());
insert_activity(creator.id, delete.clone(), true, context.pool()).await?; send_activity_single_dest(delete, creator, to, context).await?;
send_activity(context.activity_queue(), delete, creator, vec![to])?;
Ok(()) Ok(())
} }
@ -199,9 +192,7 @@ impl ApubObjectType for PrivateMessage {
.set_id(generate_activity_id(UndoType::Undo)?) .set_id(generate_activity_id(UndoType::Undo)?)
.set_to(to.clone()); .set_to(to.clone());
insert_activity(creator.id, undo.clone(), true, context.pool()).await?; send_activity_single_dest(undo, creator, to, context).await?;
send_activity(context.activity_queue(), undo, creator, vec![to])?;
Ok(()) Ok(())
} }

View file

@ -1,10 +1,9 @@
use crate::{ use crate::{
activities::generate_activity_id, activity_queue::send_activity_single_dest,
activity_queue::send_activity,
check_actor_domain, check_actor_domain,
create_apub_response, create_apub_response,
fetcher::get_or_fetch_and_upsert_actor, fetcher::get_or_fetch_and_upsert_actor,
insert_activity, generate_activity_id,
ActorType, ActorType,
FromApub, FromApub,
PersonExt, PersonExt,
@ -121,9 +120,7 @@ impl ActorType for User_ {
let follow_actor = get_or_fetch_and_upsert_actor(follow_actor_id, context).await?; let follow_actor = get_or_fetch_and_upsert_actor(follow_actor_id, context).await?;
let to = follow_actor.get_inbox_url()?; let to = follow_actor.get_inbox_url()?;
insert_activity(self.id, follow.clone(), true, context.pool()).await?; send_activity_single_dest(follow, self, to, context).await?;
send_activity(context.activity_queue(), follow, self, vec![to])?;
Ok(()) Ok(())
} }
@ -146,9 +143,7 @@ impl ActorType for User_ {
.set_context(activitystreams::context()) .set_context(activitystreams::context())
.set_id(generate_activity_id(UndoType::Undo)?); .set_id(generate_activity_id(UndoType::Undo)?);
insert_activity(self.id, undo.clone(), true, context.pool()).await?; send_activity_single_dest(undo, self, to, context).await?;
send_activity(context.activity_queue(), undo, self, vec![to])?;
Ok(()) Ok(())
} }

View file

@ -62,7 +62,8 @@ pub fn do_insert_activity<T>(
where where
T: Serialize + Debug, T: Serialize + Debug,
{ {
debug!("inserting activity for user {}, data {:?}", user_id, &data); debug!("inserting activity for user {}: ", user_id);
debug!("{}", serde_json::to_string_pretty(&data)?);
let activity_form = ActivityForm { let activity_form = ActivityForm {
user_id, user_id,
data: serde_json::to_value(&data)?, data: serde_json::to_value(&data)?,

View file

@ -134,7 +134,7 @@ impl Settings {
.federation .federation
.allowed_instances .allowed_instances
.split(',') .split(',')
.map(|d| d.to_string()) .map(|d| d.trim().to_string())
.collect(); .collect();
// The defaults.hjson config always returns a [""] // The defaults.hjson config always returns a [""]
@ -148,7 +148,7 @@ impl Settings {
.federation .federation
.blocked_instances .blocked_instances
.split(',') .split(',')
.map(|d| d.to_string()) .map(|d| d.trim().to_string())
.collect(); .collect();
// The defaults.hjson config always returns a [""] // The defaults.hjson config always returns a [""]

View file

@ -0,0 +1,128 @@
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from community_aggregates_fast where id = OLD.id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from community_aggregates_fast where id = OLD.id;
insert into community_aggregates_fast select * from community_aggregates_view where id = NEW.id;
-- Update user view due to owner changes
delete from user_fast where id = NEW.creator_id;
insert into user_fast select * from user_view where id = NEW.creator_id;
-- Update post view due to community changes
delete from post_aggregates_fast where community_id = NEW.id;
insert into post_aggregates_fast select * from post_aggregates_view where community_id = NEW.id;
-- TODO make sure this shows up in the users page ?
ELSIF (TG_OP = 'INSERT') THEN
insert into community_aggregates_fast select * from community_aggregates_view where id = NEW.id;
END IF;
return null;
end $$;
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from user_fast where id = OLD.id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from user_fast where id = OLD.id;
insert into user_fast select * from user_view where id = NEW.id;
-- Refresh post_fast, cause of user info changes
delete from post_aggregates_fast where creator_id = NEW.id;
insert into post_aggregates_fast select * from post_aggregates_view where creator_id = NEW.id;
delete from comment_aggregates_fast where creator_id = NEW.id;
insert into comment_aggregates_fast select * from comment_aggregates_view where creator_id = NEW.id;
ELSIF (TG_OP = 'INSERT') THEN
insert into user_fast select * from user_view where id = NEW.id;
END IF;
return null;
end $$;
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from post_aggregates_fast where id = OLD.id;
-- Update community number of posts
update community_aggregates_fast set number_of_posts = number_of_posts - 1 where id = OLD.community_id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from post_aggregates_fast where id = OLD.id;
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.id;
ELSIF (TG_OP = 'INSERT') THEN
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.id;
-- Update that users number of posts, post score
delete from user_fast where id = NEW.creator_id;
insert into user_fast select * from user_view where id = NEW.creator_id;
-- Update community number of posts
update community_aggregates_fast set number_of_posts = number_of_posts + 1 where id = NEW.community_id;
-- Update the hot rank on the post table
-- TODO this might not correctly update it, using a 1 week interval
update post_aggregates_fast as paf
set hot_rank = pav.hot_rank
from post_aggregates_view as pav
where paf.id = pav.id and (pav.published > ('now'::timestamp - '1 week'::interval));
END IF;
return null;
end $$;
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from comment_aggregates_fast where id = OLD.id;
-- Update community number of comments
update community_aggregates_fast as caf
set number_of_comments = number_of_comments - 1
from post as p
where caf.id = p.community_id and p.id = OLD.post_id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from comment_aggregates_fast where id = OLD.id;
insert into comment_aggregates_fast select * from comment_aggregates_view where id = NEW.id;
ELSIF (TG_OP = 'INSERT') THEN
insert into comment_aggregates_fast select * from comment_aggregates_view where id = NEW.id;
-- Update user view due to comment count
update user_fast
set number_of_comments = number_of_comments + 1
where id = NEW.creator_id;
-- Update post view due to comment count, new comment activity time, but only on new posts
-- TODO this could be done more efficiently
delete from post_aggregates_fast where id = NEW.post_id;
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.post_id;
-- Force the hot rank as zero on week-older posts
update post_aggregates_fast as paf
set hot_rank = 0
where paf.id = NEW.post_id and (paf.published < ('now'::timestamp - '1 week'::interval));
-- Update community number of comments
update community_aggregates_fast as caf
set number_of_comments = number_of_comments + 1
from post as p
where caf.id = p.community_id and p.id = NEW.post_id;
END IF;
return null;
end $$;

View file

@ -0,0 +1,132 @@
-- This adds on conflict do nothing triggers to all the insert_intos
-- Github issue: https://github.com/LemmyNet/lemmy/issues/1179
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from community_aggregates_fast where id = OLD.id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from community_aggregates_fast where id = OLD.id;
insert into community_aggregates_fast select * from community_aggregates_view where id = NEW.id on conflict (id) do nothing;
-- Update user view due to owner changes
delete from user_fast where id = NEW.creator_id;
insert into user_fast select * from user_view where id = NEW.creator_id on conflict (id) do nothing;
-- Update post view due to community changes
delete from post_aggregates_fast where community_id = NEW.id;
insert into post_aggregates_fast select * from post_aggregates_view where community_id = NEW.id on conflict (id) do nothing;
-- TODO make sure this shows up in the users page ?
ELSIF (TG_OP = 'INSERT') THEN
insert into community_aggregates_fast select * from community_aggregates_view where id = NEW.id;
END IF;
return null;
end $$;
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from user_fast where id = OLD.id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from user_fast where id = OLD.id;
insert into user_fast select * from user_view where id = NEW.id on conflict(id) do nothing;
-- Refresh post_fast, cause of user info changes
delete from post_aggregates_fast where creator_id = NEW.id;
insert into post_aggregates_fast select * from post_aggregates_view where creator_id = NEW.id on conflict (id) do nothing;
delete from comment_aggregates_fast where creator_id = NEW.id;
insert into comment_aggregates_fast select * from comment_aggregates_view where creator_id = NEW.id on conflict (id) do nothing;
ELSIF (TG_OP = 'INSERT') THEN
insert into user_fast select * from user_view where id = NEW.id;
END IF;
return null;
end $$;
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from post_aggregates_fast where id = OLD.id;
-- Update community number of posts
update community_aggregates_fast set number_of_posts = number_of_posts - 1 where id = OLD.community_id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from post_aggregates_fast where id = OLD.id;
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.id on conflict (id) do nothing;
ELSIF (TG_OP = 'INSERT') THEN
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.id;
-- Update that users number of posts, post score
delete from user_fast where id = NEW.creator_id;
insert into user_fast select * from user_view where id = NEW.creator_id on conflict (id) do nothing;
-- Update community number of posts
update community_aggregates_fast set number_of_posts = number_of_posts + 1 where id = NEW.community_id;
-- Update the hot rank on the post table
-- TODO this might not correctly update it, using a 1 week interval
update post_aggregates_fast as paf
set hot_rank = pav.hot_rank
from post_aggregates_view as pav
where paf.id = pav.id and (pav.published > ('now'::timestamp - '1 week'::interval));
END IF;
return null;
end $$;
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
IF (TG_OP = 'DELETE') THEN
delete from comment_aggregates_fast where id = OLD.id;
-- Update community number of comments
update community_aggregates_fast as caf
set number_of_comments = number_of_comments - 1
from post as p
where caf.id = p.community_id and p.id = OLD.post_id;
ELSIF (TG_OP = 'UPDATE') THEN
delete from comment_aggregates_fast where id = OLD.id;
insert into comment_aggregates_fast select * from comment_aggregates_view where id = NEW.id on conflict (id) do nothing;
ELSIF (TG_OP = 'INSERT') THEN
insert into comment_aggregates_fast select * from comment_aggregates_view where id = NEW.id;
-- Update user view due to comment count
update user_fast
set number_of_comments = number_of_comments + 1
where id = NEW.creator_id;
-- Update post view due to comment count, new comment activity time, but only on new posts
-- TODO this could be done more efficiently
delete from post_aggregates_fast where id = NEW.post_id;
insert into post_aggregates_fast select * from post_aggregates_view where id = NEW.post_id on conflict (id) do nothing;
-- Force the hot rank as zero on week-older posts
update post_aggregates_fast as paf
set hot_rank = 0
where paf.id = NEW.post_id and (paf.published < ('now'::timestamp - '1 week'::interval));
-- Update community number of comments
update community_aggregates_fast as caf
set number_of_comments = number_of_comments + 1
from post as p
where caf.id = p.community_id and p.id = NEW.post_id;
END IF;
return null;
end $$;