Merge remote-tracking branch 'upstream/main' into migration-runner

This commit is contained in:
Dull Bananas 2024-05-09 02:39:01 +00:00
commit 1aab92cbfa
53 changed files with 2922 additions and 2532 deletions

View file

@ -278,8 +278,7 @@ steps:
commands: commands:
- cargo install cargo-workspaces - cargo install cargo-workspaces
- cp -r migrations crates/db_schema/ - cp -r migrations crates/db_schema/
- cargo login "$CARGO_API_TOKEN" - cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
- cargo workspaces publish --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
secrets: [cargo_api_token] secrets: [cargo_api_token]
when: when:
- event: tag - event: tag

881
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
[workspace.package] [workspace.package]
version = "0.19.4-beta.5" version = "0.19.4-beta.6"
edition = "2021" edition = "2021"
description = "A link aggregator for the fediverse" description = "A link aggregator for the fediverse"
license = "AGPL-3.0" license = "AGPL-3.0"
@ -88,25 +88,25 @@ unused_self = "deny"
unwrap_used = "deny" unwrap_used = "deny"
[workspace.dependencies] [workspace.dependencies]
lemmy_api = { version = "=0.19.4-beta.5", path = "./crates/api" } lemmy_api = { version = "=0.19.4-beta.6", path = "./crates/api" }
lemmy_api_crud = { version = "=0.19.4-beta.5", path = "./crates/api_crud" } lemmy_api_crud = { version = "=0.19.4-beta.6", path = "./crates/api_crud" }
lemmy_apub = { version = "=0.19.4-beta.5", path = "./crates/apub" } lemmy_apub = { version = "=0.19.4-beta.6", path = "./crates/apub" }
lemmy_utils = { version = "=0.19.4-beta.5", path = "./crates/utils", default-features = false } lemmy_utils = { version = "=0.19.4-beta.6", path = "./crates/utils", default-features = false }
lemmy_db_schema = { version = "=0.19.4-beta.5", path = "./crates/db_schema" } lemmy_db_schema = { version = "=0.19.4-beta.6", path = "./crates/db_schema" }
lemmy_api_common = { version = "=0.19.4-beta.5", path = "./crates/api_common" } lemmy_api_common = { version = "=0.19.4-beta.6", path = "./crates/api_common" }
lemmy_routes = { version = "=0.19.4-beta.5", path = "./crates/routes" } lemmy_routes = { version = "=0.19.4-beta.6", path = "./crates/routes" }
lemmy_db_views = { version = "=0.19.4-beta.5", path = "./crates/db_views" } lemmy_db_views = { version = "=0.19.4-beta.6", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.4-beta.5", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.4-beta.6", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.4-beta.5", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.4-beta.6", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.4-beta.5", path = "./crates/federate" } lemmy_federate = { version = "=0.19.4-beta.6", path = "./crates/federate" }
activitypub_federation = { version = "0.5.4", default-features = false, features = [ activitypub_federation = { version = "0.5.6", default-features = false, features = [
"actix-web", "actix-web",
] } ] }
diesel = "2.1.6" diesel = "2.1.6"
diesel_migrations = "2.1.0" diesel_migrations = "2.1.0"
diesel-async = "0.4.1" diesel-async = "0.4.1"
serde = { version = "1.0.198", features = ["derive"] } serde = { version = "1.0.199", features = ["derive"] }
serde_with = "3.7.0" serde_with = "3.8.1"
actix-web = { version = "4.5.1", default-features = false, features = [ actix-web = { version = "4.5.1", default-features = false, features = [
"macros", "macros",
"rustls", "rustls",
@ -129,7 +129,7 @@ doku = { version = "0.21.1", features = ["url-2"] }
bcrypt = "0.15.1" bcrypt = "0.15.1"
chrono = { version = "0.4.38", features = ["serde"], default-features = false } chrono = { version = "0.4.38", features = ["serde"], default-features = false }
serde_json = { version = "1.0.116", features = ["preserve_order"] } serde_json = { version = "1.0.116", features = ["preserve_order"] }
base64 = "0.22.0" base64 = "0.22.1"
uuid = { version = "1.8.0", features = ["serde", "v4"] } uuid = { version = "1.8.0", features = ["serde", "v4"] }
async-trait = "0.1.80" async-trait = "0.1.80"
captcha = "0.0.9" captcha = "0.0.9"
@ -157,10 +157,10 @@ ts-rs = { version = "7.1.1", features = [
"chrono-impl", "chrono-impl",
"no-serde-warnings", "no-serde-warnings",
] } ] }
rustls = { version = "0.21.11", features = ["dangerous_configuration"] } rustls = { version = "0.23.5", features = ["ring"] }
futures-util = "0.3.30" futures-util = "0.3.30"
tokio-postgres = "0.7.10" tokio-postgres = "0.7.10"
tokio-postgres-rustls = "0.10.0" tokio-postgres-rustls = "0.12.0"
urlencoding = "2.1.3" urlencoding = "2.1.3"
enum-map = "2.7" enum-map = "2.7"
moka = { version = "0.12.7", features = ["future"] } moka = { version = "0.12.7", features = ["future"] }

View file

@ -6,7 +6,7 @@
"repository": "https://github.com/LemmyNet/lemmy", "repository": "https://github.com/LemmyNet/lemmy",
"author": "Dessalines", "author": "Dessalines",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"packageManager": "pnpm@9.0.4", "packageManager": "pnpm@9.0.6",
"scripts": { "scripts": {
"lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'", "lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'",
"fix": "prettier --write src && eslint --fix src", "fix": "prettier --write src && eslint --fix src",

File diff suppressed because it is too large Load diff

View file

@ -25,7 +25,7 @@ full = [
"lemmy_db_views_moderator/full", "lemmy_db_views_moderator/full",
"lemmy_utils/full", "lemmy_utils/full",
"activitypub_federation", "activitypub_federation",
"encoding", "encoding_rs",
"reqwest-middleware", "reqwest-middleware",
"webpage", "webpage",
"ts-rs", "ts-rs",
@ -69,7 +69,7 @@ mime = { version = "0.3.17", optional = true }
webpage = { version = "1.6", default-features = false, features = [ webpage = { version = "1.6", default-features = false, features = [
"serde", "serde",
], optional = true } ], optional = true }
encoding = { version = "0.2.33", optional = true } encoding_rs = { version = "0.8.34", optional = true }
jsonwebtoken = { version = "8.3.0", optional = true } jsonwebtoken = { version = "8.3.0", optional = true }
# necessary for wasmt compilation # necessary for wasmt compilation
getrandom = { version = "0.2.14", features = ["js"] } getrandom = { version = "0.2.14", features = ["js"] }

View file

@ -40,7 +40,7 @@ pub struct Register {
pub username: String, pub username: String,
pub password: Sensitive<String>, pub password: Sensitive<String>,
pub password_verify: Sensitive<String>, pub password_verify: Sensitive<String>,
pub show_nsfw: bool, pub show_nsfw: Option<bool>,
/// email is mandatory if email verification is enabled on the server /// email is mandatory if email verification is enabled on the server
pub email: Option<Sensitive<String>>, pub email: Option<Sensitive<String>>,
/// The UUID of the captcha item. /// The UUID of the captcha item.

View file

@ -6,7 +6,7 @@ use crate::{
utils::{local_site_opt_to_sensitive, proxy_image_link, proxy_image_link_opt_apub}, utils::{local_site_opt_to_sensitive, proxy_image_link, proxy_image_link_opt_apub},
}; };
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use encoding::{all::encodings, DecoderTrap}; use encoding_rs::{Encoding, UTF_8};
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::DbUrl, newtypes::DbUrl,
source::{ source::{
@ -160,11 +160,9 @@ fn extract_opengraph_data(html_bytes: &[u8], url: &Url) -> LemmyResult<OpenGraph
// proper encoding. If the specified encoding cannot be found, fall back to the original UTF-8 // proper encoding. If the specified encoding cannot be found, fall back to the original UTF-8
// version. // version.
if let Some(charset) = page.meta.get("charset") { if let Some(charset) = page.meta.get("charset") {
if charset.to_lowercase() != "utf-8" { if charset != UTF_8.name() {
if let Some(encoding_ref) = encodings().iter().find(|e| e.name() == charset) { if let Some(encoding) = Encoding::for_label(charset.as_bytes()) {
if let Ok(html_with_encoding) = encoding_ref.decode(html_bytes, DecoderTrap::Replace) { page = HTML::from_string(encoding.decode(html_bytes).0.into(), None)?;
page = HTML::from_string(html_with_encoding, None)?;
}
} }
} }
} }

View file

@ -142,12 +142,17 @@ pub async fn register(
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string()) .map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
.collect(); .collect();
// Show nsfw content if param is true, or if content_warning exists
let show_nsfw = data
.show_nsfw
.unwrap_or(site_view.site.content_warning.is_some());
// Create the local user // Create the local user
let local_user_form = LocalUserInsertForm::builder() let local_user_form = LocalUserInsertForm::builder()
.person_id(inserted_person.id) .person_id(inserted_person.id)
.email(data.email.as_deref().map(str::to_lowercase)) .email(data.email.as_deref().map(str::to_lowercase))
.password_encrypted(data.password.to_string()) .password_encrypted(data.password.to_string())
.show_nsfw(Some(data.show_nsfw)) .show_nsfw(Some(show_nsfw))
.accepted_application(accepted_application) .accepted_application(accepted_application)
.default_listing_type(Some(local_site.default_post_listing_type)) .default_listing_type(Some(local_site.default_post_listing_type))
.post_listing_mode(Some(local_site.default_post_listing_mode)) .post_listing_mode(Some(local_site.default_post_listing_mode))

View file

@ -0,0 +1,22 @@
{
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"type": "Group",
"updated": "2024-04-05T12:49:51Z",
"url": "https://socialhub.activitypub.rocks/c/meeting/threadiverse-wg/88",
"name": "Threadiverse Working Group (SocialHub)",
"inbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/inbox",
"outbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/outbox",
"followers": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/followers",
"preferredUsername": "threadiverse-wg",
"publicKey": {
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146#main-key",
"owner": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApJi4iAcW6bPiHVCxT9p0\n8DVnrDDO4QtLNy7bpRFdMFifmmmXprsuAi9D2MSwbhH49V54HtIkxBpKd2IR/UD8\nmhMDY4CNI9FHpjqLw0wtkzxcqF9urSqhn0/vWX+9oxyhIgQS5KMiIkYDMJiAc691\niEcZ8LCran23xIGl6Dk54Nr3TqTMLcjDhzQYUJbxMrLq5/knWqOKG3IF5OxK+9ZZ\n1wxDF872eJTxJLkmpag+WYNtHzvB2SGTp8j5IF1/pZ9J1c3cpYfaeolTch/B/GQn\najCB4l27U52rIIObxJqFXSY8wHyd0aAmNmxzPZ7cduRlBDhmI40cAmnCV1YQPvpk\nDwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://socialhub.activitypub.rocks/uploads/default/original/1X/8faac84234dc73d074dadaa2bcf24dc746b8647f.png"
},
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -0,0 +1,13 @@
{
"id": "https://socialhub.activitypub.rocks/ap/object/1899f65c062200daec50a4c89ed76dc9",
"type": "Note",
"audience": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"published": "2024-04-13T14:36:19Z",
"updated": "2024-04-13T14:36:19Z",
"url": "https://socialhub.activitypub.rocks/t/our-next-meeting/4079/1",
"attributedTo": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"name": "Our next meeting",
"context": "https://socialhub.activitypub.rocks/ap/collection/8850f6e85b57c490da915a5dfbbd5045",
"content": "<h3>Last Meeting</h3>\n<h4>Recording</h4>\n<a href=\"https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000\">https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000</a>\nPasscode: z+1*4pUB\n<h4>Minutes</h4>\nTo refresh your memory, you can read the minutes of last week's meeting <a href=\"https://community.nodebb.org/topic/17949/minutes&hellip;",
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -0,0 +1,23 @@
{
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"type": "Person",
"updated": "2024-01-15T12:27:03Z",
"url": "https://socialhub.activitypub.rocks/u/angus",
"name": "Angus McLeod",
"inbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/inbox",
"outbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/outbox",
"sharedInbox": "https://socialhub.activitypub.rocks/ap/users/inbox",
"followers": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/followers",
"preferredUsername": "angus",
"publicKey": {
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1#main-key",
"owner": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3RpuFDuwXZzOeHO5fO2O\nHmP7Flc5JDXJ8OOEJYq5T/dzUKqREOF1ZT0WMww8/E3P6w+gfFsjzThraJb8nHuW\nP6798SUD35CWBclfhw9DapjVn99JyFcAWcH3b9fr6LYshc4y1BoeJagk1kcro2Dc\n+pX0vVXgNjwWnGfyucAgGIUWrNUjcvIvXmyVCBSQfXG3nCALV1JbI4KSgf/5KyBn\nza/QefaetxYiFV8wAisPKLsz3XQAaITsQmbSi+8gmwXt/9U808PK1KphCiClDOWg\noi0HPzJn0rn+mwFCfgNWenvribfeG40AHLG33OkWKvslufjifdWDCOcBYYzyCEV6\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
},
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://socialhub.activitypub.rocks/user_avatar/socialhub.activitypub.rocks/angus/96/2295_2.png"
},
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -0,0 +1,22 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/category/31",
"url": "https://community.nodebb.org/category/31/threadiverse-working-group",
"inbox": "https://community.nodebb.org/category/31/inbox",
"outbox": "https://community.nodebb.org/category/31/outbox",
"sharedInbox": "https://community.nodebb.org/inbox",
"type": "Group",
"name": "Threadiverse Working Group",
"preferredUsername": "swicg-threadiverse-wg",
"summary": "Discussion and announcements related to the SWICG Threadiverse task force",
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://community.nodebb.org/assets/uploads/system/site-logo.png"
},
"publicKey": {
"id": "https://community.nodebb.org/category/31#key",
"owner": "https://community.nodebb.org/category/31",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0/Or3Ox2/jbhBZzF8W0Y\nWuS/4lgm5O5rxQk2nDRBXU/qNaZnMPkW2FxFPuPetndUVKSD2+vWF3SUlFyZ/vhT\nITzLkbRSILMiZCUg+0mvqi6va1WMBglMe5jLkc7wdfgNsosqBzKMdyMxqDZr++mJ\n8DjuqzWHENcjWcbMfSfAa9nkZHBIQUsHGGIwxEbKNlPqF0JIB66py7xmXbboDxpD\nPVF3EMkgZNnbmDGtlkZCKbztradyNRVl/u6KJpV3fbi+m/8CZ+POc4I5sKCQY1Hr\ndslHlm6tCkJQxIIKQtz0ZJ5yCUYmk48C2gFCndfJtYoEy9iR62xSemky6y04gWVc\naQIDAQAB\n-----END PUBLIC KEY-----\n"
}
}

View file

@ -0,0 +1,38 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/topic/17908",
"type": "Page",
"to": ["https://www.w3.org/ns/activitystreams#Public"],
"cc": ["https://community.nodebb.org/uid/2/followers"],
"inReplyTo": null,
"published": "2024-03-19T20:25:39.462Z",
"url": "https://community.nodebb.org/topic/17908/threadiverse-working-group",
"attributedTo": "https://community.nodebb.org/uid/2",
"audience": "https://community.nodebb.org/category/31/threadiverse-working-group",
"sensitive": false,
"summary": null,
"name": "Threadiverse Working Group",
"content": "<p dir=\"auto\">NodeBB is at this year's FediForum, and one of the breakout sessions centred around <strong>the Theadiverse</strong>, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.</p>\n<p dir=\"auto\">Some of the topic touched upon included:</p>\n<ul>\n<li>Aligning on a standard representation for collections of Notes</li>\n<li>FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.</li>\n<li>Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging</li>\n<li>Going forward: collaborating on building compatible threadiverse implementations</li>\n</ul>\n<p dir=\"auto\">The main action item involved <strong>the genesis of an informal working group for the threadiverse</strong>, in order to align our disparate implementations toward a common path.</p>\n<p dir=\"auto\">We intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.</p>\n<p dir=\"auto\">The topic of the first WG call is: <strong>Representation of the higherlevel collection of Notes (posts, etc.) — Article vs. Page, etc?</strong></p>\n<p dir=\"auto\">Interested?</p>\n<ul>\n<li>Publicly reply to this post (NodeBB does not support non-public posts at this time) if you'd like to join the list</li>\n<li>If you prefer to remain private, please email <a href=\"mailto:julian@nodebb.org\" rel=\"nofollow ugc\">julian@nodebb.org</a></li>\n</ul>\n<hr />\n<p dir=\"auto\">As an aside, I'd love to try something new and attempt tokeep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?</p>\n",
"source": {
"content": "NodeBB is at this year's FediForum, and one of the breakout sessions centred around **the Theadiverse**, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.\n\nSome of the topic touched upon included:\n\n* Aligning on a standard representation for collections of Notes\n* FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.\n* Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging\n* Going forward: collaborating on building compatible threadiverse implementations\n\nThe main action item involved **the genesis of an informal working group for the threadiverse**, in order to align our disparate implementations toward a common path.\n\nWe intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.\n\nThe topic of the first WG call is: **Representation of the higher level collection of Notes (posts, etc.) — Article vs. Page, etc?**\n\nInterested?\n\n* Publicly reply to this post (NodeBB does not support non-public postsat this time) if you'd like to join the list\n* If you prefer to remain private, please email julian@nodebb.org\n\n----\n\nAs an aside, I'd love to try something new and attempt to keep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?",
"mediaType": "text/markdown"
},
"tag": [
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/fediforum",
"name": "#fediforum"
},
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/activitypub",
"name": "#activitypub"
},
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/threadiverse",
"name": "#threadiverse"
}
],
"attachment": []
}

View file

@ -0,0 +1,29 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/uid/2",
"url": "https://community.nodebb.org/user/julian",
"followers": "https://community.nodebb.org/uid/2/followers",
"following": "https://community.nodebb.org/uid/2/following",
"inbox": "https://community.nodebb.org/uid/2/inbox",
"outbox": "https://community.nodebb.org/uid/2/outbox",
"sharedInbox": "https://community.nodebb.org/inbox",
"type": "Person",
"name": "julian",
"preferredUsername": "julian",
"summary": "Hi! I'm Julian, one of the co-founders of NodeBB, the forum software you are using right now.\r\n\r\nI started this company with two colleagues, Baris and Andrew, in 2013, and have been doing the startup thing since (although I think at some point along the way we stopped being a startup and just became a boring ol' small business).\r\n\r\nIn my free time I rock climb, cycle, and lift weights. I live just outside Toronto, Canada, with my wife and three children.",
"icon": {
"type": "Image",
"mediaType": "image/jpeg",
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profileavatar-1701457270279.jpeg"
},
"image": {
"type": "Image",
"mediaType": "image/jpeg",
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profilecover-1649468285913.jpeg"
},
"publicKey": {
"id": "https://community.nodebb.org/uid/2#key",
"owner": "https://community.nodebb.org/uid/2",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzEr0sFdATahQzprS4EOT\nZq+KMc6UTbt2GDP20OrQi/P5AXAbMaQiRCRdGWhYGjnH0jicn5NnozNxRo+HchJT\nV6NOHxpsxqPCoaLeoBkhfhbSCLr2Gzil6mmfqf9TjnI7A7ZTtCc0G+n0ztyL9HwL\nkEAI178l2gckk4XKKYnEd+dyiIevExrq/ROLgwW1o428FZvlF5amKxhpVUEygRU8\nCd1hqWYs+xYDOJURCP5qEx/MmRPpV/yGMTMyF+/gcQc0TUZnhWAM2E4M+aq3aKh6\nJP/vsry+5YZPUaPWfopbT5Ijyt6ZSElp6Avkg56eTz0a5SRcjCVS6IFVPwiLlzOe\nYwIDAQAB\n-----END PUBLIC KEY-----\n"
}
}

View file

@ -0,0 +1,49 @@
{
"@context": ["https://www.w3.org/ns/activitystreams"],
"id": "https://pfefferle.org/lemmy-part-4/#activity#activity",
"type": "Announce",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/#activity",
"type": "Update",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"audience": "https://pfefferle.org/@pfefferle.org",
"content": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E",
"contentMap": {
"en": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E"
},
"name": "Lemmy (Part 4)",
"published": "2024-05-03T12:32:29Z",
"summary": "Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object [...]",
"tag": [],
"updated": "2024-05-06T08:20:33Z",
"url": "https://pfefferle.org/lemmy-part-4/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": []
},
"actor": "https://pfefferle.org/author/pfefferle/"
},
"actor": "https://pfefferle.org/@pfefferle.org"
}

View file

@ -0,0 +1,66 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/@pfefferle.org",
"type": "Group",
"attachment": [],
"attributedTo": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://pfefferle.org/wp-content/uploads/2023/06/cropped-BeLItBV-_400x400.jpg"
},
"published": "2024-04-03T16:58:22Z",
"summary": "<p>Webworker, blogger und podcaster</p>\n",
"tag": [],
"url": "https://pfefferle.org/@pfefferle.org",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers",
"preferredUsername": "pfefferle.org",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/@pfefferle.org#main-key",
"owner": "https://pfefferle.org/@pfefferle.org",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuq8xeLMFcaCwPFBhgMRE\n/dDh2XKoNXFXnixctmK8BXSuuLMxucm3I/8NyhIvb3LqU+uP1fO8F0ecUbk2sN+x\nKag5vIV6yKXzJ8ILMWQ9AaELpXDmMZqL0zal0LUJRAOkDgPDovDAoq6tx++yDoV0\njdVbf9CoZKit1cz2ZrEuE5dswq3J/z9+c6POkhCkWEX5TPJzkOrmnjkvrXxGHUJ2\nA3+P+VaZhd5cmvqYosSpYNJshxCdev12pIF78OnYLiYiyXlgGHU+7uQR0M4tTcij\n6cUdLkms9m+b6H3ctXntPn410e5YLFPldjAYzQB5wHVdFZsWtyrbqfYdCa+KkKpA\nvwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/collections/featured",
"moderators": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"discoverable": true,
"indexable": true,
"webfinger": "pfefferle.org@pfefferle.org",
"postingRestrictedToMods": true
}

View file

@ -0,0 +1,24 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org?c=148",
"type": "Note",
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>Nice! Hello from WordPress!</p>",
"contentMap": {
"en": "<p>Nice! Hello from WordPress!</p>"
},
"inReplyTo": "https://socialhub.activitypub.rocks/ap/object/ce040f1ead95964f6dbbf1084b81432d",
"published": "2024-04-30T15:21:13Z",
"tag": [],
"url": "https://pfefferle.org?c=148",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers"
],
"cc": []
}

View file

@ -0,0 +1,26 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org/this-is-a-test-federation/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>with Discource!</p>",
"contentMap": {
"en": "<p>with Discource!</p>"
},
"name": "This is a test-federation",
"published": "2024-04-30T15:16:41Z",
"summary": "with Discource! [...]",
"tag": [],
"url": "https://pfefferle.org/this-is-a-test-federation/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers"
],
"cc": []
}

View file

@ -0,0 +1,74 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/author/pfefferle/",
"type": "Person",
"attachment": [
{
"type": "PropertyValue",
"name": "Blog",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/\" target=\"_blank\" href=\"https://pfefferle.org/\">pfefferle.org</a>"
},
{
"type": "PropertyValue",
"name": "Profile",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/author/pfefferle/\" target=\"_blank\" href=\"https://pfefferle.org/author/pfefferle/\">pfefferle.org</a>"
}
],
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://secure.gravatar.com/avatar/a2bdca7870e859658cece96c044b3be5?s=120&#038;d=mm&#038;r=g"
},
"published": "2014-02-10T15:23:08Z",
"summary": "<p>Ich arbeite als Open Web Lead für Automattic.</p>\n",
"tag": [],
"url": "https://pfefferle.org/author/pfefferle/",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers",
"preferredUsername": "matthias",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/author/pfefferle/#main-key",
"owner": "https://pfefferle.org/author/pfefferle/",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvTA5RA40nOsso04RSwyX\nHXTojRPUMlIlArDcSy3M5GUJp9/xbxSUOdBjqd31KKB1GIi3vrLmD1Qi/ZqS95Qy\nw2Zd3xOsCg+o9bsyOG+O6Y8Lu+HEB5JKLUbNHdiSviakJ8wGadH9Wm4WIiN20y+q\n/u6lgxgiWfZ2CFCN6SOc28fUKi9NmKvXK+M12BhFfy1tC5KWXKDm0UbfI1+dmqhR\n3Ffe6vEsCI/YIVVdWxQ9kouOd0XSHOGdslktkepRO7IP9i9TdwyeCa0WWRoeO5Wa\ntVpc1Y0WuNbTM2ksIXTg0G+rO1/6KO/hrHnGu3RCfb/ZIHK5L/aWYb9B3PG3LyKV\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/collections/featured",
"discoverable": true,
"indexable": true,
"webfinger": "matthias@pfefferle.org"
}

View file

@ -39,7 +39,10 @@ use lemmy_db_schema::{
}, },
traits::{Bannable, Crud, Followable}, traits::{Bannable, Crud, Followable},
}; };
use lemmy_utils::error::{LemmyError, LemmyResult}; use lemmy_utils::{
error::{LemmyError, LemmyResult},
LemmyErrorType,
};
use url::Url; use url::Url;
impl BlockUser { impl BlockUser {
@ -129,7 +132,11 @@ impl ActivityHandler for BlockUser {
verify_is_public(&self.to, &self.cc)?; verify_is_public(&self.to, &self.cc)?;
match self.target.dereference(context).await? { match self.target.dereference(context).await? {
SiteOrCommunity::Site(site) => { SiteOrCommunity::Site(site) => {
let domain = self.object.inner().domain().expect("url needs domain"); let domain = self
.object
.inner()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
if context.settings().hostname == domain { if context.settings().hostname == domain {
return Err( return Err(
anyhow!("Site bans from remote instance can't affect user's home instance").into(), anyhow!("Site bans from remote instance can't affect user's home instance").into(),

View file

@ -94,7 +94,12 @@ impl AnnounceActivity {
actor: community.id().into(), actor: community.id().into(),
to: vec![public()], to: vec![public()],
object: IdOrNestedObject::NestedObject(object), object: IdOrNestedObject::NestedObject(object),
cc: vec![community.followers_url.clone().into()], cc: community
.followers_url
.clone()
.map(Into::into)
.into_iter()
.collect(),
kind: AnnounceType::Announce, kind: AnnounceType::Announce,
id, id,
}) })

View file

@ -105,7 +105,7 @@ impl ActivityHandler for UpdateCommunity {
last_refreshed_at: Some(naive_now()), last_refreshed_at: Some(naive_now()),
icon: Some(self.object.icon.map(|i| i.url.into())), icon: Some(self.object.icon.map(|i| i.url.into())),
banner: Some(self.object.image.map(|i| i.url.into())), banner: Some(self.object.image.map(|i| i.url.into())),
followers_url: Some(self.object.followers.into()), followers_url: self.object.followers.map(Into::into),
inbox_url: Some(self.object.inbox.into()), inbox_url: Some(self.object.inbox.into()),
shared_inbox_url: Some(self.object.endpoints.map(|e| e.shared_inbox.into())), shared_inbox_url: Some(self.object.endpoints.map(|e| e.shared_inbox.into())),
moderators_url: self.object.attributed_to.map(Into::into), moderators_url: self.object.attributed_to.map(Into::into),

View file

@ -19,7 +19,7 @@ use activitypub_federation::{
config::Data, config::Data,
fetch::object_id::ObjectId, fetch::object_id::ObjectId,
kinds::public, kinds::public,
protocol::verification::verify_domains_match, protocol::verification::{verify_domains_match, verify_urls_match},
traits::{ActivityHandler, Actor, Object}, traits::{ActivityHandler, Actor, Object},
}; };
use lemmy_api_common::{ use lemmy_api_common::{
@ -133,6 +133,7 @@ impl ActivityHandler for CreateOrUpdateNote {
verify_domains_match(self.actor.inner(), self.object.id.inner())?; verify_domains_match(self.actor.inner(), self.object.id.inner())?;
check_community_deleted_or_removed(&community)?; check_community_deleted_or_removed(&community)?;
check_post_deleted_or_removed(&post)?; check_post_deleted_or_removed(&post)?;
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
ApubComment::verify(&self.object, self.actor.inner(), context).await?; ApubComment::verify(&self.object, self.actor.inner(), context).await?;
Ok(()) Ok(())

View file

@ -66,7 +66,6 @@ impl CreateOrUpdatePage {
kind: CreateOrUpdateType, kind: CreateOrUpdateType,
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<()> { ) -> LemmyResult<()> {
let post = ApubPost(post);
let community_id = post.community_id; let community_id = post.community_id;
let person: ApubPerson = Person::read(&mut context.pool(), person_id) let person: ApubPerson = Person::read(&mut context.pool(), person_id)
.await? .await?
@ -78,7 +77,7 @@ impl CreateOrUpdatePage {
.into(); .into();
let create_or_update = let create_or_update =
CreateOrUpdatePage::new(post, &person, &community, kind, &context).await?; CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?;
let is_mod_action = create_or_update.object.is_mod_action(&context).await?; let is_mod_action = create_or_update.object.is_mod_action(&context).await?;
let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update); let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update);
send_activity_in_community( send_activity_in_community(

View file

@ -9,7 +9,7 @@ use crate::{
}; };
use activitypub_federation::{ use activitypub_federation::{
config::Data, config::Data,
protocol::verification::verify_domains_match, protocol::verification::{verify_domains_match, verify_urls_match},
traits::{ActivityHandler, Actor, Object}, traits::{ActivityHandler, Actor, Object},
}; };
use lemmy_api_common::context::LemmyContext; use lemmy_api_common::context::LemmyContext;
@ -61,6 +61,7 @@ impl ActivityHandler for CreateOrUpdateChatMessage {
verify_person(&self.actor, context).await?; verify_person(&self.actor, context).await?;
verify_domains_match(self.actor.inner(), self.object.id.inner())?; verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?; verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?;
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
ApubPrivateMessage::verify(&self.object, self.actor.inner(), context).await?; ApubPrivateMessage::verify(&self.object, self.actor.inner(), context).await?;
Ok(()) Ok(())
} }

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
activity_lists::AnnouncableActivities, activity_lists::AnnouncableActivities,
objects::{community::ApubCommunity, post::ApubPost}, objects::community::ApubCommunity,
protocol::{ protocol::{
activities::{ activities::{
community::announce::AnnounceActivity, community::announce::AnnounceActivity,
@ -18,11 +18,8 @@ use activitypub_federation::{
}; };
use futures::future::join_all; use futures::future::join_all;
use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url}; use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url};
use lemmy_db_schema::{ use lemmy_db_schema::{utils::FETCH_LIMIT_MAX, SortType};
source::{person::Person, post::Post}, use lemmy_db_views::{post_view::PostQuery, structs::SiteView};
traits::Crud,
utils::FETCH_LIMIT_MAX,
};
use lemmy_utils::{ use lemmy_utils::{
error::{LemmyError, LemmyResult}, error::{LemmyError, LemmyResult},
LemmyErrorType, LemmyErrorType,
@ -41,19 +38,30 @@ impl Collection for ApubCommunityOutbox {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn read_local(owner: &Self::Owner, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> { async fn read_local(owner: &Self::Owner, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
let post_list: Vec<ApubPost> = Post::list_for_community(&mut data.pool(), owner.id) let site = SiteView::read_local(&mut data.pool())
.await? .await?
.into_iter() .ok_or(LemmyErrorType::LocalSiteNotSetup)?
.map(Into::into) .site;
.collect();
let post_views = PostQuery {
community_id: Some(owner.id),
sort: Some(SortType::New),
limit: Some(FETCH_LIMIT_MAX),
..Default::default()
}
.list(&site, &mut data.pool())
.await?;
let mut ordered_items = vec![]; let mut ordered_items = vec![];
for post in post_list { for post_view in post_views {
let person = Person::read(&mut data.pool(), post.creator_id) let create = CreateOrUpdatePage::new(
.await? post_view.post.into(),
.ok_or(LemmyErrorType::CouldntFindPerson)? &post_view.creator.into(),
.into(); owner,
let create = CreateOrUpdateType::Create,
CreateOrUpdatePage::new(post, &person, owner, CreateOrUpdateType::Create, data).await?; data,
)
.await?;
let announcable = AnnouncableActivities::CreateOrUpdatePost(create); let announcable = AnnouncableActivities::CreateOrUpdatePost(create);
let announce = AnnounceActivity::new(announcable.try_into()?, owner, data)?; let announce = AnnounceActivity::new(announcable.try_into()?, owner, data)?;
ordered_items.push(announce); ordered_items.push(announce);

View file

@ -128,7 +128,14 @@ pub(crate) mod tests {
use crate::protocol::objects::{group::Group, tombstone::Tombstone}; use crate::protocol::objects::{group::Group, tombstone::Tombstone};
use actix_web::body::to_bytes; use actix_web::body::to_bytes;
use lemmy_db_schema::{ use lemmy_db_schema::{
source::{community::CommunityInsertForm, instance::Instance}, newtypes::InstanceId,
source::{
community::CommunityInsertForm,
instance::Instance,
local_site::{LocalSite, LocalSiteInsertForm},
local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm},
site::{Site, SiteInsertForm},
},
traits::Crud, traits::Crud,
CommunityVisibility, CommunityVisibility,
}; };
@ -142,6 +149,8 @@ pub(crate) mod tests {
) -> LemmyResult<(Instance, Community)> { ) -> LemmyResult<(Instance, Community)> {
let instance = let instance =
Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?; Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?;
create_local_site(context, instance.id).await?;
let community_form = CommunityInsertForm::builder() let community_form = CommunityInsertForm::builder()
.name("testcom6".to_string()) .name("testcom6".to_string())
.title("nada".to_owned()) .title("nada".to_owned())
@ -154,6 +163,28 @@ pub(crate) mod tests {
Ok((instance, community)) Ok((instance, community))
} }
/// Necessary for the community outbox fetching
async fn create_local_site(
context: &Data<LemmyContext>,
instance_id: InstanceId,
) -> LemmyResult<()> {
// Create a local site, since this is necessary for community fetching.
let site_form = SiteInsertForm::builder()
.name("test site".to_string())
.instance_id(instance_id)
.build();
let site = Site::create(&mut context.pool(), &site_form).await?;
let local_site_form = LocalSiteInsertForm::builder().site_id(site.id).build();
let local_site = LocalSite::create(&mut context.pool(), &local_site_form).await?;
let local_site_rate_limit_form = LocalSiteRateLimitInsertForm::builder()
.local_site_id(local_site.id)
.build();
LocalSiteRateLimit::create(&mut context.pool(), &local_site_rate_limit_form).await?;
Ok(())
}
async fn decode_response<T: DeserializeOwned>(res: HttpResponse) -> LemmyResult<T> { async fn decode_response<T: DeserializeOwned>(res: HttpResponse) -> LemmyResult<T> {
let body = to_bytes(res.into_body()).await.unwrap(); let body = to_bytes(res.into_body()).await.unwrap();
let body = std::str::from_utf8(&body)?; let body = std::str::from_utf8(&body)?;
@ -164,6 +195,7 @@ pub(crate) mod tests {
#[serial] #[serial]
async fn test_get_community() -> LemmyResult<()> { async fn test_get_community() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await; let context = LemmyContext::init_test_context().await;
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
// fetch invalid community // fetch invalid community
let query = CommunityQuery { let query = CommunityQuery {
@ -172,8 +204,6 @@ pub(crate) mod tests {
let res = get_apub_community_http(query.into(), context.reset_request_count()).await; let res = get_apub_community_http(query.into(), context.reset_request_count()).await;
assert!(res.is_err()); assert!(res.is_err());
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
// fetch valid community // fetch valid community
let query = CommunityQuery { let query = CommunityQuery {
community_name: community.name.clone(), community_name: community.name.clone(),

View file

@ -20,7 +20,8 @@ use lemmy_db_schema::{
}; };
use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::ops::Deref; use std::{ops::Deref, time::Duration};
use tokio::time::timeout;
use url::Url; use url::Url;
mod comment; mod comment;
@ -30,13 +31,22 @@ mod post;
pub mod routes; pub mod routes;
pub mod site; pub mod site;
const INCOMING_ACTIVITY_TIMEOUT: Duration = Duration::from_secs(9);
pub async fn shared_inbox( pub async fn shared_inbox(
request: HttpRequest, request: HttpRequest,
body: Bytes, body: Bytes,
data: Data<LemmyContext>, data: Data<LemmyContext>,
) -> LemmyResult<HttpResponse> { ) -> LemmyResult<HttpResponse> {
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data) let receive_fut =
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data);
// Set a timeout shorter than `REQWEST_TIMEOUT` for processing incoming activities. This is to
// avoid taking a long time to process an incoming activity when a required data fetch times out.
// In this case our own instance would timeout and be marked as dead by the sender. Better to
// consider the activity broken and move on.
timeout(INCOMING_ACTIVITY_TIMEOUT, receive_fut)
.await .await
.map_err(|_| LemmyErrorType::InboxTimeout)?
} }
/// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub /// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub

View file

@ -78,7 +78,10 @@ impl UrlVerifier for VerifyUrlData {
/// - URL not being in the blocklist (if it is active) /// - URL not being in the blocklist (if it is active)
#[tracing::instrument(skip(local_site_data))] #[tracing::instrument(skip(local_site_data))]
fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> LemmyResult<()> { fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> LemmyResult<()> {
let domain = apub_id.domain().expect("apud id has domain").to_string(); let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
if !local_site_data if !local_site_data
.local_site .local_site
@ -158,7 +161,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
is_strict: bool, is_strict: bool,
context: &LemmyContext, context: &LemmyContext,
) -> LemmyResult<()> { ) -> LemmyResult<()> {
let domain = apub_id.domain().expect("apud id has domain").to_string(); let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
let local_instance = context let local_instance = context
.settings() .settings()
.get_hostname_without_port() .get_hostname_without_port()
@ -185,7 +191,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
.expect("local hostname is valid"); .expect("local hostname is valid");
allowed_and_local.push(local_instance); allowed_and_local.push(local_instance);
let domain = apub_id.domain().expect("apud id has domain").to_string(); let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
if !allowed_and_local.contains(&domain) { if !allowed_and_local.contains(&domain) {
Err(LemmyErrorType::FederationDisabledByStrictAllowList)? Err(LemmyErrorType::FederationDisabledByStrictAllowList)?
} }

View file

@ -54,7 +54,10 @@ pub async fn collect_non_local_mentions(
name: Some(format!( name: Some(format!(
"@{}@{}", "@{}@{}",
&parent_creator.name, &parent_creator.name,
&parent_creator.id().domain().expect("has domain") &parent_creator
.id()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
)), )),
kind: MentionType::Mention, kind: MentionType::Mention,
}; };

View file

@ -113,7 +113,7 @@ impl Object for ApubCommunity {
featured: Some(generate_featured_url(&self.actor_id)?.into()), featured: Some(generate_featured_url(&self.actor_id)?.into()),
inbox: self.inbox_url.clone().into(), inbox: self.inbox_url.clone().into(),
outbox: generate_outbox_url(&self.actor_id)?.into(), outbox: generate_outbox_url(&self.actor_id)?.into(),
followers: self.followers_url.clone().into(), followers: self.followers_url.clone().map(Into::into),
endpoints: self.shared_inbox_url.clone().map(|s| Endpoints { endpoints: self.shared_inbox_url.clone().map(|s| Endpoints {
shared_inbox: s.into(), shared_inbox: s.into(),
}), }),
@ -164,7 +164,7 @@ impl Object for ApubCommunity {
last_refreshed_at: Some(naive_now()), last_refreshed_at: Some(naive_now()),
icon, icon,
banner, banner,
followers_url: Some(group.followers.clone().into()), followers_url: group.followers.clone().map(Into::into),
inbox_url: Some(group.inbox.into()), inbox_url: Some(group.inbox.into()),
shared_inbox_url: group.endpoints.map(|e| e.shared_inbox.into()), shared_inbox_url: group.endpoints.map(|e| e.shared_inbox.into()),
moderators_url: group.attributed_to.clone().map(Into::into), moderators_url: group.attributed_to.clone().map(Into::into),
@ -187,11 +187,9 @@ impl Object for ApubCommunity {
let context_ = context.reset_request_count(); let context_ = context.reset_request_count();
spawn_try_task(async move { spawn_try_task(async move {
group.outbox.dereference(&community_, &context_).await.ok(); group.outbox.dereference(&community_, &context_).await.ok();
group if let Some(followers) = group.followers {
.followers followers.dereference(&community_, &context_).await.ok();
.dereference(&community_, &context_) }
.await
.ok();
if let Some(featured) = group.featured { if let Some(featured) = group.featured {
featured.dereference(&community_, &context_).await.ok(); featured.dereference(&community_, &context_).await.ok();
} }
@ -275,7 +273,9 @@ pub(crate) mod tests {
// change these links so they dont fetch over the network // change these links so they dont fetch over the network
json.attributed_to = None; json.attributed_to = None;
json.outbox = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox")?; json.outbox = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox")?;
json.followers = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_followers")?; json.followers = Some(CollectionId::parse(
"https://enterprise.lemmy.ml/c/tenforward/not_followers",
)?);
let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?; let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?;
ApubCommunity::verify(&json, &url, &context2).await?; ApubCommunity::verify(&json, &url, &context2).await?;

View file

@ -45,6 +45,7 @@ use lemmy_utils::{
markdown::markdown_to_html, markdown::markdown_to_html,
slurs::{check_slurs, check_slurs_opt}, slurs::{check_slurs, check_slurs_opt},
}, },
LemmyErrorType,
}; };
use std::ops::Deref; use std::ops::Deref;
use tracing::debug; use tracing::debug;
@ -99,7 +100,7 @@ impl Object for ApubSite {
kind: ApplicationType::Application, kind: ApplicationType::Application,
id: self.id().into(), id: self.id().into(),
name: self.name.clone(), name: self.name.clone(),
preferred_username: data.domain().to_string(), preferred_username: Some(data.domain().to_string()),
content: self.sidebar.as_ref().map(|d| markdown_to_html(d)), content: self.sidebar.as_ref().map(|d| markdown_to_html(d)),
source: self.sidebar.clone().map(Source::new), source: self.sidebar.clone().map(Source::new),
summary: self.description.clone(), summary: self.description.clone(),
@ -137,7 +138,11 @@ impl Object for ApubSite {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn from_json(apub: Self::Kind, context: &Data<Self::DataType>) -> LemmyResult<Self> { async fn from_json(apub: Self::Kind, context: &Data<Self::DataType>) -> LemmyResult<Self> {
let domain = apub.id.inner().domain().expect("group id has domain"); let domain = apub
.id
.inner()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
let instance = DbInstance::read_or_create(&mut context.pool(), domain.to_string()).await?; let instance = DbInstance::read_or_create(&mut context.pool(), domain.to_string()).await?;
let local_site = LocalSite::read(&mut context.pool()).await.ok(); let local_site = LocalSite::read(&mut context.pool()).await.ok();
@ -210,7 +215,9 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + C
Err(e) => { Err(e) => {
// Failed to fetch instance actor, its probably not a lemmy instance // Failed to fetch instance actor, its probably not a lemmy instance
debug!("Failed to dereference site for {}: {}", &instance_id, e); debug!("Failed to dereference site for {}: {}", &instance_id, e);
let domain = instance_id.domain().expect("has domain"); let domain = instance_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
Ok( Ok(
DbInstance::read_or_create(&mut context.pool(), domain.to_string()) DbInstance::read_or_create(&mut context.pool(), domain.to_string())
.await? .await?

View file

@ -96,4 +96,10 @@ mod tests {
test_json::<Report>("assets/mbin/activities/flag.json")?; test_json::<Report>("assets/mbin/activities/flag.json")?;
Ok(()) Ok(())
} }
#[test]
fn test_parse_wordpress_activities() -> LemmyResult<()> {
test_json::<AnnounceActivity>("assets/wordpress/activities/announce.json")?;
Ok(())
}
} }

View file

@ -45,7 +45,7 @@ pub struct Group {
/// username, set at account creation and usually fixed after that /// username, set at account creation and usually fixed after that
pub(crate) preferred_username: String, pub(crate) preferred_username: String,
pub(crate) inbox: Url, pub(crate) inbox: Url,
pub(crate) followers: CollectionId<ApubCommunityFollower>, pub(crate) followers: Option<CollectionId<ApubCommunityFollower>>,
pub(crate) public_key: PublicKey, pub(crate) public_key: PublicKey,
/// title /// title

View file

@ -22,7 +22,7 @@ pub struct Instance {
/// site name /// site name
pub(crate) name: String, pub(crate) name: String,
/// instance domain, necessary for mastodon authorized fetch /// instance domain, necessary for mastodon authorized fetch
pub(crate) preferred_username: String, pub(crate) preferred_username: Option<String>,
pub(crate) inbox: Url, pub(crate) inbox: Url,
/// mandatory field in activitypub, lemmy currently serves an empty outbox /// mandatory field in activitypub, lemmy currently serves an empty outbox
pub(crate) outbox: Url, pub(crate) outbox: Url,

View file

@ -190,4 +190,29 @@ mod tests {
test_json::<Person>("assets/mobilizon/objects/person.json")?; test_json::<Person>("assets/mobilizon/objects/person.json")?;
Ok(()) Ok(())
} }
#[test]
fn test_parse_object_discourse() -> LemmyResult<()> {
test_json::<Group>("assets/discourse/objects/group.json")?;
test_json::<Page>("assets/discourse/objects/page.json")?;
test_json::<Person>("assets/discourse/objects/person.json")?;
Ok(())
}
#[test]
fn test_parse_object_nodebb() -> LemmyResult<()> {
test_json::<Group>("assets/nodebb/objects/group.json")?;
test_json::<Page>("assets/nodebb/objects/page.json")?;
test_json::<Person>("assets/nodebb/objects/person.json")?;
Ok(())
}
#[test]
fn test_parse_object_wordpress() -> LemmyResult<()> {
test_json::<Group>("assets/wordpress/objects/group.json")?;
test_json::<Page>("assets/wordpress/objects/page.json")?;
test_json::<Person>("assets/wordpress/objects/person.json")?;
test_json::<Note>("assets/wordpress/objects/note.json")?;
Ok(())
}
} }

View file

@ -42,7 +42,7 @@ pub struct Page {
pub(crate) kind: PageType, pub(crate) kind: PageType,
pub(crate) id: ObjectId<ApubPost>, pub(crate) id: ObjectId<ApubPost>,
pub(crate) attributed_to: AttributedTo, pub(crate) attributed_to: AttributedTo,
#[serde(deserialize_with = "deserialize_one_or_many")] #[serde(deserialize_with = "deserialize_one_or_many", default)]
pub(crate) to: Vec<Url>, pub(crate) to: Vec<Url>,
// If there is inReplyTo field this is actually a comment and must not be parsed // If there is inReplyTo field this is actually a comment and must not be parsed
#[serde(deserialize_with = "deserialize_not_present", default)] #[serde(deserialize_with = "deserialize_not_present", default)]
@ -233,6 +233,10 @@ impl ActivityHandler for Page {
#[async_trait::async_trait] #[async_trait::async_trait]
impl InCommunity for Page { impl InCommunity for Page {
async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> { async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> {
if let Some(audience) = &self.audience {
return audience.dereference(context).await;
}
let community = match &self.attributed_to { let community = match &self.attributed_to {
AttributedTo::Lemmy(_) => { AttributedTo::Lemmy(_) => {
let mut iter = self.to.iter().merge(self.cc.iter()); let mut iter = self.to.iter().merge(self.cc.iter());
@ -243,7 +247,7 @@ impl InCommunity for Page {
break c; break c;
} }
} else { } else {
Err(LemmyErrorType::NoCommunityFoundInCc)? Err(LemmyErrorType::CouldntFindCommunity)?;
} }
} }
} }
@ -251,11 +255,12 @@ impl InCommunity for Page {
p.iter() p.iter()
.find(|a| a.kind == PersonOrGroupType::Group) .find(|a| a.kind == PersonOrGroupType::Group)
.map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner())) .map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner()))
.ok_or(LemmyErrorType::PageDoesNotSpecifyGroup)? .ok_or(LemmyErrorType::CouldntFindCommunity)?
.dereference(context) .dereference(context)
.await? .await?
} }
}; };
if let Some(audience) = &self.audience { if let Some(audience) = &self.audience {
verify_community_matches(audience, community.actor_id.clone())?; verify_community_matches(audience, community.actor_id.clone())?;
} }

View file

@ -1,5 +1,6 @@
[package] [package]
name = "lemmy_db_perf" name = "lemmy_db_perf"
publish = false
version.workspace = true version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true

View file

@ -84,22 +84,6 @@ impl Post {
.await .await
} }
pub async fn list_for_community(
pool: &mut DbPool<'_>,
the_community_id: CommunityId,
) -> Result<Vec<Self>, Error> {
let conn = &mut get_conn(pool).await?;
post::table
.filter(post::community_id.eq(the_community_id))
.filter(post::deleted.eq(false))
.filter(post::removed.eq(false))
.then_order_by(post::featured_community.desc())
.then_order_by(post::published.desc())
.limit(FETCH_LIMIT_MAX)
.load::<Self>(conn)
.await
}
pub async fn list_featured_for_community( pub async fn list_featured_for_community(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
the_community_id: CommunityId, the_community_id: CommunityId,

View file

@ -178,7 +178,7 @@ diesel::table! {
icon -> Nullable<Text>, icon -> Nullable<Text>,
banner -> Nullable<Text>, banner -> Nullable<Text>,
#[max_length = 255] #[max_length = 255]
followers_url -> Varchar, followers_url -> Nullable<Varchar>,
#[max_length = 255] #[max_length = 255]
inbox_url -> Varchar, inbox_url -> Varchar,
#[max_length = 255] #[max_length = 255]

View file

@ -49,8 +49,8 @@ pub struct Community {
/// A URL for a banner. /// A URL for a banner.
pub banner: Option<DbUrl>, pub banner: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(skip))] #[cfg_attr(feature = "full", ts(skip))]
#[serde(skip, default = "placeholder_apub_url")] #[serde(skip)]
pub followers_url: DbUrl, pub followers_url: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(skip))] #[cfg_attr(feature = "full", ts(skip))]
#[serde(skip, default = "placeholder_apub_url")] #[serde(skip, default = "placeholder_apub_url")]
pub inbox_url: DbUrl, pub inbox_url: DbUrl,

View file

@ -33,13 +33,22 @@ use lemmy_utils::{
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use rustls::{ use rustls::{
client::{ServerCertVerified, ServerCertVerifier}, client::danger::{
ServerName, DangerousClientConfigBuilder,
HandshakeSignatureValid,
ServerCertVerified,
ServerCertVerifier,
},
crypto::{self, verify_tls12_signature, verify_tls13_signature},
pki_types::{CertificateDer, ServerName, UnixTime},
ClientConfig,
DigitallySignedStruct,
SignatureScheme,
}; };
use std::{ use std::{
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
sync::Arc, sync::Arc,
time::{Duration, SystemTime}, time::Duration,
}; };
use tracing::error; use tracing::error;
use url::Url; use url::Url;
@ -312,8 +321,9 @@ pub fn diesel_option_overwrite_to_url_create(opt: &Option<String>) -> LemmyResul
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> { fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
let fut = async { let fut = async {
let rustls_config = rustls::ClientConfig::builder() let rustls_config = DangerousClientConfigBuilder {
.with_safe_defaults() cfg: ClientConfig::builder(),
}
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {})) .with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
.with_no_client_auth(); .with_no_client_auth();
@ -338,21 +348,55 @@ fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConne
fut.boxed() fut.boxed()
} }
#[derive(Debug)]
struct NoCertVerifier {} struct NoCertVerifier {}
impl ServerCertVerifier for NoCertVerifier { impl ServerCertVerifier for NoCertVerifier {
fn verify_server_cert( fn verify_server_cert(
&self, &self,
_end_entity: &rustls::Certificate, _end_entity: &CertificateDer,
_intermediates: &[rustls::Certificate], _intermediates: &[CertificateDer],
_server_name: &ServerName, _server_name: &ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>, _ocsp: &[u8],
_ocsp_response: &[u8], _now: UnixTime,
_now: SystemTime,
) -> Result<ServerCertVerified, rustls::Error> { ) -> Result<ServerCertVerified, rustls::Error> {
// Will verify all (even invalid) certs without any checks (sslmode=require) // Will verify all (even invalid) certs without any checks (sslmode=require)
Ok(ServerCertVerified::assertion()) Ok(ServerCertVerified::assertion())
} }
fn verify_tls12_signature(
&self,
message: &[u8],
cert: &CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
verify_tls12_signature(
message,
cert,
dss,
&crypto::ring::default_provider().signature_verification_algorithms,
)
}
fn verify_tls13_signature(
&self,
message: &[u8],
cert: &CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
verify_tls13_signature(
message,
cert,
dss,
&crypto::ring::default_provider().signature_verification_algorithms,
)
}
fn supported_verify_schemes(&self) -> Vec<SignatureScheme> {
crypto::ring::default_provider()
.signature_verification_algorithms
.supported_schemes()
}
} }
pub async fn build_db_pool() -> LemmyResult<ActualDbPool> { pub async fn build_db_pool() -> LemmyResult<ActualDbPool> {

View file

@ -220,8 +220,7 @@ fn queries<'a>() -> Queries<
query = query.filter( query = query.filter(
comment::content comment::content
.ilike(fuzzy_search(&search_term)) .ilike(fuzzy_search(&search_term))
.and(comment::removed.eq(false)) .and(not(comment::removed.or(comment::deleted))),
.and(comment::deleted.eq(false)),
); );
}; };
@ -265,10 +264,13 @@ fn queries<'a>() -> Queries<
.then_order_by(is_saved(person_id_join).desc()); .then_order_by(is_saved(person_id_join).desc());
} }
if let Some(my_id) = my_person_id {
let not_creator_filter = comment::creator_id.ne(my_id);
if options.liked_only { if options.liked_only {
query = query.filter(score(person_id_join).eq(1)); query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
} else if options.disliked_only { } else if options.disliked_only {
query = query.filter(score(person_id_join).eq(-1)); query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
}
} }
if !options if !options
@ -683,8 +685,10 @@ mod tests {
.await?; .await?;
assert_eq!( assert_eq!(
expected_comment_view_no_person, &expected_comment_view_no_person,
read_comment_views_no_person[0] read_comment_views_no_person
.first()
.ok_or(LemmyErrorType::CouldntFindComment)?
); );
let read_comment_views_with_person = CommentQuery { let read_comment_views_with_person = CommentQuery {
@ -715,18 +719,45 @@ mod tests {
// Make sure block set the creator blocked // Make sure block set the creator blocked
assert!(read_comment_from_blocked_person.creator_blocked); assert!(read_comment_from_blocked_person.creator_blocked);
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_liked_only() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
// Unblock sara first
let timmy_unblocks_sara_form = PersonBlockForm {
person_id: data.timmy_local_user_view.person.id,
target_id: data.inserted_sara_person.id,
};
PersonBlock::unblock(pool, &timmy_unblocks_sara_form).await?;
// Like a new comment
let comment_like_form = CommentLikeForm {
comment_id: data.inserted_comment_1.id,
post_id: data.inserted_post.id,
person_id: data.timmy_local_user_view.person.id,
score: 1,
};
CommentLike::like(pool, &comment_like_form).await.unwrap();
let read_liked_comment_views = CommentQuery { let read_liked_comment_views = CommentQuery {
local_user: (Some(&data.timmy_local_user_view)), local_user: (Some(&data.timmy_local_user_view)),
liked_only: (true), liked_only: (true),
..Default::default() ..Default::default()
} }
.list(pool) .list(pool)
.await?; .await?
.into_iter()
.map(|c| c.comment.content)
.collect::<Vec<String>>();
assert_eq!( // Shouldn't include your own post, only other peoples
expected_comment_view_with_person, assert_eq!(data.inserted_comment_1.content, read_liked_comment_views[0]);
read_liked_comment_views[0]
);
assert_length!(1, read_liked_comment_views); assert_length!(1, read_liked_comment_views);
@ -836,7 +867,7 @@ mod tests {
// change user lang to finnish, should only show one post in finnish and one undetermined // change user lang to finnish, should only show one post in finnish and one undetermined
let finnish_id = Language::read_id_from_code(pool, Some("fi")) let finnish_id = Language::read_id_from_code(pool, Some("fi"))
.await? .await?
.unwrap(); .ok_or(LemmyErrorType::LanguageNotAllowed)?;
LocalUserLanguage::update( LocalUserLanguage::update(
pool, pool,
vec![finnish_id], vec![finnish_id],
@ -856,7 +887,10 @@ mod tests {
assert!(finnish_comment.is_some()); assert!(finnish_comment.is_some());
assert_eq!( assert_eq!(
data.inserted_comment_2.content, data.inserted_comment_2.content,
finnish_comment.unwrap().comment.content finnish_comment
.ok_or(LemmyErrorType::CouldntFindComment)?
.comment
.content
); );
// now show all comments with undetermined language (which is the default value) // now show all comments with undetermined language (which is the default value)

View file

@ -396,11 +396,13 @@ fn queries<'a>() -> Queries<
if let Some(search_term) = &options.search_term { if let Some(search_term) = &options.search_term {
let searcher = fuzzy_search(search_term); let searcher = fuzzy_search(search_term);
query = query.filter( query = query
.filter(
post::name post::name
.ilike(searcher.clone()) .ilike(searcher.clone())
.or(post::body.ilike(searcher)), .or(post::body.ilike(searcher)),
); )
.filter(not(post::removed.or(post::deleted)));
} }
// If there is a content warning, show nsfw content by default. // If there is a content warning, show nsfw content by default.
@ -450,11 +452,12 @@ fn queries<'a>() -> Queries<
} }
} }
if let Some(person_id) = my_person_id { if let Some(my_id) = my_person_id {
let not_creator_filter = post_aggregates::creator_id.ne(my_id);
if options.liked_only { if options.liked_only {
query = query.filter(score(person_id).eq(1)); query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
} else if options.disliked_only { } else if options.disliked_only {
query = query.filter(score(person_id).eq(-1)); query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
} }
}; };
@ -1119,6 +1122,36 @@ mod tests {
.await?; .await?;
assert_eq!(vec![expected_post_with_upvote], read_post_listing); assert_eq!(vec![expected_post_with_upvote], read_post_listing);
let like_removed =
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
assert_eq!(1, like_removed);
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn post_listing_liked_only() -> LemmyResult<()> {
let pool = &build_db_pool().await?;
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Like both the bot post, and your own
// The liked_only should not show your own post
let post_like_form = PostLikeForm {
post_id: data.inserted_post.id,
person_id: data.local_user_view.person.id,
score: 1,
};
PostLike::like(pool, &post_like_form).await?;
let bot_post_like_form = PostLikeForm {
post_id: data.inserted_bot_post.id,
person_id: data.local_user_view.person.id,
score: 1,
};
PostLike::like(pool, &bot_post_like_form).await?;
// Read the liked only
let read_liked_post_listing = PostQuery { let read_liked_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
liked_only: true, liked_only: true,
@ -1126,7 +1159,9 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
assert_eq!(read_post_listing, read_liked_post_listing);
// This should only include the bot post, not the one you created
assert_eq!(vec![POST_BY_BOT], names(&read_liked_post_listing));
let read_disliked_post_listing = PostQuery { let read_disliked_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -1135,11 +1170,10 @@ mod tests {
} }
.list(&data.site, pool) .list(&data.site, pool)
.await?; .await?;
// Should be no posts
assert_eq!(read_disliked_post_listing, vec![]); assert_eq!(read_disliked_post_listing, vec![]);
let like_removed =
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
assert_eq!(1, like_removed);
cleanup(data, pool).await cleanup(data, pool).await
} }
@ -1552,7 +1586,7 @@ mod tests {
assert!( assert!(
&post_listings_show_hidden &post_listings_show_hidden
.first() .first()
.expect("first post should exist") .ok_or(LemmyErrorType::CouldntFindPost)?
.hidden .hidden
); );

View file

@ -78,7 +78,7 @@ openssl = { version = "0.10.64", optional = true }
html2text = { version = "0.6.0", optional = true } html2text = { version = "0.6.0", optional = true }
deser-hjson = { version = "2.2.4", optional = true } deser-hjson = { version = "2.2.4", optional = true }
smart-default = { version = "0.7.1", optional = true } smart-default = { version = "0.7.1", optional = true }
lettre = { version = "0.11.6", features = [ lettre = { version = "0.11.7", features = [
"tokio1", "tokio1",
"tokio1-native-tls", "tokio1-native-tls",
], optional = true } ], optional = true }

View file

@ -99,8 +99,6 @@ pub enum LemmyErrorType {
PersonIsBannedFromSite(String), PersonIsBannedFromSite(String),
InvalidVoteValue, InvalidVoteValue,
PageDoesNotSpecifyCreator, PageDoesNotSpecifyCreator,
PageDoesNotSpecifyGroup,
NoCommunityFoundInCc,
NoEmailSetup, NoEmailSetup,
LocalSiteNotSetup, LocalSiteNotSetup,
EmailSmtpServerNeedsAPort, EmailSmtpServerNeedsAPort,
@ -176,6 +174,8 @@ pub enum LemmyErrorType {
InvalidUnixTime, InvalidUnixTime,
InvalidBotAction, InvalidBotAction,
CantBlockLocalInstance, CantBlockLocalInstance,
UrlWithoutDomain,
InboxTimeout,
Unknown(String), Unknown(String),
} }

@ -1 +1 @@
Subproject commit 866e4056656755f7b31e20094b46391e6931e3e7 Subproject commit a4681f70a4ddf077951ed2dcc8cf90bb243d4828

View file

@ -114,6 +114,8 @@ services:
"-c", "-c",
"auto_explain.log_analyze=true", "auto_explain.log_analyze=true",
"-c", "-c",
"auto_explain.log_triggers=true",
"-c",
"track_activity_query_size=1048576", "track_activity_query_size=1048576",
] ]
ports: ports:

View file

@ -0,0 +1,3 @@
ALTER TABLE community
ALTER COLUMN followers_url SET NOT NULL;

View file

@ -0,0 +1,3 @@
ALTER TABLE community
ALTER COLUMN followers_url DROP NOT NULL;

View file

@ -4,39 +4,39 @@ set -e
echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete." echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete."
echo "Stopping lemmy and all services..." echo "Stopping lemmy and all services..."
sudo docker-compose stop sudo docker compose stop
echo "Make sure postgres is started..." echo "Make sure postgres is started..."
sudo docker-compose up -d postgres sudo docker compose up -d postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Exporting the Database to 15_16.dump.sql ..." echo "Exporting the Database to 15_16.dump.sql ..."
sudo docker-compose exec -T postgres pg_dumpall -c -U lemmy > 15_16_dump.sql sudo docker compose exec -T postgres pg_dumpall -c -U lemmy | sudo tee 15_16_dump.sql > /dev/null
echo "Done." echo "Done."
echo "Stopping postgres..." echo "Stopping postgres..."
sudo docker-compose stop postgres sudo docker compose stop postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Removing the old postgres folder" echo "Removing the old postgres folder"
sudo rm -rf volumes/postgres sudo rm -rf volumes/postgres
echo "Updating docker-compose to use postgres version 16." echo "Updating docker compose to use postgres version 16."
sed -i "s/image: postgres:.*/image: postgres:16-alpine/" ./docker-compose.yml sudo sed -i "s/image: .*postgres:.*/image: docker.io/postgres:16-alpine/" ./docker-compose.yml
echo "Starting up new postgres..." echo "Starting up new postgres..."
sudo docker-compose up -d postgres sudo docker compose up -d postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Importing the database...." echo "Importing the database...."
cat 15_16_dump.sql | sudo docker-compose exec -T postgres psql -U lemmy sudo cat 15_16_dump.sql | sudo docker compose exec -T postgres psql -U lemmy
echo "Done." echo "Done."
echo "Starting up lemmy..." echo "Starting up lemmy..."
sudo docker-compose up -d sudo docker compose up -d
echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly." echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly."
echo "Upgrade complete." echo "Upgrade complete."

View file

@ -160,10 +160,10 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
rate_limit_cell.clone(), rate_limit_cell.clone(),
); );
if !args.disable_scheduled_tasks { let scheduled_tasks = (!args.disable_scheduled_tasks).then(|| {
// Schedules various cleanup tasks for the DB // Schedules various cleanup tasks for the DB
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone())); tokio::task::spawn(scheduled_tasks::setup(context.clone()))
} });
if let Some(prometheus) = SETTINGS.prometheus.clone() { if let Some(prometheus) = SETTINGS.prometheus.clone() {
serve_prometheus(prometheus, context.clone())?; serve_prometheus(prometheus, context.clone())?;
@ -218,7 +218,7 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?; let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?; let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
if server.is_some() || federate.is_some() { if server.is_some() || federate.is_some() || scheduled_tasks.is_some() {
tokio::select! { tokio::select! {
_ = tokio::signal::ctrl_c() => { _ = tokio::signal::ctrl_c() => {
tracing::warn!("Received ctrl-c, shutting down gracefully..."); tracing::warn!("Received ctrl-c, shutting down gracefully...");