Merge remote-tracking branch 'upstream/main' into migration-runner

This commit is contained in:
Dull Bananas 2024-05-17 20:28:03 +00:00
commit 1af517f6c4
49 changed files with 2921 additions and 2536 deletions

View file

@ -3,6 +3,7 @@
variables: variables:
- &rust_image "rust:1.77" - &rust_image "rust:1.77"
- &rust_nightly_image "rustlang/rust:nightly"
- &install_pnpm "corepack enable pnpm" - &install_pnpm "corepack enable pnpm"
- &slow_check_paths - &slow_check_paths
- event: pull_request - event: pull_request
@ -24,15 +25,17 @@ variables:
"diesel.toml", "diesel.toml",
".gitmodules", ".gitmodules",
] ]
- install_binstall: &install_binstall
# Broken for cron jobs currently, see - wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
# https://github.com/woodpecker-ci/woodpecker/issues/1716 - tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
# clone: - cp cargo-binstall /usr/local/cargo/bin
# git: - install_diesel_cli: &install_diesel_cli
# image: woodpeckerci/plugin-git - apt update && apt install -y lsb-release build-essential
# settings: - sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
# recursive: true - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
# submodule_update_remote: true - apt update && apt install -y postgresql-client-16
- cargo install diesel_cli --no-default-features --features postgres
- export PATH="$CARGO_HOME/bin:$PATH"
steps: steps:
prepare_repo: prepare_repo:
@ -66,7 +69,7 @@ steps:
- event: pull_request - event: pull_request
cargo_fmt: cargo_fmt:
image: rustlang/rust:nightly image: *rust_nightly_image
environment: environment:
# store cargo data in repo folder so that it gets cached between steps # store cargo data in repo folder so that it gets cached between steps
CARGO_HOME: .cargo_home CARGO_HOME: .cargo_home
@ -77,11 +80,9 @@ steps:
- event: pull_request - event: pull_request
cargo_machete: cargo_machete:
image: rustlang/rust:nightly image: *rust_nightly_image
commands: commands:
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz - <<: *install_binstall
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
- cp cargo-binstall /usr/local/cargo/bin
- cargo binstall -y cargo-machete - cargo binstall -y cargo-machete
- cargo machete - cargo machete
when: when:
@ -132,6 +133,17 @@ steps:
- diff config/defaults.hjson config/defaults_current.hjson - diff config/defaults.hjson config/defaults_current.hjson
when: *slow_check_paths when: *slow_check_paths
check_diesel_schema:
image: willsquire/diesel-cli
environment:
CARGO_HOME: .cargo_home
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
commands:
- diesel migration run
- diesel print-schema --config-file=diesel.toml > tmp.schema
- diff tmp.schema crates/db_schema/src/schema.rs
when: *slow_check_paths
check_db_perf_tool: check_db_perf_tool:
image: *rust_image image: *rust_image
environment: environment:
@ -170,6 +182,7 @@ steps:
CARGO_HOME: .cargo_home CARGO_HOME: .cargo_home
commands: commands:
- target/lemmy_server migration run - target/lemmy_server migration run
- <<: *install_diesel_cli
- diesel print-schema --config-file=diesel.toml > tmp.schema - diesel print-schema --config-file=diesel.toml > tmp.schema
- diff tmp.schema crates/db_schema/src/schema.rs - diff tmp.schema crates/db_schema/src/schema.rs
when: *slow_check_paths when: *slow_check_paths
@ -239,7 +252,9 @@ steps:
publish_to_crates_io: publish_to_crates_io:
image: *rust_image image: *rust_image
commands: commands:
- cargo install cargo-workspaces - <<: *install_binstall
# Install cargo-workspaces
- cargo binstall -y cargo-workspaces
- cp -r migrations crates/db_schema/ - cp -r migrations crates/db_schema/
- cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}" - cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
secrets: [cargo_api_token] secrets: [cargo_api_token]

601
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
[workspace.package] [workspace.package]
version = "0.19.4-beta.6" version = "0.19.4-beta.7"
edition = "2021" edition = "2021"
description = "A link aggregator for the fediverse" description = "A link aggregator for the fediverse"
license = "AGPL-3.0" license = "AGPL-3.0"
@ -88,17 +88,17 @@ unused_self = "deny"
unwrap_used = "deny" unwrap_used = "deny"
[workspace.dependencies] [workspace.dependencies]
lemmy_api = { version = "=0.19.4-beta.6", path = "./crates/api" } lemmy_api = { version = "=0.19.4-beta.7", path = "./crates/api" }
lemmy_api_crud = { version = "=0.19.4-beta.6", path = "./crates/api_crud" } lemmy_api_crud = { version = "=0.19.4-beta.7", path = "./crates/api_crud" }
lemmy_apub = { version = "=0.19.4-beta.6", path = "./crates/apub" } lemmy_apub = { version = "=0.19.4-beta.7", path = "./crates/apub" }
lemmy_utils = { version = "=0.19.4-beta.6", path = "./crates/utils", default-features = false } lemmy_utils = { version = "=0.19.4-beta.7", path = "./crates/utils", default-features = false }
lemmy_db_schema = { version = "=0.19.4-beta.6", path = "./crates/db_schema" } lemmy_db_schema = { version = "=0.19.4-beta.7", path = "./crates/db_schema" }
lemmy_api_common = { version = "=0.19.4-beta.6", path = "./crates/api_common" } lemmy_api_common = { version = "=0.19.4-beta.7", path = "./crates/api_common" }
lemmy_routes = { version = "=0.19.4-beta.6", path = "./crates/routes" } lemmy_routes = { version = "=0.19.4-beta.7", path = "./crates/routes" }
lemmy_db_views = { version = "=0.19.4-beta.6", path = "./crates/db_views" } lemmy_db_views = { version = "=0.19.4-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.4-beta.6", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.4-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.4-beta.6", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.4-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.4-beta.6", path = "./crates/federate" } lemmy_federate = { version = "=0.19.4-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.5.6", default-features = false, features = [ activitypub_federation = { version = "0.5.6", default-features = false, features = [
"actix-web", "actix-web",
] } ] }

1
api_tests/.npmrc Normal file
View file

@ -0,0 +1 @@
package-manager-strict=false

File diff suppressed because it is too large Load diff

View file

@ -661,40 +661,60 @@ test("A and G subscribe to B (center) A posts, it gets announced to G", async ()
}); });
test("Report a post", async () => { test("Report a post", async () => {
// Note, this is a different one from the setup // Create post from alpha
let betaCommunity = (await resolveBetaCommunity(beta)).community; let alphaCommunity = (await resolveBetaCommunity(alpha)).community!;
if (!betaCommunity) {
throw "Missing beta community";
}
await followBeta(alpha); await followBeta(alpha);
let postRes = await createPost(beta, betaCommunity.community.id); let postRes = await createPost(alpha, alphaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined(); expect(postRes.post_view.post).toBeDefined();
let alphaPost = (await resolvePost(alpha, postRes.post_view.post)).post; let alphaPost = (await resolvePost(alpha, postRes.post_view.post)).post;
if (!alphaPost) { if (!alphaPost) {
throw "Missing alpha post"; throw "Missing alpha post";
} }
let alphaReport = (
await reportPost(alpha, alphaPost.post.id, randomString(10))
).post_report_view.post_report;
// Send report from gamma
let gammaPost = (await resolvePost(gamma, alphaPost.post)).post!;
let gammaReport = (
await reportPost(gamma, gammaPost.post.id, randomString(10))
).post_report_view.post_report;
expect(gammaReport).toBeDefined();
// Report was federated to community instance
let betaReport = (await waitUntil( let betaReport = (await waitUntil(
() => () =>
listPostReports(beta).then(p => listPostReports(beta).then(p =>
p.post_reports.find( p.post_reports.find(
r => r =>
r.post_report.original_post_name === alphaReport.original_post_name, r.post_report.original_post_name === gammaReport.original_post_name,
), ),
), ),
res => !!res, res => !!res,
))!.post_report; ))!.post_report;
expect(betaReport).toBeDefined(); expect(betaReport).toBeDefined();
expect(betaReport.resolved).toBe(false); expect(betaReport.resolved).toBe(false);
expect(betaReport.original_post_name).toBe(alphaReport.original_post_name); expect(betaReport.original_post_name).toBe(gammaReport.original_post_name);
expect(betaReport.original_post_url).toBe(alphaReport.original_post_url); //expect(betaReport.original_post_url).toBe(gammaReport.original_post_url);
expect(betaReport.original_post_body).toBe(alphaReport.original_post_body); expect(betaReport.original_post_body).toBe(gammaReport.original_post_body);
expect(betaReport.reason).toBe(alphaReport.reason); expect(betaReport.reason).toBe(gammaReport.reason);
await unfollowRemotes(alpha); await unfollowRemotes(alpha);
// Report was federated to poster's instance
let alphaReport = (await waitUntil(
() =>
listPostReports(alpha).then(p =>
p.post_reports.find(
r =>
r.post_report.original_post_name === gammaReport.original_post_name,
),
),
res => !!res,
))!.post_report;
expect(alphaReport).toBeDefined();
expect(alphaReport.resolved).toBe(false);
expect(alphaReport.original_post_name).toBe(gammaReport.original_post_name);
//expect(alphaReport.original_post_url).toBe(gammaReport.original_post_url);
expect(alphaReport.original_post_body).toBe(gammaReport.original_post_body);
expect(alphaReport.reason).toBe(gammaReport.reason);
}); });
test("Fetch post via redirect", async () => { test("Fetch post via redirect", async () => {

View file

@ -29,7 +29,7 @@ pub async fn add_admin(
.await? .await?
.ok_or(LemmyErrorType::ObjectNotLocal)?; .ok_or(LemmyErrorType::ObjectNotLocal)?;
let added_admin = LocalUser::update( LocalUser::update(
&mut context.pool(), &mut context.pool(),
added_local_user.local_user.id, added_local_user.local_user.id,
&LocalUserUpdateForm { &LocalUserUpdateForm {
@ -43,7 +43,7 @@ pub async fn add_admin(
// Mod tables // Mod tables
let form = ModAddForm { let form = ModAddForm {
mod_person_id: local_user_view.person.id, mod_person_id: local_user_view.person.id,
other_person_id: added_admin.person_id, other_person_id: added_local_user.person.id,
removed: Some(!data.added), removed: Some(!data.added),
}; };

View file

@ -1,11 +1,7 @@
use crate::{build_totp_2fa, generate_totp_2fa_secret}; use crate::{build_totp_2fa, generate_totp_2fa_secret};
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use actix_web::web::Json; use actix_web::web::Json;
use lemmy_api_common::{ use lemmy_api_common::{context::LemmyContext, person::GenerateTotpSecretResponse};
context::LemmyContext,
person::GenerateTotpSecretResponse,
sensitive::Sensitive,
};
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm}; use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
use lemmy_db_views::structs::{LocalUserView, SiteView}; use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
@ -41,6 +37,6 @@ pub async fn generate_totp_secret(
.await?; .await?;
Ok(Json(GenerateTotpSecretResponse { Ok(Json(GenerateTotpSecretResponse {
totp_secret_url: Sensitive::new(secret_url), totp_secret_url: secret_url.into(),
})) }))
} }

View file

@ -28,6 +28,7 @@ use lemmy_utils::{
error::{LemmyErrorType, LemmyResult}, error::{LemmyErrorType, LemmyResult},
utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id}, utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id},
}; };
use std::ops::Deref;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn save_user_settings( pub async fn save_user_settings(
@ -57,7 +58,7 @@ pub async fn save_user_settings(
if let Some(Some(email)) = &email { if let Some(Some(email)) = &email {
let previous_email = local_user_view.local_user.email.clone().unwrap_or_default(); let previous_email = local_user_view.local_user.email.clone().unwrap_or_default();
// if email was changed, check that it is not taken and send verification mail // if email was changed, check that it is not taken and send verification mail
if &previous_email != email { if previous_email.deref() != email {
if LocalUser::is_email_taken(&mut context.pool(), email).await? { if LocalUser::is_email_taken(&mut context.pool(), email).await? {
return Err(LemmyErrorType::EmailAlreadyExists)?; return Err(LemmyErrorType::EmailAlreadyExists)?;
} }
@ -141,11 +142,7 @@ pub async fn save_user_settings(
..Default::default() ..Default::default()
}; };
// Ignore errors, because 'no fields updated' will return an error. LocalUser::update(&mut context.pool(), local_user_id, &local_user_form).await?;
// https://github.com/LemmyNet/lemmy/issues/4076
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form)
.await
.ok();
// Update the vote display modes // Update the vote display modes
let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm { let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm {

View file

@ -9,12 +9,10 @@ use lemmy_db_schema::{
source::{ source::{
email_verification::EmailVerification, email_verification::EmailVerification,
local_user::{LocalUser, LocalUserUpdateForm}, local_user::{LocalUser, LocalUserUpdateForm},
person::Person,
}, },
traits::Crud,
RegistrationMode, RegistrationMode,
}; };
use lemmy_db_views::structs::SiteView; use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
pub async fn verify_email( pub async fn verify_email(
@ -38,7 +36,7 @@ pub async fn verify_email(
}; };
let local_user_id = verification.local_user_id; let local_user_id = verification.local_user_id;
let local_user = LocalUser::update(&mut context.pool(), local_user_id, &form).await?; LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?; EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
@ -46,11 +44,15 @@ pub async fn verify_email(
if site_view.local_site.registration_mode == RegistrationMode::RequireApplication if site_view.local_site.registration_mode == RegistrationMode::RequireApplication
&& site_view.local_site.application_email_admins && site_view.local_site.application_email_admins
{ {
let person = Person::read(&mut context.pool(), local_user.person_id) let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
.await? .await?
.ok_or(LemmyErrorType::CouldntFindPerson)?; .ok_or(LemmyErrorType::CouldntFindPerson)?;
send_new_applicant_email_to_admins(&person.name, &mut context.pool(), context.settings()) send_new_applicant_email_to_admins(
&local_user.person.name,
&mut context.pool(),
context.settings(),
)
.await?; .await?;
} }

View file

@ -1,9 +1,10 @@
use crate::{context::LemmyContext, sensitive::Sensitive}; use crate::context::LemmyContext;
use actix_web::{http::header::USER_AGENT, HttpRequest}; use actix_web::{http::header::USER_AGENT, HttpRequest};
use chrono::Utc; use chrono::Utc;
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation}; use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::LocalUserId, newtypes::LocalUserId,
sensitive::SensitiveString,
source::login_token::{LoginToken, LoginTokenCreateForm}, source::login_token::{LoginToken, LoginTokenCreateForm},
}; };
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
@ -40,7 +41,7 @@ impl Claims {
user_id: LocalUserId, user_id: LocalUserId,
req: HttpRequest, req: HttpRequest,
context: &LemmyContext, context: &LemmyContext,
) -> LemmyResult<Sensitive<String>> { ) -> LemmyResult<SensitiveString> {
let hostname = context.settings().hostname.clone(); let hostname = context.settings().hostname.clone();
let my_claims = Claims { let my_claims = Claims {
sub: user_id.0.to_string(), sub: user_id.0.to_string(),
@ -50,7 +51,7 @@ impl Claims {
let secret = &context.secret().jwt_secret; let secret = &context.secret().jwt_secret;
let key = EncodingKey::from_secret(secret.as_ref()); let key = EncodingKey::from_secret(secret.as_ref());
let token = encode(&Header::default(), &my_claims, &key)?; let token: SensitiveString = encode(&Header::default(), &my_claims, &key)?.into();
let ip = req let ip = req
.connection_info() .connection_info()
.realip_remote_addr() .realip_remote_addr()
@ -67,7 +68,7 @@ impl Claims {
user_agent, user_agent,
}; };
LoginToken::create(&mut context.pool(), form).await?; LoginToken::create(&mut context.pool(), form).await?;
Ok(Sensitive::new(token)) Ok(token)
} }
} }

View file

@ -64,7 +64,7 @@ impl LemmyContext {
let client = ClientBuilder::new(client).build(); let client = ClientBuilder::new(client).build();
let secret = Secret { let secret = Secret {
id: 0, id: 0,
jwt_secret: String::new(), jwt_secret: String::new().into(),
}; };
let rate_limit_cell = RateLimitCell::with_test_config(); let rate_limit_cell = RateLimitCell::with_test_config();

View file

@ -14,7 +14,6 @@ pub mod private_message;
pub mod request; pub mod request;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod send_activity; pub mod send_activity;
pub mod sensitive;
pub mod site; pub mod site;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod utils; pub mod utils;

View file

@ -1,6 +1,6 @@
use crate::sensitive::Sensitive;
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId}, newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId},
sensitive::SensitiveString,
source::site::Site, source::site::Site,
CommentSortType, CommentSortType,
ListingType, ListingType,
@ -25,8 +25,8 @@ use ts_rs::TS;
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Logging into lemmy. /// Logging into lemmy.
pub struct Login { pub struct Login {
pub username_or_email: Sensitive<String>, pub username_or_email: SensitiveString,
pub password: Sensitive<String>, pub password: SensitiveString,
/// May be required, if totp is enabled for their account. /// May be required, if totp is enabled for their account.
pub totp_2fa_token: Option<String>, pub totp_2fa_token: Option<String>,
} }
@ -38,11 +38,11 @@ pub struct Login {
/// Register / Sign up to lemmy. /// Register / Sign up to lemmy.
pub struct Register { pub struct Register {
pub username: String, pub username: String,
pub password: Sensitive<String>, pub password: SensitiveString,
pub password_verify: Sensitive<String>, pub password_verify: SensitiveString,
pub show_nsfw: Option<bool>, pub show_nsfw: Option<bool>,
/// email is mandatory if email verification is enabled on the server /// email is mandatory if email verification is enabled on the server
pub email: Option<Sensitive<String>>, pub email: Option<SensitiveString>,
/// The UUID of the captcha item. /// The UUID of the captcha item.
pub captcha_uuid: Option<String>, pub captcha_uuid: Option<String>,
/// Your captcha answer. /// Your captcha answer.
@ -99,7 +99,7 @@ pub struct SaveUserSettings {
/// Your display name, which can contain strange characters, and does not need to be unique. /// Your display name, which can contain strange characters, and does not need to be unique.
pub display_name: Option<String>, pub display_name: Option<String>,
/// Your email. /// Your email.
pub email: Option<Sensitive<String>>, pub email: Option<SensitiveString>,
/// Your bio / info, in markdown. /// Your bio / info, in markdown.
pub bio: Option<String>, pub bio: Option<String>,
/// Your matrix user id. Ex: @my_user:matrix.org /// Your matrix user id. Ex: @my_user:matrix.org
@ -140,9 +140,9 @@ pub struct SaveUserSettings {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Changes your account password. /// Changes your account password.
pub struct ChangePassword { pub struct ChangePassword {
pub new_password: Sensitive<String>, pub new_password: SensitiveString,
pub new_password_verify: Sensitive<String>, pub new_password_verify: SensitiveString,
pub old_password: Sensitive<String>, pub old_password: SensitiveString,
} }
#[skip_serializing_none] #[skip_serializing_none]
@ -152,7 +152,7 @@ pub struct ChangePassword {
/// A response for your login. /// A response for your login.
pub struct LoginResponse { pub struct LoginResponse {
/// This is None in response to `Register` if email verification is enabled, or the server requires registration applications. /// This is None in response to `Register` if email verification is enabled, or the server requires registration applications.
pub jwt: Option<Sensitive<String>>, pub jwt: Option<SensitiveString>,
/// If registration applications are required, this will return true for a signup response. /// If registration applications are required, this will return true for a signup response.
pub registration_created: bool, pub registration_created: bool,
/// If email verifications are required, this will return true for a signup response. /// If email verifications are required, this will return true for a signup response.
@ -340,7 +340,7 @@ pub struct CommentReplyResponse {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Delete your account. /// Delete your account.
pub struct DeleteAccount { pub struct DeleteAccount {
pub password: Sensitive<String>, pub password: SensitiveString,
pub delete_content: bool, pub delete_content: bool,
} }
@ -349,7 +349,7 @@ pub struct DeleteAccount {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Reset your password via email. /// Reset your password via email.
pub struct PasswordReset { pub struct PasswordReset {
pub email: Sensitive<String>, pub email: SensitiveString,
} }
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)] #[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
@ -357,9 +357,9 @@ pub struct PasswordReset {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Change your password after receiving a reset request. /// Change your password after receiving a reset request.
pub struct PasswordChangeAfterReset { pub struct PasswordChangeAfterReset {
pub token: Sensitive<String>, pub token: SensitiveString,
pub password: Sensitive<String>, pub password: SensitiveString,
pub password_verify: Sensitive<String>, pub password_verify: SensitiveString,
} }
#[skip_serializing_none] #[skip_serializing_none]
@ -405,7 +405,7 @@ pub struct VerifyEmail {
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
pub struct GenerateTotpSecretResponse { pub struct GenerateTotpSecretResponse {
pub totp_secret_url: Sensitive<String>, pub totp_secret_url: SensitiveString,
} }
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]

View file

@ -1,116 +0,0 @@
use serde::{Deserialize, Serialize};
use std::{
borrow::Borrow,
ops::{Deref, DerefMut},
};
#[cfg(feature = "full")]
use ts_rs::TS;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
#[serde(transparent)]
pub struct Sensitive<T>(T);
impl<T> Sensitive<T> {
pub fn new(item: T) -> Self {
Sensitive(item)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> std::fmt::Debug for Sensitive<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Sensitive").finish()
}
}
impl<T> AsRef<T> for Sensitive<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl AsRef<str> for Sensitive<String> {
fn as_ref(&self) -> &str {
&self.0
}
}
impl AsRef<[u8]> for Sensitive<String> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl AsRef<[u8]> for Sensitive<Vec<u8>> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<T> AsMut<T> for Sensitive<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl AsMut<str> for Sensitive<String> {
fn as_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl Deref for Sensitive<String> {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Sensitive<String> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T> From<T> for Sensitive<T> {
fn from(t: T) -> Self {
Sensitive(t)
}
}
impl From<&str> for Sensitive<String> {
fn from(s: &str) -> Self {
Sensitive(s.into())
}
}
impl<T> Borrow<T> for Sensitive<T> {
fn borrow(&self) -> &T {
&self.0
}
}
impl Borrow<str> for Sensitive<String> {
fn borrow(&self) -> &str {
&self.0
}
}
#[cfg(feature = "full")]
impl TS for Sensitive<String> {
fn name() -> String {
"string".to_string()
}
fn name_with_type_args(_args: Vec<String>) -> String {
"string".to_string()
}
fn dependencies() -> Vec<ts_rs::Dependency> {
Vec::new()
}
fn transparent() -> bool {
true
}
}

View file

@ -23,7 +23,6 @@
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg" "href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
} }
], ],
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"language": { "language": {
"identifier": "ko", "identifier": "ko",

View file

@ -23,7 +23,6 @@
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg" "href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
} }
], ],
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"published": "2021-10-29T15:10:51.557399Z", "published": "2021-10-29T15:10:51.557399Z",
"updated": "2021-10-29T15:11:35.976374Z" "updated": "2021-10-29T15:11:35.976374Z"

View file

@ -15,7 +15,6 @@
"cc": [], "cc": [],
"mediaType": "text/html", "mediaType": "text/html",
"attachment": [], "attachment": [],
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"published": "2023-02-06T06:42:41.939437Z", "published": "2023-02-06T06:42:41.939437Z",
"language": { "language": {
@ -36,7 +35,6 @@
"cc": [], "cc": [],
"mediaType": "text/html", "mediaType": "text/html",
"attachment": [], "attachment": [],
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"published": "2023-02-06T06:42:37.119567Z", "published": "2023-02-06T06:42:37.119567Z",
"language": { "language": {

View file

@ -22,7 +22,6 @@
], ],
"name": "another outbox test", "name": "another outbox test",
"mediaType": "text/html", "mediaType": "text/html",
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"stickied": false, "stickied": false,
"published": "2021-11-18T17:19:45.895163Z" "published": "2021-11-18T17:19:45.895163Z"
@ -51,7 +50,6 @@
], ],
"name": "outbox test", "name": "outbox test",
"mediaType": "text/html", "mediaType": "text/html",
"commentsEnabled": true,
"sensitive": false, "sensitive": false,
"stickied": false, "stickied": false,
"published": "2021-11-18T17:19:05.763109Z" "published": "2021-11-18T17:19:05.763109Z"

View file

@ -25,7 +25,6 @@
"url": "https://enterprise.lemmy.ml/pictrs/image/eOtYb9iEiB.png" "url": "https://enterprise.lemmy.ml/pictrs/image/eOtYb9iEiB.png"
}, },
"sensitive": false, "sensitive": false,
"commentsEnabled": true,
"language": { "language": {
"identifier": "fr", "identifier": "fr",
"name": "Français" "name": "Français"

View file

@ -26,6 +26,7 @@ use lemmy_db_schema::{
source::{ source::{
activity::ActivitySendTargets, activity::ActivitySendTargets,
community::Community, community::Community,
moderator::{ModLockPost, ModLockPostForm},
person::Person, person::Person,
post::{Post, PostUpdateForm}, post::{Post, PostUpdateForm},
}, },
@ -60,12 +61,22 @@ impl ActivityHandler for LockPage {
} }
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> { async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
insert_received_activity(&self.id, context).await?;
let locked = Some(true);
let form = PostUpdateForm { let form = PostUpdateForm {
locked: Some(true), locked,
..Default::default() ..Default::default()
}; };
let post = self.object.dereference(context).await?; let post = self.object.dereference(context).await?;
Post::update(&mut context.pool(), post.id, &form).await?; Post::update(&mut context.pool(), post.id, &form).await?;
let form = ModLockPostForm {
mod_person_id: self.actor.dereference(context).await?.id,
post_id: post.id,
locked,
};
ModLockPost::create(&mut context.pool(), &form).await?;
Ok(()) Ok(())
} }
} }
@ -94,12 +105,21 @@ impl ActivityHandler for UndoLockPage {
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> { async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
insert_received_activity(&self.id, context).await?; insert_received_activity(&self.id, context).await?;
let locked = Some(false);
let form = PostUpdateForm { let form = PostUpdateForm {
locked: Some(false), locked,
..Default::default() ..Default::default()
}; };
let post = self.object.object.dereference(context).await?; let post = self.object.object.dereference(context).await?;
Post::update(&mut context.pool(), post.id, &form).await?; Post::update(&mut context.pool(), post.id, &form).await?;
let form = ModLockPostForm {
mod_person_id: self.actor.dereference(context).await?.id,
post_id: post.id,
locked,
};
ModLockPost::create(&mut context.pool(), &form).await?;
Ok(()) Ok(())
} }
} }

View file

@ -4,7 +4,6 @@ use crate::{
community::send_activity_in_community, community::send_activity_in_community,
generate_activity_id, generate_activity_id,
verify_is_public, verify_is_public,
verify_mod_action,
verify_person_in_community, verify_person_in_community,
}, },
activity_lists::AnnouncableActivities, activity_lists::AnnouncableActivities,
@ -78,14 +77,13 @@ impl CreateOrUpdatePage {
let create_or_update = let create_or_update =
CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?; CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?;
let is_mod_action = create_or_update.object.is_mod_action(&context).await?;
let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update); let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update);
send_activity_in_community( send_activity_in_community(
activity, activity,
&person, &person,
&community, &community,
ActivitySendTargets::empty(), ActivitySendTargets::empty(),
is_mod_action, false,
&context, &context,
) )
.await?; .await?;
@ -112,30 +110,8 @@ impl ActivityHandler for CreateOrUpdatePage {
let community = self.community(context).await?; let community = self.community(context).await?;
verify_person_in_community(&self.actor, &community, context).await?; verify_person_in_community(&self.actor, &community, context).await?;
check_community_deleted_or_removed(&community)?; check_community_deleted_or_removed(&community)?;
match self.kind {
CreateOrUpdateType::Create => {
verify_domains_match(self.actor.inner(), self.object.id.inner())?; verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?; verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
// Check that the post isnt locked, as that isnt possible for newly created posts.
// However, when fetching a remote post we generate a new create activity with the current
// locked value, so this check may fail. So only check if its a local community,
// because then we will definitely receive all create and update activities separately.
let is_locked = self.object.comments_enabled == Some(false);
if community.local && is_locked {
Err(LemmyErrorType::NewPostCannotBeLocked)?
}
}
CreateOrUpdateType::Update => {
let is_mod_action = self.object.is_mod_action(context).await?;
if is_mod_action {
verify_mod_action(&self.actor, &community, context).await?;
} else {
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
}
}
}
ApubPost::verify(&self.object, self.actor.inner(), context).await?; ApubPost::verify(&self.object, self.actor.inner(), context).await?;
Ok(()) Ok(())
} }

View file

@ -4,9 +4,10 @@ use crate::objects::{
person::ApubPerson, person::ApubPerson,
post::ApubPost, post::ApubPost,
}; };
use activitypub_federation::{config::Data, fetch::object_id::ObjectId}; use activitypub_federation::{config::Data, fetch::object_id::ObjectId, traits::Object};
use actix_web::web::Json; use actix_web::web::Json;
use futures::{future::try_join_all, StreamExt}; use futures::{future::try_join_all, StreamExt};
use itertools::Itertools;
use lemmy_api_common::{context::LemmyContext, SuccessResponse}; use lemmy_api_common::{context::LemmyContext, SuccessResponse};
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::DbUrl, newtypes::DbUrl,
@ -30,8 +31,11 @@ use lemmy_utils::{
spawn_try_task, spawn_try_task,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::future::Future;
use tracing::info; use tracing::info;
const PARALLELISM: usize = 10;
/// Backup of user data. This struct should never be changed so that the data can be used as a /// Backup of user data. This struct should never be changed so that the data can be used as a
/// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow /// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow
/// importing partial backups. /// importing partial backups.
@ -40,7 +44,7 @@ use tracing::info;
/// ///
/// Be careful with any changes to this struct, to avoid breaking changes which could prevent /// Be careful with any changes to this struct, to avoid breaking changes which could prevent
/// importing older backups. /// importing older backups.
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct UserSettingsBackup { pub struct UserSettingsBackup {
pub display_name: Option<String>, pub display_name: Option<String>,
pub bio: Option<String>, pub bio: Option<String>,
@ -167,28 +171,17 @@ pub async fn import_settings(
} }
spawn_try_task(async move { spawn_try_task(async move {
const PARALLELISM: usize = 10;
let person_id = local_user_view.person.id; let person_id = local_user_view.person.id;
// These tasks fetch objects from remote instances which might be down.
// TODO: Would be nice if we could send a list of failed items with api response, but then
// the request would likely timeout.
let mut failed_items = vec![];
info!( info!(
"Starting settings backup for {}", "Starting settings import for {}",
local_user_view.person.name local_user_view.person.name
); );
futures::stream::iter( let failed_followed_communities = fetch_and_import(
data data.followed_communities.clone(),
.followed_communities &context,
.clone() |(followed, context)| async move {
.into_iter()
// reset_request_count works like clone, and is necessary to avoid running into request limit
.map(|f| (f, context.reset_request_count()))
.map(|(followed, context)| async move {
// need to reset outgoing request count to avoid running into limit
let community = followed.dereference(&context).await?; let community = followed.dereference(&context).await?;
let form = CommunityFollowerForm { let form = CommunityFollowerForm {
person_id, person_id,
@ -197,27 +190,14 @@ pub async fn import_settings(
}; };
CommunityFollower::follow(&mut context.pool(), &form).await?; CommunityFollower::follow(&mut context.pool(), &form).await?;
LemmyResult::Ok(()) LemmyResult::Ok(())
}), },
) )
.buffer_unordered(PARALLELISM) .await?;
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import followed community: {e}");
}
});
futures::stream::iter( let failed_saved_posts = fetch_and_import(
data data.saved_posts.clone(),
.saved_posts &context,
.clone() |(saved, context)| async move {
.into_iter()
.map(|s| (s, context.reset_request_count()))
.map(|(saved, context)| async move {
let post = saved.dereference(&context).await?; let post = saved.dereference(&context).await?;
let form = PostSavedForm { let form = PostSavedForm {
person_id, person_id,
@ -225,27 +205,14 @@ pub async fn import_settings(
}; };
PostSaved::save(&mut context.pool(), &form).await?; PostSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(()) LemmyResult::Ok(())
}), },
) )
.buffer_unordered(PARALLELISM) .await?;
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import saved post community: {e}");
}
});
futures::stream::iter( let failed_saved_comments = fetch_and_import(
data data.saved_comments.clone(),
.saved_comments &context,
.clone() |(saved, context)| async move {
.into_iter()
.map(|s| (s, context.reset_request_count()))
.map(|(saved, context)| async move {
let comment = saved.dereference(&context).await?; let comment = saved.dereference(&context).await?;
let form = CommentSavedForm { let form = CommentSavedForm {
person_id, person_id,
@ -253,55 +220,42 @@ pub async fn import_settings(
}; };
CommentSaved::save(&mut context.pool(), &form).await?; CommentSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(()) LemmyResult::Ok(())
}), },
) )
.buffer_unordered(PARALLELISM) .await?;
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import saved comment community: {e}");
}
});
let failed_items: Vec<_> = failed_items.into_iter().flatten().collect(); let failed_community_blocks = fetch_and_import(
info!( data.blocked_communities.clone(),
"Finished settings backup for {}, failed items: {:#?}", &context,
local_user_view.person.name, failed_items |(blocked, context)| async move {
); let community = blocked.dereference(&context).await?;
// These tasks don't connect to any remote instances but only insert directly in the database.
// That means the only error condition are db connection failures, so no extra error handling is
// needed.
try_join_all(data.blocked_communities.iter().map(|blocked| async {
// dont fetch unknown blocked objects from home server
let community = blocked.dereference_local(&context).await?;
let form = CommunityBlockForm { let form = CommunityBlockForm {
person_id, person_id,
community_id: community.id, community_id: community.id,
}; };
CommunityBlock::block(&mut context.pool(), &form).await?; CommunityBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(()) LemmyResult::Ok(())
})) },
)
.await?; .await?;
try_join_all(data.blocked_users.iter().map(|blocked| async { let failed_user_blocks = fetch_and_import(
// dont fetch unknown blocked objects from home server data.blocked_users.clone(),
let target = blocked.dereference_local(&context).await?; &context,
|(blocked, context)| async move {
let context = context.reset_request_count();
let target = blocked.dereference(&context).await?;
let form = PersonBlockForm { let form = PersonBlockForm {
person_id, person_id,
target_id: target.id, target_id: target.id,
}; };
PersonBlock::block(&mut context.pool(), &form).await?; PersonBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(()) LemmyResult::Ok(())
})) },
)
.await?; .await?;
try_join_all(data.blocked_instances.iter().map(|domain| async { try_join_all(data.blocked_instances.iter().map(|domain| async {
// dont fetch unknown blocked objects from home server
let instance = Instance::read_or_create(&mut context.pool(), domain.clone()).await?; let instance = Instance::read_or_create(&mut context.pool(), domain.clone()).await?;
let form = InstanceBlockForm { let form = InstanceBlockForm {
person_id, person_id,
@ -312,17 +266,53 @@ pub async fn import_settings(
})) }))
.await?; .await?;
info!("Settings import completed for {}, the following items failed: {failed_followed_communities}, {failed_saved_posts}, {failed_saved_comments}, {failed_community_blocks}, {failed_user_blocks}",
local_user_view.person.name);
Ok(()) Ok(())
}); });
Ok(Json(Default::default())) Ok(Json(Default::default()))
} }
async fn fetch_and_import<Kind, Fut>(
objects: Vec<ObjectId<Kind>>,
context: &Data<LemmyContext>,
import_fn: impl FnMut((ObjectId<Kind>, Data<LemmyContext>)) -> Fut,
) -> LemmyResult<String>
where
Kind: Object + Send + 'static,
for<'de2> <Kind as Object>::Kind: Deserialize<'de2>,
Fut: Future<Output = LemmyResult<()>>,
{
let mut failed_items = vec![];
futures::stream::iter(
objects
.clone()
.into_iter()
// need to reset outgoing request count to avoid running into limit
.map(|s| (s, context.reset_request_count()))
.map(import_fn),
)
.buffer_unordered(PARALLELISM)
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r): (usize, LemmyResult<()>)| {
if r.is_err() {
if let Some(object) = objects.get(i) {
failed_items.push(object.inner().clone());
}
}
});
Ok(failed_items.into_iter().join(","))
}
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)] #[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::api::user_settings_backup::{export_settings, import_settings}; use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup};
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use lemmy_api_common::context::LemmyContext; use lemmy_api_common::context::LemmyContext;
use lemmy_db_schema::{ use lemmy_db_schema::{
@ -420,6 +410,44 @@ mod tests {
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn test_settings_partial_import() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let export_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let community_form = CommunityInsertForm::builder()
.name("testcom".to_string())
.title("testcom".to_string())
.instance_id(export_user.person.instance_id)
.build();
let community = Community::create(&mut context.pool(), &community_form).await?;
let follower_form = CommunityFollowerForm {
community_id: community.id,
person_id: export_user.person.id,
pending: false,
};
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
let import_user = create_user("charles".to_string(), None, &context).await?;
let backup2 = UserSettingsBackup {
followed_communities: backup.followed_communities.clone(),
..Default::default()
};
import_settings(
actix_web::web::Json(backup2),
import_user.clone(),
context.reset_request_count(),
)
.await?;
Ok(())
}
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn disallow_large_backup() -> LemmyResult<()> { async fn disallow_large_backup() -> LemmyResult<()> {

View file

@ -29,7 +29,9 @@ pub(crate) mod mentions;
pub mod objects; pub mod objects;
pub mod protocol; pub mod protocol;
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 50; /// Maximum number of outgoing HTTP requests to fetch a single object. Needs to be high enough
/// to fetch a new community with posts, moderators and featured posts.
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 100;
/// Only include a basic context to save space and bandwidth. The main context is hosted statically /// Only include a basic context to save space and bandwidth. The main context is hosted statically
/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could /// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could

View file

@ -28,6 +28,7 @@ use lemmy_api_common::{
}, },
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
sensitive::SensitiveString,
source::{ source::{
activity::ActorType, activity::ActorType,
actor_language::CommunityLanguage, actor_language::CommunityLanguage,
@ -213,7 +214,7 @@ impl Actor for ApubCommunity {
} }
fn private_key_pem(&self) -> Option<String> { fn private_key_pem(&self) -> Option<String> {
self.private_key.clone() self.private_key.clone().map(SensitiveString::into_inner)
} }
fn inbox(&self) -> Url { fn inbox(&self) -> Url {

View file

@ -29,6 +29,7 @@ use lemmy_api_common::{
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
newtypes::InstanceId, newtypes::InstanceId,
sensitive::SensitiveString,
source::{ source::{
activity::ActorType, activity::ActorType,
actor_language::SiteLanguage, actor_language::SiteLanguage,
@ -187,7 +188,7 @@ impl Actor for ApubSite {
} }
fn private_key_pem(&self) -> Option<String> { fn private_key_pem(&self) -> Option<String> {
self.private_key.clone() self.private_key.clone().map(SensitiveString::into_inner)
} }
fn inbox(&self) -> Url { fn inbox(&self) -> Url {

View file

@ -30,6 +30,7 @@ use lemmy_api_common::{
}, },
}; };
use lemmy_db_schema::{ use lemmy_db_schema::{
sensitive::SensitiveString,
source::{ source::{
activity::ActorType, activity::ActorType,
local_site::LocalSite, local_site::LocalSite,
@ -200,7 +201,7 @@ impl Actor for ApubPerson {
} }
fn private_key_pem(&self) -> Option<String> { fn private_key_pem(&self) -> Option<String> {
self.private_key.clone() self.private_key.clone().map(SensitiveString::into_inner)
} }
fn inbox(&self) -> Url { fn inbox(&self) -> Url {

View file

@ -36,7 +36,6 @@ use lemmy_db_schema::{
source::{ source::{
community::Community, community::Community,
local_site::LocalSite, local_site::LocalSite,
moderator::{ModLockPost, ModLockPostForm},
person::Person, person::Person,
post::{Post, PostInsertForm, PostUpdateForm}, post::{Post, PostInsertForm, PostUpdateForm},
}, },
@ -147,7 +146,6 @@ impl Object for ApubPost {
source: self.body.clone().map(Source::new), source: self.body.clone().map(Source::new),
attachment, attachment,
image: self.thumbnail_url.clone().map(ImageObject::new), image: self.thumbnail_url.clone().map(ImageObject::new),
comments_enabled: Some(!self.locked),
sensitive: Some(self.nsfw), sensitive: Some(self.nsfw),
language, language,
published: Some(self.published), published: Some(self.published),
@ -165,12 +163,8 @@ impl Object for ApubPost {
expected_domain: &Url, expected_domain: &Url,
context: &Data<Self::DataType>, context: &Data<Self::DataType>,
) -> LemmyResult<()> { ) -> LemmyResult<()> {
// We can't verify the domain in case of mod action, because the mod may be on a different
// instance from the post author.
if !page.is_mod_action(context).await? {
verify_domains_match(page.id.inner(), expected_domain)?; verify_domains_match(page.id.inner(), expected_domain)?;
verify_is_remote_object(&page.id, context)?; verify_is_remote_object(&page.id, context)?;
};
let community = page.community(context).await?; let community = page.community(context).await?;
check_apub_id_valid_with_strictness(page.id.inner(), community.local, context).await?; check_apub_id_valid_with_strictness(page.id.inner(), community.local, context).await?;
@ -218,13 +212,9 @@ impl Object for ApubPost {
name = name.chars().take(MAX_TITLE_LENGTH).collect(); name = name.chars().take(MAX_TITLE_LENGTH).collect();
} }
// read existing, local post if any (for generating mod log)
let old_post = page.id.dereference_local(context).await;
let first_attachment = page.attachment.first(); let first_attachment = page.attachment.first();
let local_site = LocalSite::read(&mut context.pool()).await.ok(); let local_site = LocalSite::read(&mut context.pool()).await.ok();
let form = if !page.is_mod_action(context).await? {
let url = if let Some(attachment) = first_attachment.cloned() { let url = if let Some(attachment) = first_attachment.cloned() {
Some(attachment.url()) Some(attachment.url())
} else if page.kind == PageType::Video { } else if page.kind == PageType::Video {
@ -247,14 +237,13 @@ impl Object for ApubPost {
let language_id = let language_id =
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?; LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;
PostInsertForm::builder() let form = PostInsertForm::builder()
.name(name) .name(name)
.url(url.map(Into::into)) .url(url.map(Into::into))
.body(body) .body(body)
.alt_text(alt_text) .alt_text(alt_text)
.creator_id(creator.id) .creator_id(creator.id)
.community_id(community.id) .community_id(community.id)
.locked(page.comments_enabled.map(|e| !e))
.published(page.published.map(Into::into)) .published(page.published.map(Into::into))
.updated(page.updated.map(Into::into)) .updated(page.updated.map(Into::into))
.deleted(Some(false)) .deleted(Some(false))
@ -262,18 +251,7 @@ impl Object for ApubPost {
.ap_id(Some(page.id.clone().into())) .ap_id(Some(page.id.clone().into()))
.local(Some(false)) .local(Some(false))
.language_id(language_id) .language_id(language_id)
.build() .build();
} else {
// if is mod action, only update locked/stickied fields, nothing else
PostInsertForm::builder()
.name(name)
.creator_id(creator.id)
.community_id(community.id)
.ap_id(Some(page.id.clone().into()))
.locked(page.comments_enabled.map(|e| !e))
.updated(page.updated.map(Into::into))
.build()
};
let timestamp = page.updated.or(page.published).unwrap_or_else(naive_now); let timestamp = page.updated.or(page.published).unwrap_or_else(naive_now);
let post = Post::insert_apub(&mut context.pool(), timestamp, &form).await?; let post = Post::insert_apub(&mut context.pool(), timestamp, &form).await?;
@ -287,16 +265,6 @@ impl Object for ApubPost {
context.reset_request_count(), context.reset_request_count(),
); );
// write mod log entry for lock
if Page::is_locked_changed(&old_post, &page.comments_enabled) {
let form = ModLockPostForm {
mod_person_id: creator.id,
post_id: post.id,
locked: Some(post.locked),
};
ModLockPost::create(&mut context.pool(), &form).await?;
}
Ok(post.into()) Ok(post.into())
} }
} }

View file

@ -60,7 +60,6 @@ pub struct Page {
#[serde(default)] #[serde(default)]
pub(crate) attachment: Vec<Attachment>, pub(crate) attachment: Vec<Attachment>,
pub(crate) image: Option<ImageObject>, pub(crate) image: Option<ImageObject>,
pub(crate) comments_enabled: Option<bool>,
pub(crate) sensitive: Option<bool>, pub(crate) sensitive: Option<bool>,
pub(crate) published: Option<DateTime<Utc>>, pub(crate) published: Option<DateTime<Utc>>,
pub(crate) updated: Option<DateTime<Utc>>, pub(crate) updated: Option<DateTime<Utc>>,
@ -156,28 +155,6 @@ pub enum HashtagType {
} }
impl Page { impl Page {
/// Only mods can change the post's locked status. So if it is changed from the default value,
/// it is a mod action and needs to be verified as such.
///
/// Locked needs to be false on a newly created post (verified in [[CreatePost]].
pub(crate) async fn is_mod_action(&self, context: &Data<LemmyContext>) -> LemmyResult<bool> {
let old_post = self.id.clone().dereference_local(context).await;
Ok(Page::is_locked_changed(&old_post, &self.comments_enabled))
}
pub(crate) fn is_locked_changed<E>(
old_post: &Result<ApubPost, E>,
new_comments_enabled: &Option<bool>,
) -> bool {
if let Some(new_comments_enabled) = new_comments_enabled {
if let Ok(old_post) = old_post {
return new_comments_enabled != &!old_post.locked;
}
}
false
}
pub(crate) fn creator(&self) -> LemmyResult<ObjectId<ApubPerson>> { pub(crate) fn creator(&self) -> LemmyResult<ObjectId<ApubPerson>> {
match &self.attributed_to { match &self.attributed_to {
AttributedTo::Lemmy(l) => Ok(l.clone()), AttributedTo::Lemmy(l) => Ok(l.clone()),

View file

@ -5,6 +5,12 @@
-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and -- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and
-- before the automatic deletion of the row that references it. This is not a problem for insert or delete. -- before the automatic deletion of the row that references it. This is not a problem for insert or delete.
-- --
-- After a row update begins, a concurrent update on the same row can't begin until the whole
-- transaction that contains the first update is finished. To reduce this locking, statements in
-- triggers should be ordered based on the likelihood of concurrent writers. For example, updating
-- site_aggregates should be done last because the same row is updated for all local stuff. If
-- it were not last, then the locking period for concurrent writers would extend to include the
-- time consumed by statements that come after.
-- --
-- --
-- Create triggers for both post and comments -- Create triggers for both post and comments
@ -38,6 +44,7 @@ BEGIN
(thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff (thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff
WHERE WHERE
a.thing_id = diff.thing_id a.thing_id = diff.thing_id
AND (diff.upvotes, diff.downvotes) != (0, 0)
RETURNING RETURNING
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score) r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
UPDATE UPDATE
@ -47,7 +54,8 @@ BEGIN
SELECT SELECT
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
WHERE WHERE
a.person_id = diff.creator_id; a.person_id = diff.creator_id
AND diff.score != 0;
RETURN NULL; RETURN NULL;
END; END;
$$); $$);
@ -62,6 +70,21 @@ CALL r.post_or_comment ('post');
CALL r.post_or_comment ('comment'); CALL r.post_or_comment ('comment');
-- Create triggers that update counts in parent aggregates -- Create triggers that update counts in parent aggregates
CREATE FUNCTION r.parent_comment_ids (path ltree)
RETURNS SETOF int
LANGUAGE sql
IMMUTABLE parallel safe
BEGIN
ATOMIC
SELECT
comment_id::int
FROM
string_to_table (ltree2text (path), '.') AS comment_id
-- Skip first and last
LIMIT (nlevel (path) - 2) OFFSET 1;
END;
CALL r.create_triggers ('comment', $$ CALL r.create_triggers ('comment', $$
BEGIN BEGIN
UPDATE UPDATE
@ -76,60 +99,84 @@ BEGIN
r.is_counted (comment) r.is_counted (comment)
GROUP BY (comment).creator_id) AS diff GROUP BY (comment).creator_id) AS diff
WHERE WHERE
a.person_id = diff.creator_id; a.person_id = diff.creator_id
AND diff.comment_count != 0;
UPDATE UPDATE
site_aggregates AS a comment_aggregates AS a
SET SET
comments = a.comments + diff.comments child_count = a.child_count + diff.child_count
FROM ( FROM (
SELECT SELECT
coalesce(sum(count_diff), 0) AS comments parent_id,
coalesce(sum(count_diff), 0) AS child_count
FROM (
-- For each inserted or deleted comment, this outputs 1 row for each parent comment.
-- For example, this:
--
-- count_diff | (comment).path
-- ------------+----------------
-- 1 | 0.5.6.7
-- 1 | 0.5.6.7.8
--
-- becomes this:
--
-- count_diff | parent_id
-- ------------+-----------
-- 1 | 5
-- 1 | 6
-- 1 | 5
-- 1 | 6
-- 1 | 7
SELECT
count_diff,
parent_id
FROM FROM
select_old_and_new_rows AS old_and_new_rows select_old_and_new_rows AS old_and_new_rows,
WHERE LATERAL r.parent_comment_ids ((comment).path) AS parent_id) AS expanded_old_and_new_rows
r.is_counted (comment) GROUP BY
AND (comment).local) AS diff; parent_id) AS diff
WHERE
a.comment_id = diff.parent_id
AND diff.child_count != 0;
WITH post_diff AS ( WITH post_diff AS (
UPDATE UPDATE
post_aggregates AS a post_aggregates AS a
SET SET
comments = a.comments + diff.comments, comments = a.comments + diff.comments,
newest_comment_time = GREATEST (a.newest_comment_time, ( newest_comment_time = GREATEST (a.newest_comment_time, diff.newest_comment_time),
SELECT newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id ORDER BY published DESC LIMIT 1)),
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, (
SELECT
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id
-- Ignore comments from the post's creator
AND a.creator_id != new_comment.creator_id
-- Ignore comments on old posts
AND a.published > (new_comment.published - '2 days'::interval)
ORDER BY published DESC LIMIT 1))
FROM ( FROM (
SELECT SELECT
(comment).post_id, post.id AS post_id,
coalesce(sum(count_diff), 0) AS comments coalesce(sum(count_diff), 0) AS comments,
-- Old rows are excluded using `count_diff = 1`
max((comment).published) FILTER (WHERE count_diff = 1) AS newest_comment_time,
max((comment).published) FILTER (WHERE count_diff = 1
-- Ignore comments from the post's creator
AND post.creator_id != (comment).creator_id
-- Ignore comments on old posts
AND post.published > ((comment).published - '2 days'::interval)) AS newest_comment_time_necro,
r.is_counted (post.*) AS include_in_community_aggregates
FROM FROM
select_old_and_new_rows AS old_and_new_rows select_old_and_new_rows AS old_and_new_rows
LEFT JOIN post ON post.id = (comment).post_id
WHERE WHERE
r.is_counted (comment) r.is_counted (comment)
GROUP BY GROUP BY
(comment).post_id) AS diff post.id) AS diff
LEFT JOIN post ON post.id = diff.post_id
WHERE WHERE
a.post_id = diff.post_id a.post_id = diff.post_id
AND (diff.comments,
GREATEST (a.newest_comment_time, diff.newest_comment_time),
GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)) != (0,
a.newest_comment_time,
a.newest_comment_time_necro)
RETURNING RETURNING
a.community_id, a.community_id,
diff.comments, diff.comments,
r.is_counted (post.*) AS include_in_community_aggregates) diff.include_in_community_aggregates)
UPDATE UPDATE
community_aggregates AS a community_aggregates AS a
SET SET
@ -145,7 +192,23 @@ FROM (
GROUP BY GROUP BY
community_id) AS diff community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.comments != 0;
UPDATE
site_aggregates AS a
SET
comments = a.comments + diff.comments
FROM (
SELECT
coalesce(sum(count_diff), 0) AS comments
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (comment)
AND (comment).local) AS diff
WHERE
diff.comments != 0;
RETURN NULL; RETURN NULL;
@ -167,20 +230,8 @@ BEGIN
r.is_counted (post) r.is_counted (post)
GROUP BY (post).creator_id) AS diff GROUP BY (post).creator_id) AS diff
WHERE WHERE
a.person_id = diff.creator_id; a.person_id = diff.creator_id
AND diff.post_count != 0;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff;
UPDATE UPDATE
community_aggregates AS a community_aggregates AS a
@ -197,7 +248,23 @@ FROM (
GROUP BY GROUP BY
(post).community_id) AS diff (post).community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.posts != 0;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff
WHERE
diff.posts != 0;
RETURN NULL; RETURN NULL;
@ -217,7 +284,9 @@ BEGIN
FROM select_old_and_new_rows AS old_and_new_rows FROM select_old_and_new_rows AS old_and_new_rows
WHERE WHERE
r.is_counted (community) r.is_counted (community)
AND (community).local) AS diff; AND (community).local) AS diff
WHERE
diff.communities != 0;
RETURN NULL; RETURN NULL;
@ -235,7 +304,9 @@ BEGIN
SELECT SELECT
coalesce(sum(count_diff), 0) AS users coalesce(sum(count_diff), 0) AS users
FROM select_old_and_new_rows AS old_and_new_rows FROM select_old_and_new_rows AS old_and_new_rows
WHERE (person).local) AS diff; WHERE (person).local) AS diff
WHERE
diff.users != 0;
RETURN NULL; RETURN NULL;
@ -270,7 +341,8 @@ BEGIN
GROUP BY GROUP BY
old_post.community_id) AS diff old_post.community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.comments != 0;
RETURN NULL; RETURN NULL;
END; END;
$$; $$;
@ -296,7 +368,8 @@ BEGIN
LEFT JOIN community ON community.id = (community_follower).community_id LEFT JOIN community ON community.id = (community_follower).community_id
LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND (diff.subscribers, diff.subscribers_local) != (0, 0);
RETURN NULL; RETURN NULL;
@ -474,3 +547,24 @@ CREATE TRIGGER delete_follow
FOR EACH ROW FOR EACH ROW
EXECUTE FUNCTION r.delete_follow_before_person (); EXECUTE FUNCTION r.delete_follow_before_person ();
-- Triggers that change values before insert or update
CREATE FUNCTION r.comment_change_values ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
DECLARE
id text = NEW.id::text;
BEGIN
-- Make `path` end with `id` if it doesn't already
IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN
NEW.path = NEW.path || id;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER change_values
BEFORE INSERT OR UPDATE ON comment
FOR EACH ROW
EXECUTE FUNCTION r.comment_change_values ();

View file

@ -15,12 +15,7 @@ use crate::{
utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT}, utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT},
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use diesel::{ use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl};
dsl::{insert_into, sql_query},
result::Error,
ExpressionMethods,
QueryDsl,
};
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
use diesel_ltree::Ltree; use diesel_ltree::Ltree;
use url::Url; use url::Url;
@ -72,13 +67,9 @@ impl Comment {
parent_path: Option<&Ltree>, parent_path: Option<&Ltree>,
) -> Result<Comment, Error> { ) -> Result<Comment, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
let comment_form = (comment_form, parent_path.map(|p| comment::path.eq(p)));
conn if let Some(timestamp) = timestamp {
.build_transaction()
.run(|conn| {
Box::pin(async move {
// Insert, to get the id
let inserted_comment = if let Some(timestamp) = timestamp {
insert_into(comment::table) insert_into(comment::table)
.values(comment_form) .values(comment_form)
.on_conflict(comment::ap_id) .on_conflict(comment::ap_id)
@ -86,68 +77,14 @@ impl Comment {
.do_update() .do_update()
.set(comment_form) .set(comment_form)
.get_result::<Self>(conn) .get_result::<Self>(conn)
.await? .await
} else { } else {
insert_into(comment::table) insert_into(comment::table)
.values(comment_form) .values(comment_form)
.get_result::<Self>(conn) .get_result::<Self>(conn)
.await?
};
let comment_id = inserted_comment.id;
// You need to update the ltree column
let ltree = Ltree(if let Some(parent_path) = parent_path {
// The previous parent will already have 0 in it
// Append this comment id
format!("{}.{}", parent_path.0, comment_id)
} else {
// '0' is always the first path, append to that
format!("{}.{}", 0, comment_id)
});
let updated_comment = diesel::update(comment::table.find(comment_id))
.set(comment::path.eq(ltree))
.get_result::<Self>(conn)
.await?;
// Update the child count for the parent comment_aggregates
// You could do this with a trigger, but since you have to do this manually anyway,
// you can just have it here
if let Some(parent_path) = parent_path {
// You have to update counts for all parents, not just the immediate one
// TODO if the performance of this is terrible, it might be better to do this as part of a
// scheduled query... although the counts would often be wrong.
//
// The child_count query for reference:
// select c.id, c.path, count(c2.id) as child_count from comment c
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
// group by c.id
let parent_id = parent_path.0.split('.').nth(1);
if let Some(parent_id) = parent_id {
let top_parent = format!("0.{}", parent_id);
let update_child_count_stmt = format!(
"
update comment_aggregates ca set child_count = c.child_count
from (
select c.id, c.path, count(c2.id) as child_count from comment c
join comment c2 on c2.path <@ c.path and c2.path != c.path
and c.path <@ '{top_parent}'
group by c.id
) as c
where ca.comment_id = c.id"
);
sql_query(update_child_count_stmt).execute(conn).await?;
}
}
Ok(updated_comment)
}) as _
})
.await .await
} }
}
pub async fn read_from_apub_id( pub async fn read_from_apub_id(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,

View file

@ -55,12 +55,17 @@ impl LocalUser {
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
local_user_id: LocalUserId, local_user_id: LocalUserId,
form: &LocalUserUpdateForm, form: &LocalUserUpdateForm,
) -> Result<LocalUser, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
diesel::update(local_user::table.find(local_user_id)) let res = diesel::update(local_user::table.find(local_user_id))
.set(form) .set(form)
.get_result::<Self>(conn) .execute(conn)
.await .await;
// Diesel will throw an error if the query is all Nones (not updating anything), ignore this.
match res {
Err(Error::QueryBuilderError(_)) => Ok(0),
other => other,
}
} }
pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> { pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> {

View file

@ -50,7 +50,7 @@ impl PasswordResetRequest {
) -> Result<PasswordResetRequest, Error> { ) -> Result<PasswordResetRequest, Error> {
let form = PasswordResetRequestForm { let form = PasswordResetRequestForm {
local_user_id: from_local_user_id, local_user_id: from_local_user_id,
token: token_, token: token_.into(),
}; };
Self::create(pool, &form).await Self::create(pool, &form).await
@ -134,7 +134,7 @@ mod tests {
let expected_password_reset_request = PasswordResetRequest { let expected_password_reset_request = PasswordResetRequest {
id: inserted_password_reset_request.id, id: inserted_password_reset_request.id,
local_user_id: inserted_local_user.id, local_user_id: inserted_local_user.id,
token: token.to_string(), token: token.to_string().into(),
published: inserted_password_reset_request.published, published: inserted_password_reset_request.published,
}; };

View file

@ -19,6 +19,7 @@ pub mod aggregates;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod impls; pub mod impls;
pub mod newtypes; pub mod newtypes;
pub mod sensitive;
#[cfg(feature = "full")] #[cfg(feature = "full")]
#[rustfmt::skip] #[rustfmt::skip]
#[allow(clippy::wildcard_imports)] #[allow(clippy::wildcard_imports)]

View file

@ -0,0 +1,57 @@
use serde::{Deserialize, Serialize};
use std::{fmt::Debug, ops::Deref};
#[cfg(feature = "full")]
use ts_rs::TS;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType))]
#[serde(transparent)]
pub struct SensitiveString(String);
impl SensitiveString {
pub fn into_inner(self) -> String {
self.0
}
}
impl Debug for SensitiveString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Sensitive").finish()
}
}
impl AsRef<[u8]> for SensitiveString {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl Deref for SensitiveString {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<String> for SensitiveString {
fn from(t: String) -> Self {
SensitiveString(t)
}
}
#[cfg(feature = "full")]
impl TS for SensitiveString {
fn name() -> String {
"string".to_string()
}
fn name_with_type_args(_args: Vec<String>) -> String {
"string".to_string()
}
fn dependencies() -> Vec<ts_rs::Dependency> {
Vec::new()
}
fn transparent() -> bool {
true
}
}

View file

@ -2,6 +2,7 @@
use crate::schema::{community, community_follower, community_moderator, community_person_ban}; use crate::schema::{community, community_follower, community_moderator, community_person_ban};
use crate::{ use crate::{
newtypes::{CommunityId, DbUrl, InstanceId, PersonId}, newtypes::{CommunityId, DbUrl, InstanceId, PersonId},
sensitive::SensitiveString,
source::placeholder_apub_url, source::placeholder_apub_url,
CommunityVisibility, CommunityVisibility,
}; };
@ -39,7 +40,7 @@ pub struct Community {
/// Whether the community is local. /// Whether the community is local.
pub local: bool, pub local: bool,
#[serde(skip)] #[serde(skip)]
pub private_key: Option<String>, pub private_key: Option<SensitiveString>,
#[serde(skip)] #[serde(skip)]
pub public_key: String, pub public_key: String,
#[serde(skip)] #[serde(skip)]

View file

@ -2,6 +2,7 @@
use crate::schema::local_user; use crate::schema::local_user;
use crate::{ use crate::{
newtypes::{LocalUserId, PersonId}, newtypes::{LocalUserId, PersonId},
sensitive::SensitiveString,
ListingType, ListingType,
PostListingMode, PostListingMode,
SortType, SortType,
@ -24,8 +25,8 @@ pub struct LocalUser {
/// The person_id for the local user. /// The person_id for the local user.
pub person_id: PersonId, pub person_id: PersonId,
#[serde(skip)] #[serde(skip)]
pub password_encrypted: String, pub password_encrypted: SensitiveString,
pub email: Option<String>, pub email: Option<SensitiveString>,
/// Whether to show NSFW content. /// Whether to show NSFW content.
pub show_nsfw: bool, pub show_nsfw: bool,
pub theme: String, pub theme: String,
@ -47,7 +48,7 @@ pub struct LocalUser {
/// Whether their registration application has been accepted. /// Whether their registration application has been accepted.
pub accepted_application: bool, pub accepted_application: bool,
#[serde(skip)] #[serde(skip)]
pub totp_2fa_secret: Option<String>, pub totp_2fa_secret: Option<SensitiveString>,
/// Open links in a new tab. /// Open links in a new tab.
pub open_links_in_new_tab: bool, pub open_links_in_new_tab: bool,
pub blur_nsfw: bool, pub blur_nsfw: bool,

View file

@ -1,6 +1,6 @@
use crate::newtypes::LocalUserId;
#[cfg(feature = "full")] #[cfg(feature = "full")]
use crate::schema::login_token; use crate::schema::login_token;
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
@ -18,7 +18,7 @@ use ts_rs::TS;
pub struct LoginToken { pub struct LoginToken {
/// Jwt token for this login /// Jwt token for this login
#[serde(skip)] #[serde(skip)]
pub token: String, pub token: SensitiveString,
pub user_id: LocalUserId, pub user_id: LocalUserId,
/// Time of login /// Time of login
pub published: DateTime<Utc>, pub published: DateTime<Utc>,
@ -31,7 +31,7 @@ pub struct LoginToken {
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = login_token))] #[cfg_attr(feature = "full", diesel(table_name = login_token))]
pub struct LoginTokenCreateForm { pub struct LoginTokenCreateForm {
pub token: String, pub token: SensitiveString,
pub user_id: LocalUserId, pub user_id: LocalUserId,
pub ip: Option<String>, pub ip: Option<String>,
pub user_agent: Option<String>, pub user_agent: Option<String>,

View file

@ -1,6 +1,6 @@
use crate::newtypes::LocalUserId;
#[cfg(feature = "full")] #[cfg(feature = "full")]
use crate::schema::password_reset_request; use crate::schema::password_reset_request;
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
#[derive(PartialEq, Eq, Debug)] #[derive(PartialEq, Eq, Debug)]
@ -9,7 +9,7 @@ use chrono::{DateTime, Utc};
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
pub struct PasswordResetRequest { pub struct PasswordResetRequest {
pub id: i32, pub id: i32,
pub token: String, pub token: SensitiveString,
pub published: DateTime<Utc>, pub published: DateTime<Utc>,
pub local_user_id: LocalUserId, pub local_user_id: LocalUserId,
} }
@ -18,5 +18,5 @@ pub struct PasswordResetRequest {
#[cfg_attr(feature = "full", diesel(table_name = password_reset_request))] #[cfg_attr(feature = "full", diesel(table_name = password_reset_request))]
pub struct PasswordResetRequestForm { pub struct PasswordResetRequestForm {
pub local_user_id: LocalUserId, pub local_user_id: LocalUserId,
pub token: String, pub token: SensitiveString,
} }

View file

@ -2,6 +2,7 @@
use crate::schema::{person, person_follower}; use crate::schema::{person, person_follower};
use crate::{ use crate::{
newtypes::{DbUrl, InstanceId, PersonId}, newtypes::{DbUrl, InstanceId, PersonId},
sensitive::SensitiveString,
source::placeholder_apub_url, source::placeholder_apub_url,
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -36,7 +37,7 @@ pub struct Person {
/// Whether the person is local to our site. /// Whether the person is local to our site.
pub local: bool, pub local: bool,
#[serde(skip)] #[serde(skip)]
pub private_key: Option<String>, pub private_key: Option<SensitiveString>,
#[serde(skip)] #[serde(skip)]
pub public_key: String, pub public_key: String,
#[serde(skip)] #[serde(skip)]

View file

@ -1,5 +1,6 @@
#[cfg(feature = "full")] #[cfg(feature = "full")]
use crate::schema::secret; use crate::schema::secret;
use crate::sensitive::SensitiveString;
#[derive(Clone)] #[derive(Clone)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))]
@ -7,5 +8,5 @@ use crate::schema::secret;
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
pub struct Secret { pub struct Secret {
pub id: i32, pub id: i32,
pub jwt_secret: String, pub jwt_secret: SensitiveString,
} }

View file

@ -1,6 +1,9 @@
use crate::newtypes::{DbUrl, InstanceId, SiteId};
#[cfg(feature = "full")] #[cfg(feature = "full")]
use crate::schema::site; use crate::schema::site;
use crate::{
newtypes::{DbUrl, InstanceId, SiteId},
sensitive::SensitiveString,
};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
@ -35,7 +38,7 @@ pub struct Site {
/// The site inbox /// The site inbox
pub inbox_url: DbUrl, pub inbox_url: DbUrl,
#[serde(skip)] #[serde(skip)]
pub private_key: Option<String>, pub private_key: Option<SensitiveString>,
// TODO: mark as `serde(skip)` in next major release as its not needed for api // TODO: mark as `serde(skip)` in next major release as its not needed for api
pub public_key: String, pub public_key: String,
pub instance_id: InstanceId, pub instance_id: InstanceId,

View file

@ -950,9 +950,8 @@ mod tests {
show_bot_accounts: Some(false), show_bot_accounts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user; data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery { let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -986,9 +985,8 @@ mod tests {
show_bot_accounts: Some(true), show_bot_accounts: Some(true),
..Default::default() ..Default::default()
}; };
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user; data.local_user_view.local_user.show_bot_accounts = true;
let post_listings_with_bots = PostQuery { let post_listings_with_bots = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -1110,9 +1108,8 @@ mod tests {
show_bot_accounts: Some(false), show_bot_accounts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user; data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery { let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -1533,9 +1530,8 @@ mod tests {
show_read_posts: Some(false), show_read_posts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user; data.local_user_view.local_user.show_read_posts = false;
// Mark a post as read // Mark a post as read
PostRead::mark_as_read( PostRead::mark_as_read(

View file

@ -43,21 +43,18 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
.map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))? .map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?
.ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?; .ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?;
let protocols = if site_view.local_site.federation_enabled {
Some(vec!["activitypub".to_string()])
} else {
None
};
// Since there are 3 registration options, // Since there are 3 registration options,
// we need to set open_registrations as true if RegistrationMode is not Closed. // we need to set open_registrations as true if RegistrationMode is not Closed.
let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed); let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed);
let json = NodeInfo { let json = NodeInfo {
version: Some("2.0".to_string()), version: Some("2.1".to_string()),
software: Some(NodeInfoSoftware { software: Some(NodeInfoSoftware {
name: Some("lemmy".to_string()), name: Some("lemmy".to_string()),
version: Some(VERSION.to_string()), version: Some(VERSION.to_string()),
repository: Some("https://github.com/LemmyNet/lemmy".to_string()),
homepage: Some("https://join-lemmy.org/".to_string()),
}), }),
protocols, protocols: Some(vec!["activitypub".to_string()]),
usage: Some(NodeInfoUsage { usage: Some(NodeInfoUsage {
users: Some(NodeInfoUsers { users: Some(NodeInfoUsers {
total: Some(site_view.counts.users), total: Some(site_view.counts.users),
@ -68,6 +65,11 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
local_comments: Some(site_view.counts.comments), local_comments: Some(site_view.counts.comments),
}), }),
open_registrations, open_registrations,
services: Some(NodeInfoServices {
inbound: Some(vec![]),
outbound: Some(vec![]),
}),
metadata: Some(vec![]),
}; };
Ok(HttpResponse::Ok().json(json)) Ok(HttpResponse::Ok().json(json))
@ -84,6 +86,7 @@ struct NodeInfoWellKnownLinks {
pub href: Url, pub href: Url,
} }
/// Nodeinfo spec: http://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)] #[serde(rename_all = "camelCase", default)]
pub struct NodeInfo { pub struct NodeInfo {
@ -92,6 +95,9 @@ pub struct NodeInfo {
pub protocols: Option<Vec<String>>, pub protocols: Option<Vec<String>>,
pub usage: Option<NodeInfoUsage>, pub usage: Option<NodeInfoUsage>,
pub open_registrations: Option<bool>, pub open_registrations: Option<bool>,
/// These fields are required by the spec for no reason
pub services: Option<NodeInfoServices>,
pub metadata: Option<Vec<String>>,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
@ -99,6 +105,8 @@ pub struct NodeInfo {
pub struct NodeInfoSoftware { pub struct NodeInfoSoftware {
pub name: Option<String>, pub name: Option<String>,
pub version: Option<String>, pub version: Option<String>,
pub repository: Option<String>,
pub homepage: Option<String>,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
@ -116,3 +124,10 @@ pub struct NodeInfoUsers {
pub active_halfyear: Option<i64>, pub active_halfyear: Option<i64>,
pub active_month: Option<i64>, pub active_month: Option<i64>,
} }
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct NodeInfoServices {
pub inbound: Option<Vec<String>>,
pub outbound: Option<Vec<String>>,
}

View file

@ -19,7 +19,7 @@ const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
const BODY_MAX_LENGTH: usize = 10000; const BODY_MAX_LENGTH: usize = 10000;
const POST_BODY_MAX_LENGTH: usize = 50000; const POST_BODY_MAX_LENGTH: usize = 50000;
const BIO_MAX_LENGTH: usize = 300; const BIO_MAX_LENGTH: usize = 300;
const ALT_TEXT_MAX_LENGTH: usize = 300; const ALT_TEXT_MAX_LENGTH: usize = 1500;
const SITE_NAME_MAX_LENGTH: usize = 20; const SITE_NAME_MAX_LENGTH: usize = 20;
const SITE_NAME_MIN_LENGTH: usize = 1; const SITE_NAME_MIN_LENGTH: usize = 1;
const SITE_DESCRIPTION_MAX_LENGTH: usize = 150; const SITE_DESCRIPTION_MAX_LENGTH: usize = 150;

@ -1 +1 @@
Subproject commit a4681f70a4ddf077951ed2dcc8cf90bb243d4828 Subproject commit f0ab81deea347c433277a90ae752b10f68473719

View file

@ -0,0 +1,3 @@
SELECT
1;

View file

@ -0,0 +1,4 @@
-- This migration exists to trigger re-execution of replaceable_schema
SELECT
1;

View file

@ -262,12 +262,22 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
// User // User
.service( .service(
// Account action, I don't like that it's in /user maybe /accounts // Account action, I don't like that it's in /user maybe /accounts
// Handle /user/register separately to add the register() rate limitter // Handle /user/register separately to add the register() rate limiter
web::resource("/user/register") web::resource("/user/register")
.guard(guard::Post()) .guard(guard::Post())
.wrap(rate_limit.register()) .wrap(rate_limit.register())
.route(web::post().to(register)), .route(web::post().to(register)),
) )
// User
.service(
// Handle /user/login separately to add the register() rate limiter
// TODO: pretty annoying way to apply rate limits for register and login, we should
// group them under a common path so that rate limit is only applied once (eg under /account).
web::resource("/user/login")
.guard(guard::Post())
.wrap(rate_limit.register())
.route(web::post().to(login)),
)
.service( .service(
// Handle captcha separately // Handle captcha separately
web::resource("/user/get_captcha") web::resource("/user/get_captcha")
@ -306,7 +316,6 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
.route("/banned", web::get().to(list_banned_users)) .route("/banned", web::get().to(list_banned_users))
.route("/block", web::post().to(block_person)) .route("/block", web::post().to(block_person))
// TODO Account actions. I don't like that they're in /user maybe /accounts // TODO Account actions. I don't like that they're in /user maybe /accounts
.route("/login", web::post().to(login))
.route("/logout", web::post().to(logout)) .route("/logout", web::post().to(logout))
.route("/delete_account", web::post().to(delete_account)) .route("/delete_account", web::post().to(delete_account))
.route("/password_reset", web::post().to(reset_password)) .route("/password_reset", web::post().to(reset_password))