mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-26 11:21:02 +00:00
Merge remote-tracking branch 'upstream/main' into migration-runner
This commit is contained in:
commit
1af517f6c4
49 changed files with 2921 additions and 2536 deletions
|
@ -3,6 +3,7 @@
|
|||
|
||||
variables:
|
||||
- &rust_image "rust:1.77"
|
||||
- &rust_nightly_image "rustlang/rust:nightly"
|
||||
- &install_pnpm "corepack enable pnpm"
|
||||
- &slow_check_paths
|
||||
- event: pull_request
|
||||
|
@ -24,15 +25,17 @@ variables:
|
|||
"diesel.toml",
|
||||
".gitmodules",
|
||||
]
|
||||
|
||||
# Broken for cron jobs currently, see
|
||||
# https://github.com/woodpecker-ci/woodpecker/issues/1716
|
||||
# clone:
|
||||
# git:
|
||||
# image: woodpeckerci/plugin-git
|
||||
# settings:
|
||||
# recursive: true
|
||||
# submodule_update_remote: true
|
||||
- install_binstall: &install_binstall
|
||||
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- cp cargo-binstall /usr/local/cargo/bin
|
||||
- install_diesel_cli: &install_diesel_cli
|
||||
- apt update && apt install -y lsb-release build-essential
|
||||
- sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
- wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
|
||||
- apt update && apt install -y postgresql-client-16
|
||||
- cargo install diesel_cli --no-default-features --features postgres
|
||||
- export PATH="$CARGO_HOME/bin:$PATH"
|
||||
|
||||
steps:
|
||||
prepare_repo:
|
||||
|
@ -66,7 +69,7 @@ steps:
|
|||
- event: pull_request
|
||||
|
||||
cargo_fmt:
|
||||
image: rustlang/rust:nightly
|
||||
image: *rust_nightly_image
|
||||
environment:
|
||||
# store cargo data in repo folder so that it gets cached between steps
|
||||
CARGO_HOME: .cargo_home
|
||||
|
@ -77,11 +80,9 @@ steps:
|
|||
- event: pull_request
|
||||
|
||||
cargo_machete:
|
||||
image: rustlang/rust:nightly
|
||||
image: *rust_nightly_image
|
||||
commands:
|
||||
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
|
||||
- cp cargo-binstall /usr/local/cargo/bin
|
||||
- <<: *install_binstall
|
||||
- cargo binstall -y cargo-machete
|
||||
- cargo machete
|
||||
when:
|
||||
|
@ -132,6 +133,17 @@ steps:
|
|||
- diff config/defaults.hjson config/defaults_current.hjson
|
||||
when: *slow_check_paths
|
||||
|
||||
check_diesel_schema:
|
||||
image: willsquire/diesel-cli
|
||||
environment:
|
||||
CARGO_HOME: .cargo_home
|
||||
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
|
||||
commands:
|
||||
- diesel migration run
|
||||
- diesel print-schema --config-file=diesel.toml > tmp.schema
|
||||
- diff tmp.schema crates/db_schema/src/schema.rs
|
||||
when: *slow_check_paths
|
||||
|
||||
check_db_perf_tool:
|
||||
image: *rust_image
|
||||
environment:
|
||||
|
@ -170,6 +182,7 @@ steps:
|
|||
CARGO_HOME: .cargo_home
|
||||
commands:
|
||||
- target/lemmy_server migration run
|
||||
- <<: *install_diesel_cli
|
||||
- diesel print-schema --config-file=diesel.toml > tmp.schema
|
||||
- diff tmp.schema crates/db_schema/src/schema.rs
|
||||
when: *slow_check_paths
|
||||
|
@ -239,7 +252,9 @@ steps:
|
|||
publish_to_crates_io:
|
||||
image: *rust_image
|
||||
commands:
|
||||
- cargo install cargo-workspaces
|
||||
- <<: *install_binstall
|
||||
# Install cargo-workspaces
|
||||
- cargo binstall -y cargo-workspaces
|
||||
- cp -r migrations crates/db_schema/
|
||||
- cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
|
||||
secrets: [cargo_api_token]
|
||||
|
|
601
Cargo.lock
generated
601
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
24
Cargo.toml
24
Cargo.toml
|
@ -1,5 +1,5 @@
|
|||
[workspace.package]
|
||||
version = "0.19.4-beta.6"
|
||||
version = "0.19.4-beta.7"
|
||||
edition = "2021"
|
||||
description = "A link aggregator for the fediverse"
|
||||
license = "AGPL-3.0"
|
||||
|
@ -88,17 +88,17 @@ unused_self = "deny"
|
|||
unwrap_used = "deny"
|
||||
|
||||
[workspace.dependencies]
|
||||
lemmy_api = { version = "=0.19.4-beta.6", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4-beta.6", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4-beta.6", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4-beta.6", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4-beta.6", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4-beta.6", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4-beta.6", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4-beta.6", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4-beta.6", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4-beta.6", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4-beta.6", path = "./crates/federate" }
|
||||
lemmy_api = { version = "=0.19.4-beta.7", path = "./crates/api" }
|
||||
lemmy_api_crud = { version = "=0.19.4-beta.7", path = "./crates/api_crud" }
|
||||
lemmy_apub = { version = "=0.19.4-beta.7", path = "./crates/apub" }
|
||||
lemmy_utils = { version = "=0.19.4-beta.7", path = "./crates/utils", default-features = false }
|
||||
lemmy_db_schema = { version = "=0.19.4-beta.7", path = "./crates/db_schema" }
|
||||
lemmy_api_common = { version = "=0.19.4-beta.7", path = "./crates/api_common" }
|
||||
lemmy_routes = { version = "=0.19.4-beta.7", path = "./crates/routes" }
|
||||
lemmy_db_views = { version = "=0.19.4-beta.7", path = "./crates/db_views" }
|
||||
lemmy_db_views_actor = { version = "=0.19.4-beta.7", path = "./crates/db_views_actor" }
|
||||
lemmy_db_views_moderator = { version = "=0.19.4-beta.7", path = "./crates/db_views_moderator" }
|
||||
lemmy_federate = { version = "=0.19.4-beta.7", path = "./crates/federate" }
|
||||
activitypub_federation = { version = "0.5.6", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
|
|
1
api_tests/.npmrc
Normal file
1
api_tests/.npmrc
Normal file
|
@ -0,0 +1 @@
|
|||
package-manager-strict=false
|
File diff suppressed because it is too large
Load diff
|
@ -661,40 +661,60 @@ test("A and G subscribe to B (center) A posts, it gets announced to G", async ()
|
|||
});
|
||||
|
||||
test("Report a post", async () => {
|
||||
// Note, this is a different one from the setup
|
||||
let betaCommunity = (await resolveBetaCommunity(beta)).community;
|
||||
if (!betaCommunity) {
|
||||
throw "Missing beta community";
|
||||
}
|
||||
// Create post from alpha
|
||||
let alphaCommunity = (await resolveBetaCommunity(alpha)).community!;
|
||||
await followBeta(alpha);
|
||||
let postRes = await createPost(beta, betaCommunity.community.id);
|
||||
let postRes = await createPost(alpha, alphaCommunity.community.id);
|
||||
expect(postRes.post_view.post).toBeDefined();
|
||||
|
||||
let alphaPost = (await resolvePost(alpha, postRes.post_view.post)).post;
|
||||
if (!alphaPost) {
|
||||
throw "Missing alpha post";
|
||||
}
|
||||
let alphaReport = (
|
||||
await reportPost(alpha, alphaPost.post.id, randomString(10))
|
||||
).post_report_view.post_report;
|
||||
|
||||
// Send report from gamma
|
||||
let gammaPost = (await resolvePost(gamma, alphaPost.post)).post!;
|
||||
let gammaReport = (
|
||||
await reportPost(gamma, gammaPost.post.id, randomString(10))
|
||||
).post_report_view.post_report;
|
||||
expect(gammaReport).toBeDefined();
|
||||
|
||||
// Report was federated to community instance
|
||||
let betaReport = (await waitUntil(
|
||||
() =>
|
||||
listPostReports(beta).then(p =>
|
||||
p.post_reports.find(
|
||||
r =>
|
||||
r.post_report.original_post_name === alphaReport.original_post_name,
|
||||
r.post_report.original_post_name === gammaReport.original_post_name,
|
||||
),
|
||||
),
|
||||
res => !!res,
|
||||
))!.post_report;
|
||||
expect(betaReport).toBeDefined();
|
||||
expect(betaReport.resolved).toBe(false);
|
||||
expect(betaReport.original_post_name).toBe(alphaReport.original_post_name);
|
||||
expect(betaReport.original_post_url).toBe(alphaReport.original_post_url);
|
||||
expect(betaReport.original_post_body).toBe(alphaReport.original_post_body);
|
||||
expect(betaReport.reason).toBe(alphaReport.reason);
|
||||
expect(betaReport.original_post_name).toBe(gammaReport.original_post_name);
|
||||
//expect(betaReport.original_post_url).toBe(gammaReport.original_post_url);
|
||||
expect(betaReport.original_post_body).toBe(gammaReport.original_post_body);
|
||||
expect(betaReport.reason).toBe(gammaReport.reason);
|
||||
await unfollowRemotes(alpha);
|
||||
|
||||
// Report was federated to poster's instance
|
||||
let alphaReport = (await waitUntil(
|
||||
() =>
|
||||
listPostReports(alpha).then(p =>
|
||||
p.post_reports.find(
|
||||
r =>
|
||||
r.post_report.original_post_name === gammaReport.original_post_name,
|
||||
),
|
||||
),
|
||||
res => !!res,
|
||||
))!.post_report;
|
||||
expect(alphaReport).toBeDefined();
|
||||
expect(alphaReport.resolved).toBe(false);
|
||||
expect(alphaReport.original_post_name).toBe(gammaReport.original_post_name);
|
||||
//expect(alphaReport.original_post_url).toBe(gammaReport.original_post_url);
|
||||
expect(alphaReport.original_post_body).toBe(gammaReport.original_post_body);
|
||||
expect(alphaReport.reason).toBe(gammaReport.reason);
|
||||
});
|
||||
|
||||
test("Fetch post via redirect", async () => {
|
||||
|
|
|
@ -29,7 +29,7 @@ pub async fn add_admin(
|
|||
.await?
|
||||
.ok_or(LemmyErrorType::ObjectNotLocal)?;
|
||||
|
||||
let added_admin = LocalUser::update(
|
||||
LocalUser::update(
|
||||
&mut context.pool(),
|
||||
added_local_user.local_user.id,
|
||||
&LocalUserUpdateForm {
|
||||
|
@ -43,7 +43,7 @@ pub async fn add_admin(
|
|||
// Mod tables
|
||||
let form = ModAddForm {
|
||||
mod_person_id: local_user_view.person.id,
|
||||
other_person_id: added_admin.person_id,
|
||||
other_person_id: added_local_user.person.id,
|
||||
removed: Some(!data.added),
|
||||
};
|
||||
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
use crate::{build_totp_2fa, generate_totp_2fa_secret};
|
||||
use activitypub_federation::config::Data;
|
||||
use actix_web::web::Json;
|
||||
use lemmy_api_common::{
|
||||
context::LemmyContext,
|
||||
person::GenerateTotpSecretResponse,
|
||||
sensitive::Sensitive,
|
||||
};
|
||||
use lemmy_api_common::{context::LemmyContext, person::GenerateTotpSecretResponse};
|
||||
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
|
@ -41,6 +37,6 @@ pub async fn generate_totp_secret(
|
|||
.await?;
|
||||
|
||||
Ok(Json(GenerateTotpSecretResponse {
|
||||
totp_secret_url: Sensitive::new(secret_url),
|
||||
totp_secret_url: secret_url.into(),
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ use lemmy_utils::{
|
|||
error::{LemmyErrorType, LemmyResult},
|
||||
utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id},
|
||||
};
|
||||
use std::ops::Deref;
|
||||
|
||||
#[tracing::instrument(skip(context))]
|
||||
pub async fn save_user_settings(
|
||||
|
@ -57,7 +58,7 @@ pub async fn save_user_settings(
|
|||
if let Some(Some(email)) = &email {
|
||||
let previous_email = local_user_view.local_user.email.clone().unwrap_or_default();
|
||||
// if email was changed, check that it is not taken and send verification mail
|
||||
if &previous_email != email {
|
||||
if previous_email.deref() != email {
|
||||
if LocalUser::is_email_taken(&mut context.pool(), email).await? {
|
||||
return Err(LemmyErrorType::EmailAlreadyExists)?;
|
||||
}
|
||||
|
@ -141,11 +142,7 @@ pub async fn save_user_settings(
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
// Ignore errors, because 'no fields updated' will return an error.
|
||||
// https://github.com/LemmyNet/lemmy/issues/4076
|
||||
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form)
|
||||
.await
|
||||
.ok();
|
||||
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form).await?;
|
||||
|
||||
// Update the vote display modes
|
||||
let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm {
|
||||
|
|
|
@ -9,12 +9,10 @@ use lemmy_db_schema::{
|
|||
source::{
|
||||
email_verification::EmailVerification,
|
||||
local_user::{LocalUser, LocalUserUpdateForm},
|
||||
person::Person,
|
||||
},
|
||||
traits::Crud,
|
||||
RegistrationMode,
|
||||
};
|
||||
use lemmy_db_views::structs::SiteView;
|
||||
use lemmy_db_views::structs::{LocalUserView, SiteView};
|
||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||
|
||||
pub async fn verify_email(
|
||||
|
@ -38,7 +36,7 @@ pub async fn verify_email(
|
|||
};
|
||||
let local_user_id = verification.local_user_id;
|
||||
|
||||
let local_user = LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
|
||||
LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
|
||||
|
||||
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
|
||||
|
||||
|
@ -46,12 +44,16 @@ pub async fn verify_email(
|
|||
if site_view.local_site.registration_mode == RegistrationMode::RequireApplication
|
||||
&& site_view.local_site.application_email_admins
|
||||
{
|
||||
let person = Person::read(&mut context.pool(), local_user.person_id)
|
||||
let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
|
||||
.await?
|
||||
.ok_or(LemmyErrorType::CouldntFindPerson)?;
|
||||
|
||||
send_new_applicant_email_to_admins(&person.name, &mut context.pool(), context.settings())
|
||||
.await?;
|
||||
send_new_applicant_email_to_admins(
|
||||
&local_user.person.name,
|
||||
&mut context.pool(),
|
||||
context.settings(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(Json(SuccessResponse::default()))
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
use crate::{context::LemmyContext, sensitive::Sensitive};
|
||||
use crate::context::LemmyContext;
|
||||
use actix_web::{http::header::USER_AGENT, HttpRequest};
|
||||
use chrono::Utc;
|
||||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::LocalUserId,
|
||||
sensitive::SensitiveString,
|
||||
source::login_token::{LoginToken, LoginTokenCreateForm},
|
||||
};
|
||||
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||
|
@ -40,7 +41,7 @@ impl Claims {
|
|||
user_id: LocalUserId,
|
||||
req: HttpRequest,
|
||||
context: &LemmyContext,
|
||||
) -> LemmyResult<Sensitive<String>> {
|
||||
) -> LemmyResult<SensitiveString> {
|
||||
let hostname = context.settings().hostname.clone();
|
||||
let my_claims = Claims {
|
||||
sub: user_id.0.to_string(),
|
||||
|
@ -50,7 +51,7 @@ impl Claims {
|
|||
|
||||
let secret = &context.secret().jwt_secret;
|
||||
let key = EncodingKey::from_secret(secret.as_ref());
|
||||
let token = encode(&Header::default(), &my_claims, &key)?;
|
||||
let token: SensitiveString = encode(&Header::default(), &my_claims, &key)?.into();
|
||||
let ip = req
|
||||
.connection_info()
|
||||
.realip_remote_addr()
|
||||
|
@ -67,7 +68,7 @@ impl Claims {
|
|||
user_agent,
|
||||
};
|
||||
LoginToken::create(&mut context.pool(), form).await?;
|
||||
Ok(Sensitive::new(token))
|
||||
Ok(token)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ impl LemmyContext {
|
|||
let client = ClientBuilder::new(client).build();
|
||||
let secret = Secret {
|
||||
id: 0,
|
||||
jwt_secret: String::new(),
|
||||
jwt_secret: String::new().into(),
|
||||
};
|
||||
|
||||
let rate_limit_cell = RateLimitCell::with_test_config();
|
||||
|
|
|
@ -14,7 +14,6 @@ pub mod private_message;
|
|||
pub mod request;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod send_activity;
|
||||
pub mod sensitive;
|
||||
pub mod site;
|
||||
#[cfg(feature = "full")]
|
||||
pub mod utils;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::sensitive::Sensitive;
|
||||
use lemmy_db_schema::{
|
||||
newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId},
|
||||
sensitive::SensitiveString,
|
||||
source::site::Site,
|
||||
CommentSortType,
|
||||
ListingType,
|
||||
|
@ -25,8 +25,8 @@ use ts_rs::TS;
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Logging into lemmy.
|
||||
pub struct Login {
|
||||
pub username_or_email: Sensitive<String>,
|
||||
pub password: Sensitive<String>,
|
||||
pub username_or_email: SensitiveString,
|
||||
pub password: SensitiveString,
|
||||
/// May be required, if totp is enabled for their account.
|
||||
pub totp_2fa_token: Option<String>,
|
||||
}
|
||||
|
@ -38,11 +38,11 @@ pub struct Login {
|
|||
/// Register / Sign up to lemmy.
|
||||
pub struct Register {
|
||||
pub username: String,
|
||||
pub password: Sensitive<String>,
|
||||
pub password_verify: Sensitive<String>,
|
||||
pub password: SensitiveString,
|
||||
pub password_verify: SensitiveString,
|
||||
pub show_nsfw: Option<bool>,
|
||||
/// email is mandatory if email verification is enabled on the server
|
||||
pub email: Option<Sensitive<String>>,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// The UUID of the captcha item.
|
||||
pub captcha_uuid: Option<String>,
|
||||
/// Your captcha answer.
|
||||
|
@ -99,7 +99,7 @@ pub struct SaveUserSettings {
|
|||
/// Your display name, which can contain strange characters, and does not need to be unique.
|
||||
pub display_name: Option<String>,
|
||||
/// Your email.
|
||||
pub email: Option<Sensitive<String>>,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// Your bio / info, in markdown.
|
||||
pub bio: Option<String>,
|
||||
/// Your matrix user id. Ex: @my_user:matrix.org
|
||||
|
@ -140,9 +140,9 @@ pub struct SaveUserSettings {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Changes your account password.
|
||||
pub struct ChangePassword {
|
||||
pub new_password: Sensitive<String>,
|
||||
pub new_password_verify: Sensitive<String>,
|
||||
pub old_password: Sensitive<String>,
|
||||
pub new_password: SensitiveString,
|
||||
pub new_password_verify: SensitiveString,
|
||||
pub old_password: SensitiveString,
|
||||
}
|
||||
|
||||
#[skip_serializing_none]
|
||||
|
@ -152,7 +152,7 @@ pub struct ChangePassword {
|
|||
/// A response for your login.
|
||||
pub struct LoginResponse {
|
||||
/// This is None in response to `Register` if email verification is enabled, or the server requires registration applications.
|
||||
pub jwt: Option<Sensitive<String>>,
|
||||
pub jwt: Option<SensitiveString>,
|
||||
/// If registration applications are required, this will return true for a signup response.
|
||||
pub registration_created: bool,
|
||||
/// If email verifications are required, this will return true for a signup response.
|
||||
|
@ -340,7 +340,7 @@ pub struct CommentReplyResponse {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Delete your account.
|
||||
pub struct DeleteAccount {
|
||||
pub password: Sensitive<String>,
|
||||
pub password: SensitiveString,
|
||||
pub delete_content: bool,
|
||||
}
|
||||
|
||||
|
@ -349,7 +349,7 @@ pub struct DeleteAccount {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Reset your password via email.
|
||||
pub struct PasswordReset {
|
||||
pub email: Sensitive<String>,
|
||||
pub email: SensitiveString,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
|
||||
|
@ -357,9 +357,9 @@ pub struct PasswordReset {
|
|||
#[cfg_attr(feature = "full", ts(export))]
|
||||
/// Change your password after receiving a reset request.
|
||||
pub struct PasswordChangeAfterReset {
|
||||
pub token: Sensitive<String>,
|
||||
pub password: Sensitive<String>,
|
||||
pub password_verify: Sensitive<String>,
|
||||
pub token: SensitiveString,
|
||||
pub password: SensitiveString,
|
||||
pub password_verify: SensitiveString,
|
||||
}
|
||||
|
||||
#[skip_serializing_none]
|
||||
|
@ -405,7 +405,7 @@ pub struct VerifyEmail {
|
|||
#[cfg_attr(feature = "full", derive(TS))]
|
||||
#[cfg_attr(feature = "full", ts(export))]
|
||||
pub struct GenerateTotpSecretResponse {
|
||||
pub totp_secret_url: Sensitive<String>,
|
||||
pub totp_secret_url: SensitiveString,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
|
||||
|
|
|
@ -1,116 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
ops::{Deref, DerefMut},
|
||||
};
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct Sensitive<T>(T);
|
||||
|
||||
impl<T> Sensitive<T> {
|
||||
pub fn new(item: T) -> Self {
|
||||
Sensitive(item)
|
||||
}
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Debug for Sensitive<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Sensitive").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsRef<T> for Sensitive<T> {
|
||||
fn as_ref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for Sensitive<String> {
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Sensitive<String> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Sensitive<Vec<u8>> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsMut<T> for Sensitive<T> {
|
||||
fn as_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsMut<str> for Sensitive<String> {
|
||||
fn as_mut(&mut self) -> &mut str {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Sensitive<String> {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Sensitive<String> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Sensitive<T> {
|
||||
fn from(t: T) -> Self {
|
||||
Sensitive(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Sensitive<String> {
|
||||
fn from(s: &str) -> Self {
|
||||
Sensitive(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Borrow<T> for Sensitive<T> {
|
||||
fn borrow(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for Sensitive<String> {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl TS for Sensitive<String> {
|
||||
fn name() -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn name_with_type_args(_args: Vec<String>) -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn dependencies() -> Vec<ts_rs::Dependency> {
|
||||
Vec::new()
|
||||
}
|
||||
fn transparent() -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -23,7 +23,6 @@
|
|||
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
|
||||
}
|
||||
],
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"language": {
|
||||
"identifier": "ko",
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
|
||||
}
|
||||
],
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"published": "2021-10-29T15:10:51.557399Z",
|
||||
"updated": "2021-10-29T15:11:35.976374Z"
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
"cc": [],
|
||||
"mediaType": "text/html",
|
||||
"attachment": [],
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"published": "2023-02-06T06:42:41.939437Z",
|
||||
"language": {
|
||||
|
@ -36,7 +35,6 @@
|
|||
"cc": [],
|
||||
"mediaType": "text/html",
|
||||
"attachment": [],
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"published": "2023-02-06T06:42:37.119567Z",
|
||||
"language": {
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
],
|
||||
"name": "another outbox test",
|
||||
"mediaType": "text/html",
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"stickied": false,
|
||||
"published": "2021-11-18T17:19:45.895163Z"
|
||||
|
@ -51,7 +50,6 @@
|
|||
],
|
||||
"name": "outbox test",
|
||||
"mediaType": "text/html",
|
||||
"commentsEnabled": true,
|
||||
"sensitive": false,
|
||||
"stickied": false,
|
||||
"published": "2021-11-18T17:19:05.763109Z"
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
"url": "https://enterprise.lemmy.ml/pictrs/image/eOtYb9iEiB.png"
|
||||
},
|
||||
"sensitive": false,
|
||||
"commentsEnabled": true,
|
||||
"language": {
|
||||
"identifier": "fr",
|
||||
"name": "Français"
|
||||
|
|
|
@ -26,6 +26,7 @@ use lemmy_db_schema::{
|
|||
source::{
|
||||
activity::ActivitySendTargets,
|
||||
community::Community,
|
||||
moderator::{ModLockPost, ModLockPostForm},
|
||||
person::Person,
|
||||
post::{Post, PostUpdateForm},
|
||||
},
|
||||
|
@ -60,12 +61,22 @@ impl ActivityHandler for LockPage {
|
|||
}
|
||||
|
||||
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
|
||||
insert_received_activity(&self.id, context).await?;
|
||||
let locked = Some(true);
|
||||
let form = PostUpdateForm {
|
||||
locked: Some(true),
|
||||
locked,
|
||||
..Default::default()
|
||||
};
|
||||
let post = self.object.dereference(context).await?;
|
||||
Post::update(&mut context.pool(), post.id, &form).await?;
|
||||
|
||||
let form = ModLockPostForm {
|
||||
mod_person_id: self.actor.dereference(context).await?.id,
|
||||
post_id: post.id,
|
||||
locked,
|
||||
};
|
||||
ModLockPost::create(&mut context.pool(), &form).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -94,12 +105,21 @@ impl ActivityHandler for UndoLockPage {
|
|||
|
||||
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
|
||||
insert_received_activity(&self.id, context).await?;
|
||||
let locked = Some(false);
|
||||
let form = PostUpdateForm {
|
||||
locked: Some(false),
|
||||
locked,
|
||||
..Default::default()
|
||||
};
|
||||
let post = self.object.object.dereference(context).await?;
|
||||
Post::update(&mut context.pool(), post.id, &form).await?;
|
||||
|
||||
let form = ModLockPostForm {
|
||||
mod_person_id: self.actor.dereference(context).await?.id,
|
||||
post_id: post.id,
|
||||
locked,
|
||||
};
|
||||
ModLockPost::create(&mut context.pool(), &form).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ use crate::{
|
|||
community::send_activity_in_community,
|
||||
generate_activity_id,
|
||||
verify_is_public,
|
||||
verify_mod_action,
|
||||
verify_person_in_community,
|
||||
},
|
||||
activity_lists::AnnouncableActivities,
|
||||
|
@ -78,14 +77,13 @@ impl CreateOrUpdatePage {
|
|||
|
||||
let create_or_update =
|
||||
CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?;
|
||||
let is_mod_action = create_or_update.object.is_mod_action(&context).await?;
|
||||
let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update);
|
||||
send_activity_in_community(
|
||||
activity,
|
||||
&person,
|
||||
&community,
|
||||
ActivitySendTargets::empty(),
|
||||
is_mod_action,
|
||||
false,
|
||||
&context,
|
||||
)
|
||||
.await?;
|
||||
|
@ -112,30 +110,8 @@ impl ActivityHandler for CreateOrUpdatePage {
|
|||
let community = self.community(context).await?;
|
||||
verify_person_in_community(&self.actor, &community, context).await?;
|
||||
check_community_deleted_or_removed(&community)?;
|
||||
|
||||
match self.kind {
|
||||
CreateOrUpdateType::Create => {
|
||||
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
|
||||
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
|
||||
// Check that the post isnt locked, as that isnt possible for newly created posts.
|
||||
// However, when fetching a remote post we generate a new create activity with the current
|
||||
// locked value, so this check may fail. So only check if its a local community,
|
||||
// because then we will definitely receive all create and update activities separately.
|
||||
let is_locked = self.object.comments_enabled == Some(false);
|
||||
if community.local && is_locked {
|
||||
Err(LemmyErrorType::NewPostCannotBeLocked)?
|
||||
}
|
||||
}
|
||||
CreateOrUpdateType::Update => {
|
||||
let is_mod_action = self.object.is_mod_action(context).await?;
|
||||
if is_mod_action {
|
||||
verify_mod_action(&self.actor, &community, context).await?;
|
||||
} else {
|
||||
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
|
||||
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
|
||||
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
|
||||
ApubPost::verify(&self.object, self.actor.inner(), context).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -4,9 +4,10 @@ use crate::objects::{
|
|||
person::ApubPerson,
|
||||
post::ApubPost,
|
||||
};
|
||||
use activitypub_federation::{config::Data, fetch::object_id::ObjectId};
|
||||
use activitypub_federation::{config::Data, fetch::object_id::ObjectId, traits::Object};
|
||||
use actix_web::web::Json;
|
||||
use futures::{future::try_join_all, StreamExt};
|
||||
use itertools::Itertools;
|
||||
use lemmy_api_common::{context::LemmyContext, SuccessResponse};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::DbUrl,
|
||||
|
@ -30,8 +31,11 @@ use lemmy_utils::{
|
|||
spawn_try_task,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::future::Future;
|
||||
use tracing::info;
|
||||
|
||||
const PARALLELISM: usize = 10;
|
||||
|
||||
/// Backup of user data. This struct should never be changed so that the data can be used as a
|
||||
/// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow
|
||||
/// importing partial backups.
|
||||
|
@ -40,7 +44,7 @@ use tracing::info;
|
|||
///
|
||||
/// Be careful with any changes to this struct, to avoid breaking changes which could prevent
|
||||
/// importing older backups.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
|
||||
pub struct UserSettingsBackup {
|
||||
pub display_name: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
|
@ -167,141 +171,91 @@ pub async fn import_settings(
|
|||
}
|
||||
|
||||
spawn_try_task(async move {
|
||||
const PARALLELISM: usize = 10;
|
||||
let person_id = local_user_view.person.id;
|
||||
|
||||
// These tasks fetch objects from remote instances which might be down.
|
||||
// TODO: Would be nice if we could send a list of failed items with api response, but then
|
||||
// the request would likely timeout.
|
||||
let mut failed_items = vec![];
|
||||
|
||||
info!(
|
||||
"Starting settings backup for {}",
|
||||
"Starting settings import for {}",
|
||||
local_user_view.person.name
|
||||
);
|
||||
|
||||
futures::stream::iter(
|
||||
data
|
||||
.followed_communities
|
||||
.clone()
|
||||
.into_iter()
|
||||
// reset_request_count works like clone, and is necessary to avoid running into request limit
|
||||
.map(|f| (f, context.reset_request_count()))
|
||||
.map(|(followed, context)| async move {
|
||||
// need to reset outgoing request count to avoid running into limit
|
||||
let community = followed.dereference(&context).await?;
|
||||
let form = CommunityFollowerForm {
|
||||
person_id,
|
||||
community_id: community.id,
|
||||
pending: true,
|
||||
};
|
||||
CommunityFollower::follow(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
}),
|
||||
let failed_followed_communities = fetch_and_import(
|
||||
data.followed_communities.clone(),
|
||||
&context,
|
||||
|(followed, context)| async move {
|
||||
let community = followed.dereference(&context).await?;
|
||||
let form = CommunityFollowerForm {
|
||||
person_id,
|
||||
community_id: community.id,
|
||||
pending: true,
|
||||
};
|
||||
CommunityFollower::follow(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
},
|
||||
)
|
||||
.buffer_unordered(PARALLELISM)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(i, r)| {
|
||||
if let Err(e) = r {
|
||||
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
|
||||
info!("Failed to import followed community: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
futures::stream::iter(
|
||||
data
|
||||
.saved_posts
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|s| (s, context.reset_request_count()))
|
||||
.map(|(saved, context)| async move {
|
||||
let post = saved.dereference(&context).await?;
|
||||
let form = PostSavedForm {
|
||||
person_id,
|
||||
post_id: post.id,
|
||||
};
|
||||
PostSaved::save(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
}),
|
||||
)
|
||||
.buffer_unordered(PARALLELISM)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(i, r)| {
|
||||
if let Err(e) = r {
|
||||
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
|
||||
info!("Failed to import saved post community: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
futures::stream::iter(
|
||||
data
|
||||
.saved_comments
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|s| (s, context.reset_request_count()))
|
||||
.map(|(saved, context)| async move {
|
||||
let comment = saved.dereference(&context).await?;
|
||||
let form = CommentSavedForm {
|
||||
person_id,
|
||||
comment_id: comment.id,
|
||||
};
|
||||
CommentSaved::save(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
}),
|
||||
)
|
||||
.buffer_unordered(PARALLELISM)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(i, r)| {
|
||||
if let Err(e) = r {
|
||||
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
|
||||
info!("Failed to import saved comment community: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
let failed_items: Vec<_> = failed_items.into_iter().flatten().collect();
|
||||
info!(
|
||||
"Finished settings backup for {}, failed items: {:#?}",
|
||||
local_user_view.person.name, failed_items
|
||||
);
|
||||
|
||||
// These tasks don't connect to any remote instances but only insert directly in the database.
|
||||
// That means the only error condition are db connection failures, so no extra error handling is
|
||||
// needed.
|
||||
try_join_all(data.blocked_communities.iter().map(|blocked| async {
|
||||
// dont fetch unknown blocked objects from home server
|
||||
let community = blocked.dereference_local(&context).await?;
|
||||
let form = CommunityBlockForm {
|
||||
person_id,
|
||||
community_id: community.id,
|
||||
};
|
||||
CommunityBlock::block(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
}))
|
||||
.await?;
|
||||
|
||||
try_join_all(data.blocked_users.iter().map(|blocked| async {
|
||||
// dont fetch unknown blocked objects from home server
|
||||
let target = blocked.dereference_local(&context).await?;
|
||||
let form = PersonBlockForm {
|
||||
person_id,
|
||||
target_id: target.id,
|
||||
};
|
||||
PersonBlock::block(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
}))
|
||||
let failed_saved_posts = fetch_and_import(
|
||||
data.saved_posts.clone(),
|
||||
&context,
|
||||
|(saved, context)| async move {
|
||||
let post = saved.dereference(&context).await?;
|
||||
let form = PostSavedForm {
|
||||
person_id,
|
||||
post_id: post.id,
|
||||
};
|
||||
PostSaved::save(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let failed_saved_comments = fetch_and_import(
|
||||
data.saved_comments.clone(),
|
||||
&context,
|
||||
|(saved, context)| async move {
|
||||
let comment = saved.dereference(&context).await?;
|
||||
let form = CommentSavedForm {
|
||||
person_id,
|
||||
comment_id: comment.id,
|
||||
};
|
||||
CommentSaved::save(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let failed_community_blocks = fetch_and_import(
|
||||
data.blocked_communities.clone(),
|
||||
&context,
|
||||
|(blocked, context)| async move {
|
||||
let community = blocked.dereference(&context).await?;
|
||||
let form = CommunityBlockForm {
|
||||
person_id,
|
||||
community_id: community.id,
|
||||
};
|
||||
CommunityBlock::block(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let failed_user_blocks = fetch_and_import(
|
||||
data.blocked_users.clone(),
|
||||
&context,
|
||||
|(blocked, context)| async move {
|
||||
let context = context.reset_request_count();
|
||||
let target = blocked.dereference(&context).await?;
|
||||
let form = PersonBlockForm {
|
||||
person_id,
|
||||
target_id: target.id,
|
||||
};
|
||||
PersonBlock::block(&mut context.pool(), &form).await?;
|
||||
LemmyResult::Ok(())
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
try_join_all(data.blocked_instances.iter().map(|domain| async {
|
||||
// dont fetch unknown blocked objects from home server
|
||||
let instance = Instance::read_or_create(&mut context.pool(), domain.clone()).await?;
|
||||
let form = InstanceBlockForm {
|
||||
person_id,
|
||||
|
@ -312,17 +266,53 @@ pub async fn import_settings(
|
|||
}))
|
||||
.await?;
|
||||
|
||||
info!("Settings import completed for {}, the following items failed: {failed_followed_communities}, {failed_saved_posts}, {failed_saved_comments}, {failed_community_blocks}, {failed_user_blocks}",
|
||||
local_user_view.person.name);
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
Ok(Json(Default::default()))
|
||||
}
|
||||
|
||||
async fn fetch_and_import<Kind, Fut>(
|
||||
objects: Vec<ObjectId<Kind>>,
|
||||
context: &Data<LemmyContext>,
|
||||
import_fn: impl FnMut((ObjectId<Kind>, Data<LemmyContext>)) -> Fut,
|
||||
) -> LemmyResult<String>
|
||||
where
|
||||
Kind: Object + Send + 'static,
|
||||
for<'de2> <Kind as Object>::Kind: Deserialize<'de2>,
|
||||
Fut: Future<Output = LemmyResult<()>>,
|
||||
{
|
||||
let mut failed_items = vec![];
|
||||
futures::stream::iter(
|
||||
objects
|
||||
.clone()
|
||||
.into_iter()
|
||||
// need to reset outgoing request count to avoid running into limit
|
||||
.map(|s| (s, context.reset_request_count()))
|
||||
.map(import_fn),
|
||||
)
|
||||
.buffer_unordered(PARALLELISM)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(i, r): (usize, LemmyResult<()>)| {
|
||||
if r.is_err() {
|
||||
if let Some(object) = objects.get(i) {
|
||||
failed_items.push(object.inner().clone());
|
||||
}
|
||||
}
|
||||
});
|
||||
Ok(failed_items.into_iter().join(","))
|
||||
}
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::indexing_slicing)]
|
||||
mod tests {
|
||||
|
||||
use crate::api::user_settings_backup::{export_settings, import_settings};
|
||||
use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup};
|
||||
use activitypub_federation::config::Data;
|
||||
use lemmy_api_common::context::LemmyContext;
|
||||
use lemmy_db_schema::{
|
||||
|
@ -420,6 +410,44 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_settings_partial_import() -> LemmyResult<()> {
|
||||
let context = LemmyContext::init_test_context().await;
|
||||
|
||||
let export_user =
|
||||
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
|
||||
|
||||
let community_form = CommunityInsertForm::builder()
|
||||
.name("testcom".to_string())
|
||||
.title("testcom".to_string())
|
||||
.instance_id(export_user.person.instance_id)
|
||||
.build();
|
||||
let community = Community::create(&mut context.pool(), &community_form).await?;
|
||||
let follower_form = CommunityFollowerForm {
|
||||
community_id: community.id,
|
||||
person_id: export_user.person.id,
|
||||
pending: false,
|
||||
};
|
||||
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
|
||||
|
||||
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
|
||||
|
||||
let import_user = create_user("charles".to_string(), None, &context).await?;
|
||||
|
||||
let backup2 = UserSettingsBackup {
|
||||
followed_communities: backup.followed_communities.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
import_settings(
|
||||
actix_web::web::Json(backup2),
|
||||
import_user.clone(),
|
||||
context.reset_request_count(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn disallow_large_backup() -> LemmyResult<()> {
|
||||
|
|
|
@ -29,7 +29,9 @@ pub(crate) mod mentions;
|
|||
pub mod objects;
|
||||
pub mod protocol;
|
||||
|
||||
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 50;
|
||||
/// Maximum number of outgoing HTTP requests to fetch a single object. Needs to be high enough
|
||||
/// to fetch a new community with posts, moderators and featured posts.
|
||||
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 100;
|
||||
|
||||
/// Only include a basic context to save space and bandwidth. The main context is hosted statically
|
||||
/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could
|
||||
|
|
|
@ -28,6 +28,7 @@ use lemmy_api_common::{
|
|||
},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
actor_language::CommunityLanguage,
|
||||
|
@ -213,7 +214,7 @@ impl Actor for ApubCommunity {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -29,6 +29,7 @@ use lemmy_api_common::{
|
|||
};
|
||||
use lemmy_db_schema::{
|
||||
newtypes::InstanceId,
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
actor_language::SiteLanguage,
|
||||
|
@ -187,7 +188,7 @@ impl Actor for ApubSite {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -30,6 +30,7 @@ use lemmy_api_common::{
|
|||
},
|
||||
};
|
||||
use lemmy_db_schema::{
|
||||
sensitive::SensitiveString,
|
||||
source::{
|
||||
activity::ActorType,
|
||||
local_site::LocalSite,
|
||||
|
@ -200,7 +201,7 @@ impl Actor for ApubPerson {
|
|||
}
|
||||
|
||||
fn private_key_pem(&self) -> Option<String> {
|
||||
self.private_key.clone()
|
||||
self.private_key.clone().map(SensitiveString::into_inner)
|
||||
}
|
||||
|
||||
fn inbox(&self) -> Url {
|
||||
|
|
|
@ -36,7 +36,6 @@ use lemmy_db_schema::{
|
|||
source::{
|
||||
community::Community,
|
||||
local_site::LocalSite,
|
||||
moderator::{ModLockPost, ModLockPostForm},
|
||||
person::Person,
|
||||
post::{Post, PostInsertForm, PostUpdateForm},
|
||||
},
|
||||
|
@ -147,7 +146,6 @@ impl Object for ApubPost {
|
|||
source: self.body.clone().map(Source::new),
|
||||
attachment,
|
||||
image: self.thumbnail_url.clone().map(ImageObject::new),
|
||||
comments_enabled: Some(!self.locked),
|
||||
sensitive: Some(self.nsfw),
|
||||
language,
|
||||
published: Some(self.published),
|
||||
|
@ -165,12 +163,8 @@ impl Object for ApubPost {
|
|||
expected_domain: &Url,
|
||||
context: &Data<Self::DataType>,
|
||||
) -> LemmyResult<()> {
|
||||
// We can't verify the domain in case of mod action, because the mod may be on a different
|
||||
// instance from the post author.
|
||||
if !page.is_mod_action(context).await? {
|
||||
verify_domains_match(page.id.inner(), expected_domain)?;
|
||||
verify_is_remote_object(&page.id, context)?;
|
||||
};
|
||||
verify_domains_match(page.id.inner(), expected_domain)?;
|
||||
verify_is_remote_object(&page.id, context)?;
|
||||
|
||||
let community = page.community(context).await?;
|
||||
check_apub_id_valid_with_strictness(page.id.inner(), community.local, context).await?;
|
||||
|
@ -218,62 +212,46 @@ impl Object for ApubPost {
|
|||
name = name.chars().take(MAX_TITLE_LENGTH).collect();
|
||||
}
|
||||
|
||||
// read existing, local post if any (for generating mod log)
|
||||
let old_post = page.id.dereference_local(context).await;
|
||||
|
||||
let first_attachment = page.attachment.first();
|
||||
let local_site = LocalSite::read(&mut context.pool()).await.ok();
|
||||
|
||||
let form = if !page.is_mod_action(context).await? {
|
||||
let url = if let Some(attachment) = first_attachment.cloned() {
|
||||
Some(attachment.url())
|
||||
} else if page.kind == PageType::Video {
|
||||
// we cant display videos directly, so insert a link to external video page
|
||||
Some(page.id.inner().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
check_url_scheme(&url)?;
|
||||
|
||||
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
|
||||
|
||||
let url = proxy_image_link_opt_apub(url, context).await?;
|
||||
|
||||
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(context).await?;
|
||||
|
||||
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
|
||||
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
|
||||
let language_id =
|
||||
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;
|
||||
|
||||
PostInsertForm::builder()
|
||||
.name(name)
|
||||
.url(url.map(Into::into))
|
||||
.body(body)
|
||||
.alt_text(alt_text)
|
||||
.creator_id(creator.id)
|
||||
.community_id(community.id)
|
||||
.locked(page.comments_enabled.map(|e| !e))
|
||||
.published(page.published.map(Into::into))
|
||||
.updated(page.updated.map(Into::into))
|
||||
.deleted(Some(false))
|
||||
.nsfw(page.sensitive)
|
||||
.ap_id(Some(page.id.clone().into()))
|
||||
.local(Some(false))
|
||||
.language_id(language_id)
|
||||
.build()
|
||||
let url = if let Some(attachment) = first_attachment.cloned() {
|
||||
Some(attachment.url())
|
||||
} else if page.kind == PageType::Video {
|
||||
// we cant display videos directly, so insert a link to external video page
|
||||
Some(page.id.inner().clone())
|
||||
} else {
|
||||
// if is mod action, only update locked/stickied fields, nothing else
|
||||
PostInsertForm::builder()
|
||||
.name(name)
|
||||
.creator_id(creator.id)
|
||||
.community_id(community.id)
|
||||
.ap_id(Some(page.id.clone().into()))
|
||||
.locked(page.comments_enabled.map(|e| !e))
|
||||
.updated(page.updated.map(Into::into))
|
||||
.build()
|
||||
None
|
||||
};
|
||||
check_url_scheme(&url)?;
|
||||
|
||||
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
|
||||
|
||||
let url = proxy_image_link_opt_apub(url, context).await?;
|
||||
|
||||
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
|
||||
let url_blocklist = get_url_blocklist(context).await?;
|
||||
|
||||
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
|
||||
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
|
||||
let language_id =
|
||||
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;
|
||||
|
||||
let form = PostInsertForm::builder()
|
||||
.name(name)
|
||||
.url(url.map(Into::into))
|
||||
.body(body)
|
||||
.alt_text(alt_text)
|
||||
.creator_id(creator.id)
|
||||
.community_id(community.id)
|
||||
.published(page.published.map(Into::into))
|
||||
.updated(page.updated.map(Into::into))
|
||||
.deleted(Some(false))
|
||||
.nsfw(page.sensitive)
|
||||
.ap_id(Some(page.id.clone().into()))
|
||||
.local(Some(false))
|
||||
.language_id(language_id)
|
||||
.build();
|
||||
|
||||
let timestamp = page.updated.or(page.published).unwrap_or_else(naive_now);
|
||||
let post = Post::insert_apub(&mut context.pool(), timestamp, &form).await?;
|
||||
|
@ -287,16 +265,6 @@ impl Object for ApubPost {
|
|||
context.reset_request_count(),
|
||||
);
|
||||
|
||||
// write mod log entry for lock
|
||||
if Page::is_locked_changed(&old_post, &page.comments_enabled) {
|
||||
let form = ModLockPostForm {
|
||||
mod_person_id: creator.id,
|
||||
post_id: post.id,
|
||||
locked: Some(post.locked),
|
||||
};
|
||||
ModLockPost::create(&mut context.pool(), &form).await?;
|
||||
}
|
||||
|
||||
Ok(post.into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,6 @@ pub struct Page {
|
|||
#[serde(default)]
|
||||
pub(crate) attachment: Vec<Attachment>,
|
||||
pub(crate) image: Option<ImageObject>,
|
||||
pub(crate) comments_enabled: Option<bool>,
|
||||
pub(crate) sensitive: Option<bool>,
|
||||
pub(crate) published: Option<DateTime<Utc>>,
|
||||
pub(crate) updated: Option<DateTime<Utc>>,
|
||||
|
@ -156,28 +155,6 @@ pub enum HashtagType {
|
|||
}
|
||||
|
||||
impl Page {
|
||||
/// Only mods can change the post's locked status. So if it is changed from the default value,
|
||||
/// it is a mod action and needs to be verified as such.
|
||||
///
|
||||
/// Locked needs to be false on a newly created post (verified in [[CreatePost]].
|
||||
pub(crate) async fn is_mod_action(&self, context: &Data<LemmyContext>) -> LemmyResult<bool> {
|
||||
let old_post = self.id.clone().dereference_local(context).await;
|
||||
Ok(Page::is_locked_changed(&old_post, &self.comments_enabled))
|
||||
}
|
||||
|
||||
pub(crate) fn is_locked_changed<E>(
|
||||
old_post: &Result<ApubPost, E>,
|
||||
new_comments_enabled: &Option<bool>,
|
||||
) -> bool {
|
||||
if let Some(new_comments_enabled) = new_comments_enabled {
|
||||
if let Ok(old_post) = old_post {
|
||||
return new_comments_enabled != &!old_post.locked;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub(crate) fn creator(&self) -> LemmyResult<ObjectId<ApubPerson>> {
|
||||
match &self.attributed_to {
|
||||
AttributedTo::Lemmy(l) => Ok(l.clone()),
|
||||
|
|
|
@ -5,6 +5,12 @@
|
|||
-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and
|
||||
-- before the automatic deletion of the row that references it. This is not a problem for insert or delete.
|
||||
--
|
||||
-- After a row update begins, a concurrent update on the same row can't begin until the whole
|
||||
-- transaction that contains the first update is finished. To reduce this locking, statements in
|
||||
-- triggers should be ordered based on the likelihood of concurrent writers. For example, updating
|
||||
-- site_aggregates should be done last because the same row is updated for all local stuff. If
|
||||
-- it were not last, then the locking period for concurrent writers would extend to include the
|
||||
-- time consumed by statements that come after.
|
||||
--
|
||||
--
|
||||
-- Create triggers for both post and comments
|
||||
|
@ -38,16 +44,18 @@ BEGIN
|
|||
(thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff
|
||||
WHERE
|
||||
a.thing_id = diff.thing_id
|
||||
RETURNING
|
||||
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
|
||||
UPDATE
|
||||
person_aggregates AS a
|
||||
SET
|
||||
thing_score = a.thing_score + diff.score FROM (
|
||||
SELECT
|
||||
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
|
||||
WHERE
|
||||
a.person_id = diff.creator_id;
|
||||
AND (diff.upvotes, diff.downvotes) != (0, 0)
|
||||
RETURNING
|
||||
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
|
||||
UPDATE
|
||||
person_aggregates AS a
|
||||
SET
|
||||
thing_score = a.thing_score + diff.score FROM (
|
||||
SELECT
|
||||
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
|
||||
WHERE
|
||||
a.person_id = diff.creator_id
|
||||
AND diff.score != 0;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$);
|
||||
|
@ -62,6 +70,21 @@ CALL r.post_or_comment ('post');
|
|||
CALL r.post_or_comment ('comment');
|
||||
|
||||
-- Create triggers that update counts in parent aggregates
|
||||
CREATE FUNCTION r.parent_comment_ids (path ltree)
|
||||
RETURNS SETOF int
|
||||
LANGUAGE sql
|
||||
IMMUTABLE parallel safe
|
||||
BEGIN
|
||||
ATOMIC
|
||||
SELECT
|
||||
comment_id::int
|
||||
FROM
|
||||
string_to_table (ltree2text (path), '.') AS comment_id
|
||||
-- Skip first and last
|
||||
LIMIT (nlevel (path) - 2) OFFSET 1;
|
||||
|
||||
END;
|
||||
|
||||
CALL r.create_triggers ('comment', $$
|
||||
BEGIN
|
||||
UPDATE
|
||||
|
@ -76,60 +99,84 @@ BEGIN
|
|||
r.is_counted (comment)
|
||||
GROUP BY (comment).creator_id) AS diff
|
||||
WHERE
|
||||
a.person_id = diff.creator_id;
|
||||
a.person_id = diff.creator_id
|
||||
AND diff.comment_count != 0;
|
||||
|
||||
UPDATE
|
||||
site_aggregates AS a
|
||||
comment_aggregates AS a
|
||||
SET
|
||||
comments = a.comments + diff.comments
|
||||
child_count = a.child_count + diff.child_count
|
||||
FROM (
|
||||
SELECT
|
||||
coalesce(sum(count_diff), 0) AS comments
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE
|
||||
r.is_counted (comment)
|
||||
AND (comment).local) AS diff;
|
||||
parent_id,
|
||||
coalesce(sum(count_diff), 0) AS child_count
|
||||
FROM (
|
||||
-- For each inserted or deleted comment, this outputs 1 row for each parent comment.
|
||||
-- For example, this:
|
||||
--
|
||||
-- count_diff | (comment).path
|
||||
-- ------------+----------------
|
||||
-- 1 | 0.5.6.7
|
||||
-- 1 | 0.5.6.7.8
|
||||
--
|
||||
-- becomes this:
|
||||
--
|
||||
-- count_diff | parent_id
|
||||
-- ------------+-----------
|
||||
-- 1 | 5
|
||||
-- 1 | 6
|
||||
-- 1 | 5
|
||||
-- 1 | 6
|
||||
-- 1 | 7
|
||||
SELECT
|
||||
count_diff,
|
||||
parent_id
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows,
|
||||
LATERAL r.parent_comment_ids ((comment).path) AS parent_id) AS expanded_old_and_new_rows
|
||||
GROUP BY
|
||||
parent_id) AS diff
|
||||
WHERE
|
||||
a.comment_id = diff.parent_id
|
||||
AND diff.child_count != 0;
|
||||
|
||||
WITH post_diff AS (
|
||||
UPDATE
|
||||
post_aggregates AS a
|
||||
SET
|
||||
comments = a.comments + diff.comments,
|
||||
newest_comment_time = GREATEST (a.newest_comment_time, (
|
||||
SELECT
|
||||
published
|
||||
FROM select_new_rows AS new_comment
|
||||
WHERE
|
||||
a.post_id = new_comment.post_id ORDER BY published DESC LIMIT 1)),
|
||||
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, (
|
||||
SELECT
|
||||
published
|
||||
FROM select_new_rows AS new_comment
|
||||
WHERE
|
||||
a.post_id = new_comment.post_id
|
||||
-- Ignore comments from the post's creator
|
||||
AND a.creator_id != new_comment.creator_id
|
||||
-- Ignore comments on old posts
|
||||
AND a.published > (new_comment.published - '2 days'::interval)
|
||||
ORDER BY published DESC LIMIT 1))
|
||||
newest_comment_time = GREATEST (a.newest_comment_time, diff.newest_comment_time),
|
||||
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)
|
||||
FROM (
|
||||
SELECT
|
||||
(comment).post_id,
|
||||
coalesce(sum(count_diff), 0) AS comments
|
||||
post.id AS post_id,
|
||||
coalesce(sum(count_diff), 0) AS comments,
|
||||
-- Old rows are excluded using `count_diff = 1`
|
||||
max((comment).published) FILTER (WHERE count_diff = 1) AS newest_comment_time,
|
||||
max((comment).published) FILTER (WHERE count_diff = 1
|
||||
-- Ignore comments from the post's creator
|
||||
AND post.creator_id != (comment).creator_id
|
||||
-- Ignore comments on old posts
|
||||
AND post.published > ((comment).published - '2 days'::interval)) AS newest_comment_time_necro,
|
||||
r.is_counted (post.*) AS include_in_community_aggregates
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows
|
||||
LEFT JOIN post ON post.id = (comment).post_id
|
||||
WHERE
|
||||
r.is_counted (comment)
|
||||
GROUP BY
|
||||
(comment).post_id) AS diff
|
||||
LEFT JOIN post ON post.id = diff.post_id
|
||||
post.id) AS diff
|
||||
WHERE
|
||||
a.post_id = diff.post_id
|
||||
AND (diff.comments,
|
||||
GREATEST (a.newest_comment_time, diff.newest_comment_time),
|
||||
GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)) != (0,
|
||||
a.newest_comment_time,
|
||||
a.newest_comment_time_necro)
|
||||
RETURNING
|
||||
a.community_id,
|
||||
diff.comments,
|
||||
r.is_counted (post.*) AS include_in_community_aggregates)
|
||||
diff.include_in_community_aggregates)
|
||||
UPDATE
|
||||
community_aggregates AS a
|
||||
SET
|
||||
|
@ -145,7 +192,23 @@ FROM (
|
|||
GROUP BY
|
||||
community_id) AS diff
|
||||
WHERE
|
||||
a.community_id = diff.community_id;
|
||||
a.community_id = diff.community_id
|
||||
AND diff.comments != 0;
|
||||
|
||||
UPDATE
|
||||
site_aggregates AS a
|
||||
SET
|
||||
comments = a.comments + diff.comments
|
||||
FROM (
|
||||
SELECT
|
||||
coalesce(sum(count_diff), 0) AS comments
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE
|
||||
r.is_counted (comment)
|
||||
AND (comment).local) AS diff
|
||||
WHERE
|
||||
diff.comments != 0;
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
|
@ -167,20 +230,8 @@ BEGIN
|
|||
r.is_counted (post)
|
||||
GROUP BY (post).creator_id) AS diff
|
||||
WHERE
|
||||
a.person_id = diff.creator_id;
|
||||
|
||||
UPDATE
|
||||
site_aggregates AS a
|
||||
SET
|
||||
posts = a.posts + diff.posts
|
||||
FROM (
|
||||
SELECT
|
||||
coalesce(sum(count_diff), 0) AS posts
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE
|
||||
r.is_counted (post)
|
||||
AND (post).local) AS diff;
|
||||
a.person_id = diff.creator_id
|
||||
AND diff.post_count != 0;
|
||||
|
||||
UPDATE
|
||||
community_aggregates AS a
|
||||
|
@ -197,7 +248,23 @@ FROM (
|
|||
GROUP BY
|
||||
(post).community_id) AS diff
|
||||
WHERE
|
||||
a.community_id = diff.community_id;
|
||||
a.community_id = diff.community_id
|
||||
AND diff.posts != 0;
|
||||
|
||||
UPDATE
|
||||
site_aggregates AS a
|
||||
SET
|
||||
posts = a.posts + diff.posts
|
||||
FROM (
|
||||
SELECT
|
||||
coalesce(sum(count_diff), 0) AS posts
|
||||
FROM
|
||||
select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE
|
||||
r.is_counted (post)
|
||||
AND (post).local) AS diff
|
||||
WHERE
|
||||
diff.posts != 0;
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
|
@ -217,7 +284,9 @@ BEGIN
|
|||
FROM select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE
|
||||
r.is_counted (community)
|
||||
AND (community).local) AS diff;
|
||||
AND (community).local) AS diff
|
||||
WHERE
|
||||
diff.communities != 0;
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
|
@ -235,7 +304,9 @@ BEGIN
|
|||
SELECT
|
||||
coalesce(sum(count_diff), 0) AS users
|
||||
FROM select_old_and_new_rows AS old_and_new_rows
|
||||
WHERE (person).local) AS diff;
|
||||
WHERE (person).local) AS diff
|
||||
WHERE
|
||||
diff.users != 0;
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
|
@ -270,7 +341,8 @@ BEGIN
|
|||
GROUP BY
|
||||
old_post.community_id) AS diff
|
||||
WHERE
|
||||
a.community_id = diff.community_id;
|
||||
a.community_id = diff.community_id
|
||||
AND diff.comments != 0;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;
|
||||
|
@ -296,7 +368,8 @@ BEGIN
|
|||
LEFT JOIN community ON community.id = (community_follower).community_id
|
||||
LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff
|
||||
WHERE
|
||||
a.community_id = diff.community_id;
|
||||
a.community_id = diff.community_id
|
||||
AND (diff.subscribers, diff.subscribers_local) != (0, 0);
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
|
@ -474,3 +547,24 @@ CREATE TRIGGER delete_follow
|
|||
FOR EACH ROW
|
||||
EXECUTE FUNCTION r.delete_follow_before_person ();
|
||||
|
||||
-- Triggers that change values before insert or update
|
||||
CREATE FUNCTION r.comment_change_values ()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
id text = NEW.id::text;
|
||||
BEGIN
|
||||
-- Make `path` end with `id` if it doesn't already
|
||||
IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN
|
||||
NEW.path = NEW.path || id;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE TRIGGER change_values
|
||||
BEFORE INSERT OR UPDATE ON comment
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION r.comment_change_values ();
|
||||
|
||||
|
|
|
@ -15,12 +15,7 @@ use crate::{
|
|||
utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT},
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use diesel::{
|
||||
dsl::{insert_into, sql_query},
|
||||
result::Error,
|
||||
ExpressionMethods,
|
||||
QueryDsl,
|
||||
};
|
||||
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use diesel_ltree::Ltree;
|
||||
use url::Url;
|
||||
|
@ -72,81 +67,23 @@ impl Comment {
|
|||
parent_path: Option<&Ltree>,
|
||||
) -> Result<Comment, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
let comment_form = (comment_form, parent_path.map(|p| comment::path.eq(p)));
|
||||
|
||||
conn
|
||||
.build_transaction()
|
||||
.run(|conn| {
|
||||
Box::pin(async move {
|
||||
// Insert, to get the id
|
||||
let inserted_comment = if let Some(timestamp) = timestamp {
|
||||
insert_into(comment::table)
|
||||
.values(comment_form)
|
||||
.on_conflict(comment::ap_id)
|
||||
.filter_target(coalesce(comment::updated, comment::published).lt(timestamp))
|
||||
.do_update()
|
||||
.set(comment_form)
|
||||
.get_result::<Self>(conn)
|
||||
.await?
|
||||
} else {
|
||||
insert_into(comment::table)
|
||||
.values(comment_form)
|
||||
.get_result::<Self>(conn)
|
||||
.await?
|
||||
};
|
||||
|
||||
let comment_id = inserted_comment.id;
|
||||
|
||||
// You need to update the ltree column
|
||||
let ltree = Ltree(if let Some(parent_path) = parent_path {
|
||||
// The previous parent will already have 0 in it
|
||||
// Append this comment id
|
||||
format!("{}.{}", parent_path.0, comment_id)
|
||||
} else {
|
||||
// '0' is always the first path, append to that
|
||||
format!("{}.{}", 0, comment_id)
|
||||
});
|
||||
|
||||
let updated_comment = diesel::update(comment::table.find(comment_id))
|
||||
.set(comment::path.eq(ltree))
|
||||
.get_result::<Self>(conn)
|
||||
.await?;
|
||||
|
||||
// Update the child count for the parent comment_aggregates
|
||||
// You could do this with a trigger, but since you have to do this manually anyway,
|
||||
// you can just have it here
|
||||
if let Some(parent_path) = parent_path {
|
||||
// You have to update counts for all parents, not just the immediate one
|
||||
// TODO if the performance of this is terrible, it might be better to do this as part of a
|
||||
// scheduled query... although the counts would often be wrong.
|
||||
//
|
||||
// The child_count query for reference:
|
||||
// select c.id, c.path, count(c2.id) as child_count from comment c
|
||||
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
|
||||
// group by c.id
|
||||
|
||||
let parent_id = parent_path.0.split('.').nth(1);
|
||||
|
||||
if let Some(parent_id) = parent_id {
|
||||
let top_parent = format!("0.{}", parent_id);
|
||||
let update_child_count_stmt = format!(
|
||||
"
|
||||
update comment_aggregates ca set child_count = c.child_count
|
||||
from (
|
||||
select c.id, c.path, count(c2.id) as child_count from comment c
|
||||
join comment c2 on c2.path <@ c.path and c2.path != c.path
|
||||
and c.path <@ '{top_parent}'
|
||||
group by c.id
|
||||
) as c
|
||||
where ca.comment_id = c.id"
|
||||
);
|
||||
|
||||
sql_query(update_child_count_stmt).execute(conn).await?;
|
||||
}
|
||||
}
|
||||
Ok(updated_comment)
|
||||
}) as _
|
||||
})
|
||||
.await
|
||||
if let Some(timestamp) = timestamp {
|
||||
insert_into(comment::table)
|
||||
.values(comment_form)
|
||||
.on_conflict(comment::ap_id)
|
||||
.filter_target(coalesce(comment::updated, comment::published).lt(timestamp))
|
||||
.do_update()
|
||||
.set(comment_form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
} else {
|
||||
insert_into(comment::table)
|
||||
.values(comment_form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_from_apub_id(
|
||||
|
|
|
@ -55,12 +55,17 @@ impl LocalUser {
|
|||
pool: &mut DbPool<'_>,
|
||||
local_user_id: LocalUserId,
|
||||
form: &LocalUserUpdateForm,
|
||||
) -> Result<LocalUser, Error> {
|
||||
) -> Result<usize, Error> {
|
||||
let conn = &mut get_conn(pool).await?;
|
||||
diesel::update(local_user::table.find(local_user_id))
|
||||
let res = diesel::update(local_user::table.find(local_user_id))
|
||||
.set(form)
|
||||
.get_result::<Self>(conn)
|
||||
.await
|
||||
.execute(conn)
|
||||
.await;
|
||||
// Diesel will throw an error if the query is all Nones (not updating anything), ignore this.
|
||||
match res {
|
||||
Err(Error::QueryBuilderError(_)) => Ok(0),
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> {
|
||||
|
|
|
@ -50,7 +50,7 @@ impl PasswordResetRequest {
|
|||
) -> Result<PasswordResetRequest, Error> {
|
||||
let form = PasswordResetRequestForm {
|
||||
local_user_id: from_local_user_id,
|
||||
token: token_,
|
||||
token: token_.into(),
|
||||
};
|
||||
|
||||
Self::create(pool, &form).await
|
||||
|
@ -134,7 +134,7 @@ mod tests {
|
|||
let expected_password_reset_request = PasswordResetRequest {
|
||||
id: inserted_password_reset_request.id,
|
||||
local_user_id: inserted_local_user.id,
|
||||
token: token.to_string(),
|
||||
token: token.to_string().into(),
|
||||
published: inserted_password_reset_request.published,
|
||||
};
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ pub mod aggregates;
|
|||
#[cfg(feature = "full")]
|
||||
pub mod impls;
|
||||
pub mod newtypes;
|
||||
pub mod sensitive;
|
||||
#[cfg(feature = "full")]
|
||||
#[rustfmt::skip]
|
||||
#[allow(clippy::wildcard_imports)]
|
||||
|
|
57
crates/db_schema/src/sensitive.rs
Normal file
57
crates/db_schema/src/sensitive.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Debug, ops::Deref};
|
||||
#[cfg(feature = "full")]
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
|
||||
#[cfg_attr(feature = "full", derive(DieselNewType))]
|
||||
#[serde(transparent)]
|
||||
pub struct SensitiveString(String);
|
||||
|
||||
impl SensitiveString {
|
||||
pub fn into_inner(self) -> String {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for SensitiveString {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Sensitive").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for SensitiveString {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SensitiveString {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for SensitiveString {
|
||||
fn from(t: String) -> Self {
|
||||
SensitiveString(t)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl TS for SensitiveString {
|
||||
fn name() -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn name_with_type_args(_args: Vec<String>) -> String {
|
||||
"string".to_string()
|
||||
}
|
||||
fn dependencies() -> Vec<ts_rs::Dependency> {
|
||||
Vec::new()
|
||||
}
|
||||
fn transparent() -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
use crate::schema::{community, community_follower, community_moderator, community_person_ban};
|
||||
use crate::{
|
||||
newtypes::{CommunityId, DbUrl, InstanceId, PersonId},
|
||||
sensitive::SensitiveString,
|
||||
source::placeholder_apub_url,
|
||||
CommunityVisibility,
|
||||
};
|
||||
|
@ -39,7 +40,7 @@ pub struct Community {
|
|||
/// Whether the community is local.
|
||||
pub local: bool,
|
||||
#[serde(skip)]
|
||||
pub private_key: Option<String>,
|
||||
pub private_key: Option<SensitiveString>,
|
||||
#[serde(skip)]
|
||||
pub public_key: String,
|
||||
#[serde(skip)]
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
use crate::schema::local_user;
|
||||
use crate::{
|
||||
newtypes::{LocalUserId, PersonId},
|
||||
sensitive::SensitiveString,
|
||||
ListingType,
|
||||
PostListingMode,
|
||||
SortType,
|
||||
|
@ -24,8 +25,8 @@ pub struct LocalUser {
|
|||
/// The person_id for the local user.
|
||||
pub person_id: PersonId,
|
||||
#[serde(skip)]
|
||||
pub password_encrypted: String,
|
||||
pub email: Option<String>,
|
||||
pub password_encrypted: SensitiveString,
|
||||
pub email: Option<SensitiveString>,
|
||||
/// Whether to show NSFW content.
|
||||
pub show_nsfw: bool,
|
||||
pub theme: String,
|
||||
|
@ -47,7 +48,7 @@ pub struct LocalUser {
|
|||
/// Whether their registration application has been accepted.
|
||||
pub accepted_application: bool,
|
||||
#[serde(skip)]
|
||||
pub totp_2fa_secret: Option<String>,
|
||||
pub totp_2fa_secret: Option<SensitiveString>,
|
||||
/// Open links in a new tab.
|
||||
pub open_links_in_new_tab: bool,
|
||||
pub blur_nsfw: bool,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::newtypes::LocalUserId;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::schema::login_token;
|
||||
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
|
@ -18,7 +18,7 @@ use ts_rs::TS;
|
|||
pub struct LoginToken {
|
||||
/// Jwt token for this login
|
||||
#[serde(skip)]
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
pub user_id: LocalUserId,
|
||||
/// Time of login
|
||||
pub published: DateTime<Utc>,
|
||||
|
@ -31,7 +31,7 @@ pub struct LoginToken {
|
|||
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
|
||||
#[cfg_attr(feature = "full", diesel(table_name = login_token))]
|
||||
pub struct LoginTokenCreateForm {
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
pub user_id: LocalUserId,
|
||||
pub ip: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::newtypes::LocalUserId;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::schema::password_reset_request;
|
||||
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
|
@ -9,7 +9,7 @@ use chrono::{DateTime, Utc};
|
|||
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
|
||||
pub struct PasswordResetRequest {
|
||||
pub id: i32,
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
pub published: DateTime<Utc>,
|
||||
pub local_user_id: LocalUserId,
|
||||
}
|
||||
|
@ -18,5 +18,5 @@ pub struct PasswordResetRequest {
|
|||
#[cfg_attr(feature = "full", diesel(table_name = password_reset_request))]
|
||||
pub struct PasswordResetRequestForm {
|
||||
pub local_user_id: LocalUserId,
|
||||
pub token: String,
|
||||
pub token: SensitiveString,
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
use crate::schema::{person, person_follower};
|
||||
use crate::{
|
||||
newtypes::{DbUrl, InstanceId, PersonId},
|
||||
sensitive::SensitiveString,
|
||||
source::placeholder_apub_url,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
@ -36,7 +37,7 @@ pub struct Person {
|
|||
/// Whether the person is local to our site.
|
||||
pub local: bool,
|
||||
#[serde(skip)]
|
||||
pub private_key: Option<String>,
|
||||
pub private_key: Option<SensitiveString>,
|
||||
#[serde(skip)]
|
||||
pub public_key: String,
|
||||
#[serde(skip)]
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#[cfg(feature = "full")]
|
||||
use crate::schema::secret;
|
||||
use crate::sensitive::SensitiveString;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))]
|
||||
|
@ -7,5 +8,5 @@ use crate::schema::secret;
|
|||
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
|
||||
pub struct Secret {
|
||||
pub id: i32,
|
||||
pub jwt_secret: String,
|
||||
pub jwt_secret: SensitiveString,
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use crate::newtypes::{DbUrl, InstanceId, SiteId};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::schema::site;
|
||||
use crate::{
|
||||
newtypes::{DbUrl, InstanceId, SiteId},
|
||||
sensitive::SensitiveString,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
|
@ -35,7 +38,7 @@ pub struct Site {
|
|||
/// The site inbox
|
||||
pub inbox_url: DbUrl,
|
||||
#[serde(skip)]
|
||||
pub private_key: Option<String>,
|
||||
pub private_key: Option<SensitiveString>,
|
||||
// TODO: mark as `serde(skip)` in next major release as its not needed for api
|
||||
pub public_key: String,
|
||||
pub instance_id: InstanceId,
|
||||
|
|
|
@ -950,9 +950,8 @@ mod tests {
|
|||
show_bot_accounts: Some(false),
|
||||
..Default::default()
|
||||
};
|
||||
let inserted_local_user =
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user = inserted_local_user;
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_bot_accounts = false;
|
||||
|
||||
let read_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
|
@ -986,9 +985,8 @@ mod tests {
|
|||
show_bot_accounts: Some(true),
|
||||
..Default::default()
|
||||
};
|
||||
let inserted_local_user =
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user = inserted_local_user;
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_bot_accounts = true;
|
||||
|
||||
let post_listings_with_bots = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
|
@ -1110,9 +1108,8 @@ mod tests {
|
|||
show_bot_accounts: Some(false),
|
||||
..Default::default()
|
||||
};
|
||||
let inserted_local_user =
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user = inserted_local_user;
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_bot_accounts = false;
|
||||
|
||||
let read_post_listing = PostQuery {
|
||||
community_id: Some(data.inserted_community.id),
|
||||
|
@ -1533,9 +1530,8 @@ mod tests {
|
|||
show_read_posts: Some(false),
|
||||
..Default::default()
|
||||
};
|
||||
let inserted_local_user =
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user = inserted_local_user;
|
||||
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
|
||||
data.local_user_view.local_user.show_read_posts = false;
|
||||
|
||||
// Mark a post as read
|
||||
PostRead::mark_as_read(
|
||||
|
|
|
@ -43,21 +43,18 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
|
|||
.map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?
|
||||
.ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?;
|
||||
|
||||
let protocols = if site_view.local_site.federation_enabled {
|
||||
Some(vec!["activitypub".to_string()])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// Since there are 3 registration options,
|
||||
// we need to set open_registrations as true if RegistrationMode is not Closed.
|
||||
let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed);
|
||||
let json = NodeInfo {
|
||||
version: Some("2.0".to_string()),
|
||||
version: Some("2.1".to_string()),
|
||||
software: Some(NodeInfoSoftware {
|
||||
name: Some("lemmy".to_string()),
|
||||
version: Some(VERSION.to_string()),
|
||||
repository: Some("https://github.com/LemmyNet/lemmy".to_string()),
|
||||
homepage: Some("https://join-lemmy.org/".to_string()),
|
||||
}),
|
||||
protocols,
|
||||
protocols: Some(vec!["activitypub".to_string()]),
|
||||
usage: Some(NodeInfoUsage {
|
||||
users: Some(NodeInfoUsers {
|
||||
total: Some(site_view.counts.users),
|
||||
|
@ -68,6 +65,11 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
|
|||
local_comments: Some(site_view.counts.comments),
|
||||
}),
|
||||
open_registrations,
|
||||
services: Some(NodeInfoServices {
|
||||
inbound: Some(vec![]),
|
||||
outbound: Some(vec![]),
|
||||
}),
|
||||
metadata: Some(vec![]),
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(json))
|
||||
|
@ -84,6 +86,7 @@ struct NodeInfoWellKnownLinks {
|
|||
pub href: Url,
|
||||
}
|
||||
|
||||
/// Nodeinfo spec: http://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct NodeInfo {
|
||||
|
@ -92,6 +95,9 @@ pub struct NodeInfo {
|
|||
pub protocols: Option<Vec<String>>,
|
||||
pub usage: Option<NodeInfoUsage>,
|
||||
pub open_registrations: Option<bool>,
|
||||
/// These fields are required by the spec for no reason
|
||||
pub services: Option<NodeInfoServices>,
|
||||
pub metadata: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
|
@ -99,6 +105,8 @@ pub struct NodeInfo {
|
|||
pub struct NodeInfoSoftware {
|
||||
pub name: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub repository: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
|
@ -116,3 +124,10 @@ pub struct NodeInfoUsers {
|
|||
pub active_halfyear: Option<i64>,
|
||||
pub active_month: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct NodeInfoServices {
|
||||
pub inbound: Option<Vec<String>>,
|
||||
pub outbound: Option<Vec<String>>,
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
|
|||
const BODY_MAX_LENGTH: usize = 10000;
|
||||
const POST_BODY_MAX_LENGTH: usize = 50000;
|
||||
const BIO_MAX_LENGTH: usize = 300;
|
||||
const ALT_TEXT_MAX_LENGTH: usize = 300;
|
||||
const ALT_TEXT_MAX_LENGTH: usize = 1500;
|
||||
const SITE_NAME_MAX_LENGTH: usize = 20;
|
||||
const SITE_NAME_MIN_LENGTH: usize = 1;
|
||||
const SITE_DESCRIPTION_MAX_LENGTH: usize = 150;
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit a4681f70a4ddf077951ed2dcc8cf90bb243d4828
|
||||
Subproject commit f0ab81deea347c433277a90ae752b10f68473719
|
3
migrations/2024-05-04-140749_separate_triggers/down.sql
Normal file
3
migrations/2024-05-04-140749_separate_triggers/down.sql
Normal file
|
@ -0,0 +1,3 @@
|
|||
SELECT
|
||||
1;
|
||||
|
4
migrations/2024-05-04-140749_separate_triggers/up.sql
Normal file
4
migrations/2024-05-04-140749_separate_triggers/up.sql
Normal file
|
@ -0,0 +1,4 @@
|
|||
-- This migration exists to trigger re-execution of replaceable_schema
|
||||
SELECT
|
||||
1;
|
||||
|
|
@ -262,12 +262,22 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
|
|||
// User
|
||||
.service(
|
||||
// Account action, I don't like that it's in /user maybe /accounts
|
||||
// Handle /user/register separately to add the register() rate limitter
|
||||
// Handle /user/register separately to add the register() rate limiter
|
||||
web::resource("/user/register")
|
||||
.guard(guard::Post())
|
||||
.wrap(rate_limit.register())
|
||||
.route(web::post().to(register)),
|
||||
)
|
||||
// User
|
||||
.service(
|
||||
// Handle /user/login separately to add the register() rate limiter
|
||||
// TODO: pretty annoying way to apply rate limits for register and login, we should
|
||||
// group them under a common path so that rate limit is only applied once (eg under /account).
|
||||
web::resource("/user/login")
|
||||
.guard(guard::Post())
|
||||
.wrap(rate_limit.register())
|
||||
.route(web::post().to(login)),
|
||||
)
|
||||
.service(
|
||||
// Handle captcha separately
|
||||
web::resource("/user/get_captcha")
|
||||
|
@ -306,7 +316,6 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
|
|||
.route("/banned", web::get().to(list_banned_users))
|
||||
.route("/block", web::post().to(block_person))
|
||||
// TODO Account actions. I don't like that they're in /user maybe /accounts
|
||||
.route("/login", web::post().to(login))
|
||||
.route("/logout", web::post().to(logout))
|
||||
.route("/delete_account", web::post().to(delete_account))
|
||||
.route("/password_reset", web::post().to(reset_password))
|
||||
|
|
Loading…
Reference in a new issue