Merge branch 'main' into apub-remove-expires

This commit is contained in:
Felix Ableitner 2024-03-26 16:36:21 +01:00
commit e84e25d8fa
104 changed files with 1881 additions and 1041 deletions

2
.github/CODEOWNERS vendored
View file

@ -1,3 +1,3 @@
* @Nutomic @dessalines @phiresky @dullbananas
* @Nutomic @dessalines @phiresky @dullbananas @SleeplessOne1917
crates/apub/ @Nutomic
migrations/ @dessalines @phiresky @dullbananas

View file

@ -6,21 +6,23 @@ variables:
- &install_pnpm "corepack enable pnpm"
- &slow_check_paths
- path:
# rust source code
- "crates/**"
- "src/**"
- "**/Cargo.toml"
- "Cargo.lock"
# database migrations
- "migrations/**"
# typescript tests
- "api_tests/**"
# config files and scripts used by ci
- ".woodpecker.yml"
- ".rustfmt.toml"
- "scripts/update_config_defaults.sh"
- "diesel.toml"
- ".gitmodules"
include: [
# rust source code
"crates/**",
"src/**",
"**/Cargo.toml",
"Cargo.lock",
# database migrations
"migrations/**",
# typescript tests
"api_tests/**",
# config files and scripts used by ci
".woodpecker.yml",
".rustfmt.toml",
"scripts/update_config_defaults.sh",
"diesel.toml",
".gitmodules",
]
# Broken for cron jobs currently, see
# https://github.com/woodpecker-ci/woodpecker/issues/1716
@ -198,7 +200,7 @@ steps:
- cat target/log/lemmy_*.out || true
- "# If you can't see all output, then use the download button"
when:
status: [failure]
- status: [failure]
publish_release_docker:
image: woodpeckerci/plugin-docker-buildx
@ -211,7 +213,7 @@ steps:
- RUST_RELEASE_MODE=release
tag: ${CI_COMMIT_TAG}
when:
event: tag
- event: tag
nightly_build:
image: woodpeckerci/plugin-docker-buildx
@ -224,7 +226,7 @@ steps:
- RUST_RELEASE_MODE=release
tag: dev
when:
event: cron
- event: cron
# using https://github.com/pksunkara/cargo-workspaces
publish_to_crates_io:
@ -237,7 +239,7 @@ steps:
- cargo workspaces publish --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
secrets: [cargo_api_token]
when:
event: tag
- event: tag
notify_on_failure:
image: alpine:3
@ -245,7 +247,7 @@ steps:
- apk add curl
- "curl -d'Lemmy CI build failed: ${CI_PIPELINE_URL}' ntfy.sh/lemmy_drone_ci"
when:
status: [failure]
- status: [failure]
notify_on_tag_deploy:
image: alpine:3
@ -253,7 +255,7 @@ steps:
- apk add curl
- "curl -d'lemmy:${CI_COMMIT_TAG} deployed' ntfy.sh/lemmy_drone_ci"
when:
event: tag
- event: tag
services:
database:

972
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -37,6 +37,8 @@ debug = 0
[features]
embed-pictrs = ["pict-rs"]
# This feature requires building with `tokio_unstable` flag, see documentation:
# https://docs.rs/tokio/latest/tokio/#unstable-features
console = [
"console-subscriber",
"opentelemetry",
@ -102,9 +104,9 @@ activitypub_federation = { version = "0.5.2", default-features = false, features
diesel = "2.1.4"
diesel_migrations = "2.1.0"
diesel-async = "0.4.1"
serde = { version = "1.0.195", features = ["derive"] }
serde_with = "3.5.1"
actix-web = { version = "4.4.1", default-features = false, features = [
serde = { version = "1.0.197", features = ["derive"] }
serde_with = "3.7.0"
actix-web = { version = "4.5.1", default-features = false, features = [
"macros",
"rustls",
"compress-brotli",
@ -113,39 +115,39 @@ actix-web = { version = "4.4.1", default-features = false, features = [
"cookies",
] }
tracing = "0.1.40"
tracing-actix-web = { version = "0.7.9", default-features = false }
tracing-actix-web = { version = "0.7.10", default-features = false }
tracing-error = "0.2.0"
tracing-log = "0.2.0"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
url = { version = "2.5.0", features = ["serde"] }
reqwest = { version = "0.11.23", features = ["json", "blocking", "gzip"] }
reqwest = { version = "0.11.26", features = ["json", "blocking", "gzip"] }
reqwest-middleware = "0.2.4"
reqwest-tracing = "0.4.7"
clokwerk = "0.4.0"
doku = { version = "0.21.1", features = ["url-2"] }
bcrypt = "0.15.0"
chrono = { version = "0.4.32", features = ["serde"], default-features = false }
serde_json = { version = "1.0.111", features = ["preserve_order"] }
chrono = { version = "0.4.35", features = ["serde"], default-features = false }
serde_json = { version = "1.0.114", features = ["preserve_order"] }
base64 = "0.21.7"
uuid = { version = "1.7.0", features = ["serde", "v4"] }
async-trait = "0.1.77"
captcha = "0.0.9"
anyhow = { version = "1.0.79", features = [
anyhow = { version = "1.0.81", features = [
"backtrace",
] } # backtrace is on by default on nightly, but not stable rust
diesel_ltree = "0.3.1"
typed-builder = "0.18.1"
serial_test = "2.0.0"
tokio = { version = "1.35.1", features = ["full"] }
tokio = { version = "1.36.0", features = ["full"] }
regex = "1.10.3"
once_cell = "1.19.0"
diesel-derive-newtype = "2.1.0"
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
strum = "0.25.0"
strum_macros = "0.25.3"
itertools = "0.12.0"
itertools = "0.12.1"
futures = "0.3.30"
http = "0.2.11"
http = "0.2.12"
rosetta-i18n = "0.1.3"
opentelemetry = { version = "0.19.0", features = ["rt-tokio"] }
tracing-opentelemetry = { version = "0.19.0" }
@ -160,9 +162,9 @@ tokio-postgres = "0.7.10"
tokio-postgres-rustls = "0.10.0"
urlencoding = "2.1.3"
enum-map = "2.7"
moka = { version = "0.12.4", features = ["future"] }
moka = { version = "0.12.5", features = ["future"] }
i-love-jesus = { version = "0.1.0" }
clap = { version = "4.4.18", features = ["derive"] }
clap = { version = "4.5.2", features = ["derive"] }
pretty_assertions = "1.4.0"
[dependencies]
@ -193,7 +195,7 @@ tracing-opentelemetry = { workspace = true, optional = true }
opentelemetry = { workspace = true, optional = true }
console-subscriber = { version = "0.1.10", optional = true }
opentelemetry-otlp = { version = "0.12.0", optional = true }
pict-rs = { version = "0.5.1", optional = true }
pict-rs = { version = "0.5.9", optional = true }
tokio.workspace = true
actix-cors = "0.6.5"
futures-util = { workspace = true }

View file

@ -20,16 +20,16 @@
},
"devDependencies": {
"@types/jest": "^29.5.12",
"@types/node": "^20.11.22",
"@typescript-eslint/eslint-plugin": "^7.1.0",
"@typescript-eslint/parser": "^7.1.0",
"@types/node": "^20.11.27",
"@typescript-eslint/eslint-plugin": "^7.2.0",
"@typescript-eslint/parser": "^7.2.0",
"download-file-sync": "^1.0.4",
"eslint": "^8.57.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-prettier": "^5.1.3",
"jest": "^29.5.0",
"lemmy-js-client": "0.19.4-alpha.6",
"lemmy-js-client": "0.19.4-alpha.8",
"prettier": "^3.2.5",
"ts-jest": "^29.1.0",
"typescript": "^5.3.3"
"typescript": "^5.4.2"
}
}

View file

@ -9,14 +9,14 @@ devDependencies:
specifier: ^29.5.12
version: 29.5.12
'@types/node':
specifier: ^20.11.22
version: 20.11.22
specifier: ^20.11.27
version: 20.11.27
'@typescript-eslint/eslint-plugin':
specifier: ^7.1.0
version: 7.1.0(@typescript-eslint/parser@7.1.0)(eslint@8.57.0)(typescript@5.3.3)
specifier: ^7.2.0
version: 7.2.0(@typescript-eslint/parser@7.2.0)(eslint@8.57.0)(typescript@5.4.2)
'@typescript-eslint/parser':
specifier: ^7.1.0
version: 7.1.0(eslint@8.57.0)(typescript@5.3.3)
specifier: ^7.2.0
version: 7.2.0(eslint@8.57.0)(typescript@5.4.2)
download-file-sync:
specifier: ^1.0.4
version: 1.0.4
@ -24,23 +24,23 @@ devDependencies:
specifier: ^8.57.0
version: 8.57.0
eslint-plugin-prettier:
specifier: ^5.0.1
specifier: ^5.1.3
version: 5.1.3(eslint@8.57.0)(prettier@3.2.5)
jest:
specifier: ^29.5.0
version: 29.7.0(@types/node@20.11.22)
version: 29.7.0(@types/node@20.11.27)
lemmy-js-client:
specifier: 0.19.4-alpha.6
version: 0.19.4-alpha.6
specifier: 0.19.4-alpha.8
version: 0.19.4-alpha.8
prettier:
specifier: ^3.2.5
version: 3.2.5
ts-jest:
specifier: ^29.1.0
version: 29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.3.3)
version: 29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.2)
typescript:
specifier: ^5.3.3
version: 5.3.3
specifier: ^5.4.2
version: 5.4.2
packages:
@ -464,7 +464,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
jest-message-util: 29.7.0
jest-util: 29.7.0
@ -485,14 +485,14 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
ansi-escapes: 4.3.2
chalk: 4.1.2
ci-info: 3.9.0
exit: 0.1.2
graceful-fs: 4.2.11
jest-changed-files: 29.7.0
jest-config: 29.7.0(@types/node@20.11.22)
jest-config: 29.7.0(@types/node@20.11.27)
jest-haste-map: 29.7.0
jest-message-util: 29.7.0
jest-regex-util: 29.6.3
@ -520,7 +520,7 @@ packages:
dependencies:
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
jest-mock: 29.7.0
dev: true
@ -547,7 +547,7 @@ packages:
dependencies:
'@jest/types': 29.6.3
'@sinonjs/fake-timers': 10.3.0
'@types/node': 20.11.22
'@types/node': 20.11.27
jest-message-util: 29.7.0
jest-mock: 29.7.0
jest-util: 29.7.0
@ -580,7 +580,7 @@ packages:
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.22
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
collect-v8-coverage: 1.0.2
exit: 0.1.2
@ -668,7 +668,7 @@ packages:
'@jest/schemas': 29.6.3
'@types/istanbul-lib-coverage': 2.0.6
'@types/istanbul-reports': 3.0.4
'@types/node': 20.11.22
'@types/node': 20.11.27
'@types/yargs': 17.0.32
chalk: 4.1.2
dev: true
@ -777,7 +777,7 @@ packages:
/@types/graceful-fs@4.1.9:
resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==}
dependencies:
'@types/node': 20.11.22
'@types/node': 20.11.27
dev: true
/@types/istanbul-lib-coverage@2.0.6:
@ -807,8 +807,8 @@ packages:
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
dev: true
/@types/node@20.11.22:
resolution: {integrity: sha512-/G+IxWxma6V3E+pqK1tSl2Fo1kl41pK1yeCyDsgkF9WlVAme4j5ISYM2zR11bgLFJGLN5sVK40T4RJNuiZbEjA==}
/@types/node@20.11.27:
resolution: {integrity: sha512-qyUZfMnCg1KEz57r7pzFtSGt49f6RPkPBis3Vo4PbS7roQEDn22hiHzl/Lo1q4i4hDEgBJmBF/NTNg2XR0HbFg==}
dependencies:
undici-types: 5.26.5
dev: true
@ -831,8 +831,8 @@ packages:
'@types/yargs-parser': 21.0.3
dev: true
/@typescript-eslint/eslint-plugin@7.1.0(@typescript-eslint/parser@7.1.0)(eslint@8.57.0)(typescript@5.3.3):
resolution: {integrity: sha512-j6vT/kCulhG5wBmGtstKeiVr1rdXE4nk+DT1k6trYkwlrvW9eOF5ZbgKnd/YR6PcM4uTEXa0h6Fcvf6X7Dxl0w==}
/@typescript-eslint/eslint-plugin@7.2.0(@typescript-eslint/parser@7.2.0)(eslint@8.57.0)(typescript@5.4.2):
resolution: {integrity: sha512-mdekAHOqS9UjlmyF/LSs6AIEvfceV749GFxoBAjwAv0nkevfKHWQFDMcBZWUiIC5ft6ePWivXoS36aKQ0Cy3sw==}
engines: {node: ^16.0.0 || >=18.0.0}
peerDependencies:
'@typescript-eslint/parser': ^7.0.0
@ -843,25 +843,25 @@ packages:
optional: true
dependencies:
'@eslint-community/regexpp': 4.10.0
'@typescript-eslint/parser': 7.1.0(eslint@8.57.0)(typescript@5.3.3)
'@typescript-eslint/scope-manager': 7.1.0
'@typescript-eslint/type-utils': 7.1.0(eslint@8.57.0)(typescript@5.3.3)
'@typescript-eslint/utils': 7.1.0(eslint@8.57.0)(typescript@5.3.3)
'@typescript-eslint/visitor-keys': 7.1.0
'@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.4.2)
'@typescript-eslint/scope-manager': 7.2.0
'@typescript-eslint/type-utils': 7.2.0(eslint@8.57.0)(typescript@5.4.2)
'@typescript-eslint/utils': 7.2.0(eslint@8.57.0)(typescript@5.4.2)
'@typescript-eslint/visitor-keys': 7.2.0
debug: 4.3.4
eslint: 8.57.0
graphemer: 1.4.0
ignore: 5.3.1
natural-compare: 1.4.0
semver: 7.6.0
ts-api-utils: 1.2.1(typescript@5.3.3)
typescript: 5.3.3
ts-api-utils: 1.3.0(typescript@5.4.2)
typescript: 5.4.2
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/parser@7.1.0(eslint@8.57.0)(typescript@5.3.3):
resolution: {integrity: sha512-V1EknKUubZ1gWFjiOZhDSNToOjs63/9O0puCgGS8aDOgpZY326fzFu15QAUjwaXzRZjf/qdsdBrckYdv9YxB8w==}
/@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.2):
resolution: {integrity: sha512-5FKsVcHTk6TafQKQbuIVkXq58Fnbkd2wDL4LB7AURN7RUOu1utVP+G8+6u3ZhEroW3DF6hyo3ZEXxgKgp4KeCg==}
engines: {node: ^16.0.0 || >=18.0.0}
peerDependencies:
eslint: ^8.56.0
@ -870,27 +870,27 @@ packages:
typescript:
optional: true
dependencies:
'@typescript-eslint/scope-manager': 7.1.0
'@typescript-eslint/types': 7.1.0
'@typescript-eslint/typescript-estree': 7.1.0(typescript@5.3.3)
'@typescript-eslint/visitor-keys': 7.1.0
'@typescript-eslint/scope-manager': 7.2.0
'@typescript-eslint/types': 7.2.0
'@typescript-eslint/typescript-estree': 7.2.0(typescript@5.4.2)
'@typescript-eslint/visitor-keys': 7.2.0
debug: 4.3.4
eslint: 8.57.0
typescript: 5.3.3
typescript: 5.4.2
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/scope-manager@7.1.0:
resolution: {integrity: sha512-6TmN4OJiohHfoOdGZ3huuLhpiUgOGTpgXNUPJgeZOZR3DnIpdSgtt83RS35OYNNXxM4TScVlpVKC9jyQSETR1A==}
/@typescript-eslint/scope-manager@7.2.0:
resolution: {integrity: sha512-Qh976RbQM/fYtjx9hs4XkayYujB/aPwglw2choHmf3zBjB4qOywWSdt9+KLRdHubGcoSwBnXUH2sR3hkyaERRg==}
engines: {node: ^16.0.0 || >=18.0.0}
dependencies:
'@typescript-eslint/types': 7.1.0
'@typescript-eslint/visitor-keys': 7.1.0
'@typescript-eslint/types': 7.2.0
'@typescript-eslint/visitor-keys': 7.2.0
dev: true
/@typescript-eslint/type-utils@7.1.0(eslint@8.57.0)(typescript@5.3.3):
resolution: {integrity: sha512-UZIhv8G+5b5skkcuhgvxYWHjk7FW7/JP5lPASMEUoliAPwIH/rxoUSQPia2cuOj9AmDZmwUl1usKm85t5VUMew==}
/@typescript-eslint/type-utils@7.2.0(eslint@8.57.0)(typescript@5.4.2):
resolution: {integrity: sha512-xHi51adBHo9O9330J8GQYQwrKBqbIPJGZZVQTHHmy200hvkLZFWJIFtAG/7IYTWUyun6DE6w5InDReePJYJlJA==}
engines: {node: ^16.0.0 || >=18.0.0}
peerDependencies:
eslint: ^8.56.0
@ -899,23 +899,23 @@ packages:
typescript:
optional: true
dependencies:
'@typescript-eslint/typescript-estree': 7.1.0(typescript@5.3.3)
'@typescript-eslint/utils': 7.1.0(eslint@8.57.0)(typescript@5.3.3)
'@typescript-eslint/typescript-estree': 7.2.0(typescript@5.4.2)
'@typescript-eslint/utils': 7.2.0(eslint@8.57.0)(typescript@5.4.2)
debug: 4.3.4
eslint: 8.57.0
ts-api-utils: 1.2.1(typescript@5.3.3)
typescript: 5.3.3
ts-api-utils: 1.3.0(typescript@5.4.2)
typescript: 5.4.2
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/types@7.1.0:
resolution: {integrity: sha512-qTWjWieJ1tRJkxgZYXx6WUYtWlBc48YRxgY2JN1aGeVpkhmnopq+SUC8UEVGNXIvWH7XyuTjwALfG6bFEgCkQA==}
/@typescript-eslint/types@7.2.0:
resolution: {integrity: sha512-XFtUHPI/abFhm4cbCDc5Ykc8npOKBSJePY3a3s+lwumt7XWJuzP5cZcfZ610MIPHjQjNsOLlYK8ASPaNG8UiyA==}
engines: {node: ^16.0.0 || >=18.0.0}
dev: true
/@typescript-eslint/typescript-estree@7.1.0(typescript@5.3.3):
resolution: {integrity: sha512-k7MyrbD6E463CBbSpcOnwa8oXRdHzH1WiVzOipK3L5KSML92ZKgUBrTlehdi7PEIMT8k0bQixHUGXggPAlKnOQ==}
/@typescript-eslint/typescript-estree@7.2.0(typescript@5.4.2):
resolution: {integrity: sha512-cyxS5WQQCoBwSakpMrvMXuMDEbhOo9bNHHrNcEWis6XHx6KF518tkF1wBvKIn/tpq5ZpUYK7Bdklu8qY0MsFIA==}
engines: {node: ^16.0.0 || >=18.0.0}
peerDependencies:
typescript: '*'
@ -923,21 +923,21 @@ packages:
typescript:
optional: true
dependencies:
'@typescript-eslint/types': 7.1.0
'@typescript-eslint/visitor-keys': 7.1.0
'@typescript-eslint/types': 7.2.0
'@typescript-eslint/visitor-keys': 7.2.0
debug: 4.3.4
globby: 11.1.0
is-glob: 4.0.3
minimatch: 9.0.3
semver: 7.6.0
ts-api-utils: 1.2.1(typescript@5.3.3)
typescript: 5.3.3
ts-api-utils: 1.3.0(typescript@5.4.2)
typescript: 5.4.2
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/utils@7.1.0(eslint@8.57.0)(typescript@5.3.3):
resolution: {integrity: sha512-WUFba6PZC5OCGEmbweGpnNJytJiLG7ZvDBJJoUcX4qZYf1mGZ97mO2Mps6O2efxJcJdRNpqweCistDbZMwIVHw==}
/@typescript-eslint/utils@7.2.0(eslint@8.57.0)(typescript@5.4.2):
resolution: {integrity: sha512-YfHpnMAGb1Eekpm3XRK8hcMwGLGsnT6L+7b2XyRv6ouDuJU1tZir1GS2i0+VXRatMwSI1/UfcyPe53ADkU+IuA==}
engines: {node: ^16.0.0 || >=18.0.0}
peerDependencies:
eslint: ^8.56.0
@ -945,9 +945,9 @@ packages:
'@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0)
'@types/json-schema': 7.0.15
'@types/semver': 7.5.8
'@typescript-eslint/scope-manager': 7.1.0
'@typescript-eslint/types': 7.1.0
'@typescript-eslint/typescript-estree': 7.1.0(typescript@5.3.3)
'@typescript-eslint/scope-manager': 7.2.0
'@typescript-eslint/types': 7.2.0
'@typescript-eslint/typescript-estree': 7.2.0(typescript@5.4.2)
eslint: 8.57.0
semver: 7.6.0
transitivePeerDependencies:
@ -955,11 +955,11 @@ packages:
- typescript
dev: true
/@typescript-eslint/visitor-keys@7.1.0:
resolution: {integrity: sha512-FhUqNWluiGNzlvnDZiXad4mZRhtghdoKW6e98GoEOYSu5cND+E39rG5KwJMUzeENwm1ztYBRqof8wMLP+wNPIA==}
/@typescript-eslint/visitor-keys@7.2.0:
resolution: {integrity: sha512-c6EIQRHhcpl6+tO8EMR+kjkkV+ugUNXOmeASA1rlzkd8EPIriavpWoiEz1HR/VLhbVIdhqnV6E7JZm00cBDx2A==}
engines: {node: ^16.0.0 || >=18.0.0}
dependencies:
'@typescript-eslint/types': 7.1.0
'@typescript-eslint/types': 7.2.0
eslint-visitor-keys: 3.4.3
dev: true
@ -1276,7 +1276,7 @@ packages:
resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
dev: true
/create-jest@29.7.0(@types/node@20.11.22):
/create-jest@29.7.0(@types/node@20.11.27):
resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@ -1285,7 +1285,7 @@ packages:
chalk: 4.1.2
exit: 0.1.2
graceful-fs: 4.2.11
jest-config: 29.7.0(@types/node@20.11.22)
jest-config: 29.7.0(@types/node@20.11.27)
jest-util: 29.7.0
prompts: 2.4.2
transitivePeerDependencies:
@ -1939,7 +1939,7 @@ packages:
'@jest/expect': 29.7.0
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
co: 4.6.0
dedent: 1.5.1
@ -1960,7 +1960,7 @@ packages:
- supports-color
dev: true
/jest-cli@29.7.0(@types/node@20.11.22):
/jest-cli@29.7.0(@types/node@20.11.27):
resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@ -1974,10 +1974,10 @@ packages:
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
chalk: 4.1.2
create-jest: 29.7.0(@types/node@20.11.22)
create-jest: 29.7.0(@types/node@20.11.27)
exit: 0.1.2
import-local: 3.1.0
jest-config: 29.7.0(@types/node@20.11.22)
jest-config: 29.7.0(@types/node@20.11.27)
jest-util: 29.7.0
jest-validate: 29.7.0
yargs: 17.7.2
@ -1988,7 +1988,7 @@ packages:
- ts-node
dev: true
/jest-config@29.7.0(@types/node@20.11.22):
/jest-config@29.7.0(@types/node@20.11.27):
resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
@ -2003,7 +2003,7 @@ packages:
'@babel/core': 7.23.9
'@jest/test-sequencer': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
babel-jest: 29.7.0(@babel/core@7.23.9)
chalk: 4.1.2
ci-info: 3.9.0
@ -2063,7 +2063,7 @@ packages:
'@jest/environment': 29.7.0
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
jest-mock: 29.7.0
jest-util: 29.7.0
dev: true
@ -2079,7 +2079,7 @@ packages:
dependencies:
'@jest/types': 29.6.3
'@types/graceful-fs': 4.1.9
'@types/node': 20.11.22
'@types/node': 20.11.27
anymatch: 3.1.3
fb-watchman: 2.0.2
graceful-fs: 4.2.11
@ -2130,7 +2130,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
jest-util: 29.7.0
dev: true
@ -2185,7 +2185,7 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
emittery: 0.13.1
graceful-fs: 4.2.11
@ -2216,7 +2216,7 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
cjs-module-lexer: 1.2.3
collect-v8-coverage: 1.0.2
@ -2268,7 +2268,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
chalk: 4.1.2
ci-info: 3.9.0
graceful-fs: 4.2.11
@ -2293,7 +2293,7 @@ packages:
dependencies:
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.11.22
'@types/node': 20.11.27
ansi-escapes: 4.3.2
chalk: 4.1.2
emittery: 0.13.1
@ -2305,13 +2305,13 @@ packages:
resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@types/node': 20.11.22
'@types/node': 20.11.27
jest-util: 29.7.0
merge-stream: 2.0.0
supports-color: 8.1.1
dev: true
/jest@29.7.0(@types/node@20.11.22):
/jest@29.7.0(@types/node@20.11.27):
resolution: {integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@ -2324,7 +2324,7 @@ packages:
'@jest/core': 29.7.0
'@jest/types': 29.6.3
import-local: 3.1.0
jest-cli: 29.7.0(@types/node@20.11.22)
jest-cli: 29.7.0(@types/node@20.11.27)
transitivePeerDependencies:
- '@types/node'
- babel-plugin-macros
@ -2390,8 +2390,8 @@ packages:
engines: {node: '>=6'}
dev: true
/lemmy-js-client@0.19.4-alpha.6:
resolution: {integrity: sha512-x4htMlpoZ7hzrhrIk82aompVxbpu2ZDWtmWNGraM0+27nUCDf6gYxJH5nb5R/o39BQe5KSHq6zoBdliBwAY40w==}
/lemmy-js-client@0.19.4-alpha.8:
resolution: {integrity: sha512-8vjqUYVOhyUTcmG9FvPLjrWziVwNa2/Zi+kSflTrajJsK0V+5DclJ5dhdVMUQ4DEA70gb0OuNMDlipPG2FoS5A==}
dependencies:
cross-fetch: 4.0.0
form-data: 4.0.0
@ -2956,16 +2956,16 @@ packages:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
dev: true
/ts-api-utils@1.2.1(typescript@5.3.3):
resolution: {integrity: sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA==}
/ts-api-utils@1.3.0(typescript@5.4.2):
resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==}
engines: {node: '>=16'}
peerDependencies:
typescript: '>=4.2.0'
dependencies:
typescript: 5.3.3
typescript: 5.4.2
dev: true
/ts-jest@29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.3.3):
/ts-jest@29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.2):
resolution: {integrity: sha512-br6GJoH/WUX4pu7FbZXuWGKGNDuU7b8Uj77g/Sp7puZV6EXzuByl6JrECvm0MzVzSTkSHWTihsXt+5XYER5b+g==}
engines: {node: ^16.10.0 || ^18.0.0 || >=20.0.0}
hasBin: true
@ -2989,13 +2989,13 @@ packages:
'@babel/core': 7.23.9
bs-logger: 0.2.6
fast-json-stable-stringify: 2.1.0
jest: 29.7.0(@types/node@20.11.22)
jest: 29.7.0(@types/node@20.11.27)
jest-util: 29.7.0
json5: 2.2.3
lodash.memoize: 4.1.2
make-error: 1.3.6
semver: 7.5.4
typescript: 5.3.3
typescript: 5.4.2
yargs-parser: 21.1.1
dev: true
@ -3025,8 +3025,8 @@ packages:
engines: {node: '>=10'}
dev: true
/typescript@5.3.3:
resolution: {integrity: sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==}
/typescript@5.4.2:
resolution: {integrity: sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==}
engines: {node: '>=14.17'}
hasBin: true
dev: true

View file

@ -18,6 +18,7 @@ import {
resolveBetaCommunity,
createComment,
deletePost,
delay,
removePost,
getPost,
unfollowRemotes,
@ -219,9 +220,10 @@ test("Sticky a post", async () => {
if (!gammaPost) {
throw "Missing gamma post";
}
let gammaTrySticky = await featurePost(gamma, true, gammaPost.post);
// This has been failing occasionally
await featurePost(gamma, true, gammaPost.post);
let betaPost3 = (await resolvePost(beta, postRes.post_view.post)).post;
expect(gammaTrySticky.post_view.post.featured_community).toBe(true);
// expect(gammaTrySticky.post_view.post.featured_community).toBe(true);
expect(betaPost3?.post.featured_community).toBe(false);
});
@ -710,3 +712,25 @@ test("Fetch post via redirect", async () => {
expect(gammaPost.post?.post.ap_id).toBe(alphaPost.post_view.post.ap_id);
await unfollowRemotes(alpha);
});
test("Block post that contains banned URL", async () => {
let editSiteForm: EditSite = {
blocked_urls: ["https://evil.com/"],
};
await epsilon.editSite(editSiteForm);
await delay(500);
if (!betaCommunity) {
throw "Missing beta community";
}
expect(
createPost(epsilon, betaCommunity.community.id, "https://evil.com"),
).rejects.toStrictEqual(Error("blocked_url"));
// Later tests need this to be empty
editSiteForm.blocked_urls = [];
await epsilon.editSite(editSiteForm);
});

View file

@ -45,7 +45,7 @@ test("Create user", async () => {
if (!site.my_user) {
throw "Missing site user";
}
apShortname = `@${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`;
apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`;
});
test("Set some user settings, check that they are federated", async () => {
@ -68,7 +68,7 @@ test("Delete user", async () => {
let user = await registerUser(alpha, alphaUrl);
// make a local post and comment
let alphaCommunity = (await resolveCommunity(user, "!main@lemmy-alpha:8541"))
let alphaCommunity = (await resolveCommunity(user, "main@lemmy-alpha:8541"))
.community;
if (!alphaCommunity) {
throw "Missing alpha community";
@ -134,8 +134,28 @@ test("Create user with Arabic name", async () => {
if (!site.my_user) {
throw "Missing site user";
}
apShortname = `@${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`;
apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`;
let alphaPerson = (await resolvePerson(alpha, apShortname)).person;
expect(alphaPerson).toBeDefined();
});
test("Create user with accept-language", async () => {
let lemmy_http = new LemmyHttp(alphaUrl, {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language#syntax
headers: { "Accept-Language": "fr-CH, en;q=0.8, de;q=0.7, *;q=0.5" },
});
let user = await registerUser(lemmy_http, alphaUrl);
let site = await getSite(user);
expect(site.my_user).toBeDefined();
expect(site.my_user?.local_user_view.local_user.interface_language).toBe(
"fr",
);
let langs = site.all_languages
.filter(a => site.my_user?.discussion_languages.includes(a.id))
.map(l => l.code);
// should have languages from accept header, as well as "undetermined"
// which is automatically enabled by backend
expect(langs).toStrictEqual(["und", "de", "en", "fr"]);
});

View file

@ -34,7 +34,7 @@ tracing = { workspace = true }
chrono = { workspace = true }
url = { workspace = true }
wav = "1.0.0"
sitemap-rs = "0.2.0"
sitemap-rs = "0.2.1"
totp-rs = { version = "5.5.1", features = ["gen_secret", "otpauth"] }
actix-web-httpauth = "0.8.1"

View file

@ -259,9 +259,9 @@ pub async fn local_user_view_from_jwt(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;

View file

@ -6,10 +6,7 @@ use lemmy_api_common::{
person::GenerateTotpSecretResponse,
sensitive::Sensitive,
};
use lemmy_db_schema::{
source::local_user::{LocalUser, LocalUserUpdateForm},
traits::Crud,
};
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyError, LemmyErrorType};

View file

@ -3,6 +3,7 @@ use lemmy_api_common::{
context::LemmyContext,
person::SaveUserSettings,
utils::{
get_url_blocklist,
local_site_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_api,
@ -35,7 +36,10 @@ pub async fn save_user_settings(
let site_view = SiteView::read_local(&mut context.pool()).await?;
let slur_regex = local_site_to_slur_regex(&site_view.local_site);
let bio = diesel_option_overwrite(process_markdown_opt(&data.bio, &slur_regex, &context).await?);
let url_blocklist = get_url_blocklist(&context).await?;
let bio = diesel_option_overwrite(
process_markdown_opt(&data.bio, &slur_regex, &url_blocklist, &context).await?,
);
let avatar = proxy_image_link_opt_api(&data.avatar, &context).await?;
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;

View file

@ -4,10 +4,7 @@ use lemmy_api_common::{
context::LemmyContext,
person::{UpdateTotp, UpdateTotpResponse},
};
use lemmy_db_schema::{
source::local_user::{LocalUser, LocalUserUpdateForm},
traits::Crud,
};
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
use lemmy_db_views::structs::LocalUserView;
use lemmy_utils::error::LemmyError;

View file

@ -4,6 +4,7 @@ use lemmy_db_schema::{
source::{
actor_language::SiteLanguage,
language::Language,
local_site_url_blocklist::LocalSiteUrlBlocklist,
local_user::{LocalUser, LocalUserUpdateForm},
moderator::{ModAdd, ModAddForm},
tagline::Tagline,
@ -62,6 +63,7 @@ pub async fn leave_admin(
let taglines = Tagline::get_all(&mut context.pool(), site_view.local_site.id).await?;
let custom_emojis =
CustomEmojiView::get_all(&mut context.pool(), site_view.local_site.id).await?;
let blocked_urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?;
Ok(Json(GetSiteResponse {
site_view,
@ -72,5 +74,6 @@ pub async fn leave_admin(
discussion_languages,
taglines,
custom_emojis,
blocked_urls,
}))
}

View file

@ -42,8 +42,8 @@ pub async fn get_sitemap(context: Data<LemmyContext>) -> LemmyResult<HttpRespons
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
pub(crate) mod tests {
#![allow(clippy::unwrap_used)]
use crate::sitemap::generate_urlset;
use chrono::{DateTime, NaiveDate, Utc};

View file

@ -59,6 +59,8 @@ uuid = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
reqwest = { workspace = true, optional = true }
ts-rs = { workspace = true, optional = true }
moka.workspace = true
anyhow.workspace = true
once_cell = { workspace = true, optional = true }
actix-web = { workspace = true, optional = true }
enum-map = { workspace = true }

View file

@ -72,9 +72,9 @@ impl Claims {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{claims::Claims, context::LemmyContext};
use actix_web::test::TestRequest;
@ -124,7 +124,9 @@ mod tests {
.password_encrypted("123456".to_string())
.build();
let inserted_local_user = LocalUser::create(pool, &local_user_form).await.unwrap();
let inserted_local_user = LocalUser::create(pool, &local_user_form, vec![])
.await
.unwrap();
let req = TestRequest::default().to_http_request();
let jwt = Claims::generate(inserted_local_user.id, req, &context)

View file

@ -59,14 +59,8 @@ pub async fn fetch_link_metadata(
let opengraph_data = extract_opengraph_data(&html_bytes, url)
.map_err(|e| info!("{e}"))
.unwrap_or_default();
let thumbnail = extract_thumbnail_from_opengraph_data(
url,
&opengraph_data,
&content_type,
generate_thumbnail,
context,
)
.await;
let thumbnail =
extract_thumbnail_from_opengraph_data(url, &opengraph_data, generate_thumbnail, context).await;
Ok(LinkMetadata {
opengraph_data,
@ -158,23 +152,21 @@ fn extract_opengraph_data(html_bytes: &[u8], url: &Url) -> Result<OpenGraphData,
pub async fn extract_thumbnail_from_opengraph_data(
url: &Url,
opengraph_data: &OpenGraphData,
content_type: &Option<Mime>,
generate_thumbnail: bool,
context: &LemmyContext,
) -> Option<DbUrl> {
let is_image = content_type.as_ref().unwrap_or(&mime::TEXT_PLAIN).type_() == mime::IMAGE;
if generate_thumbnail && is_image {
if generate_thumbnail {
let image_url = opengraph_data
.image
.as_ref()
.map(lemmy_db_schema::newtypes::DbUrl::inner)
.map(DbUrl::inner)
.unwrap_or(url);
generate_pictrs_thumbnail(image_url, context)
.await
.ok()
.map(Into::into)
} else {
None
opengraph_data.image.clone()
}
}
@ -321,9 +313,9 @@ async fn is_image_content_type(client: &ClientWithMiddleware, url: &Url) -> Resu
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
context::LemmyContext,
@ -363,7 +355,7 @@ mod tests {
Some(mime::TEXT_HTML_UTF_8.to_string()),
sample_res.content_type
);
assert_eq!(None, sample_res.thumbnail);
assert!(sample_res.thumbnail.is_some());
}
// #[test]

View file

@ -6,6 +6,7 @@ use lemmy_db_schema::{
federation_queue_state::FederationQueueState,
instance::Instance,
language::Language,
local_site_url_blocklist::LocalSiteUrlBlocklist,
tagline::Tagline,
},
ListingType,
@ -268,6 +269,8 @@ pub struct EditSite {
pub allowed_instances: Option<Vec<String>>,
/// A list of blocked instances.
pub blocked_instances: Option<Vec<String>>,
/// A list of blocked URLs
pub blocked_urls: Option<Vec<String>>,
/// A list of taglines shown at the top of the front page.
pub taglines: Option<Vec<String>>,
pub registration_mode: Option<RegistrationMode>,
@ -305,6 +308,7 @@ pub struct GetSiteResponse {
pub taglines: Vec<Tagline>,
/// A list of custom emojis your site supports.
pub custom_emojis: Vec<CustomEmojiView>,
pub blocked_urls: Vec<LocalSiteUrlBlocklist>,
}
#[skip_serializing_none]

View file

@ -17,6 +17,7 @@ use lemmy_db_schema::{
instance_block::InstanceBlock,
local_site::LocalSite,
local_site_rate_limit::LocalSiteRateLimit,
local_site_url_blocklist::LocalSiteUrlBlocklist,
password_reset_request::PasswordResetRequest,
person::{Person, PersonUpdateForm},
person_block::PersonBlock,
@ -38,18 +39,24 @@ use lemmy_utils::{
rate_limit::{ActionType, BucketConfig},
settings::structs::{PictrsImageMode, Settings},
utils::{
markdown::markdown_rewrite_image_links,
markdown::{markdown_check_for_blocked_urls, markdown_rewrite_image_links},
slurs::{build_slur_regex, remove_slurs},
},
};
use regex::Regex;
use moka::future::Cache;
use once_cell::sync::Lazy;
use regex::{escape, Regex, RegexSet};
use rosetta_i18n::{Language, LanguageId};
use std::collections::HashSet;
use std::{collections::HashSet, time::Duration};
use tracing::warn;
use url::{ParseError, Url};
use urlencoding::encode;
pub static AUTH_COOKIE_NAME: &str = "jwt";
#[cfg(debug_assertions)]
static URL_BLOCKLIST_RECHECK_DELAY: Duration = Duration::from_millis(500);
#[cfg(not(debug_assertions))]
static URL_BLOCKLIST_RECHECK_DELAY: Duration = Duration::from_secs(60);
#[tracing::instrument(skip_all)]
pub async fn is_mod_or_admin(
@ -516,6 +523,47 @@ pub fn local_site_opt_to_sensitive(local_site: &Option<LocalSite>) -> bool {
.unwrap_or(false)
}
pub async fn get_url_blocklist(context: &LemmyContext) -> LemmyResult<RegexSet> {
static URL_BLOCKLIST: Lazy<Cache<(), RegexSet>> = Lazy::new(|| {
Cache::builder()
.max_capacity(1)
.time_to_live(URL_BLOCKLIST_RECHECK_DELAY)
.build()
});
Ok(
URL_BLOCKLIST
.try_get_with::<_, LemmyError>((), async {
let urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?;
let regexes = urls.iter().map(|url| {
let url = &url.url;
let parsed = Url::parse(url).expect("Coundln't parse URL.");
if url.ends_with('/') {
format!(
"({}://)?{}{}?",
parsed.scheme(),
escape(parsed.domain().expect("No domain.")),
escape(parsed.path())
)
} else {
format!(
"({}://)?{}{}",
parsed.scheme(),
escape(parsed.domain().expect("No domain.")),
escape(parsed.path())
)
}
});
let set = RegexSet::new(regexes)?;
Ok(set)
})
.await
.map_err(|e| anyhow::anyhow!("Failed to build URL blocklist due to `{}`", e))?,
)
}
pub async fn send_application_approved_email(
user: &LocalUserView,
settings: &Settings,
@ -867,9 +915,13 @@ fn limit_expire_time(expires: DateTime<Utc>) -> LemmyResult<Option<DateTime<Utc>
pub async fn process_markdown(
text: &str,
slur_regex: &Option<Regex>,
url_blocklist: &RegexSet,
context: &LemmyContext,
) -> LemmyResult<String> {
let text = remove_slurs(text, slur_regex);
markdown_check_for_blocked_urls(&text, url_blocklist)?;
if context.settings().pictrs_config()?.image_mode() == PictrsImageMode::ProxyAllImages {
let (text, links) = markdown_rewrite_image_links(text);
RemoteImage::create(&mut context.pool(), links).await?;
@ -882,10 +934,13 @@ pub async fn process_markdown(
pub async fn process_markdown_opt(
text: &Option<String>,
slur_regex: &Option<Regex>,
url_blocklist: &RegexSet,
context: &LemmyContext,
) -> LemmyResult<Option<String>> {
match text {
Some(t) => process_markdown(t, slur_regex, context).await.map(Some),
Some(t) => process_markdown(t, slur_regex, url_blocklist, context)
.await
.map(Some),
None => Ok(None),
}
}
@ -964,9 +1019,9 @@ pub async fn proxy_image_link_opt_apub(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use pretty_assertions::assert_eq;

View file

@ -10,6 +10,7 @@ use lemmy_api_common::{
check_post_deleted_or_removed,
generate_local_apub_endpoint,
get_post,
get_url_blocklist,
is_mod_or_admin,
local_site_to_slur_regex,
process_markdown,
@ -44,7 +45,8 @@ pub async fn create_comment(
let local_site = LocalSite::read(&mut context.pool()).await?;
let slur_regex = local_site_to_slur_regex(&local_site);
let content = process_markdown(&data.content, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
is_valid_body_field(&Some(content.clone()), false)?;
// Check for a community ban
@ -162,10 +164,15 @@ pub async fn create_comment(
)
.await?;
// If its a reply, mark the parent as read
// If we're responding to a comment where we're the recipient,
// (ie we're the grandparent, or the recipient of the parent comment_reply),
// then mark the parent as read.
// Then we don't have to do it manually after we respond to a comment.
if let Some(parent) = parent_opt {
let person_id = local_user_view.person.id;
let parent_id = parent.id;
let comment_reply = CommentReply::read_by_comment(&mut context.pool(), parent_id).await;
let comment_reply =
CommentReply::read_by_comment_and_person(&mut context.pool(), parent_id, person_id).await;
if let Ok(reply) = comment_reply {
CommentReply::update(
&mut context.pool(),
@ -177,7 +184,6 @@ pub async fn create_comment(
}
// If the parent has PersonMentions mark them as read too
let person_id = local_user_view.person.id;
let person_mention =
PersonMention::read_by_comment_and_person(&mut context.pool(), parent_id, person_id).await;
if let Ok(mention) = person_mention {

View file

@ -5,7 +5,12 @@ use lemmy_api_common::{
comment::{CommentResponse, EditComment},
context::LemmyContext,
send_activity::{ActivityChannel, SendActivityData},
utils::{check_community_user_action, local_site_to_slur_regex, process_markdown_opt},
utils::{
check_community_user_action,
get_url_blocklist,
local_site_to_slur_regex,
process_markdown_opt,
},
};
use lemmy_db_schema::{
source::{
@ -54,7 +59,8 @@ pub async fn update_comment(
.await?;
let slur_regex = local_site_to_slur_regex(&local_site);
let content = process_markdown_opt(&data.content, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let content = process_markdown_opt(&data.content, &slur_regex, &url_blocklist, &context).await?;
is_valid_body_field(&content, false)?;
let comment_id = data.comment_id;

View file

@ -9,6 +9,7 @@ use lemmy_api_common::{
generate_inbox_url,
generate_local_apub_endpoint,
generate_shared_inbox_url,
get_url_blocklist,
is_admin,
local_site_to_slur_regex,
process_markdown_opt,
@ -53,9 +54,11 @@ pub async fn create_community(
}
let slur_regex = local_site_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(&context).await?;
check_slurs(&data.name, &slur_regex)?;
check_slurs(&data.title, &slur_regex)?;
let description = process_markdown_opt(&data.description, &slur_regex, &context).await?;
let description =
process_markdown_opt(&data.description, &slur_regex, &url_blocklist, &context).await?;
let icon = proxy_image_link_api(&data.icon, &context).await?;
let banner = proxy_image_link_api(&data.banner, &context).await?;

View file

@ -7,6 +7,7 @@ use lemmy_api_common::{
send_activity::{ActivityChannel, SendActivityData},
utils::{
check_community_mod_action,
get_url_blocklist,
local_site_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_api,
@ -36,8 +37,10 @@ pub async fn update_community(
let local_site = LocalSite::read(&mut context.pool()).await?;
let slur_regex = local_site_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(&context).await?;
check_slurs_opt(&data.title, &slur_regex)?;
let description = process_markdown_opt(&data.description, &slur_regex, &context).await?;
let description =
process_markdown_opt(&data.description, &slur_regex, &url_blocklist, &context).await?;
is_valid_body_field(&data.description, false)?;
let description = diesel_option_overwrite(description);

View file

@ -9,6 +9,7 @@ use lemmy_api_common::{
utils::{
check_community_user_action,
generate_local_apub_endpoint,
get_url_blocklist,
honeypot_check,
local_site_to_slur_regex,
mark_post_as_read,
@ -38,6 +39,7 @@ use lemmy_utils::{
validation::{
check_url_scheme,
clean_url_params,
is_url_blocked,
is_valid_alt_text_field,
is_valid_body_field,
is_valid_post_title,
@ -60,8 +62,9 @@ pub async fn create_post(
let slur_regex = local_site_to_slur_regex(&local_site);
check_slurs(&data.name, &slur_regex)?;
let url_blocklist = get_url_blocklist(&context).await?;
let body = process_markdown_opt(&data.body, &slur_regex, &context).await?;
let body = process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context).await?;
let data_url = data.url.as_ref();
let url = data_url.map(clean_url_params); // TODO no good way to handle a "clear"
let custom_thumbnail = data.custom_thumbnail.as_ref().map(clean_url_params);
@ -69,6 +72,7 @@ pub async fn create_post(
is_valid_post_title(&data.name)?;
is_valid_body_field(&body, true)?;
is_valid_alt_text_field(&data.alt_text)?;
is_url_blocked(&url, &url_blocklist)?;
check_url_scheme(&url)?;
check_url_scheme(&custom_thumbnail)?;

View file

@ -8,6 +8,7 @@ use lemmy_api_common::{
send_activity::{ActivityChannel, SendActivityData},
utils::{
check_community_user_action,
get_url_blocklist,
local_site_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_apub,
@ -30,6 +31,7 @@ use lemmy_utils::{
validation::{
check_url_scheme,
clean_url_params,
is_url_blocked,
is_valid_alt_text_field,
is_valid_body_field,
is_valid_post_title,
@ -51,9 +53,11 @@ pub async fn update_post(
let url = data.url.as_ref().map(clean_url_params);
let custom_thumbnail = data.custom_thumbnail.as_ref().map(clean_url_params);
let url_blocklist = get_url_blocklist(&context).await?;
let slur_regex = local_site_to_slur_regex(&local_site);
check_slurs_opt(&data.name, &slur_regex)?;
let body = process_markdown_opt(&data.body, &slur_regex, &context).await?;
let body = process_markdown_opt(&data.body, &slur_regex, &url_blocklist, &context).await?;
if let Some(name) = &data.name {
is_valid_post_title(name)?;
@ -61,6 +65,7 @@ pub async fn update_post(
is_valid_body_field(&body, true)?;
is_valid_alt_text_field(&data.alt_text)?;
is_url_blocked(&url, &url_blocklist)?;
check_url_scheme(&url)?;
check_url_scheme(&custom_thumbnail)?;

View file

@ -8,6 +8,7 @@ use lemmy_api_common::{
check_person_block,
generate_local_apub_endpoint,
get_interface_language,
get_url_blocklist,
local_site_to_slur_regex,
process_markdown,
send_email_to_user,
@ -36,7 +37,8 @@ pub async fn create_private_message(
let local_site = LocalSite::read(&mut context.pool()).await?;
let slur_regex = local_site_to_slur_regex(&local_site);
let content = process_markdown(&data.content, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
is_valid_body_field(&Some(content.clone()), false)?;
check_person_block(

View file

@ -4,7 +4,7 @@ use lemmy_api_common::{
context::LemmyContext,
private_message::{EditPrivateMessage, PrivateMessageResponse},
send_activity::{ActivityChannel, SendActivityData},
utils::{local_site_to_slur_regex, process_markdown},
utils::{get_url_blocklist, local_site_to_slur_regex, process_markdown},
};
use lemmy_db_schema::{
source::{
@ -37,7 +37,8 @@ pub async fn update_private_message(
// Doing the update
let slur_regex = local_site_to_slur_regex(&local_site);
let content = process_markdown(&data.content, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let content = process_markdown(&data.content, &slur_regex, &url_blocklist, &context).await?;
is_valid_body_field(&Some(content.clone()), false)?;
let private_message_id = data.private_message_id;

View file

@ -6,6 +6,7 @@ use lemmy_api_common::{
site::{CreateSite, SiteResponse},
utils::{
generate_shared_inbox_url,
get_url_blocklist,
is_admin,
local_site_rate_limit_to_rate_limit_config,
local_site_to_slur_regex,
@ -58,7 +59,8 @@ pub async fn create_site(
let keypair = generate_actor_keypair()?;
let slur_regex = local_site_to_slur_regex(&local_site);
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?;
let icon = proxy_image_link_opt_api(&data.icon, &context).await?;
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
@ -187,9 +189,9 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) ->
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::site::create::validate_create_payload;
use lemmy_api_common::site::CreateSite;

View file

@ -41,9 +41,9 @@ pub fn application_question_check(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::site::{application_question_check, site_default_post_listing_type_check};
use lemmy_db_schema::{ListingType, RegistrationMode};

View file

@ -6,6 +6,7 @@ use lemmy_api_common::{
use lemmy_db_schema::source::{
actor_language::{LocalUserLanguage, SiteLanguage},
language::Language,
local_site_url_blocklist::LocalSiteUrlBlocklist,
tagline::Tagline,
};
use lemmy_db_views::structs::{CustomEmojiView, LocalUserView, SiteView};
@ -47,6 +48,7 @@ pub async fn get_site(
let taglines = Tagline::get_all(&mut context.pool(), site_view.local_site.id).await?;
let custom_emojis =
CustomEmojiView::get_all(&mut context.pool(), site_view.local_site.id).await?;
let blocked_urls = LocalSiteUrlBlocklist::get_all(&mut context.pool()).await?;
Ok(GetSiteResponse {
site_view,
admins,
@ -56,6 +58,7 @@ pub async fn get_site(
discussion_languages,
taglines,
custom_emojis,
blocked_urls,
})
})
.await

View file

@ -4,6 +4,7 @@ use lemmy_api_common::{
context::LemmyContext,
site::{EditSite, SiteResponse},
utils::{
get_url_blocklist,
is_admin,
local_site_rate_limit_to_rate_limit_config,
local_site_to_slur_regex,
@ -18,6 +19,7 @@ use lemmy_db_schema::{
federation_blocklist::FederationBlockList,
local_site::{LocalSite, LocalSiteUpdateForm},
local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitUpdateForm},
local_site_url_blocklist::LocalSiteUrlBlocklist,
local_user::LocalUser,
site::{Site, SiteUpdateForm},
tagline::Tagline,
@ -34,6 +36,7 @@ use lemmy_utils::{
validation::{
build_and_check_regex,
check_site_visibility_valid,
check_urls_are_valid,
is_valid_body_field,
site_description_length_check,
site_name_length_check,
@ -61,7 +64,8 @@ pub async fn update_site(
}
let slur_regex = local_site_to_slur_regex(&local_site);
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &context).await?;
let url_blocklist = get_url_blocklist(&context).await?;
let sidebar = process_markdown_opt(&data.sidebar, &slur_regex, &url_blocklist, &context).await?;
let icon = proxy_image_link_opt_api(&data.icon, &context).await?;
let banner = proxy_image_link_opt_api(&data.banner, &context).await?;
@ -137,6 +141,11 @@ pub async fn update_site(
let blocked = data.blocked_instances.clone();
FederationBlockList::replace(&mut context.pool(), blocked).await?;
if let Some(url_blocklist) = data.blocked_urls.clone() {
let parsed_urls = check_urls_are_valid(&url_blocklist)?;
LocalSiteUrlBlocklist::replace(&mut context.pool(), parsed_urls).await?;
}
// TODO can't think of a better way to do this.
// If the server suddenly requires email verification, or required applications, no old users
// will be able to log in. It really only wants this to be a requirement for NEW signups.
@ -222,9 +231,9 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::site::update::validate_update_payload;
use lemmy_api_common::site::EditSite;
@ -578,6 +587,7 @@ mod tests {
captcha_difficulty: None,
allowed_instances: None,
blocked_instances: None,
blocked_urls: None,
taglines: None,
registration_mode: site_registration_mode,
reports_email_admins: None,

View file

@ -20,6 +20,7 @@ use lemmy_db_schema::{
aggregates::structs::PersonAggregates,
source::{
captcha_answer::{CaptchaAnswer, CheckCaptchaAnswer},
language::Language,
local_user::{LocalUser, LocalUserInsertForm},
local_user_vote_display_mode::LocalUserVoteDisplayMode,
person::{Person, PersonInsertForm},
@ -36,6 +37,7 @@ use lemmy_utils::{
validation::is_valid_actor_name,
},
};
use std::collections::HashSet;
#[tracing::instrument(skip(context))]
pub async fn register(
@ -128,12 +130,15 @@ pub async fn register(
let accepted_application = Some(!require_registration_application);
// Get the user's preferred language using the Accept-Language header
let language_tag = req.headers().get("Accept-Language").and_then(|hdr| {
accept_language::parse(hdr.to_str().unwrap_or_default())
.first()
// Remove the optional region code
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
});
let language_tags: Vec<String> = req
.headers()
.get("Accept-Language")
.map(|hdr| accept_language::parse(hdr.to_str().unwrap_or_default()))
.iter()
.flatten()
// Remove the optional region code
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
.collect();
// Create the local user
let local_user_form = LocalUserInsertForm::builder()
@ -144,12 +149,23 @@ pub async fn register(
.accepted_application(accepted_application)
.default_listing_type(Some(local_site.default_post_listing_type))
.post_listing_mode(Some(local_site.default_post_listing_mode))
.interface_language(language_tag)
.interface_language(language_tags.first().cloned())
// If its the initial site setup, they are an admin
.admin(Some(!local_site.site_setup))
.build();
let inserted_local_user = LocalUser::create(&mut context.pool(), &local_user_form).await?;
let all_languages = Language::read_all(&mut context.pool()).await?;
// use hashset to avoid duplicates
let mut language_ids = HashSet::new();
for l in language_tags {
if let Some(found) = all_languages.iter().find(|all| all.code == l) {
language_ids.insert(found.id);
}
}
let language_ids = language_ids.into_iter().collect();
let inserted_local_user =
LocalUser::create(&mut context.pool(), &local_user_form, language_ids).await?;
if local_site.site_setup && require_registration_application {
// Create the registration application

View file

@ -8,6 +8,6 @@
"type": "Block",
"removeData": true,
"summary": "spam post",
"expires": "2021-11-01T12:23:50.151874Z",
"endTime": "2021-11-01T12:23:50.151874Z",
"id": "http://enterprise.lemmy.ml/activities/block/5d42fffb-0903-4625-86d4-0b39bb344fc2"
}

View file

@ -11,7 +11,7 @@
"type": "Block",
"removeData": true,
"summary": "spam post",
"expires": "2021-11-01T12:23:50.151874Z",
"endTime": "2021-11-01T12:23:50.151874Z",
"id": "http://enterprise.lemmy.ml/activities/block/726f43ab-bd0e-4ab3-89c8-627e976f553c"
},
"cc": ["http://enterprise.lemmy.ml/c/main"],

View file

@ -3,5 +3,6 @@
"to": ["https://www.w3.org/ns/activitystreams#Public"],
"object": "http://ds9.lemmy.ml/u/lemmy_alpha",
"type": "Delete",
"id": "http://ds9.lemmy.ml/activities/delete/f2abee48-c7bb-41d5-9e27-8775ff32db12"
"id": "http://ds9.lemmy.ml/activities/delete/f2abee48-c7bb-41d5-9e27-8775ff32db12",
"removeData": true
}

View file

@ -2,7 +2,7 @@ use crate::{
activities::{
generate_activity_id,
verify_person_in_community,
voting::{vote_comment, vote_post},
voting::{undo_vote_comment, undo_vote_post, vote_comment, vote_post},
},
insert_received_activity,
objects::{community::ApubCommunity, person::ApubPerson},
@ -17,7 +17,6 @@ use activitypub_federation::{
fetch::object_id::ObjectId,
traits::{ActivityHandler, Actor},
};
use anyhow::anyhow;
use lemmy_api_common::{context::LemmyContext, utils::check_bot_account};
use lemmy_db_schema::source::local_site::LocalSite;
use lemmy_utils::error::LemmyError;
@ -58,15 +57,7 @@ impl ActivityHandler for Vote {
async fn verify(&self, context: &Data<LemmyContext>) -> Result<(), LemmyError> {
let community = self.community(context).await?;
verify_person_in_community(&self.actor, &community, context).await?;
let enable_downvotes = LocalSite::read(&mut context.pool())
.await
.map(|l| l.enable_downvotes)
.unwrap_or(true);
if self.kind == VoteType::Dislike && !enable_downvotes {
Err(anyhow!("Downvotes disabled").into())
} else {
Ok(())
}
Ok(())
}
#[tracing::instrument(skip_all)]
@ -77,9 +68,22 @@ impl ActivityHandler for Vote {
check_bot_account(&actor.0)?;
match object {
PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await,
PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await,
let enable_downvotes = LocalSite::read(&mut context.pool())
.await
.map(|l| l.enable_downvotes)
.unwrap_or(true);
if self.kind == VoteType::Dislike && !enable_downvotes {
// If this is a downvote but downvotes are ignored, only undo any existing vote
match object {
PostOrComment::Post(p) => undo_vote_post(actor, &p, context).await,
PostOrComment::Comment(c) => undo_vote_comment(actor, &c, context).await,
}
} else {
// Otherwise apply the vote normally
match object {
PostOrComment::Post(p) => vote_post(&self.kind, actor, &p, context).await,
PostOrComment::Comment(c) => vote_comment(&self.kind, actor, &c, context).await,
}
}
}
}

View file

@ -123,8 +123,8 @@ impl InCommunity for AnnouncableActivities {
}
#[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::indexing_slicing)]
use crate::{
activity_lists::{GroupInboxActivities, PersonInboxActivities, SharedInboxActivities},

View file

@ -1,7 +1,6 @@
use crate::fetcher::search::{
search_query_to_object_id,
search_query_to_object_id_local,
SearchableObjects,
use crate::fetcher::{
search::{search_query_to_object_id, search_query_to_object_id_local, SearchableObjects},
user_or_community::UserOrCommunity,
};
use activitypub_federation::config::Data;
use actix_web::web::{Json, Query};
@ -31,7 +30,7 @@ pub async fn resolve_object(
let res = if is_authenticated {
// user is fully authenticated; allow remote lookups as well.
search_query_to_object_id(&data.q, &context).await
search_query_to_object_id(data.q.clone(), &context).await
} else {
// user isn't authenticated only allow a local search.
search_query_to_object_id_local(&data.q, &context).await
@ -52,14 +51,6 @@ async fn convert_response(
let removed_or_deleted;
let mut res = ResolveObjectResponse::default();
match object {
Person(p) => {
removed_or_deleted = p.deleted;
res.person = Some(PersonView::read(pool, p.id).await?)
}
Community(c) => {
removed_or_deleted = c.deleted || c.removed;
res.community = Some(CommunityView::read(pool, c.id, user_id, false).await?)
}
Post(p) => {
removed_or_deleted = p.deleted || p.removed;
res.post = Some(PostView::read(pool, p.id, user_id, false).await?)
@ -68,6 +59,16 @@ async fn convert_response(
removed_or_deleted = c.deleted || c.removed;
res.comment = Some(CommentView::read(pool, c.id, user_id).await?)
}
PersonOrCommunity(p) => match *p {
UserOrCommunity::User(u) => {
removed_or_deleted = u.deleted;
res.person = Some(PersonView::read(pool, u.id).await?)
}
UserOrCommunity::Community(c) => {
removed_or_deleted = c.deleted || c.removed;
res.community = Some(CommunityView::read(pool, c.id, user_id, false).await?)
}
},
};
// if the object was deleted from database, dont return it
if removed_or_deleted {

View file

@ -319,8 +319,8 @@ pub async fn import_settings(
}
#[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::indexing_slicing)]
use crate::api::user_settings_backup::{export_settings, import_settings};
use activitypub_federation::config::Data;
@ -361,7 +361,7 @@ mod tests {
.person_id(person.id)
.password_encrypted("pass".to_string())
.build();
let local_user = LocalUser::create(&mut context.pool(), &user_form).await?;
let local_user = LocalUser::create(&mut context.pool(), &user_form, vec![]).await?;
Ok(LocalUserView::read(&mut context.pool(), local_user.id).await?)
}

View file

@ -101,8 +101,8 @@ impl Collection for ApubCommunityModerators {
}
#[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::indexing_slicing)]
use super::*;
use crate::{

View file

@ -1,6 +1,7 @@
use crate::{
fetcher::user_or_community::{PersonOrGroup, UserOrCommunity},
objects::{comment::ApubComment, community::ApubCommunity, person::ApubPerson, post::ApubPost},
protocol::objects::{group::Group, note::Note, page::Page, person::Person},
protocol::objects::{note::Note, page::Page},
};
use activitypub_federation::{
config::Data,
@ -9,7 +10,7 @@ use activitypub_federation::{
};
use chrono::{DateTime, Utc};
use lemmy_api_common::context::LemmyContext;
use lemmy_utils::error::{LemmyError, LemmyErrorType};
use lemmy_utils::error::LemmyError;
use serde::Deserialize;
use url::Url;
@ -18,28 +19,22 @@ use url::Url;
/// which gets resolved to an URL.
#[tracing::instrument(skip_all)]
pub(crate) async fn search_query_to_object_id(
query: &str,
mut query: String,
context: &Data<LemmyContext>,
) -> Result<SearchableObjects, LemmyError> {
Ok(match Url::parse(query) {
Ok(match Url::parse(&query) {
Ok(url) => {
// its already an url, just go with it
ObjectId::from(url).dereference(context).await?
}
Err(_) => {
// not an url, try to resolve via webfinger
let mut chars = query.chars();
let kind = chars.next();
let identifier = chars.as_str();
match kind {
Some('@') => SearchableObjects::Person(
webfinger_resolve_actor::<LemmyContext, ApubPerson>(identifier, context).await?,
),
Some('!') => SearchableObjects::Community(
webfinger_resolve_actor::<LemmyContext, ApubCommunity>(identifier, context).await?,
),
_ => return Err(LemmyErrorType::InvalidQuery)?,
if query.starts_with('!') || query.starts_with('@') {
query.remove(0);
}
SearchableObjects::PersonOrCommunity(Box::new(
webfinger_resolve_actor::<LemmyContext, UserOrCommunity>(&query, context).await?,
))
}
})
}
@ -59,19 +54,17 @@ pub(crate) async fn search_query_to_object_id_local(
/// The types of ActivityPub objects that can be fetched directly by searching for their ID.
#[derive(Debug)]
pub(crate) enum SearchableObjects {
Person(ApubPerson),
Community(ApubCommunity),
Post(ApubPost),
Comment(ApubComment),
PersonOrCommunity(Box<UserOrCommunity>),
}
#[derive(Deserialize)]
#[serde(untagged)]
pub(crate) enum SearchableKinds {
Group(Group),
Person(Person),
Page(Page),
Page(Box<Page>),
Note(Note),
PersonOrGroup(Box<PersonOrGroup>),
}
#[async_trait::async_trait]
@ -82,10 +75,9 @@ impl Object for SearchableObjects {
fn last_refreshed_at(&self) -> Option<DateTime<Utc>> {
match self {
SearchableObjects::Person(p) => p.last_refreshed_at(),
SearchableObjects::Community(c) => c.last_refreshed_at(),
SearchableObjects::Post(p) => p.last_refreshed_at(),
SearchableObjects::Comment(c) => c.last_refreshed_at(),
SearchableObjects::PersonOrCommunity(p) => p.last_refreshed_at(),
}
}
@ -99,13 +91,9 @@ impl Object for SearchableObjects {
object_id: Url,
context: &Data<Self::DataType>,
) -> Result<Option<Self>, LemmyError> {
let c = ApubCommunity::read_from_id(object_id.clone(), context).await?;
if let Some(c) = c {
return Ok(Some(SearchableObjects::Community(c)));
}
let p = ApubPerson::read_from_id(object_id.clone(), context).await?;
if let Some(p) = p {
return Ok(Some(SearchableObjects::Person(p)));
let uc = UserOrCommunity::read_from_id(object_id.clone(), context).await?;
if let Some(uc) = uc {
return Ok(Some(SearchableObjects::PersonOrCommunity(Box::new(uc))));
}
let p = ApubPost::read_from_id(object_id.clone(), context).await?;
if let Some(p) = p {
@ -121,10 +109,12 @@ impl Object for SearchableObjects {
#[tracing::instrument(skip_all)]
async fn delete(self, data: &Data<Self::DataType>) -> Result<(), LemmyError> {
match self {
SearchableObjects::Person(p) => p.delete(data).await,
SearchableObjects::Community(c) => c.delete(data).await,
SearchableObjects::Post(p) => p.delete(data).await,
SearchableObjects::Comment(c) => c.delete(data).await,
SearchableObjects::PersonOrCommunity(pc) => match *pc {
UserOrCommunity::User(p) => p.delete(data).await,
UserOrCommunity::Community(c) => c.delete(data).await,
},
}
}
@ -139,10 +129,12 @@ impl Object for SearchableObjects {
data: &Data<Self::DataType>,
) -> Result<(), LemmyError> {
match apub {
SearchableKinds::Group(a) => ApubCommunity::verify(a, expected_domain, data).await,
SearchableKinds::Person(a) => ApubPerson::verify(a, expected_domain, data).await,
SearchableKinds::Page(a) => ApubPost::verify(a, expected_domain, data).await,
SearchableKinds::Note(a) => ApubComment::verify(a, expected_domain, data).await,
SearchableKinds::PersonOrGroup(pg) => match pg.as_ref() {
PersonOrGroup::Person(a) => ApubPerson::verify(a, expected_domain, data).await,
PersonOrGroup::Group(a) => ApubCommunity::verify(a, expected_domain, data).await,
},
}
}
@ -151,10 +143,11 @@ impl Object for SearchableObjects {
use SearchableKinds as SAT;
use SearchableObjects as SO;
Ok(match apub {
SAT::Group(g) => SO::Community(ApubCommunity::from_json(g, context).await?),
SAT::Person(p) => SO::Person(ApubPerson::from_json(p, context).await?),
SAT::Page(p) => SO::Post(ApubPost::from_json(p, context).await?),
SAT::Page(p) => SO::Post(ApubPost::from_json(*p, context).await?),
SAT::Note(n) => SO::Comment(ApubComment::from_json(n, context).await?),
SAT::PersonOrGroup(pg) => {
SO::PersonOrCommunity(Box::new(UserOrCommunity::from_json(*pg, context).await?))
}
})
}
}

View file

@ -115,9 +115,9 @@ pub(crate) async fn get_apub_community_featured(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
pub(crate) mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use crate::protocol::objects::{group::Group, tombstone::Tombstone};

View file

@ -18,7 +18,7 @@ use activitypub_federation::{
use chrono::{DateTime, Utc};
use lemmy_api_common::{
context::LemmyContext,
utils::{is_mod_or_admin, local_site_opt_to_slur_regex, process_markdown},
utils::{get_url_blocklist, is_mod_or_admin, local_site_opt_to_slur_regex, process_markdown},
};
use lemmy_db_schema::{
source::{
@ -165,7 +165,8 @@ impl Object for ApubComment {
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let content = process_markdown(&content, slur_regex, context).await?;
let url_blocklist = get_url_blocklist(context).await?;
let content = process_markdown(&content, slur_regex, &url_blocklist, context).await?;
let language_id =
LanguageTag::to_language_id_single(note.language, &mut context.pool()).await?;

View file

@ -21,6 +21,7 @@ use lemmy_api_common::{
generate_featured_url,
generate_moderators_url,
generate_outbox_url,
get_url_blocklist,
local_site_opt_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_apub,
@ -141,8 +142,10 @@ impl Object for ApubCommunity {
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let description = read_from_string_or_source_opt(&group.summary, &None, &group.source);
let description = process_markdown_opt(&description, slur_regex, context).await?;
let description =
process_markdown_opt(&description, slur_regex, &url_blocklist, context).await?;
let icon = proxy_image_link_opt_apub(group.icon.map(|i| i.url), context).await?;
let banner = proxy_image_link_opt_apub(group.image.map(|i| i.url), context).await?;

View file

@ -19,7 +19,12 @@ use activitypub_federation::{
use chrono::{DateTime, Utc};
use lemmy_api_common::{
context::LemmyContext,
utils::{local_site_opt_to_slur_regex, process_markdown_opt, proxy_image_link_opt_apub},
utils::{
get_url_blocklist,
local_site_opt_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_apub,
},
};
use lemmy_db_schema::{
newtypes::InstanceId,
@ -138,8 +143,9 @@ impl Object for ApubSite {
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let sidebar = read_from_string_or_source_opt(&apub.content, &None, &apub.source);
let sidebar = process_markdown_opt(&sidebar, slur_regex, context).await?;
let sidebar = process_markdown_opt(&sidebar, slur_regex, &url_blocklist, context).await?;
let icon = proxy_image_link_opt_apub(apub.icon.map(|i| i.url), context).await?;
let banner = proxy_image_link_opt_apub(apub.image.map(|i| i.url), context).await?;

View file

@ -22,6 +22,7 @@ use lemmy_api_common::{
context::LemmyContext,
utils::{
generate_outbox_url,
get_url_blocklist,
local_site_opt_to_slur_regex,
process_markdown_opt,
proxy_image_link_opt_apub,
@ -152,8 +153,9 @@ impl Object for ApubPerson {
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let bio = read_from_string_or_source_opt(&person.summary, &None, &person.source);
let bio = process_markdown_opt(&bio, slur_regex, context).await?;
let bio = process_markdown_opt(&bio, slur_regex, &url_blocklist, context).await?;
let avatar = proxy_image_link_opt_apub(person.icon.map(|i| i.url), context).await?;
let banner = proxy_image_link_opt_apub(person.image.map(|i| i.url), context).await?;

View file

@ -26,6 +26,7 @@ use lemmy_api_common::{
context::LemmyContext,
request::fetch_link_metadata_opt,
utils::{
get_url_blocklist,
local_site_opt_to_sensitive,
local_site_opt_to_slur_regex,
process_markdown_opt,
@ -246,9 +247,10 @@ impl Object for ApubPost {
let thumbnail_url = proxy_image_link_opt_apub(thumbnail_url, context).await?;
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
let body = process_markdown_opt(&body, slur_regex, context).await?;
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
let language_id =
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;

View file

@ -14,7 +14,7 @@ use activitypub_federation::{
use chrono::{DateTime, Utc};
use lemmy_api_common::{
context::LemmyContext,
utils::{check_person_block, local_site_opt_to_slur_regex, process_markdown},
utils::{check_person_block, get_url_blocklist, local_site_opt_to_slur_regex, process_markdown},
};
use lemmy_db_schema::{
source::{
@ -127,8 +127,9 @@ impl Object for ApubPrivateMessage {
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let content = read_from_string_or_source(&note.content, &None, &note.source);
let content = process_markdown(&content, slur_regex, context).await?;
let content = process_markdown(&content, slur_regex, &url_blocklist, context).await?;
let form = PrivateMessageInsertForm {
creator_id: creator.id,

View file

@ -33,9 +33,9 @@ impl CommentAggregates {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
aggregates::comment_aggregates::CommentAggregates,

View file

@ -31,9 +31,9 @@ impl CommunityAggregates {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
aggregates::community_aggregates::CommunityAggregates,

View file

@ -18,9 +18,9 @@ impl PersonAggregates {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
aggregates::person_aggregates::PersonAggregates,

View file

@ -52,9 +52,9 @@ impl PostAggregates {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
aggregates::post_aggregates::PostAggregates,

View file

@ -14,9 +14,9 @@ impl SiteAggregates {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
aggregates::site_aggregates::SiteAggregates,

View file

@ -61,9 +61,9 @@ impl ReceivedActivity {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use crate::{source::activity::ActorType, utils::build_db_pool_for_tests};

View file

@ -385,9 +385,9 @@ async fn convert_read_languages(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use crate::{
@ -523,10 +523,6 @@ mod tests {
let pool = &mut pool.into();
let (site, instance) = create_test_site(pool).await;
let mut test_langs = test_langs1(pool).await;
SiteLanguage::update(pool, test_langs.clone(), &site)
.await
.unwrap();
let person_form = PersonInsertForm::builder()
.name("my test person".to_string())
@ -539,14 +535,13 @@ mod tests {
.password_encrypted("my_pw".to_string())
.build();
let local_user = LocalUser::create(pool, &local_user_form).await.unwrap();
let local_user = LocalUser::create(pool, &local_user_form, vec![])
.await
.unwrap();
let local_user_langs1 = LocalUserLanguage::read(pool, local_user.id).await.unwrap();
// new user should be initialized with site languages and undetermined
//test_langs.push(UNDETERMINED_ID);
//test_langs.sort();
test_langs.insert(0, UNDETERMINED_ID);
assert_eq!(test_langs, local_user_langs1);
// new user should be initialized with all languages
assert_eq!(0, local_user_langs1.len());
// update user languages
let test_langs2 = test_langs2(pool).await;
@ -655,7 +650,9 @@ mod tests {
.person_id(person.id)
.password_encrypted("my_pw".to_string())
.build();
let local_user = LocalUser::create(pool, &local_user_form).await.unwrap();
let local_user = LocalUser::create(pool, &local_user_form, vec![])
.await
.unwrap();
LocalUserLanguage::update(pool, test_langs2, local_user.id)
.await
.unwrap();

View file

@ -48,9 +48,9 @@ impl CaptchaAnswer {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer},

View file

@ -241,9 +241,9 @@ impl Saveable for CommentSaved {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
newtypes::LanguageId,

View file

@ -1,6 +1,6 @@
use crate::{
newtypes::{CommentId, CommentReplyId, PersonId},
schema::comment_reply::dsl::{comment_id, comment_reply, read, recipient_id},
schema::comment_reply,
source::comment_reply::{CommentReply, CommentReplyInsertForm, CommentReplyUpdateForm},
traits::Crud,
utils::{get_conn, DbPool},
@ -22,9 +22,9 @@ impl Crud for CommentReply {
// since the return here isnt utilized, we dont need to do an update
// but get_result doesnt return the existing row here
insert_into(comment_reply)
insert_into(comment_reply::table)
.values(comment_reply_form)
.on_conflict((recipient_id, comment_id))
.on_conflict((comment_reply::recipient_id, comment_reply::comment_id))
.do_update()
.set(comment_reply_form)
.get_result::<Self>(conn)
@ -37,7 +37,7 @@ impl Crud for CommentReply {
comment_reply_form: &Self::UpdateForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(comment_reply.find(comment_reply_id))
diesel::update(comment_reply::table.find(comment_reply_id))
.set(comment_reply_form)
.get_result::<Self>(conn)
.await
@ -51,11 +51,11 @@ impl CommentReply {
) -> Result<Vec<CommentReply>, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(
comment_reply
.filter(recipient_id.eq(for_recipient_id))
.filter(read.eq(false)),
comment_reply::table
.filter(comment_reply::recipient_id.eq(for_recipient_id))
.filter(comment_reply::read.eq(false)),
)
.set(read.eq(true))
.set(comment_reply::read.eq(true))
.get_results::<Self>(conn)
.await
}
@ -65,17 +65,30 @@ impl CommentReply {
for_comment_id: CommentId,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
comment_reply
.filter(comment_id.eq(for_comment_id))
comment_reply::table
.filter(comment_reply::comment_id.eq(for_comment_id))
.first::<Self>(conn)
.await
}
pub async fn read_by_comment_and_person(
pool: &mut DbPool<'_>,
for_comment_id: CommentId,
for_recipient_id: PersonId,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
comment_reply::table
.filter(comment_reply::comment_id.eq(for_comment_id))
.filter(comment_reply::recipient_id.eq(for_recipient_id))
.first::<Self>(conn)
.await
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -381,9 +381,9 @@ impl ApubActor for Community {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -48,9 +48,9 @@ impl FederationAllowList {
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{federation_allowlist::FederationAllowList, instance::Instance},

View file

@ -41,9 +41,9 @@ impl Language {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{source::language::Language, utils::build_db_pool_for_tests};
use pretty_assertions::assert_eq;

View file

@ -0,0 +1,49 @@
use crate::{
schema::local_site_url_blocklist,
source::local_site_url_blocklist::{LocalSiteUrlBlocklist, LocalSiteUrlBlocklistForm},
utils::{get_conn, DbPool},
};
use diesel::{dsl::insert_into, result::Error};
use diesel_async::{AsyncPgConnection, RunQueryDsl};
impl LocalSiteUrlBlocklist {
pub async fn replace(pool: &mut DbPool<'_>, url_blocklist: Vec<String>) -> Result<(), Error> {
let conn = &mut get_conn(pool).await?;
conn
.build_transaction()
.run(|conn| {
Box::pin(async move {
use crate::schema::local_site_url_blocklist::dsl::local_site_url_blocklist;
Self::clear(conn).await?;
let forms = url_blocklist
.into_iter()
.map(|url| LocalSiteUrlBlocklistForm { url, updated: None })
.collect::<Vec<_>>();
insert_into(local_site_url_blocklist)
.values(forms)
.execute(conn)
.await?;
Ok(())
}) as _
})
.await
}
async fn clear(conn: &mut AsyncPgConnection) -> Result<usize, Error> {
diesel::delete(local_site_url_blocklist::table)
.execute(conn)
.await
}
pub async fn get_all(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
let conn = &mut get_conn(pool).await?;
local_site_url_blocklist::table
.get_results::<Self>(conn)
.await
}
}

View file

@ -1,12 +1,11 @@
use crate::{
newtypes::{DbUrl, LocalUserId, PersonId},
newtypes::{DbUrl, LanguageId, LocalUserId, PersonId},
schema::{local_user, person, registration_application},
source::{
actor_language::{LocalUserLanguage, SiteLanguage},
actor_language::LocalUserLanguage,
local_user::{LocalUser, LocalUserInsertForm, LocalUserUpdateForm},
local_user_vote_display_mode::{LocalUserVoteDisplayMode, LocalUserVoteDisplayModeInsertForm},
},
traits::Crud,
utils::{
functions::{coalesce, lower},
get_conn,
@ -25,6 +24,52 @@ use diesel::{
use diesel_async::RunQueryDsl;
impl LocalUser {
pub async fn create(
pool: &mut DbPool<'_>,
form: &LocalUserInsertForm,
languages: Vec<LanguageId>,
) -> Result<LocalUser, Error> {
let conn = &mut get_conn(pool).await?;
let mut form_with_encrypted_password = form.clone();
let password_hash =
hash(&form.password_encrypted, DEFAULT_COST).expect("Couldn't hash password");
form_with_encrypted_password.password_encrypted = password_hash;
let local_user_ = insert_into(local_user::table)
.values(form_with_encrypted_password)
.get_result::<Self>(conn)
.await?;
LocalUserLanguage::update(pool, languages, local_user_.id).await?;
// Create their vote_display_modes
let vote_display_mode_form = LocalUserVoteDisplayModeInsertForm::builder()
.local_user_id(local_user_.id)
.build();
LocalUserVoteDisplayMode::create(pool, &vote_display_mode_form).await?;
Ok(local_user_)
}
pub async fn update(
pool: &mut DbPool<'_>,
local_user_id: LocalUserId,
form: &LocalUserUpdateForm,
) -> Result<LocalUser, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(local_user::table.find(local_user_id))
.set(form)
.get_result::<Self>(conn)
.await
}
pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> {
let conn = &mut *get_conn(pool).await?;
diesel::delete(local_user::table.find(id))
.execute(conn)
.await
}
pub async fn update_password(
pool: &mut DbPool<'_>,
local_user_id: LocalUserId,
@ -183,52 +228,3 @@ pub struct UserBackupLists {
pub blocked_users: Vec<DbUrl>,
pub blocked_instances: Vec<String>,
}
#[async_trait]
impl Crud for LocalUser {
type InsertForm = LocalUserInsertForm;
type UpdateForm = LocalUserUpdateForm;
type IdType = LocalUserId;
async fn create(pool: &mut DbPool<'_>, form: &Self::InsertForm) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
let mut form_with_encrypted_password = form.clone();
let password_hash =
hash(&form.password_encrypted, DEFAULT_COST).expect("Couldn't hash password");
form_with_encrypted_password.password_encrypted = password_hash;
let local_user_ = insert_into(local_user::table)
.values(form_with_encrypted_password)
.get_result::<Self>(conn)
.await?;
let site_languages = SiteLanguage::read_local_raw(pool).await;
if let Ok(langs) = site_languages {
// if site exists, init user with site languages
LocalUserLanguage::update(pool, langs, local_user_.id).await?;
} else {
// otherwise, init with all languages (this only happens during tests and
// for first admin user, which is created before site)
LocalUserLanguage::update(pool, vec![], local_user_.id).await?;
}
// Create their vote_display_modes
let vote_display_mode_form = LocalUserVoteDisplayModeInsertForm::builder()
.local_user_id(local_user_.id)
.build();
LocalUserVoteDisplayMode::create(pool, &vote_display_mode_form).await?;
Ok(local_user_)
}
async fn update(
pool: &mut DbPool<'_>,
local_user_id: LocalUserId,
form: &Self::UpdateForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(local_user::table.find(local_user_id))
.set(form)
.get_result::<Self>(conn)
.await
}
}

View file

@ -17,6 +17,7 @@ pub mod instance_block;
pub mod language;
pub mod local_site;
pub mod local_site_rate_limit;
pub mod local_site_url_blocklist;
pub mod local_user;
pub mod local_user_vote_display_mode;
pub mod login_token;

View file

@ -465,9 +465,9 @@ impl Crud for AdminPurgeComment {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -81,9 +81,9 @@ impl PasswordResetRequest {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{
@ -121,7 +121,9 @@ mod tests {
.password_encrypted("pass".to_string())
.build();
let inserted_local_user = LocalUser::create(pool, &new_local_user).await.unwrap();
let inserted_local_user = LocalUser::create(pool, &new_local_user, vec![])
.await
.unwrap();
let token = "nope";

View file

@ -212,9 +212,9 @@ impl PersonFollower {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -1,6 +1,6 @@
use crate::{
newtypes::{CommentId, PersonId, PersonMentionId},
schema::person_mention::dsl::{comment_id, person_mention, read, recipient_id},
schema::person_mention,
source::person_mention::{PersonMention, PersonMentionInsertForm, PersonMentionUpdateForm},
traits::Crud,
utils::{get_conn, DbPool},
@ -21,9 +21,9 @@ impl Crud for PersonMention {
let conn = &mut get_conn(pool).await?;
// since the return here isnt utilized, we dont need to do an update
// but get_result doesnt return the existing row here
insert_into(person_mention)
insert_into(person_mention::table)
.values(person_mention_form)
.on_conflict((recipient_id, comment_id))
.on_conflict((person_mention::recipient_id, person_mention::comment_id))
.do_update()
.set(person_mention_form)
.get_result::<Self>(conn)
@ -36,7 +36,7 @@ impl Crud for PersonMention {
person_mention_form: &Self::UpdateForm,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(person_mention.find(person_mention_id))
diesel::update(person_mention::table.find(person_mention_id))
.set(person_mention_form)
.get_result::<Self>(conn)
.await
@ -50,11 +50,11 @@ impl PersonMention {
) -> Result<Vec<PersonMention>, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(
person_mention
.filter(recipient_id.eq(for_recipient_id))
.filter(read.eq(false)),
person_mention::table
.filter(person_mention::recipient_id.eq(for_recipient_id))
.filter(person_mention::read.eq(false)),
)
.set(read.eq(true))
.set(person_mention::read.eq(true))
.get_results::<Self>(conn)
.await
}
@ -65,18 +65,18 @@ impl PersonMention {
for_recipient_id: PersonId,
) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?;
person_mention
.filter(comment_id.eq(for_comment_id))
.filter(recipient_id.eq(for_recipient_id))
person_mention::table
.filter(person_mention::comment_id.eq(for_comment_id))
.filter(person_mention::recipient_id.eq(for_recipient_id))
.first::<Self>(conn)
.await
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -27,7 +27,7 @@ use crate::{
},
};
use ::url::Url;
use chrono::{Duration, Utc};
use chrono::Utc;
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods};
use diesel_async::RunQueryDsl;
use std::collections::HashSet;
@ -104,7 +104,9 @@ impl Post {
.filter(post::local.eq(true))
.filter(post::deleted.eq(false))
.filter(post::removed.eq(false))
.filter(post::published.ge(Utc::now().naive_utc() - Duration::days(SITEMAP_DAYS)))
.filter(
post::published.ge(Utc::now().naive_utc() - SITEMAP_DAYS.expect("TimeDelta out of bounds")),
)
.order(post::published.desc())
.limit(SITEMAP_LIMIT)
.load::<(DbUrl, chrono::DateTime<Utc>)>(conn)
@ -360,9 +362,9 @@ impl PostHide {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -80,9 +80,9 @@ impl Reportable for PostReport {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use crate::{

View file

@ -74,9 +74,9 @@ impl PrivateMessage {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
source::{

View file

@ -409,6 +409,15 @@ diesel::table! {
}
}
diesel::table! {
local_site_url_blocklist (id) {
id -> Int4,
url -> Text,
published -> Timestamptz,
updated -> Nullable<Timestamptz>,
}
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::SortTypeEnum;
@ -1052,6 +1061,7 @@ diesel::allow_tables_to_appear_in_same_query!(
local_image,
local_site,
local_site_rate_limit,
local_site_url_blocklist,
local_user,
local_user_language,
local_user_vote_display_mode,

View file

@ -0,0 +1,28 @@
#[cfg(feature = "full")]
use crate::schema::local_site_url_blocklist;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
#[cfg(feature = "full")]
use ts_rs::TS;
#[skip_serializing_none]
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))]
#[cfg_attr(feature = "full", diesel(table_name = local_site_url_blocklist))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))]
pub struct LocalSiteUrlBlocklist {
pub id: i32,
pub url: String,
pub published: DateTime<Utc>,
pub updated: Option<DateTime<Utc>>,
}
#[derive(Default, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = local_site_url_blocklist))]
pub struct LocalSiteUrlBlocklistForm {
pub url: String,
pub updated: Option<DateTime<Utc>>,
}

View file

@ -22,6 +22,7 @@ pub mod instance_block;
pub mod language;
pub mod local_site;
pub mod local_site_rate_limit;
pub mod local_site_url_blocklist;
pub mod local_user;
pub mod local_user_vote_display_mode;
pub mod login_token;

View file

@ -6,7 +6,7 @@ use crate::{
SortType,
};
use anyhow::Context;
use chrono::{DateTime, Utc};
use chrono::{DateTime, TimeDelta, Utc};
use deadpool::Runtime;
use diesel::{
helper_types::AsExprOf,
@ -51,7 +51,7 @@ use url::Url;
const FETCH_LIMIT_DEFAULT: i64 = 10;
pub const FETCH_LIMIT_MAX: i64 = 50;
pub const SITEMAP_LIMIT: i64 = 50000;
pub const SITEMAP_DAYS: i64 = 31;
pub const SITEMAP_DAYS: Option<TimeDelta> = TimeDelta::try_days(31);
pub const RANK_DEFAULT: f64 = 0.0001;
pub type ActualDbPool = Pool<AsyncPgConnection>;
@ -536,9 +536,9 @@ impl<RF, LF> Queries<RF, LF> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use pretty_assertions::assert_eq;

View file

@ -258,9 +258,9 @@ impl CommentReportQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
comment_report_view::{CommentReportQuery, CommentReportView},
@ -308,7 +308,9 @@ mod tests {
.person_id(inserted_timmy.id)
.password_encrypted("123".to_string())
.build();
let timmy_local_user = LocalUser::create(pool, &new_local_user).await.unwrap();
let timmy_local_user = LocalUser::create(pool, &new_local_user, vec![])
.await
.unwrap();
let timmy_view = LocalUserView {
local_user: timmy_local_user,
local_user_vote_display_mode: LocalUserVoteDisplayMode::default(),

View file

@ -53,6 +53,16 @@ fn queries<'a>() -> Queries<
),
);
let is_local_user_banned_from_community = |person_id| {
exists(
community_person_ban::table.filter(
community::id
.eq(community_person_ban::community_id)
.and(community_person_ban::person_id.eq(person_id)),
),
)
};
let is_saved = |person_id| {
comment_saved::table
.filter(
@ -113,6 +123,14 @@ fn queries<'a>() -> Queries<
);
let all_joins = move |query: comment::BoxedQuery<'a, Pg>, my_person_id: Option<PersonId>| {
let is_local_user_banned_from_community_selection: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>,
> = if let Some(person_id) = my_person_id {
Box::new(is_local_user_banned_from_community(person_id))
} else {
Box::new(false.into_sql::<sql_types::Bool>())
};
let score_selection: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable<sql_types::SmallInt>>,
> = if let Some(person_id) = my_person_id {
@ -156,6 +174,7 @@ fn queries<'a>() -> Queries<
community::all_columns,
comment_aggregates::all_columns,
is_creator_banned_from_community,
is_local_user_banned_from_community_selection,
creator_is_moderator,
creator_is_admin,
subscribed_type_selection,
@ -407,9 +426,9 @@ impl<'a> CommentQuery<'a> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
comment_view::{CommentQuery, CommentSortType, CommentView, DbPool},
@ -436,6 +455,8 @@ mod tests {
CommunityInsertForm,
CommunityModerator,
CommunityModeratorForm,
CommunityPersonBan,
CommunityPersonBanForm,
CommunityUpdateForm,
},
instance::Instance,
@ -446,11 +467,12 @@ mod tests {
person_block::{PersonBlock, PersonBlockForm},
post::{Post, PostInsertForm},
},
traits::{Blockable, Crud, Joinable, Likeable, Saveable},
traits::{Bannable, Blockable, Crud, Joinable, Likeable, Saveable},
utils::{build_db_pool_for_tests, RANK_DEFAULT},
CommunityVisibility,
SubscribedType,
};
use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq;
use serial_test::serial;
@ -481,7 +503,7 @@ mod tests {
.admin(Some(true))
.password_encrypted(String::new())
.build();
let inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form)
let inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form, vec![])
.await
.unwrap();
@ -633,12 +655,12 @@ mod tests {
#[tokio::test]
#[serial]
async fn test_crud() {
async fn test_crud() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
let expected_comment_view_no_person = expected_comment_view(&data, pool).await;
let expected_comment_view_no_person = expected_comment_view(&data, pool).await?;
let mut expected_comment_view_with_person = expected_comment_view_no_person.clone();
expected_comment_view_with_person.my_vote = Some(1);
@ -649,8 +671,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(
expected_comment_view_no_person,
@ -664,8 +685,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(
expected_comment_view_with_person,
@ -680,8 +700,7 @@ mod tests {
data.inserted_comment_1.id,
Some(data.timmy_local_user_view.person.id),
)
.await
.unwrap();
.await?;
// Make sure block set the creator blocked
assert!(read_comment_from_blocked_person.creator_blocked);
@ -692,8 +711,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(
expected_comment_view_with_person,
@ -708,17 +726,16 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert!(read_disliked_comment_views.is_empty());
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_comment_tree() {
async fn test_comment_tree() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -730,8 +747,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
let child_path = data.inserted_comment_1.path.clone();
let read_comment_views_child_path = CommentQuery {
@ -740,8 +756,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
// Make sure the comment parent-limited fetch is correct
assert_length!(6, read_comment_views_top_path);
@ -761,12 +776,11 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
// Make sure a depth limited one only has the top comment
assert_eq!(
expected_comment_view(&data, pool).await,
expected_comment_view(&data, pool).await?,
read_comment_views_top_max_depth[0]
);
assert_length!(1, read_comment_views_top_max_depth);
@ -780,8 +794,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
// Make sure a depth limited one, and given child comment 1, has 3
assert!(read_comment_views_parent_max_depth[2]
@ -790,12 +803,12 @@ mod tests {
.eq("Comment 3"));
assert_length!(3, read_comment_views_parent_max_depth);
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_languages() {
async fn test_languages() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -807,29 +820,25 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_length!(5, all_languages);
// change user lang to finnish, should only show one post in finnish and one undetermined
let finnish_id = Language::read_id_from_code(pool, Some("fi"))
.await
.unwrap()
.await?
.unwrap();
LocalUserLanguage::update(
pool,
vec![finnish_id],
data.timmy_local_user_view.local_user.id,
)
.await
.unwrap();
.await?;
let finnish_comments = CommentQuery {
local_user: (Some(&data.timmy_local_user_view)),
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_length!(2, finnish_comments);
let finnish_comment = finnish_comments
.iter()
@ -846,23 +855,21 @@ mod tests {
vec![UNDETERMINED_ID],
data.timmy_local_user_view.local_user.id,
)
.await
.unwrap();
.await?;
let undetermined_comment = CommentQuery {
local_user: (Some(&data.timmy_local_user_view)),
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_length!(1, undetermined_comment);
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_distinguished_first() {
async fn test_distinguished_first() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -871,26 +878,23 @@ mod tests {
distinguished: Some(true),
..Default::default()
};
Comment::update(pool, data.inserted_comment_2.id, &form)
.await
.unwrap();
Comment::update(pool, data.inserted_comment_2.id, &form).await?;
let comments = CommentQuery {
post_id: Some(data.inserted_comment_2.post_id),
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(comments[0].comment.id, data.inserted_comment_2.id);
assert!(comments[0].comment.distinguished);
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_creator_is_moderator() {
async fn test_creator_is_moderator() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -902,7 +906,7 @@ mod tests {
community_id,
person_id,
};
CommunityModerator::join(pool, &form).await.unwrap();
CommunityModerator::join(pool, &form).await?;
// Make sure that they come back as a mod in the list
let comments = CommentQuery {
@ -910,19 +914,18 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(comments[1].creator.name, "sara");
assert!(comments[1].creator_is_moderator);
assert!(!comments[0].creator_is_moderator);
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_creator_is_admin() {
async fn test_creator_is_admin() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -932,8 +935,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
// Timmy is an admin, and make sure that field is true
assert_eq!(comments[0].creator.name, "timmy");
@ -943,12 +945,12 @@ mod tests {
assert_eq!(comments[1].creator.name, "sara");
assert!(!comments[1].creator_is_admin);
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_saved_order() {
async fn test_saved_order() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -958,17 +960,13 @@ mod tests {
person_id: data.timmy_local_user_view.person.id,
comment_id: data.inserted_comment_0.id,
};
CommentSaved::save(pool, &save_comment_0_form)
.await
.unwrap();
CommentSaved::save(pool, &save_comment_0_form).await?;
let save_comment_2_form = CommentSavedForm {
person_id: data.timmy_local_user_view.person.id,
comment_id: data.inserted_comment_2.id,
};
CommentSaved::save(pool, &save_comment_2_form)
.await
.unwrap();
CommentSaved::save(pool, &save_comment_2_form).await?;
// Fetch the saved comments
let comments = CommentQuery {
@ -977,8 +975,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
// There should only be two comments
assert_eq!(2, comments.len());
@ -989,47 +986,33 @@ mod tests {
// The second comment, should be the first one saved
assert_eq!(comments[1].comment.id, data.inserted_comment_0.id);
cleanup(data, pool).await;
cleanup(data, pool).await
}
async fn cleanup(data: Data, pool: &mut DbPool<'_>) {
async fn cleanup(data: Data, pool: &mut DbPool<'_>) -> LemmyResult<()> {
CommentLike::remove(
pool,
data.timmy_local_user_view.person.id,
data.inserted_comment_0.id,
)
.await
.unwrap();
Comment::delete(pool, data.inserted_comment_0.id)
.await
.unwrap();
Comment::delete(pool, data.inserted_comment_1.id)
.await
.unwrap();
Post::delete(pool, data.inserted_post.id).await.unwrap();
Community::delete(pool, data.inserted_community.id)
.await
.unwrap();
Person::delete(pool, data.timmy_local_user_view.person.id)
.await
.unwrap();
LocalUser::delete(pool, data.timmy_local_user_view.local_user.id)
.await
.unwrap();
Person::delete(pool, data.inserted_sara_person.id)
.await
.unwrap();
Instance::delete(pool, data.inserted_instance.id)
.await
.unwrap();
.await?;
Comment::delete(pool, data.inserted_comment_0.id).await?;
Comment::delete(pool, data.inserted_comment_1.id).await?;
Post::delete(pool, data.inserted_post.id).await?;
Community::delete(pool, data.inserted_community.id).await?;
Person::delete(pool, data.timmy_local_user_view.person.id).await?;
LocalUser::delete(pool, data.timmy_local_user_view.local_user.id).await?;
Person::delete(pool, data.inserted_sara_person.id).await?;
Instance::delete(pool, data.inserted_instance.id).await?;
Ok(())
}
async fn expected_comment_view(data: &Data, pool: &mut DbPool<'_>) -> CommentView {
let agg = CommentAggregates::read(pool, data.inserted_comment_0.id)
.await
.unwrap();
CommentView {
async fn expected_comment_view(data: &Data, pool: &mut DbPool<'_>) -> LemmyResult<CommentView> {
let agg = CommentAggregates::read(pool, data.inserted_comment_0.id).await?;
Ok(CommentView {
creator_banned_from_community: false,
banned_from_community: false,
creator_is_moderator: false,
creator_is_admin: true,
my_vote: None,
@ -1136,12 +1119,12 @@ mod tests {
hot_rank: RANK_DEFAULT,
controversy_rank: 0.0,
},
}
})
}
#[tokio::test]
#[serial]
async fn local_only_instance() {
async fn local_only_instance() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
@ -1154,15 +1137,13 @@ mod tests {
..Default::default()
},
)
.await
.unwrap();
.await?;
let unauthenticated_query = CommentQuery {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(0, unauthenticated_query.len());
let authenticated_query = CommentQuery {
@ -1170,8 +1151,7 @@ mod tests {
..Default::default()
}
.list(pool)
.await
.unwrap();
.await?;
assert_eq!(5, authenticated_query.len());
let unauthenticated_comment = CommentView::read(pool, data.inserted_comment_0.id, None).await;
@ -1185,6 +1165,67 @@ mod tests {
.await;
assert!(authenticated_comment.is_ok());
cleanup(data, pool).await;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn comment_listing_local_user_banned_from_community() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
// Test that comment view shows if local user is blocked from community
let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill");
let inserted_banned_from_comm_person = Person::create(pool, &banned_from_comm_person).await?;
let inserted_banned_from_comm_local_user = LocalUser::create(
pool,
&LocalUserInsertForm::test_form(inserted_banned_from_comm_person.id),
vec![],
)
.await?;
CommunityPersonBan::ban(
pool,
&CommunityPersonBanForm {
community_id: data.inserted_community.id,
person_id: inserted_banned_from_comm_person.id,
expires: None,
},
)
.await?;
let comment_view = CommentView::read(
pool,
data.inserted_comment_0.id,
Some(inserted_banned_from_comm_local_user.person_id),
)
.await?;
assert!(comment_view.banned_from_community);
Person::delete(pool, inserted_banned_from_comm_person.id).await?;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn comment_listing_local_user_not_banned_from_community() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
let comment_view = CommentView::read(
pool,
data.inserted_comment_0.id,
Some(data.timmy_local_user_view.person.id),
)
.await?;
assert!(!comment_view.banned_from_community);
cleanup(data, pool).await
}
}

View file

@ -77,7 +77,7 @@ impl CustomEmojiView {
}
for emoji in &mut result {
if let Some(keywords) = hash.get_mut(&emoji.custom_emoji.id) {
emoji.keywords = keywords.clone();
emoji.keywords.clone_from(keywords);
}
}
result

View file

@ -283,9 +283,9 @@ impl PostReportQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
post_report_view::{PostReportQuery, PostReportView},
@ -330,7 +330,9 @@ mod tests {
.person_id(inserted_timmy.id)
.password_encrypted("123".to_string())
.build();
let timmy_local_user = LocalUser::create(pool, &new_local_user).await.unwrap();
let timmy_local_user = LocalUser::create(pool, &new_local_user, vec![])
.await
.unwrap();
let timmy_view = LocalUserView {
local_user: timmy_local_user,
local_user_vote_display_mode: LocalUserVoteDisplayMode::default(),

View file

@ -73,6 +73,17 @@ fn queries<'a>() -> Queries<
.and(community_person_ban::person_id.eq(post_aggregates::creator_id)),
),
);
let is_local_user_banned_from_community = |person_id| {
exists(
community_person_ban::table.filter(
post_aggregates::community_id
.eq(community_person_ban::community_id)
.and(community_person_ban::person_id.eq(person_id)),
),
)
};
let creator_is_moderator = exists(
community_moderator::table.filter(
post_aggregates::community_id
@ -143,6 +154,14 @@ fn queries<'a>() -> Queries<
let all_joins = move |query: post_aggregates::BoxedQuery<'a, Pg>,
my_person_id: Option<PersonId>| {
let is_local_user_banned_from_community_selection: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Bool>,
> = if let Some(person_id) = my_person_id {
Box::new(is_local_user_banned_from_community(person_id))
} else {
Box::new(false.into_sql::<sql_types::Bool>())
};
let is_saved_selection: Box<
dyn BoxableExpression<_, Pg, SqlType = sql_types::Nullable<sql_types::Timestamptz>>,
> = if let Some(person_id) = my_person_id {
@ -223,6 +242,7 @@ fn queries<'a>() -> Queries<
person::all_columns,
community::all_columns,
is_creator_banned_from_community,
is_local_user_banned_from_community_selection,
creator_is_moderator,
creator_is_admin,
post_aggregates::all_columns,
@ -742,6 +762,8 @@ mod tests {
CommunityInsertForm,
CommunityModerator,
CommunityModeratorForm,
CommunityPersonBan,
CommunityPersonBanForm,
CommunityUpdateForm,
},
community_block::{CommunityBlock, CommunityBlockForm},
@ -755,7 +777,7 @@ mod tests {
post::{Post, PostHide, PostInsertForm, PostLike, PostLikeForm, PostRead, PostUpdateForm},
site::Site,
},
traits::{Blockable, Crud, Joinable, Likeable},
traits::{Bannable, Blockable, Crud, Joinable, Likeable},
utils::{build_db_pool, build_db_pool_for_tests, DbPool, RANK_DEFAULT},
CommunityVisibility,
SortType,
@ -807,7 +829,7 @@ mod tests {
admin: Some(true),
..LocalUserInsertForm::test_form(inserted_person.id)
};
let inserted_local_user = LocalUser::create(pool, &local_user_form).await?;
let inserted_local_user = LocalUser::create(pool, &local_user_form, vec![]).await?;
let new_bot = PersonInsertForm {
bot_account: Some(true),
@ -833,6 +855,7 @@ mod tests {
let inserted_blocked_local_user = LocalUser::create(
pool,
&LocalUserInsertForm::test_form(inserted_blocked_person.id),
vec![],
)
.await?;
@ -1604,6 +1627,7 @@ mod tests {
last_refreshed_at: inserted_person.last_refreshed_at,
},
creator_banned_from_community: false,
banned_from_community: false,
creator_is_moderator: false,
creator_is_admin: true,
community: Community {
@ -1707,4 +1731,67 @@ mod tests {
cleanup(data, pool).await?;
Ok(())
}
#[tokio::test]
#[serial]
async fn post_listing_local_user_banned_from_community() -> LemmyResult<()> {
let pool = &build_db_pool().await?;
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Test that post view shows if local user is blocked from community
let banned_from_comm_person = PersonInsertForm::test_form(data.inserted_instance.id, "jill");
let inserted_banned_from_comm_person = Person::create(pool, &banned_from_comm_person).await?;
let inserted_banned_from_comm_local_user = LocalUser::create(
pool,
&LocalUserInsertForm::test_form(inserted_banned_from_comm_person.id),
vec![],
)
.await?;
CommunityPersonBan::ban(
pool,
&CommunityPersonBanForm {
community_id: data.inserted_community.id,
person_id: inserted_banned_from_comm_person.id,
expires: None,
},
)
.await?;
let post_view = PostView::read(
pool,
data.inserted_post.id,
Some(inserted_banned_from_comm_local_user.person_id),
false,
)
.await?;
assert!(post_view.banned_from_community);
Person::delete(pool, inserted_banned_from_comm_person.id).await?;
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn post_listing_local_user_not_banned_from_community() -> LemmyResult<()> {
let pool = &build_db_pool().await?;
let pool = &mut pool.into();
let data = init_data(pool).await?;
let post_view = PostView::read(
pool,
data.inserted_post.id,
Some(data.local_user_view.person.id),
false,
)
.await?;
assert!(!post_view.banned_from_community);
cleanup(data, pool).await
}
}

View file

@ -106,9 +106,9 @@ impl PrivateMessageReportQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::private_message_report_view::PrivateMessageReportQuery;
use lemmy_db_schema::{

View file

@ -173,9 +173,9 @@ impl PrivateMessageQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{private_message_view::PrivateMessageQuery, structs::PrivateMessageView};
use lemmy_db_schema::{

View file

@ -127,9 +127,9 @@ impl RegistrationApplicationQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::registration_application_view::{
RegistrationApplicationQuery,
@ -176,7 +176,7 @@ mod tests {
.admin(Some(true))
.build();
let _inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form)
let _inserted_timmy_local_user = LocalUser::create(pool, &timmy_local_user_form, vec![])
.await
.unwrap();
@ -193,7 +193,7 @@ mod tests {
.password_encrypted("nada".to_string())
.build();
let inserted_sara_local_user = LocalUser::create(pool, &sara_local_user_form)
let inserted_sara_local_user = LocalUser::create(pool, &sara_local_user_form, vec![])
.await
.unwrap();
@ -224,7 +224,7 @@ mod tests {
.password_encrypted("nada".to_string())
.build();
let inserted_jess_local_user = LocalUser::create(pool, &jess_local_user_form)
let inserted_jess_local_user = LocalUser::create(pool, &jess_local_user_form, vec![])
.await
.unwrap();

View file

@ -64,6 +64,7 @@ pub struct CommentView {
pub community: Community,
pub counts: CommentAggregates,
pub creator_banned_from_community: bool,
pub banned_from_community: bool,
pub creator_is_moderator: bool,
pub creator_is_admin: bool,
pub subscribed: SubscribedType,
@ -129,6 +130,7 @@ pub struct PostView {
pub creator: Person,
pub community: Community,
pub creator_banned_from_community: bool,
pub banned_from_community: bool,
pub creator_is_moderator: bool,
pub creator_is_admin: bool,
pub counts: PostAggregates,

View file

@ -50,9 +50,9 @@ impl VoteView {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::structs::VoteView;
use lemmy_db_schema::{

View file

@ -250,9 +250,9 @@ impl<'a> CommunityQuery<'a> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{community_view::CommunityQuery, structs::CommunityView};
use lemmy_db_schema::{
@ -296,7 +296,9 @@ mod tests {
.person_id(inserted_person.id)
.password_encrypted(String::new())
.build();
let local_user = LocalUser::create(pool, &local_user_form).await.unwrap();
let local_user = LocalUser::create(pool, &local_user_form, vec![])
.await
.unwrap();
let new_community = CommunityInsertForm::builder()
.name("test_community_3".to_string())

View file

@ -163,9 +163,9 @@ impl PersonQuery {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use diesel::NotFound;
@ -204,7 +204,7 @@ mod tests {
.person_id(alice.id)
.password_encrypted(String::new())
.build();
let alice_local_user = LocalUser::create(pool, &alice_local_user_form).await?;
let alice_local_user = LocalUser::create(pool, &alice_local_user_form, vec![]).await?;
let bob_form = PersonInsertForm::builder()
.name("bob".to_string())
@ -218,7 +218,7 @@ mod tests {
.person_id(bob.id)
.password_encrypted(String::new())
.build();
let bob_local_user = LocalUser::create(pool, &bob_local_user_form).await?;
let bob_local_user = LocalUser::create(pool, &bob_local_user_form, vec![]).await?;
Ok(Data {
alice,

View file

@ -46,17 +46,17 @@ static SAVE_STATE_EVERY_TIME: Duration = Duration::from_secs(60);
/// this delay limits the maximum time until the follow actually results in activities from that community id being sent to that inbox url.
/// This delay currently needs to not be too small because the DB load is currently fairly high because of the current structure of storing inboxes for every person, not having a separate list of shared_inboxes, and the architecture of having every instance queue be fully separate.
/// (see https://github.com/LemmyNet/lemmy/issues/3958)
static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::Duration> = Lazy::new(|| {
static FOLLOW_ADDITIONS_RECHECK_DELAY: Lazy<chrono::TimeDelta> = Lazy::new(|| {
if *LEMMY_TEST_FAST_FEDERATION {
chrono::Duration::seconds(1)
chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds")
} else {
chrono::Duration::minutes(2)
chrono::TimeDelta::try_minutes(2).expect("TimeDelta out of bounds")
}
});
/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance unfollows a specific remote community.
/// This is expected to happen pretty rarely and updating it in a timely manner is not too important.
static FOLLOW_REMOVALS_RECHECK_DELAY: Lazy<chrono::Duration> =
Lazy::new(|| chrono::Duration::hours(1));
static FOLLOW_REMOVALS_RECHECK_DELAY: Lazy<chrono::TimeDelta> =
Lazy::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds"));
pub(crate) struct InstanceWorker {
instance: Instance,
// load site lazily because if an instance is first seen due to being on allowlist,
@ -332,7 +332,8 @@ impl InstanceWorker {
instance_id: InstanceId,
last_fetch: DateTime<Utc>,
) -> Result<(HashMap<CommunityId, HashSet<Url>>, DateTime<Utc>)> {
let new_last_fetch = Utc::now() - chrono::Duration::seconds(10); // update to time before fetch to ensure overlap. subtract 10s to ensure overlap even if published date is not exact
let new_last_fetch =
Utc::now() - chrono::TimeDelta::try_seconds(10).expect("TimeDelta out of bounds"); // update to time before fetch to ensure overlap. subtract 10s to ensure overlap even if published date is not exact
Ok((
CommunityFollowerView::get_instance_followed_community_inboxes(pool, instance_id, last_fetch)
.await?

View file

@ -74,11 +74,11 @@ uuid = { workspace = true, features = ["serde", "v4"], optional = true }
rosetta-i18n = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
urlencoding = { workspace = true, optional = true }
openssl = { version = "0.10.63", optional = true }
openssl = { version = "0.10.64", optional = true }
html2text = { version = "0.6.0", optional = true }
deser-hjson = { version = "2.2.4", optional = true }
smart-default = { version = "0.7.1", optional = true }
lettre = { version = "0.11.3", features = [
lettre = { version = "0.11.4", features = [
"tokio1",
"tokio1-native-tls",
], optional = true }

View file

@ -135,6 +135,7 @@ pub enum LemmyErrorType {
CouldntSetAllRegistrationsAccepted,
CouldntSetAllEmailVerified,
Banned,
BlockedUrl,
CouldntGetComments,
CouldntGetPosts,
InvalidUrl,

View file

@ -221,9 +221,9 @@ fn parse_ip(addr: &str) -> Option<IpAddr> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
#[test]
fn test_parse_ip() {

View file

@ -158,7 +158,7 @@ impl<K: Eq + Hash, C: MapLevel> MapLevel for Map<K, C> {
// Evaluated if `some_children_remaining` is false
let total_has_refill_in_future = || {
group.total.into_iter().all(|(action_type, bucket)| {
group.total.into_iter().any(|(action_type, bucket)| {
#[allow(clippy::indexing_slicing)]
let config = configs[action_type];
bucket.update(now, config).tokens != config.capacity
@ -306,9 +306,9 @@ fn split_ipv6(ip: Ipv6Addr) -> ([u8; 6], u8, u8) {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::{ActionType, BucketConfig, InstantSecs, RateLimitState, RateLimitedGroup};
use pretty_assertions::assert_eq;
@ -416,5 +416,23 @@ mod tests {
rate_limiter.remove_full_buckets(now);
assert!(rate_limiter.ipv4_buckets.is_empty());
assert!(rate_limiter.ipv6_buckets.is_empty());
// `remove full buckets` should not remove empty buckets
let ip = "1.1.1.1".parse().unwrap();
// empty the bucket with 2 requests
assert!(rate_limiter.check(ActionType::Post, ip, now));
assert!(rate_limiter.check(ActionType::Post, ip, now));
rate_limiter.remove_full_buckets(now);
assert!(!rate_limiter.ipv4_buckets.is_empty());
// `remove full buckets` should not remove partial buckets
now.secs += 2;
let ip = "1.1.1.1".parse().unwrap();
// Only make one request, so bucket still has 1 token
assert!(rate_limiter.check(ActionType::Post, ip, now));
rate_limiter.remove_full_buckets(now);
assert!(!rate_limiter.ipv4_buckets.is_empty());
}
}

View file

@ -1,6 +1,7 @@
use crate::settings::SETTINGS;
use crate::{error::LemmyResult, settings::SETTINGS, LemmyErrorType};
use markdown_it::{plugins::cmark::inline::image::Image, MarkdownIt};
use once_cell::sync::Lazy;
use regex::RegexSet;
use url::Url;
use urlencoding::encode;
@ -98,10 +99,17 @@ pub fn markdown_rewrite_image_links(mut src: String) -> (String, Vec<Url>) {
(src, links)
}
pub fn markdown_check_for_blocked_urls(text: &str, blocklist: &RegexSet) -> LemmyResult<()> {
if blocklist.is_match(text) {
Err(LemmyErrorType::BlockedUrl)?
}
Ok(())
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use super::*;
use pretty_assertions::assert_eq;
@ -109,65 +117,65 @@ mod tests {
#[test]
fn test_basic_markdown() {
let tests: Vec<_> = vec![
(
"headings",
"# h1\n## h2\n### h3\n#### h4\n##### h5\n###### h6",
"<h1>h1</h1>\n<h2>h2</h2>\n<h3>h3</h3>\n<h4>h4</h4>\n<h5>h5</h5>\n<h6>h6</h6>\n"
),
(
"line breaks",
"First\rSecond",
"<p>First\nSecond</p>\n"),
(
"emphasis",
"__bold__ **bold** *italic* ***bold+italic***",
"<p><strong>bold</strong> <strong>bold</strong> <em>italic</em> <em><strong>bold+italic</strong></em></p>\n"
),
(
"blockquotes",
"> #### Hello\n > \n > - Hola\n > - 안영 \n>> Goodbye\n",
"<blockquote>\n<h4>Hello</h4>\n<ul>\n<li>Hola</li>\n<li>안영</li>\n</ul>\n<blockquote>\n<p>Goodbye</p>\n</blockquote>\n</blockquote>\n"
),
(
"lists (ordered, unordered)",
"1. pen\n2. apple\n3. apple pen\n- pen\n- pineapple\n- pineapple pen",
"<ol>\n<li>pen</li>\n<li>apple</li>\n<li>apple pen</li>\n</ol>\n<ul>\n<li>pen</li>\n<li>pineapple</li>\n<li>pineapple pen</li>\n</ul>\n"
),
(
"code and code blocks",
"this is my amazing `code snippet` and my amazing ```code block```",
"<p>this is my amazing <code>code snippet</code> and my amazing <code>code block</code></p>\n"
),
// Links with added nofollow attribute
(
"links",
"[Lemmy](https://join-lemmy.org/ \"Join Lemmy!\")",
"<p><a href=\"https://join-lemmy.org/\" rel=\"nofollow\" title=\"Join Lemmy!\">Lemmy</a></p>\n"
),
// Remote images with proxy
(
"images",
"![My linked image](https://example.com/image.png \"image alt text\")",
"<p><img src=\"https://example.com/image.png\" alt=\"My linked image\" title=\"image alt text\" /></p>\n"
),
// Local images without proxy
(
"images",
"![My linked image](https://lemmy-alpha/image.png \"image alt text\")",
"<p><img src=\"https://lemmy-alpha/image.png\" alt=\"My linked image\" title=\"image alt text\" /></p>\n"
),
// Ensure spoiler plugin is added
(
"basic spoiler",
"::: spoiler click to see more\nhow spicy!\n:::\n",
"<details><summary>click to see more</summary><p>how spicy!\n</p></details>\n"
),
(
"escape html special chars",
"<script>alert('xss');</script> hello &\"",
"<p>&lt;script&gt;alert(xss);&lt;/script&gt; hello &amp;&quot;</p>\n"
)
];
(
"headings",
"# h1\n## h2\n### h3\n#### h4\n##### h5\n###### h6",
"<h1>h1</h1>\n<h2>h2</h2>\n<h3>h3</h3>\n<h4>h4</h4>\n<h5>h5</h5>\n<h6>h6</h6>\n"
),
(
"line breaks",
"First\rSecond",
"<p>First\nSecond</p>\n"),
(
"emphasis",
"__bold__ **bold** *italic* ***bold+italic***",
"<p><strong>bold</strong> <strong>bold</strong> <em>italic</em> <em><strong>bold+italic</strong></em></p>\n"
),
(
"blockquotes",
"> #### Hello\n > \n > - Hola\n > - 안영 \n>> Goodbye\n",
"<blockquote>\n<h4>Hello</h4>\n<ul>\n<li>Hola</li>\n<li>안영</li>\n</ul>\n<blockquote>\n<p>Goodbye</p>\n</blockquote>\n</blockquote>\n"
),
(
"lists (ordered, unordered)",
"1. pen\n2. apple\n3. apple pen\n- pen\n- pineapple\n- pineapple pen",
"<ol>\n<li>pen</li>\n<li>apple</li>\n<li>apple pen</li>\n</ol>\n<ul>\n<li>pen</li>\n<li>pineapple</li>\n<li>pineapple pen</li>\n</ul>\n"
),
(
"code and code blocks",
"this is my amazing `code snippet` and my amazing ```code block```",
"<p>this is my amazing <code>code snippet</code> and my amazing <code>code block</code></p>\n"
),
// Links with added nofollow attribute
(
"links",
"[Lemmy](https://join-lemmy.org/ \"Join Lemmy!\")",
"<p><a href=\"https://join-lemmy.org/\" rel=\"nofollow\" title=\"Join Lemmy!\">Lemmy</a></p>\n"
),
// Remote images with proxy
(
"images",
"![My linked image](https://example.com/image.png \"image alt text\")",
"<p><img src=\"https://example.com/image.png\" alt=\"My linked image\" title=\"image alt text\" /></p>\n"
),
// Local images without proxy
(
"images",
"![My linked image](https://lemmy-alpha/image.png \"image alt text\")",
"<p><img src=\"https://lemmy-alpha/image.png\" alt=\"My linked image\" title=\"image alt text\" /></p>\n"
),
// Ensure spoiler plugin is added
(
"basic spoiler",
"::: spoiler click to see more\nhow spicy!\n:::\n",
"<details><summary>click to see more</summary><p>how spicy!\n</p></details>\n"
),
(
"escape html special chars",
"<script>alert('xss');</script> hello &\"",
"<p>&lt;script&gt;alert(xss);&lt;/script&gt; hello &amp;&quot;</p>\n"
)
];
tests.iter().for_each(|&(msg, input, expected)| {
let result = markdown_to_html(input);
@ -184,46 +192,46 @@ mod tests {
fn test_markdown_proxy_images() {
let tests: Vec<_> =
vec![
(
"remote image proxied",
"![link](http://example.com/image.jpg)",
"![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)",
),
(
"local image unproxied",
"![link](http://lemmy-alpha/image.jpg)",
"![link](http://lemmy-alpha/image.jpg)",
),
(
"multiple image links",
"![link](http://example.com/image1.jpg) ![link](http://example.com/image2.jpg)",
"![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage1.jpg) ![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage2.jpg)",
),
(
"empty link handled",
"![image]()",
"![image]()"
),
(
"empty label handled",
"![](http://example.com/image.jpg)",
"![](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)"
),
(
"invalid image link removed",
"![image](http-not-a-link)",
"![image]()"
),
(
"label with nested markdown handled",
"![a *b* c](http://example.com/image.jpg)",
"![a *b* c](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)"
),
(
"custom emoji support",
r#"![party-blob](https://www.hexbear.net/pictrs/image/83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#,
r#"![party-blob](https://lemmy-alpha/api/v3/image_proxy?url=https%3A%2F%2Fwww.hexbear.net%2Fpictrs%2Fimage%2F83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#
)
(
"remote image proxied",
"![link](http://example.com/image.jpg)",
"![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)",
),
(
"local image unproxied",
"![link](http://lemmy-alpha/image.jpg)",
"![link](http://lemmy-alpha/image.jpg)",
),
(
"multiple image links",
"![link](http://example.com/image1.jpg) ![link](http://example.com/image2.jpg)",
"![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage1.jpg) ![link](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage2.jpg)",
),
(
"empty link handled",
"![image]()",
"![image]()"
),
(
"empty label handled",
"![](http://example.com/image.jpg)",
"![](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)"
),
(
"invalid image link removed",
"![image](http-not-a-link)",
"![image]()"
),
(
"label with nested markdown handled",
"![a *b* c](http://example.com/image.jpg)",
"![a *b* c](https://lemmy-alpha/api/v3/image_proxy?url=http%3A%2F%2Fexample.com%2Fimage.jpg)"
),
(
"custom emoji support",
r#"![party-blob](https://www.hexbear.net/pictrs/image/83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#,
r#"![party-blob](https://lemmy-alpha/api/v3/image_proxy?url=https%3A%2F%2Fwww.hexbear.net%2Fpictrs%2Fimage%2F83405746-0620-4728-9358-5f51b040ffee.gif "emoji party-blob")"#
)
];
tests.iter().for_each(|&(msg, input, expected)| {
@ -237,6 +245,69 @@ mod tests {
});
}
#[test]
fn test_url_blocking() {
let set = RegexSet::new(vec![r"(https://)?example\.com/?"]).unwrap();
assert!(
markdown_check_for_blocked_urls(&String::from("[](https://example.com)"), &set).is_err()
);
assert!(markdown_check_for_blocked_urls(
&String::from("Go to https://example.com to get free Robux"),
&set
)
.is_err());
assert!(
markdown_check_for_blocked_urls(&String::from("[](https://example.blog)"), &set).is_ok()
);
assert!(markdown_check_for_blocked_urls(&String::from("example.com"), &set).is_err());
assert!(markdown_check_for_blocked_urls(
"Odio exercitationem culpa sed sunt
et. Sit et similique tempora deserunt doloremque. Cupiditate iusto
repellat et quis qui. Cum veritatis facere quasi repellendus sunt
eveniet nemo sint. Cumque sit unde est. https://example.com Alias
repellendus at quos.",
&set
)
.is_err());
let set = RegexSet::new(vec![r"(https://)?example\.com/spam\.jpg"]).unwrap();
assert!(markdown_check_for_blocked_urls(
&String::from("![](https://example.com/spam.jpg)"),
&set
)
.is_err());
let set = RegexSet::new(vec![
r"(https://)?quo\.example\.com/?",
r"(https://)?foo\.example\.com/?",
r"(https://)?bar\.example\.com/?",
])
.unwrap();
assert!(
markdown_check_for_blocked_urls(&String::from("https://baz.example.com"), &set).is_ok()
);
assert!(
markdown_check_for_blocked_urls(&String::from("https://bar.example.com"), &set).is_err()
);
let set = RegexSet::new(vec![r"(https://)?example\.com/banned_page"]).unwrap();
assert!(
markdown_check_for_blocked_urls(&String::from("https://example.com/page"), &set).is_ok()
);
let set = RegexSet::new(vec![r"(https://)?ex\.mple\.com/?"]).unwrap();
assert!(markdown_check_for_blocked_urls("example.com", &set).is_ok());
}
#[test]
fn test_sanitize_html() {
let sanitized = sanitize_html("<script>alert('xss');</script> hello &\"'");

View file

@ -134,9 +134,9 @@ pub fn add(markdown_parser: &mut MarkdownIt) {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::utils::markdown::spoiler_rule::add;
use markdown_it::MarkdownIt;

View file

@ -34,9 +34,9 @@ pub fn scrape_text_for_mentions(text: &str) -> Vec<MentionData> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod test {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::utils::mention::scrape_text_for_mentions;
use pretty_assertions::assert_eq;

View file

@ -64,9 +64,9 @@ pub(crate) fn slurs_vec_to_str(slurs: &[&str]) -> String {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod test {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str};
use pretty_assertions::assert_eq;

View file

@ -1,8 +1,8 @@
use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
use itertools::Itertools;
use once_cell::sync::Lazy;
use regex::{Regex, RegexBuilder};
use url::Url;
use regex::{Regex, RegexBuilder, RegexSet};
use url::{ParseError, Url};
// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35
static VALID_MATRIX_ID_REGEX: Lazy<Regex> = Lazy::new(|| {
@ -299,10 +299,37 @@ pub fn check_url_scheme(url: &Option<Url>) -> LemmyResult<()> {
}
}
pub fn is_url_blocked(url: &Option<Url>, blocklist: &RegexSet) -> LemmyResult<()> {
if let Some(url) = url {
if blocklist.is_match(url.as_str()) {
Err(LemmyErrorType::BlockedUrl)?
}
}
Ok(())
}
pub fn check_urls_are_valid(urls: &Vec<String>) -> LemmyResult<Vec<String>> {
let mut parsed_urls = vec![];
for url in urls {
let url = Url::parse(url).or_else(|e| {
if e == ParseError::RelativeUrlWithoutBase {
Url::parse(&format!("https://{url}"))
} else {
Err(e)
}
})?;
parsed_urls.push(url.to_string());
}
Ok(parsed_urls)
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
#![allow(clippy::unwrap_used)]
#![allow(clippy::indexing_slicing)]
use crate::{
error::LemmyErrorType,
@ -310,7 +337,9 @@ mod tests {
build_and_check_regex,
check_site_visibility_valid,
check_url_scheme,
check_urls_are_valid,
clean_url_params,
is_url_blocked,
is_valid_actor_name,
is_valid_bio_field,
is_valid_display_name,
@ -550,4 +579,38 @@ mod tests {
let magnet_link="magnet:?xt=urn:btih:4b390af3891e323778959d5abfff4b726510f14c&dn=Ravel%20Complete%20Piano%20Sheet%20Music%20-%20Public%20Domain&tr=udp%3A%2F%2Fopen.tracker.cl%3A1337%2Fannounce";
assert!(check_url_scheme(&Some(Url::parse(magnet_link).unwrap())).is_ok());
}
#[test]
fn test_url_block() {
let set = regex::RegexSet::new(vec![
r"(https://)?example\.org/page/to/article",
r"(https://)?example\.net/?",
r"(https://)?example\.com/?",
])
.unwrap();
assert!(is_url_blocked(&Some(Url::parse("https://example.blog").unwrap()), &set).is_ok());
assert!(is_url_blocked(&Some(Url::parse("https://example.org").unwrap()), &set).is_ok());
assert!(is_url_blocked(&None, &set).is_ok());
assert!(is_url_blocked(&Some(Url::parse("https://example.com").unwrap()), &set).is_err());
}
#[test]
fn test_url_parsed() {
assert_eq!(
vec![String::from("https://example.com/")],
check_urls_are_valid(&vec![String::from("example.com")]).unwrap()
);
assert!(check_urls_are_valid(&vec![
String::from("example.com"),
String::from("https://example.blog")
])
.is_ok());
assert!(check_urls_are_valid(&vec![String::from("https://example .com"),]).is_err());
}
}

View file

@ -19,6 +19,7 @@ FROM --platform=${BUILDPLATFORM} ${AMD_BUILDER_IMAGE} AS build-amd64
ARG CARGO_BUILD_FEATURES
ARG RUST_RELEASE_MODE
ARG RUSTFLAGS
WORKDIR /lemmy
@ -48,6 +49,7 @@ FROM --platform=linux/amd64 ${ARM_BUILDER_IMAGE} AS build-arm64
ARG RUST_RELEASE_MODE
ARG CARGO_BUILD_FEATURES
ARG RUSTFLAGS
WORKDIR /home/lemmy/src
USER 10001:10001

View file

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE local_site_url_blocklist;

Some files were not shown because too many files have changed in this diff Show more