Merge pull request #1328 from LemmyNet/move_views_to_diesel

Move SQL views to diesel
This commit is contained in:
Dessalines 2021-01-20 10:01:53 -05:00 committed by GitHub
commit 88284a999e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
224 changed files with 13126 additions and 8421 deletions

153
.drone.yml Normal file
View file

@ -0,0 +1,153 @@
---
kind: pipeline
name: amd64
platform:
os: linux
arch: amd64
steps:
- name: fetch git submodules
image: node:15-alpine3.12
commands:
- apk add git
- git submodule update --init --recursive --remote
- name: chown repo
image: ekidd/rust-musl-builder:1.47.0
user: root
commands:
- chown 1000:1000 . -R
- name: check formatting
image: rustdocker/rust:nightly
commands:
- /root/.cargo/bin/cargo fmt -- --check
- name: cargo clippy
image: ekidd/rust-musl-builder:1.47.0
commands:
- cargo clippy --workspace --tests --all-targets --all-features -- -D warnings -D deprecated -D clippy::perf -D clippy::complexity -D clippy::dbg_macro
- name: cargo test
image: ekidd/rust-musl-builder:1.47.0
environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
RUST_BACKTRACE: 1
RUST_TEST_THREADS: 1
commands:
- sudo apt-get update
- sudo apt-get -y install --no-install-recommends espeak postgresql-client
- cargo test --workspace --no-fail-fast
- name: cargo build
image: ekidd/rust-musl-builder:1.47.0
commands:
- cargo build
- mv target/x86_64-unknown-linux-musl/debug/lemmy_server target/lemmy_server
- name: run federation tests
image: node:15-alpine3.12
environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
DO_WRITE_HOSTS_FILE: 1
commands:
- apk add bash curl postgresql-client
- bash api_tests/prepare-drone-federation-test.sh
- cd api_tests/
- yarn
- yarn api-test
- name: make release build and push to docker hub
image: plugins/docker
settings:
dockerfile: docker/prod/Dockerfile
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: dessalines/lemmy
auto_tag: true
when:
ref:
- refs/tags/*
services:
- name: database
image: postgres:12-alpine
environment:
POSTGRES_USER: lemmy
POSTGRES_PASSWORD: password
---
kind: pipeline
name: arm64
platform:
os: linux
arch: arm64
steps:
- name: fetch git submodules
image: node:15-alpine3.12
commands:
- apk add git
- git submodule update --init --recursive --remote
- name: cargo test
image: rust:1.47-slim-buster
environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
RUST_BACKTRACE: 1
RUST_TEST_THREADS: 1
commands:
- apt-get update
- apt-get -y install --no-install-recommends espeak postgresql-client libssl-dev pkg-config libpq-dev
- cargo test --workspace --no-fail-fast
- cargo build
# Using Debian here because there seems to be no official Alpine-based Rust docker image for ARM.
- name: cargo build
image: rust:1.47-slim-buster
commands:
- apt-get update
- apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev
- cargo build
- mv target/debug/lemmy_server target/lemmy_server
- name: run federation tests
image: node:15-buster-slim
environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
DO_WRITE_HOSTS_FILE: 1
commands:
- mkdir -p /usr/share/man/man1 /usr/share/man/man7
- apt-get update
- apt-get -y install --no-install-recommends bash curl libssl-dev pkg-config libpq-dev postgresql-client libc6-dev
- bash api_tests/prepare-drone-federation-test.sh
- cd api_tests/
- yarn
- yarn api-test
- name: make release build and push to docker hub
image: plugins/docker
settings:
dockerfile: docker/prod/Dockerfile.arm
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: dessalines/lemmy
auto_tag: true
auto_tag_suffix: arm64
when:
ref:
- refs/tags/*
services:
- name: database
image: postgres:12-alpine
environment:
POSTGRES_USER: lemmy
POSTGRES_PASSWORD: password

3
.gitignore vendored
View file

@ -15,8 +15,7 @@ volumes
# local build files
target
env_setup.sh
query_testing/*.json
query_testing/*.json.old
query_testing/**/reports/*.json
# API tests
api_tests/node_modules

3
.gitmodules vendored
View file

@ -1,3 +1,4 @@
[submodule "docs"]
path = docs
url = https://github.com/LemmyNet/lemmy-docs.git
url = https://github.com/LemmyNet/lemmy-docs
branch = main

View file

@ -1,30 +0,0 @@
sudo: required
language: node_js
node_js:
- 14
services:
- docker
env:
matrix:
- DOCKER_COMPOSE_VERSION=1.25.5
global:
- secure: nzmFoTxPn7OT+qcTULezSCT6B44j/q8RxERBQSr1FVXaCcDrBr6q9ewhGy7BHWP74r4qbif4m9r3sNELZCoFYFP3JwLnrZfX/xUwU8p61eFD2PMOJAdOywDxb94SvooOSnjBmxNvRsuqf6Zmnw378mbsSVCi9Xbx9jpoV4Jq8zKgO0M8WIl/lj2dijD95WIMrHcorbzKS3+2zW3LkPiC2bnfDAUmUDfaCj1gh9FCvzZMtrSxu7kxAeFCkR16TJUciIcGgag8rLHfxwG0h2uEJJ+3/62qCWUdgnj171oTE4ZRi0hdvt2HOY5wjHfS2y1ZxWYgo31uws3pyoTNeQZi0o7Q9Xe/4JXYZXvDfuscSZ9RiuhAstCVswtXPJJVVJQ9cdl5eX1TI0bz8eVRvRy4p40OIBjKiobkmRjl8sXjFbpYAIvFr+TgSa/K/bxm3POfI0B8bIHI85zFxUMrWt5i2IJ0dWvDNHrz+CWWKn1vVFYbBNPgDDHtE0P3LWLEioWFf+ULycjW8DefWc+b63Lf9SSaEE7FnX2mc+BaHCgubCDkJy9Au4xP8zQlJjgZwOdTedw5jvmwz3fqMZBpHypVUXzZs7cRhMWtQ7TAoGb8TOqXNgPEVW+BARNXl0wAamTgjt9v20x0wkp+/SLJwMNY+zvwmzxzd5R9TPgDOqyIRTU=
- secure: ALZqC4OYV315P7EZyk+c/PLJdneeU7jMC30TTzMcX3hospIu7naWekZ+HUnziFDQKZxIHWKZsq1R52DWhsERLrPF3SVa+QiXu8vTTPrETBWnu9VgyFzgdEbUKRas1X3qerEAHcNBms1EAl2FOiQM1k5EDygrClv4KWgyzntEtKJbN2UCFKxtoBSdMZA6fcGtCwffcj8uIAIP2NhZixbU+smVgVbpMpe6QEuuEoVlVrfH8iXxb8Gi+qkd0YIYAHkjtTqQ/nHuAUhcuEE0mORTNGPv7CmTwpuQiGCCdtySZc7Qq8z1x2y7RLy0+RVxM0PR8UV6iy4ipyTgZ6wTF30ksLDxOI3GlRaKF3F6kLErOiEiEUOqa+zLgUM0OLGTn+KLATQDx74in5NcKjKUAnkuxdZyuDbifvQb5tqfrGdXd22pzVZbielRJRW59ig0Nr5cxEpRtoRkoFKNk7o3XlD6JmIBjKn1UHkZ4H/oLUKIXT2qOP2fIEzgLjfpSuGwhvJRz1KRP49HYVl7Gkd45/RdZ519W0gnMkIrEaod90iXSFNTgmJTGeH0Mv0jHameN47PIT3c49MOy5Hj0XCHUPfc6qqrdGnliS5hTnrFThCfn5ZuSZxVdgGLJUQvV+D+5KDqjFdGyNGVGoEg0YdrDtGXmpojbyQDJAT7ToL3yIBF7co=
before_install:
# Install docker-compose
- sudo rm /usr/local/bin/docker-compose
- curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname
-s`-`uname -m` > docker-compose
- chmod +x docker-compose
- sudo mv docker-compose /usr/local/bin
# Change dir
- cd docker/travis
script:
- "./run-tests.bash"
deploy:
provider: script
script: bash docker_push.sh
on:
tags: true
notifications:
email: false

View file

@ -1,35 +0,0 @@
# Code of Conduct
- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
- Please be kind and courteous. Theres no need to be mean or rude.
- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the Citizen Code of Conduct; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we dont tolerate behavior that excludes people in socially marginalized groups.
- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the Lemmy moderation team immediately. Whether youre a regular contributor or a newcomer, we care about making this community a safe place for you and weve got your back.
- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
[**Message the Moderation Team on Mastodon**](https://mastodon.social/@LemmyDev)
[**Email The Moderation Team**](mailto:contact@lemmy.ml)
## Moderation
These are the policies for upholding our communitys standards of conduct. If you feel that a thread needs moderation, please contact the Lemmy moderation team .
1. Remarks that violate the Lemmy standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
3. Moderators will first respond to such remarks with a warning, at the same time the offending content will likely be removed whenever possible.
4. If the warning is unheeded, the user will be “kicked,” i.e., kicked out of the communication channel to cool off.
5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, in private. Complaints about bans in-channel are not allowed.
8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
In the Lemmy community we strive to go the extra step to look out for each other. Dont just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if theyre off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you couldve communicated better — remember that its your responsibility to make others comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
The enforcement policies listed above apply to all official Lemmy venues; including git repositories under [github.com/LemmyNet/lemmy](https://github.com/LemmyNet/lemmy) and [yerbamate.ml/LemmyNet/lemmy](https://yerbamate.ml/LemmyNet/lemmy), the [Matrix channel](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org?via=matrix.org&via=privacytools.io&via=permaweb.io); and all instances under lemmy.ml. For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
Adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct), which is based on the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).

401
Cargo.lock generated
View file

@ -2,9 +2,9 @@
# It is not intended for manual editing.
[[package]]
name = "activitystreams"
version = "0.7.0-alpha.8"
version = "0.7.0-alpha.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e9fedbe571e267d9b93d071bdc4493f944022c6cce717ebb27d352026fc81c4"
checksum = "b0bc65a417d0e6bb79922b4ddb40ae52c7eddb5fa87707c83e383c3013ae0c1e"
dependencies = [
"chrono",
"mime",
@ -44,7 +44,7 @@ dependencies = [
"parking_lot",
"pin-project 0.4.27",
"smallvec",
"tokio 0.2.23",
"tokio 0.2.24",
"tokio-util",
"trust-dns-proto",
"trust-dns-resolver",
@ -62,7 +62,7 @@ dependencies = [
"futures-sink",
"log",
"pin-project 0.4.27",
"tokio 0.2.23",
"tokio 0.2.24",
"tokio-util",
]
@ -145,8 +145,8 @@ dependencies = [
"log",
"mime",
"percent-encoding",
"pin-project 1.0.2",
"rand",
"pin-project 1.0.3",
"rand 0.7.3",
"regex",
"serde 1.0.118",
"serde_json",
@ -191,7 +191,7 @@ dependencies = [
"futures-channel",
"futures-util",
"smallvec",
"tokio 0.2.23",
"tokio 0.2.24",
]
[[package]]
@ -317,7 +317,7 @@ dependencies = [
"fxhash",
"log",
"mime",
"pin-project 1.0.2",
"pin-project 1.0.3",
"regex",
"rustls",
"serde 1.0.118",
@ -369,9 +369,9 @@ dependencies = [
[[package]]
name = "addr2line"
version = "0.14.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423"
checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
dependencies = [
"gimli",
]
@ -399,9 +399,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.35"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c0df63cb2955042487fad3aefd2c6e3ae7389ac5dc1beb28921de0b69f779d4"
checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86"
[[package]]
name = "arrayvec"
@ -464,7 +464,7 @@ dependencies = [
"log",
"mime",
"percent-encoding",
"rand",
"rand 0.7.3",
"rustls",
"serde 1.0.118",
"serde_json",
@ -495,11 +495,11 @@ dependencies = [
"chrono",
"log",
"num_cpus",
"rand",
"rand 0.7.3",
"serde 1.0.118",
"serde_json",
"thiserror",
"tokio 0.2.23",
"tokio 0.2.24",
"uuid",
]
@ -518,7 +518,7 @@ dependencies = [
"serde 1.0.118",
"serde_json",
"thiserror",
"tokio 0.2.23",
"tokio 0.2.24",
"uuid",
]
@ -562,7 +562,7 @@ checksum = "a4d0faafe9e089674fc3efdb311ff5253d445c79d85d1d28bd3ace76d45e7164"
dependencies = [
"base64 0.13.0",
"blowfish",
"getrandom 0.2.0",
"getrandom 0.2.1",
]
[[package]]
@ -708,7 +708,7 @@ dependencies = [
"hound",
"image",
"lodepng",
"rand",
"rand 0.7.3",
"serde_json",
]
@ -795,21 +795,11 @@ dependencies = [
"serde-hjson",
]
[[package]]
name = "console_error_panic_hook"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8d976903543e0c48546a91908f21588a680a8c8f984df9a5d69feccb2b2a211"
dependencies = [
"cfg-if 0.1.10",
"wasm-bindgen",
]
[[package]]
name = "const_fn"
version = "0.4.3"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c478836e029dcef17fb47c89023448c64f781a046e0300e257ad8225ae59afab"
checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6"
[[package]]
name = "cookie"
@ -1186,9 +1176,9 @@ checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "funty"
version = "1.0.1"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ba62103ce691c2fd80fbae2213dfdda9ce60804973ac6b6e97de818ea7f52c8"
checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
[[package]]
name = "futures"
@ -1278,7 +1268,7 @@ dependencies = [
"futures-sink",
"futures-task",
"memchr",
"pin-project 1.0.2",
"pin-project 1.0.3",
"pin-utils",
"proc-macro-hack",
"proc-macro-nested",
@ -1315,24 +1305,24 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.1.15"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee8025cf36f917e6a52cce185b7c7177689b838b7ec138364e50cc2277a56cf4"
checksum = "4060f4657be78b8e766215b02b18a2e862d83745545de804638e2b545e81aee6"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
"wasi 0.10.0+wasi-snapshot-preview1",
]
[[package]]
@ -1365,7 +1355,7 @@ dependencies = [
"http",
"indexmap",
"slab",
"tokio 0.2.23",
"tokio 0.2.24",
"tokio-util",
"tracing",
"tracing-futures",
@ -1379,9 +1369,9 @@ checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
[[package]]
name = "heck"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac"
dependencies = [
"unicode-segmentation",
]
@ -1414,9 +1404,9 @@ checksum = "8a164bb2ceaeff4f42542bdb847c41517c78a60f5649671b2a07312b6e117549"
[[package]]
name = "http"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9"
checksum = "84129d298a6d57d246960ff8eb831ca4af3f96d29e2e28848dae275408658e26"
dependencies = [
"bytes 0.5.6",
"fnv",
@ -1476,7 +1466,7 @@ dependencies = [
"reqwest",
"sha2",
"thiserror",
"tokio 0.2.23",
"tokio 0.2.24",
]
[[package]]
@ -1513,9 +1503,9 @@ dependencies = [
"httparse",
"httpdate",
"itoa",
"pin-project 1.0.2",
"pin-project 1.0.3",
"socket2",
"tokio 0.2.23",
"tokio 0.2.24",
"tower-service",
"tracing",
"want",
@ -1530,7 +1520,7 @@ dependencies = [
"bytes 0.5.6",
"hyper",
"native-tls",
"tokio 0.2.23",
"tokio 0.2.24",
"tokio-tls",
]
@ -1590,9 +1580,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "1.6.0"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2"
checksum = "4fb1fa934250de4de8aef298d81c729a7d33d8c239daa3a7575e6b92bfc7313b"
dependencies = [
"autocfg",
"hashbrown",
@ -1645,9 +1635,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "0.4.6"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
[[package]]
name = "jpeg-decoder"
@ -1727,14 +1717,17 @@ dependencies = [
"jsonwebtoken",
"lazy_static",
"lemmy_apub",
"lemmy_db",
"lemmy_rate_limit",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_db_views",
"lemmy_db_views_actor",
"lemmy_db_views_moderator",
"lemmy_structs",
"lemmy_utils",
"lemmy_websocket",
"log",
"openssl",
"rand",
"rand 0.8.1",
"reqwest",
"serde 1.0.118",
"serde_json",
@ -1742,7 +1735,7 @@ dependencies = [
"strum",
"strum_macros",
"thiserror",
"tokio 0.3.5",
"tokio 0.3.6",
"url",
"uuid",
]
@ -1771,14 +1764,17 @@ dependencies = [
"http-signature-normalization-reqwest",
"itertools",
"lazy_static",
"lemmy_db",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_db_views",
"lemmy_db_views_actor",
"lemmy_structs",
"lemmy_utils",
"lemmy_websocket",
"log",
"openssl",
"percent-encoding",
"rand",
"rand 0.8.1",
"reqwest",
"serde 1.0.118",
"serde_json",
@ -1786,19 +1782,21 @@ dependencies = [
"strum",
"strum_macros",
"thiserror",
"tokio 0.3.5",
"tokio 0.3.6",
"url",
"uuid",
]
[[package]]
name = "lemmy_db"
name = "lemmy_db_queries"
version = "0.1.0"
dependencies = [
"bcrypt",
"chrono",
"diesel",
"diesel_migrations",
"lazy_static",
"lemmy_db_schema",
"lemmy_utils",
"log",
"regex",
@ -1811,16 +1809,46 @@ dependencies = [
]
[[package]]
name = "lemmy_rate_limit"
name = "lemmy_db_schema"
version = "0.1.0"
dependencies = [
"actix-web",
"futures",
"lemmy_utils",
"chrono",
"diesel",
"log",
"strum",
"strum_macros",
"tokio 0.3.5",
"serde 1.0.118",
"serde_json",
"url",
]
[[package]]
name = "lemmy_db_views"
version = "0.1.0"
dependencies = [
"diesel",
"lemmy_db_queries",
"lemmy_db_schema",
"log",
"serde 1.0.118",
]
[[package]]
name = "lemmy_db_views_actor"
version = "0.1.0"
dependencies = [
"diesel",
"lemmy_db_queries",
"lemmy_db_schema",
"serde 1.0.118",
]
[[package]]
name = "lemmy_db_views_moderator"
version = "0.1.0"
dependencies = [
"diesel",
"lemmy_db_queries",
"lemmy_db_schema",
"serde 1.0.118",
]
[[package]]
@ -1844,8 +1872,11 @@ dependencies = [
"lazy_static",
"lemmy_api",
"lemmy_apub",
"lemmy_db",
"lemmy_rate_limit",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_db_views",
"lemmy_db_views_actor",
"lemmy_db_views_moderator",
"lemmy_structs",
"lemmy_utils",
"lemmy_websocket",
@ -1857,7 +1888,7 @@ dependencies = [
"serde_json",
"sha2",
"strum",
"tokio 0.3.5",
"tokio 0.3.6",
"url",
]
@ -1868,7 +1899,11 @@ dependencies = [
"actix-web",
"chrono",
"diesel",
"lemmy_db",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_db_views",
"lemmy_db_views_actor",
"lemmy_db_views_moderator",
"lemmy_utils",
"log",
"serde 1.0.118",
@ -1885,18 +1920,22 @@ dependencies = [
"chrono",
"comrak",
"config",
"futures",
"itertools",
"lazy_static",
"lettre",
"log",
"openssl",
"percent-encoding",
"rand",
"rand 0.8.1",
"regex",
"reqwest",
"serde 1.0.118",
"serde_json",
"strum",
"strum_macros",
"thiserror",
"tokio 0.3.6",
"url",
]
@ -1909,18 +1948,18 @@ dependencies = [
"background-jobs",
"chrono",
"diesel",
"lemmy_db",
"lemmy_rate_limit",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_structs",
"lemmy_utils",
"log",
"rand",
"rand 0.8.1",
"reqwest",
"serde 1.0.118",
"serde_json",
"strum",
"strum_macros",
"tokio 0.3.5",
"tokio 0.3.6",
]
[[package]]
@ -1939,7 +1978,7 @@ dependencies = [
"once_cell",
"quoted_printable",
"r2d2",
"rand",
"rand 0.7.3",
"regex",
"serde 1.0.118",
"serde_json",
@ -2158,9 +2197,9 @@ dependencies = [
[[package]]
name = "native-tls"
version = "0.2.6"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fcc7939b5edc4e4f86b1b4a04bb1498afaaf871b1a6691838ed06fcb48d3a3f"
checksum = "b8d96b2e1c8da3957d58100b09f102c6d9cfdfced01b7ec5a8974044bb09dbd4"
dependencies = [
"lazy_static",
"libc",
@ -2176,9 +2215,9 @@ dependencies = [
[[package]]
name = "net2"
version = "0.2.36"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7cf75f38f16cb05ea017784dc6dbfd354f76c223dba37701734c4f5a9337d02"
checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae"
dependencies = [
"cfg-if 0.1.10",
"libc",
@ -2314,12 +2353,12 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
version = "0.10.30"
version = "0.10.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d575eff3665419f9b83678ff2815858ad9d11567e082f5ac1814baba4e2bcb4"
checksum = "038d43985d1ddca7a9900630d8cd031b56e4794eecc2e9ea39dd17aa04399a70"
dependencies = [
"bitflags",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"foreign-types",
"lazy_static",
"libc",
@ -2334,9 +2373,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
[[package]]
name = "openssl-sys"
version = "0.9.58"
version = "0.9.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a842db4709b604f0fe5d1170ae3565899be2ad3d9cbc72dedc789ac0511f78de"
checksum = "921fc71883267538946025deffb622905ecad223c28efbfdef9bb59a0175f3e6"
dependencies = [
"autocfg",
"cc",
@ -2358,9 +2397,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.8.1"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7c6d9b8427445284a09c55be860a15855ab580a417ccad9da88f5a06787ced0"
checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272"
dependencies = [
"cfg-if 1.0.0",
"instant",
@ -2441,11 +2480,11 @@ dependencies = [
[[package]]
name = "pin-project"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ccc2237c2c489783abd8c4c80e5450fc0e98644555b1364da68cc29aa151ca7"
checksum = "5a83804639aad6ba65345661744708855f9fbcb71176ea8d28d05aeb11d975e7"
dependencies = [
"pin-project-internal 1.0.2",
"pin-project-internal 1.0.3",
]
[[package]]
@ -2461,9 +2500,9 @@ dependencies = [
[[package]]
name = "pin-project-internal"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f"
checksum = "b7bcc46b8f73443d15bc1c5fecbb315718491fa9187fa483f0e359323cde8b3a"
dependencies = [
"proc-macro2",
"quote",
@ -2478,9 +2517,9 @@ checksum = "c917123afa01924fc84bb20c4c03f004d9c38e5127e3c039bbf7f4b9c76a2f6b"
[[package]]
name = "pin-project-lite"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b063f57ec186e6140e2b8b6921e5f1bd89c7356dda5b33acc5401203ca6131c"
checksum = "e36743d754ccdf9954c2e352ce2d4b106e024c814f6499c2dadff80da9a442d8"
[[package]]
name = "pin-utils"
@ -2496,9 +2535,9 @@ checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
[[package]]
name = "png"
version = "0.16.7"
version = "0.16.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfe7f9f1c730833200b134370e1d5098964231af8450bce9b78ee3ab5278b970"
checksum = "3c3287920cb847dee3de33d301c463fba14dda99db24214ddf93f83d3021f4c6"
dependencies = [
"bitflags",
"crc32fast",
@ -2560,9 +2599,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.7"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
dependencies = [
"proc-macro2",
]
@ -2596,11 +2635,23 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.15",
"getrandom 0.1.16",
"libc",
"rand_chacha",
"rand_core",
"rand_hc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
"rand_hc 0.2.0",
]
[[package]]
name = "rand"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c24fcd450d3fa2b592732565aa4f17a27a61c65ece4726353e000939b0edee34"
dependencies = [
"libc",
"rand_chacha 0.3.0",
"rand_core 0.6.1",
"rand_hc 0.3.0",
]
[[package]]
@ -2610,7 +2661,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core",
"rand_core 0.5.1",
]
[[package]]
name = "rand_chacha"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d"
dependencies = [
"ppv-lite86",
"rand_core 0.6.1",
]
[[package]]
@ -2619,7 +2680,16 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.15",
"getrandom 0.1.16",
]
[[package]]
name = "rand_core"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c026d7df8b298d90ccbbc5190bd04d85e159eaf5576caeacf8741da93ccbd2e5"
dependencies = [
"getrandom 0.2.1",
]
[[package]]
@ -2628,7 +2698,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core",
"rand_core 0.5.1",
]
[[package]]
name = "rand_hc"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73"
dependencies = [
"rand_core 0.6.1",
]
[[package]]
@ -2691,9 +2770,9 @@ dependencies = [
[[package]]
name = "reqwest"
version = "0.10.9"
version = "0.10.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb15d6255c792356a0f578d8a645c677904dc02e862bebe2ecc18e0c01b9a0ce"
checksum = "0718f81a8e14c4dbb3b34cf23dc6aaf9ab8a0dfec160c534b3dbca1aaa21f47c"
dependencies = [
"base64 0.13.0",
"bytes 0.5.6",
@ -2712,16 +2791,15 @@ dependencies = [
"mime_guess",
"native-tls",
"percent-encoding",
"pin-project-lite 0.2.0",
"pin-project-lite 0.2.1",
"serde 1.0.118",
"serde_json",
"serde_urlencoded",
"tokio 0.2.23",
"tokio 0.2.24",
"tokio-tls",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-bindgen-test",
"web-sys",
"winreg 0.7.0",
]
@ -2823,12 +2901,6 @@ dependencies = [
"parking_lot",
]
[[package]]
name = "scoped-tls"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
[[package]]
name = "scoped_threadpool"
version = "0.1.9"
@ -2930,9 +3002,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.60"
version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779"
checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a"
dependencies = [
"indexmap",
"itoa",
@ -3013,9 +3085,9 @@ checksum = "b6fa3938c99da4914afedd13bf3d79bcb6c277d1b2c398d23257a304d9e1b074"
[[package]]
name = "signal-hook-registry"
version = "1.2.2"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce32ea0c6c56d5eacaeb814fbed9960547021d3edd010ded1425f180536b20ab"
checksum = "16f1d0fef1604ba8f7a073c7e701f213e056707210e9020af4528e0101ce11a6"
dependencies = [
"libc",
]
@ -3039,19 +3111,18 @@ checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "1.5.1"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae524f056d7d770e174287294f562e95044c68e88dec909a00d2094805db9d75"
checksum = "1a55ca5f3b68e41c979bf8c46a6f1da892ca4db8f94023ce0bd32407573b1ac0"
[[package]]
name = "socket2"
version = "0.3.17"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c29947abdee2a218277abeca306f25789c938e500ea5a9d4b12a5a504466902"
checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall",
"winapi 0.3.9",
]
@ -3063,9 +3134,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "standback"
version = "0.2.13"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf906c8b8fc3f6ecd1046e01da1d8ddec83e48c8b08b84dcc02b585a6bedf5a8"
checksum = "c66a8cff4fa24853fdf6b51f75c6d7f8206d7c75cab4e467bcd7f25c2b1febe0"
dependencies = [
"version_check 0.9.2",
]
@ -3151,9 +3222,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.54"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
dependencies = [
"proc-macro2",
"quote",
@ -3174,7 +3245,7 @@ checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
dependencies = [
"cfg-if 0.1.10",
"libc",
"rand",
"rand 0.7.3",
"redox_syscall",
"remove_dir_all",
"winapi 0.3.9",
@ -3191,18 +3262,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.22"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e"
checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.22"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56"
checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2",
"quote",
@ -3229,9 +3300,9 @@ dependencies = [
[[package]]
name = "tiff"
version = "0.6.0"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abeb4e3f32a8973722c0254189e6890358e72b1bf11becb287ee0b23c595a41d"
checksum = "9a53f4706d65497df0c4349241deddf35f84cee19c87ed86ea8ca590f4464437"
dependencies = [
"jpeg-decoder",
"miniz_oxide 0.4.3",
@ -3304,9 +3375,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
version = "0.2.23"
version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6d7ad61edd59bfcc7e80dababf0f4aed2e6d5e0ba1659356ae889752dfc12ff"
checksum = "099837d3464c16a808060bb3f02263b412f6fafcb5d01c533d309985fbeebe48"
dependencies = [
"bytes 0.5.6",
"fnv",
@ -3325,12 +3396,12 @@ dependencies = [
[[package]]
name = "tokio"
version = "0.3.5"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a12a3eb39ee2c231be64487f1fcbe726c8f2514876a55480a5ab8559fc374252"
checksum = "720ba21c25078711bf456d607987d95bce90f7c3bea5abe1db587862e7a1e87c"
dependencies = [
"autocfg",
"pin-project-lite 0.2.0",
"pin-project-lite 0.2.1",
]
[[package]]
@ -3341,7 +3412,7 @@ checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a"
dependencies = [
"futures-core",
"rustls",
"tokio 0.2.23",
"tokio 0.2.24",
"webpki",
]
@ -3352,7 +3423,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a70f4fcd7b3b24fb194f837560168208f669ca8cb70d0c4b862944452396343"
dependencies = [
"native-tls",
"tokio 0.2.23",
"tokio 0.2.24",
]
[[package]]
@ -3367,7 +3438,7 @@ dependencies = [
"futures-sink",
"log",
"pin-project-lite 0.1.11",
"tokio 0.2.23",
"tokio 0.2.24",
]
[[package]]
@ -3384,7 +3455,7 @@ checksum = "9f47026cdc4080c07e49b37087de021820269d996f581aac150ef9e5583eefe3"
dependencies = [
"cfg-if 1.0.0",
"log",
"pin-project-lite 0.2.0",
"pin-project-lite 0.2.1",
"tracing-core",
]
@ -3420,10 +3491,10 @@ dependencies = [
"idna",
"lazy_static",
"log",
"rand",
"rand 0.7.3",
"smallvec",
"thiserror",
"tokio 0.2.23",
"tokio 0.2.24",
"url",
]
@ -3443,7 +3514,7 @@ dependencies = [
"resolv-conf",
"smallvec",
"thiserror",
"tokio 0.2.23",
"tokio 0.2.24",
"trust-dns-proto",
]
@ -3557,7 +3628,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
dependencies = [
"rand",
"rand 0.7.3",
"serde 1.0.118",
]
@ -3595,9 +3666,9 @@ dependencies = [
[[package]]
name = "vcpkg"
version = "0.2.10"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6454029bf181f092ad1b853286f23e2c507d8e8194d01d92da4a55c274a5508c"
checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
[[package]]
name = "version_check"
@ -3701,30 +3772,6 @@ version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e7811dd7f9398f14cc76efd356f98f03aa30419dea46aa810d71e819fc97158"
[[package]]
name = "wasm-bindgen-test"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0355fa0c1f9b792a09b6dcb6a8be24d51e71e6d74972f9eb4a44c4c004d24a25"
dependencies = [
"console_error_panic_hook",
"js-sys",
"scoped-tls",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-bindgen-test-macro",
]
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27e07b46b98024c2ba2f9e83a10c2ef0515f057f2da299c1762a2017de80438b"
dependencies = [
"proc-macro2",
"quote",
]
[[package]]
name = "web-sys"
version = "0.3.46"

View file

@ -3,17 +3,20 @@ name = "lemmy_server"
version = "0.0.1"
edition = "2018"
[profile.release]
lto = true
[profile.dev]
debug = 0
[workspace]
members = [
"lemmy_api",
"lemmy_apub",
"lemmy_utils",
"lemmy_db",
"lemmy_db_queries",
"lemmy_db_schema",
"lemmy_db_views",
"lemmy_db_views_actor",
"lemmy_db_views_actor",
"lemmy_structs",
"lemmy_rate_limit",
"lemmy_websocket",
]
@ -21,9 +24,12 @@ members = [
lemmy_api = { path = "./lemmy_api" }
lemmy_apub = { path = "./lemmy_apub" }
lemmy_utils = { path = "./lemmy_utils" }
lemmy_db = { path = "./lemmy_db" }
lemmy_db_schema = { path = "./lemmy_db_schema" }
lemmy_db_queries = { path = "lemmy_db_queries" }
lemmy_db_views = { path = "./lemmy_db_views" }
lemmy_db_views_moderator = { path = "./lemmy_db_views_moderator" }
lemmy_db_views_actor = { path = "lemmy_db_views_actor" }
lemmy_structs = { path = "./lemmy_structs" }
lemmy_rate_limit = { path = "./lemmy_rate_limit" }
lemmy_websocket = { path = "./lemmy_websocket" }
diesel = "1.4.5"
diesel_migrations = "1.4.0"
@ -40,12 +46,12 @@ strum = "0.20.0"
lazy_static = "1.4.0"
rss = "1.9.0"
url = { version = "2.2.0", features = ["serde"] }
openssl = "0.10.30"
openssl = "0.10.31"
http-signature-normalization-actix = { version = "0.4.1", default-features = false, features = ["sha-2"] }
tokio = "0.3.5"
tokio = "0.3.6"
sha2 = "0.9.2"
anyhow = "1.0.35"
reqwest = { version = "0.10.9", features = ["json"] }
anyhow = "1.0.36"
reqwest = { version = "0.10.10", features = ["json"] }
activitystreams = "0.7.0-alpha.8"
actix-rt = { version = "1.1.1", default-features = false }
serde_json = { version = "1.0.60", features = ["preserve_order"] }

View file

@ -1,7 +1,7 @@
<div align="center">
![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/LemmyNet/lemmy.svg)
[![Build Status](https://travis-ci.org/LemmyNet/lemmy.svg?branch=main)](https://travis-ci.org/LemmyNet/lemmy)
![Build Status](https://cloud.drone.io/api/badges/LemmyNet/lemmy/status.svg)
[![GitHub issues](https://img.shields.io/github/issues-raw/LemmyNet/lemmy.svg)](https://github.com/LemmyNet/lemmy/issues)
[![Docker Pulls](https://img.shields.io/docker/pulls/dessalines/lemmy.svg)](https://cloud.docker.com/repository/docker/dessalines/lemmy/)
[![Translation status](http://weblate.yerbamate.ml/widgets/lemmy/-/lemmy/svg-badge.svg)](http://weblate.yerbamate.ml/engage/lemmy/)
@ -27,6 +27,8 @@
<a href="https://github.com/LemmyNet/lemmy/issues">Request Feature</a>
·
<a href="https://github.com/LemmyNet/lemmy/blob/main/RELEASES.md">Releases</a>
·
<a href="https://lemmy.ml/docs/en/code_of_conduct.html">Code of Conduct</a>
</p>
</p>

View file

@ -1 +1 @@
v0.8.10
0.9.0-rc.12

51
api_tests/.eslintrc.json Normal file
View file

@ -0,0 +1,51 @@
{
"root": true,
"env": {
"browser": true
},
"plugins": [
"jane"
],
"extends": [
"plugin:jane/recommended",
"plugin:jane/typescript"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json",
"warnOnUnsupportedTypeScriptVersion": false
},
"rules": {
"@typescript-eslint/camelcase": 0,
"@typescript-eslint/member-delimiter-style": 0,
"@typescript-eslint/no-empty-interface": 0,
"@typescript-eslint/no-explicit-any": 0,
"@typescript-eslint/no-this-alias": 0,
"@typescript-eslint/no-unused-vars": 0,
"@typescript-eslint/no-use-before-define": 0,
"@typescript-eslint/no-useless-constructor": 0,
"arrow-body-style": 0,
"curly": 0,
"eol-last": 0,
"eqeqeq": 0,
"func-style": 0,
"import/no-duplicates": 0,
"max-statements": 0,
"max-params": 0,
"new-cap": 0,
"no-console": 0,
"no-duplicate-imports": 0,
"no-extra-parens": 0,
"no-return-assign": 0,
"no-throw-literal": 0,
"no-trailing-spaces": 0,
"no-unused-expressions": 0,
"no-useless-constructor": 0,
"no-useless-escape": 0,
"no-var": 0,
"prefer-const": 0,
"prefer-rest-params": 0,
"quote-props": 0,
"unicorn/filename-case": 0
}
}

4
api_tests/.prettierrc.js Normal file
View file

@ -0,0 +1,4 @@
module.exports = Object.assign(require('eslint-plugin-jane/prettier-ts'), {
arrowParens: 'avoid',
semi: true,
});

View file

@ -7,14 +7,19 @@
"author": "Dessalines",
"license": "AGPL-3.0",
"scripts": {
"lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src",
"fix": "prettier --write src && eslint --fix src",
"api-test": "jest src/ -i --verbose"
},
"devDependencies": {
"@types/jest": "^26.0.14",
"jest": "^26.4.2",
"lemmy-js-client": "^1.0.14",
"@types/jest": "^26.0.19",
"jest": "^26.6.3",
"lemmy-js-client": "0.9.0-rc.12",
"node-fetch": "^2.6.1",
"ts-jest": "^26.4.1",
"typescript": "^4.0.3"
"ts-jest": "^26.4.4",
"prettier": "^2.1.2",
"eslint": "^7.10.0",
"eslint-plugin-jane": "^9.0.3",
"typescript": "^4.1.3"
}
}

View file

@ -0,0 +1,90 @@
#!/bin/bash
set -e
export LEMMY_JWT_SECRET=changeme
export LEMMY_FEDERATION__ENABLED=true
export LEMMY_TLS_ENABLED=false
export LEMMY_SETUP__ADMIN_PASSWORD=lemmy
export LEMMY_RATE_LIMIT__POST=99999
export LEMMY_RATE_LIMIT__REGISTER=99999
export LEMMY_CAPTCHA__ENABLED=false
export LEMMY_TEST_SEND_SYNC=1
export RUST_BACKTRACE=1
for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do
psql "${LEMMY_DATABASE_URL}/lemmy" -c "DROP DATABASE IF EXISTS $INSTANCE"
psql "${LEMMY_DATABASE_URL}/lemmy" -c "CREATE DATABASE $INSTANCE"
done
if [ -z "$DO_WRITE_HOSTS_FILE" ]; then
if ! grep -q lemmy-alpha /etc/hosts; then
echo "Please add the following to your /etc/hosts file, then press enter:
127.0.0.1 lemmy-alpha
127.0.0.1 lemmy-beta
127.0.0.1 lemmy-gamma
127.0.0.1 lemmy-delta
127.0.0.1 lemmy-epsilon"
read -p ""
fi
else
for INSTANCE in lemmy-alpha lemmy-beta lemmy-gamma lemmy-delta lemmy-epsilon; do
echo "127.0.0.1 $INSTANCE" >> /etc/hosts
done
fi
killall lemmy_server || true
echo "start alpha"
LEMMY_HOSTNAME=lemmy-alpha:8541 \
LEMMY_PORT=8541 \
LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_alpha" \
LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-beta,lemmy-gamma,lemmy-delta,lemmy-epsilon \
LEMMY_SETUP__ADMIN_USERNAME=lemmy_alpha \
LEMMY_SETUP__SITE_NAME=lemmy-alpha \
target/lemmy_server >/dev/null 2>&1 &
echo "start beta"
LEMMY_HOSTNAME=lemmy-beta:8551 \
LEMMY_PORT=8551 \
LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_beta" \
LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-alpha,lemmy-gamma,lemmy-delta,lemmy-epsilon \
LEMMY_SETUP__ADMIN_USERNAME=lemmy_beta \
LEMMY_SETUP__SITE_NAME=lemmy-beta \
target/lemmy_server >/dev/null 2>&1 &
echo "start gamma"
LEMMY_HOSTNAME=lemmy-gamma:8561 \
LEMMY_PORT=8561 \
LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_gamma" \
LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-alpha,lemmy-beta,lemmy-delta,lemmy-epsilon \
LEMMY_SETUP__ADMIN_USERNAME=lemmy_gamma \
LEMMY_SETUP__SITE_NAME=lemmy-gamma \
target/lemmy_server >/dev/null 2>&1 &
echo "start delta"
# An instance with only an allowlist for beta
LEMMY_HOSTNAME=lemmy-delta:8571 \
LEMMY_PORT=8571 \
LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_delta" \
LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-beta \
LEMMY_SETUP__ADMIN_USERNAME=lemmy_delta \
LEMMY_SETUP__SITE_NAME=lemmy-delta \
target/lemmy_server >/dev/null 2>&1 &
echo "start epsilon"
# An instance who has a blocklist, with lemmy-alpha blocked
LEMMY_HOSTNAME=lemmy-epsilon:8581 \
LEMMY_PORT=8581 \
LEMMY_DATABASE_URL="${LEMMY_DATABASE_URL}/lemmy_epsilon" \
LEMMY_FEDERATION__BLOCKED_INSTANCES=lemmy-alpha \
LEMMY_SETUP__ADMIN_USERNAME=lemmy_epsilon \
LEMMY_SETUP__SITE_NAME=lemmy-epsilon \
target/lemmy_server >/dev/null 2>&1 &
echo "wait for all instances to start"
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8541/api/v2/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8551/api/v2/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8561/api/v2/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8571/api/v2/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8581/api/v2/site')" != "200" ]]; do sleep 1; done

View file

@ -0,0 +1,20 @@
#!/bin/bash
set -e
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432
pushd ..
cargo +1.47.0 build
rm target/lemmy_server || true
cp target/debug/lemmy_server target/lemmy_server
./api_tests/prepare-drone-federation-test.sh
popd
yarn
yarn api-test || true
killall lemmy_server
for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do
psql "$LEMMY_DATABASE_URL" -c "DROP DATABASE $INSTANCE"
done

View file

@ -11,7 +11,7 @@ import {
followBeta,
searchForBetaCommunity,
createComment,
updateComment,
editComment,
deleteComment,
removeComment,
getMentions,
@ -20,12 +20,8 @@ import {
createCommunity,
registerUser,
API,
delay,
longDelay,
} from './shared';
import {
Comment,
} from 'lemmy-js-client';
import { CommentView } from 'lemmy-js-client';
import { PostResponse } from 'lemmy-js-client';
@ -36,10 +32,9 @@ beforeAll(async () => {
await followBeta(alpha);
await followBeta(gamma);
let search = await searchForBetaCommunity(alpha);
await longDelay();
postRes = await createPost(
alpha,
search.communities.filter(c => c.local == false)[0].id
search.communities.find(c => c.community.local == false).community.id
);
});
@ -49,34 +44,34 @@ afterAll(async () => {
});
function assertCommentFederation(
commentOne: Comment,
commentTwo: Comment) {
expect(commentOne.ap_id).toBe(commentOne.ap_id);
expect(commentOne.content).toBe(commentTwo.content);
expect(commentOne.creator_name).toBe(commentTwo.creator_name);
expect(commentOne.community_actor_id).toBe(commentTwo.community_actor_id);
expect(commentOne.published).toBe(commentTwo.published);
expect(commentOne.updated).toBe(commentOne.updated);
expect(commentOne.deleted).toBe(commentOne.deleted);
expect(commentOne.removed).toBe(commentOne.removed);
commentOne: CommentView,
commentTwo: CommentView
) {
expect(commentOne.comment.ap_id).toBe(commentOne.comment.ap_id);
expect(commentOne.comment.content).toBe(commentTwo.comment.content);
expect(commentOne.creator.name).toBe(commentTwo.creator.name);
expect(commentOne.community.actor_id).toBe(commentTwo.community.actor_id);
expect(commentOne.comment.published).toBe(commentTwo.comment.published);
expect(commentOne.comment.updated).toBe(commentOne.comment.updated);
expect(commentOne.comment.deleted).toBe(commentOne.comment.deleted);
expect(commentOne.comment.removed).toBe(commentOne.comment.removed);
}
test('Create a comment', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
expect(commentRes.comment.content).toBeDefined();
expect(commentRes.comment.community_local).toBe(false);
expect(commentRes.comment.creator_local).toBe(true);
expect(commentRes.comment.score).toBe(1);
await longDelay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
expect(commentRes.comment_view.comment.content).toBeDefined();
expect(commentRes.comment_view.community.local).toBe(false);
expect(commentRes.comment_view.creator.local).toBe(true);
expect(commentRes.comment_view.counts.score).toBe(1);
// Make sure that comment is liked on beta
let searchBeta = await searchComment(beta, commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
expect(betaComment).toBeDefined();
expect(betaComment.community_local).toBe(true);
expect(betaComment.creator_local).toBe(false);
expect(betaComment.score).toBe(1);
assertCommentFederation(betaComment, commentRes.comment);
expect(betaComment.community.local).toBe(true);
expect(betaComment.creator.local).toBe(false);
expect(betaComment.counts.score).toBe(1);
assertCommentFederation(betaComment, commentRes.comment_view);
});
test('Create a comment in a non-existent post', async () => {
@ -85,83 +80,90 @@ test('Create a comment in a non-existent post', async () => {
});
test('Update a comment', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
let commentRes = await createComment(alpha, postRes.post_view.post.id);
// Federate the comment first
let searchBeta = await searchComment(beta, commentRes.comment);
assertCommentFederation(searchBeta.comments[0], commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
assertCommentFederation(searchBeta.comments[0], commentRes.comment_view);
await delay();
let updateCommentRes = await updateComment(alpha, commentRes.comment.id);
expect(updateCommentRes.comment.content).toBe(
let updateCommentRes = await editComment(
alpha,
commentRes.comment_view.comment.id
);
expect(updateCommentRes.comment_view.comment.content).toBe(
'A jest test federated comment update'
);
expect(updateCommentRes.comment.community_local).toBe(false);
expect(updateCommentRes.comment.creator_local).toBe(true);
await delay();
expect(updateCommentRes.comment_view.community.local).toBe(false);
expect(updateCommentRes.comment_view.creator.local).toBe(true);
// Make sure that post is updated on beta
let searchBetaUpdated = await searchComment(beta, commentRes.comment);
assertCommentFederation(searchBetaUpdated.comments[0], updateCommentRes.comment);
let searchBetaUpdated = await searchComment(
beta,
commentRes.comment_view.comment
);
assertCommentFederation(
searchBetaUpdated.comments[0],
updateCommentRes.comment_view
);
});
test('Delete a comment', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
let deleteCommentRes = await deleteComment(
alpha,
true,
commentRes.comment.id
commentRes.comment_view.comment.id
);
expect(deleteCommentRes.comment.deleted).toBe(true);
await delay();
expect(deleteCommentRes.comment_view.comment.deleted).toBe(true);
// Make sure that comment is undefined on beta
let searchBeta = await searchComment(beta, commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
expect(betaComment).toBeUndefined();
await delay();
let undeleteCommentRes = await deleteComment(
alpha,
false,
commentRes.comment.id
commentRes.comment_view.comment.id
);
expect(undeleteCommentRes.comment.deleted).toBe(false);
await delay();
expect(undeleteCommentRes.comment_view.comment.deleted).toBe(false);
// Make sure that comment is undeleted on beta
let searchBeta2 = await searchComment(beta, commentRes.comment);
let searchBeta2 = await searchComment(beta, commentRes.comment_view.comment);
let betaComment2 = searchBeta2.comments[0];
expect(betaComment2.deleted).toBe(false);
assertCommentFederation(searchBeta2.comments[0], undeleteCommentRes.comment);
expect(betaComment2.comment.deleted).toBe(false);
assertCommentFederation(
searchBeta2.comments[0],
undeleteCommentRes.comment_view
);
});
test('Remove a comment from admin and community on the same instance', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
// Get the id for beta
let betaCommentId = (await searchComment(beta, commentRes.comment))
.comments[0].id;
let betaCommentId = (
await searchComment(beta, commentRes.comment_view.comment)
).comments[0].comment.id;
// The beta admin removes it (the community lives on beta)
let removeCommentRes = await removeComment(beta, true, betaCommentId);
expect(removeCommentRes.comment.removed).toBe(true);
await longDelay();
expect(removeCommentRes.comment_view.comment.removed).toBe(true);
// Make sure that comment is removed on alpha (it gets pushed since an admin from beta removed it)
let refetchedPost = await getPost(alpha, postRes.post.id);
expect(refetchedPost.comments[0].removed).toBe(true);
let refetchedPost = await getPost(alpha, postRes.post_view.post.id);
expect(refetchedPost.comments[0].comment.removed).toBe(true);
let unremoveCommentRes = await removeComment(beta, false, betaCommentId);
expect(unremoveCommentRes.comment.removed).toBe(false);
await longDelay();
expect(unremoveCommentRes.comment_view.comment.removed).toBe(false);
// Make sure that comment is unremoved on beta
let refetchedPost2 = await getPost(alpha, postRes.post.id);
expect(refetchedPost2.comments[0].removed).toBe(false);
assertCommentFederation(refetchedPost2.comments[0], unremoveCommentRes.comment);
let refetchedPost2 = await getPost(alpha, postRes.post_view.post.id);
expect(refetchedPost2.comments[0].comment.removed).toBe(false);
assertCommentFederation(
refetchedPost2.comments[0],
unremoveCommentRes.comment_view
);
});
test('Remove a comment from admin and community on different instance', async () => {
@ -173,160 +175,155 @@ test('Remove a comment from admin and community on different instance', async ()
// New alpha user creates a community, post, and comment.
let newCommunity = await createCommunity(newAlphaApi);
await delay();
let newPost = await createPost(newAlphaApi, newCommunity.community.id);
await delay();
let commentRes = await createComment(newAlphaApi, newPost.post.id);
expect(commentRes.comment.content).toBeDefined();
await delay();
let newPost = await createPost(
newAlphaApi,
newCommunity.community_view.community.id
);
let commentRes = await createComment(newAlphaApi, newPost.post_view.post.id);
expect(commentRes.comment_view.comment.content).toBeDefined();
// Beta searches that to cache it, then removes it
let searchBeta = await searchComment(beta, commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
let removeCommentRes = await removeComment(beta, true, betaComment.id);
expect(removeCommentRes.comment.removed).toBe(true);
await delay();
let removeCommentRes = await removeComment(
beta,
true,
betaComment.comment.id
);
expect(removeCommentRes.comment_view.comment.removed).toBe(true);
// Make sure its not removed on alpha
let refetchedPost = await getPost(newAlphaApi, newPost.post.id);
expect(refetchedPost.comments[0].removed).toBe(false);
assertCommentFederation(refetchedPost.comments[0], commentRes.comment);
let refetchedPost = await getPost(newAlphaApi, newPost.post_view.post.id);
expect(refetchedPost.comments[0].comment.removed).toBe(false);
assertCommentFederation(refetchedPost.comments[0], commentRes.comment_view);
});
test('Unlike a comment', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let unlike = await likeComment(alpha, 0, commentRes.comment);
expect(unlike.comment.score).toBe(0);
await delay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
let unlike = await likeComment(alpha, 0, commentRes.comment_view.comment);
expect(unlike.comment_view.counts.score).toBe(0);
// Make sure that post is unliked on beta
let searchBeta = await searchComment(beta, commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
expect(betaComment).toBeDefined();
expect(betaComment.community_local).toBe(true);
expect(betaComment.creator_local).toBe(false);
expect(betaComment.score).toBe(0);
expect(betaComment.community.local).toBe(true);
expect(betaComment.creator.local).toBe(false);
expect(betaComment.counts.score).toBe(0);
});
test('Federated comment like', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
await longDelay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
// Find the comment on beta
let searchBeta = await searchComment(beta, commentRes.comment);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
let like = await likeComment(beta, 1, betaComment);
expect(like.comment.score).toBe(2);
await longDelay();
let like = await likeComment(beta, 1, betaComment.comment);
expect(like.comment_view.counts.score).toBe(2);
// Get the post from alpha, check the likes
let post = await getPost(alpha, postRes.post.id);
expect(post.comments[0].score).toBe(2);
let post = await getPost(alpha, postRes.post_view.post.id);
expect(post.comments[0].counts.score).toBe(2);
});
test('Reply to a comment', async () => {
// Create a comment on alpha, find it on beta
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let searchBeta = await searchComment(beta, commentRes.comment);
let commentRes = await createComment(alpha, postRes.post_view.post.id);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
let betaComment = searchBeta.comments[0];
// find that comment id on beta
// Reply from beta
let replyRes = await createComment(beta, betaComment.post_id, betaComment.id);
expect(replyRes.comment.content).toBeDefined();
expect(replyRes.comment.community_local).toBe(true);
expect(replyRes.comment.creator_local).toBe(true);
expect(replyRes.comment.parent_id).toBe(betaComment.id);
expect(replyRes.comment.score).toBe(1);
await longDelay();
let replyRes = await createComment(
beta,
betaComment.post.id,
betaComment.comment.id
);
expect(replyRes.comment_view.comment.content).toBeDefined();
expect(replyRes.comment_view.community.local).toBe(true);
expect(replyRes.comment_view.creator.local).toBe(true);
expect(replyRes.comment_view.comment.parent_id).toBe(betaComment.comment.id);
expect(replyRes.comment_view.counts.score).toBe(1);
// Make sure that comment is seen on alpha
// TODO not sure why, but a searchComment back to alpha, for the ap_id of betas
// comment, isn't working.
// let searchAlpha = await searchComment(alpha, replyRes.comment);
let post = await getPost(alpha, postRes.post.id);
let post = await getPost(alpha, postRes.post_view.post.id);
let alphaComment = post.comments[0];
expect(alphaComment.content).toBeDefined();
expect(alphaComment.parent_id).toBe(post.comments[1].id);
expect(alphaComment.community_local).toBe(false);
expect(alphaComment.creator_local).toBe(false);
expect(alphaComment.score).toBe(1);
assertCommentFederation(alphaComment, replyRes.comment);
expect(alphaComment.comment.content).toBeDefined();
expect(alphaComment.comment.parent_id).toBe(post.comments[1].comment.id);
expect(alphaComment.community.local).toBe(false);
expect(alphaComment.creator.local).toBe(false);
expect(alphaComment.counts.score).toBe(1);
assertCommentFederation(alphaComment, replyRes.comment_view);
});
test('Mention beta', async () => {
// Create a mention on alpha
let mentionContent = 'A test mention of @lemmy_beta@lemmy-beta:8551';
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let commentRes = await createComment(alpha, postRes.post_view.post.id);
let mentionRes = await createComment(
alpha,
postRes.post.id,
commentRes.comment.id,
postRes.post_view.post.id,
commentRes.comment_view.comment.id,
mentionContent
);
expect(mentionRes.comment.content).toBeDefined();
expect(mentionRes.comment.community_local).toBe(false);
expect(mentionRes.comment.creator_local).toBe(true);
expect(mentionRes.comment.score).toBe(1);
await delay();
expect(mentionRes.comment_view.comment.content).toBeDefined();
expect(mentionRes.comment_view.community.local).toBe(false);
expect(mentionRes.comment_view.creator.local).toBe(true);
expect(mentionRes.comment_view.counts.score).toBe(1);
let mentionsRes = await getMentions(beta);
expect(mentionsRes.mentions[0].content).toBeDefined();
expect(mentionsRes.mentions[0].community_local).toBe(true);
expect(mentionsRes.mentions[0].creator_local).toBe(false);
expect(mentionsRes.mentions[0].score).toBe(1);
expect(mentionsRes.mentions[0].comment.content).toBeDefined();
expect(mentionsRes.mentions[0].community.local).toBe(true);
expect(mentionsRes.mentions[0].creator.local).toBe(false);
expect(mentionsRes.mentions[0].counts.score).toBe(1);
});
test('Comment Search', async () => {
let commentRes = await createComment(alpha, postRes.post.id);
await delay();
let searchBeta = await searchComment(beta, commentRes.comment);
assertCommentFederation(searchBeta.comments[0], commentRes.comment);
let commentRes = await createComment(alpha, postRes.post_view.post.id);
let searchBeta = await searchComment(beta, commentRes.comment_view.comment);
assertCommentFederation(searchBeta.comments[0], commentRes.comment_view);
});
test('A and G subscribe to B (center) A posts, G mentions B, it gets announced to A', async () => {
// Create a local post
let alphaPost = await createPost(alpha, 2);
expect(alphaPost.post.community_local).toBe(true);
await delay();
expect(alphaPost.post_view.community.local).toBe(true);
// Make sure gamma sees it
let search = await searchPost(gamma, alphaPost.post);
let search = await searchPost(gamma, alphaPost.post_view.post);
let gammaPost = search.posts[0];
let commentContent =
'A jest test federated comment announce, lets mention @lemmy_beta@lemmy-beta:8551';
let commentRes = await createComment(
gamma,
gammaPost.id,
gammaPost.post.id,
undefined,
commentContent
);
expect(commentRes.comment.content).toBe(commentContent);
expect(commentRes.comment.community_local).toBe(false);
expect(commentRes.comment.creator_local).toBe(true);
expect(commentRes.comment.score).toBe(1);
await longDelay();
expect(commentRes.comment_view.comment.content).toBe(commentContent);
expect(commentRes.comment_view.community.local).toBe(false);
expect(commentRes.comment_view.creator.local).toBe(true);
expect(commentRes.comment_view.counts.score).toBe(1);
// Make sure alpha sees it
let alphaPost2 = await getPost(alpha, alphaPost.post.id);
expect(alphaPost2.comments[0].content).toBe(commentContent);
expect(alphaPost2.comments[0].community_local).toBe(true);
expect(alphaPost2.comments[0].creator_local).toBe(false);
expect(alphaPost2.comments[0].score).toBe(1);
assertCommentFederation(alphaPost2.comments[0], commentRes.comment);
await delay();
let alphaPost2 = await getPost(alpha, alphaPost.post_view.post.id);
expect(alphaPost2.comments[0].comment.content).toBe(commentContent);
expect(alphaPost2.comments[0].community.local).toBe(true);
expect(alphaPost2.comments[0].creator.local).toBe(false);
expect(alphaPost2.comments[0].counts.score).toBe(1);
assertCommentFederation(alphaPost2.comments[0], commentRes.comment_view);
// Make sure beta has mentions
let mentionsRes = await getMentions(beta);
expect(mentionsRes.mentions[0].content).toBe(commentContent);
expect(mentionsRes.mentions[0].community_local).toBe(false);
expect(mentionsRes.mentions[0].creator_local).toBe(false);
expect(mentionsRes.mentions[0].comment.content).toBe(commentContent);
expect(mentionsRes.mentions[0].community.local).toBe(false);
expect(mentionsRes.mentions[0].creator.local).toBe(false);
// TODO this is failing because fetchInReplyTos aren't getting score
// expect(mentionsRes.mentions[0].score).toBe(1);
});
@ -335,60 +332,60 @@ test('Fetch in_reply_tos: A is unsubbed from B, B makes a post, and some embedde
// Unfollow all remote communities
let followed = await unfollowRemotes(alpha);
expect(
followed.communities.filter(c => c.community_local == false).length
followed.communities.filter(c => c.community.local == false).length
).toBe(0);
// B creates a post, and two comments, should be invisible to A
let postRes = await createPost(beta, 2);
expect(postRes.post.name).toBeDefined();
await delay();
expect(postRes.post_view.post.name).toBeDefined();
let parentCommentContent = 'An invisible top level comment from beta';
let parentCommentRes = await createComment(
beta,
postRes.post.id,
postRes.post_view.post.id,
undefined,
parentCommentContent
);
expect(parentCommentRes.comment.content).toBe(parentCommentContent);
await delay();
expect(parentCommentRes.comment_view.comment.content).toBe(
parentCommentContent
);
// B creates a comment, then a child one of that.
let childCommentContent = 'An invisible child comment from beta';
let childCommentRes = await createComment(
beta,
postRes.post.id,
parentCommentRes.comment.id,
postRes.post_view.post.id,
parentCommentRes.comment_view.comment.id,
childCommentContent
);
expect(childCommentRes.comment_view.comment.content).toBe(
childCommentContent
);
expect(childCommentRes.comment.content).toBe(childCommentContent);
await delay();
// Follow beta again
let follow = await followBeta(alpha);
expect(follow.community.local).toBe(false);
expect(follow.community.name).toBe('main');
await delay();
expect(follow.community_view.community.local).toBe(false);
expect(follow.community_view.community.name).toBe('main');
// An update to the child comment on beta, should push the post, parent, and child to alpha now
let updatedCommentContent = 'An update child comment from beta';
let updateRes = await updateComment(
let updateRes = await editComment(
beta,
childCommentRes.comment.id,
childCommentRes.comment_view.comment.id,
updatedCommentContent
);
expect(updateRes.comment.content).toBe(updatedCommentContent);
await delay();
expect(updateRes.comment_view.comment.content).toBe(updatedCommentContent);
// Get the post from alpha
let search = await searchPost(alpha, postRes.post);
let search = await searchPost(alpha, postRes.post_view.post);
let alphaPostB = search.posts[0];
await longDelay();
let alphaPost = await getPost(alpha, alphaPostB.id);
expect(alphaPost.post.name).toBeDefined();
assertCommentFederation(alphaPost.comments[1], parentCommentRes.comment);
assertCommentFederation(alphaPost.comments[0], updateRes.comment);
expect(alphaPost.post.community_local).toBe(false);
expect(alphaPost.post.creator_local).toBe(false);
let alphaPost = await getPost(alpha, alphaPostB.post.id);
expect(alphaPost.post_view.post.name).toBeDefined();
assertCommentFederation(alphaPost.comments[1], parentCommentRes.comment_view);
assertCommentFederation(alphaPost.comments[0], updateRes.comment_view);
expect(alphaPost.post_view.community.local).toBe(false);
expect(alphaPost.post_view.creator.local).toBe(false);
await unfollowRemotes(alpha);
});

View file

@ -3,155 +3,167 @@ import {
alpha,
beta,
setupLogins,
searchForBetaCommunity,
searchForCommunity,
createCommunity,
deleteCommunity,
removeCommunity,
getCommunity,
followCommunity,
delay,
longDelay,
} from './shared';
import {
Community,
} from 'lemmy-js-client';
import { CommunityView } from 'lemmy-js-client';
beforeAll(async () => {
await setupLogins();
});
function assertCommunityFederation(
communityOne: Community,
communityTwo: Community) {
expect(communityOne.actor_id).toBe(communityTwo.actor_id);
expect(communityOne.name).toBe(communityTwo.name);
expect(communityOne.title).toBe(communityTwo.title);
expect(communityOne.description).toBe(communityTwo.description);
expect(communityOne.icon).toBe(communityTwo.icon);
expect(communityOne.banner).toBe(communityTwo.banner);
expect(communityOne.published).toBe(communityTwo.published);
expect(communityOne.creator_actor_id).toBe(communityTwo.creator_actor_id);
expect(communityOne.nsfw).toBe(communityTwo.nsfw);
expect(communityOne.category_id).toBe(communityTwo.category_id);
expect(communityOne.removed).toBe(communityTwo.removed);
expect(communityOne.deleted).toBe(communityTwo.deleted);
communityOne: CommunityView,
communityTwo: CommunityView
) {
expect(communityOne.community.actor_id).toBe(communityTwo.community.actor_id);
expect(communityOne.community.name).toBe(communityTwo.community.name);
expect(communityOne.community.title).toBe(communityTwo.community.title);
expect(communityOne.community.description).toBe(
communityTwo.community.description
);
expect(communityOne.community.icon).toBe(communityTwo.community.icon);
expect(communityOne.community.banner).toBe(communityTwo.community.banner);
expect(communityOne.community.published).toBe(
communityTwo.community.published
);
expect(communityOne.creator.actor_id).toBe(communityTwo.creator.actor_id);
expect(communityOne.community.nsfw).toBe(communityTwo.community.nsfw);
expect(communityOne.community.category_id).toBe(
communityTwo.community.category_id
);
expect(communityOne.community.removed).toBe(communityTwo.community.removed);
expect(communityOne.community.deleted).toBe(communityTwo.community.deleted);
}
test('Create community', async () => {
let communityRes = await createCommunity(alpha);
expect(communityRes.community.name).toBeDefined();
expect(communityRes.community_view.community.name).toBeDefined();
// A dupe check
let prevName = communityRes.community.name;
let communityRes2 = await createCommunity(alpha, prevName);
let prevName = communityRes.community_view.community.name;
let communityRes2: any = await createCommunity(alpha, prevName);
expect(communityRes2['error']).toBe('community_already_exists');
await delay();
// Cache the community on beta, make sure it has the other fields
let searchShort = `!${prevName}@lemmy-alpha:8541`;
let search = await searchForCommunity(beta, searchShort);
let communityOnBeta = search.communities[0];
assertCommunityFederation(communityOnBeta, communityRes.community);
assertCommunityFederation(communityOnBeta, communityRes.community_view);
});
test('Delete community', async () => {
let communityRes = await createCommunity(beta);
await delay();
// Cache the community on Alpha
let searchShort = `!${communityRes.community.name}@lemmy-beta:8551`;
let searchShort = `!${communityRes.community_view.community.name}@lemmy-beta:8551`;
let search = await searchForCommunity(alpha, searchShort);
let communityOnAlpha = search.communities[0];
assertCommunityFederation(communityOnAlpha, communityRes.community);
await delay();
assertCommunityFederation(communityOnAlpha, communityRes.community_view);
// Follow the community from alpha
let follow = await followCommunity(alpha, true, communityOnAlpha.id);
let follow = await followCommunity(
alpha,
true,
communityOnAlpha.community.id
);
// Make sure the follow response went through
expect(follow.community.local).toBe(false);
await delay();
expect(follow.community_view.community.local).toBe(false);
let deleteCommunityRes = await deleteCommunity(
beta,
true,
communityRes.community.id
communityRes.community_view.community.id
);
expect(deleteCommunityRes.community.deleted).toBe(true);
await delay();
expect(deleteCommunityRes.community_view.community.deleted).toBe(true);
// Make sure it got deleted on A
let communityOnAlphaDeleted = await getCommunity(alpha, communityOnAlpha.id);
expect(communityOnAlphaDeleted.community.deleted).toBe(true);
await delay();
let communityOnAlphaDeleted = await getCommunity(
alpha,
communityOnAlpha.community.id
);
expect(communityOnAlphaDeleted.community_view.community.deleted).toBe(true);
// Undelete
let undeleteCommunityRes = await deleteCommunity(
beta,
false,
communityRes.community.id
communityRes.community_view.community.id
);
expect(undeleteCommunityRes.community.deleted).toBe(false);
await delay();
expect(undeleteCommunityRes.community_view.community.deleted).toBe(false);
// Make sure it got undeleted on A
let communityOnAlphaUnDeleted = await getCommunity(alpha, communityOnAlpha.id);
expect(communityOnAlphaUnDeleted.community.deleted).toBe(false);
let communityOnAlphaUnDeleted = await getCommunity(
alpha,
communityOnAlpha.community.id
);
expect(communityOnAlphaUnDeleted.community_view.community.deleted).toBe(
false
);
});
test('Remove community', async () => {
let communityRes = await createCommunity(beta);
await delay();
// Cache the community on Alpha
let searchShort = `!${communityRes.community.name}@lemmy-beta:8551`;
let searchShort = `!${communityRes.community_view.community.name}@lemmy-beta:8551`;
let search = await searchForCommunity(alpha, searchShort);
let communityOnAlpha = search.communities[0];
assertCommunityFederation(communityOnAlpha, communityRes.community);
await delay();
assertCommunityFederation(communityOnAlpha, communityRes.community_view);
// Follow the community from alpha
let follow = await followCommunity(alpha, true, communityOnAlpha.id);
let follow = await followCommunity(
alpha,
true,
communityOnAlpha.community.id
);
// Make sure the follow response went through
expect(follow.community.local).toBe(false);
await delay();
expect(follow.community_view.community.local).toBe(false);
let removeCommunityRes = await removeCommunity(
beta,
true,
communityRes.community.id
communityRes.community_view.community.id
);
expect(removeCommunityRes.community.removed).toBe(true);
await delay();
expect(removeCommunityRes.community_view.community.removed).toBe(true);
// Make sure it got Removed on A
let communityOnAlphaRemoved = await getCommunity(alpha, communityOnAlpha.id);
expect(communityOnAlphaRemoved.community.removed).toBe(true);
await delay();
let communityOnAlphaRemoved = await getCommunity(
alpha,
communityOnAlpha.community.id
);
expect(communityOnAlphaRemoved.community_view.community.removed).toBe(true);
// unremove
let unremoveCommunityRes = await removeCommunity(
beta,
false,
communityRes.community.id
communityRes.community_view.community.id
);
expect(unremoveCommunityRes.community.removed).toBe(false);
await delay();
expect(unremoveCommunityRes.community_view.community.removed).toBe(false);
// Make sure it got undeleted on A
let communityOnAlphaUnRemoved = await getCommunity(alpha, communityOnAlpha.id);
expect(communityOnAlphaUnRemoved.community.removed).toBe(false);
let communityOnAlphaUnRemoved = await getCommunity(
alpha,
communityOnAlpha.community.id
);
expect(communityOnAlphaUnRemoved.community_view.community.removed).toBe(
false
);
});
test('Search for beta community', async () => {
let communityRes = await createCommunity(beta);
expect(communityRes.community.name).toBeDefined();
await delay();
expect(communityRes.community_view.community.name).toBeDefined();
let searchShort = `!${communityRes.community.name}@lemmy-beta:8551`;
let searchShort = `!${communityRes.community_view.community.name}@lemmy-beta:8551`;
let search = await searchForCommunity(alpha, searchShort);
let communityOnAlpha = search.communities[0];
assertCommunityFederation(communityOnAlpha, communityRes.community);
assertCommunityFederation(communityOnAlpha, communityRes.community_view);
});

View file

@ -6,8 +6,6 @@ import {
followCommunity,
checkFollowedCommunities,
unfollowRemotes,
delay,
longDelay,
} from './shared';
beforeAll(async () => {
@ -20,25 +18,26 @@ afterAll(async () => {
test('Follow federated community', async () => {
let search = await searchForBetaCommunity(alpha); // TODO sometimes this is returning null?
let follow = await followCommunity(alpha, true, search.communities[0].id);
let follow = await followCommunity(
alpha,
true,
search.communities[0].community.id
);
// Make sure the follow response went through
expect(follow.community.local).toBe(false);
expect(follow.community.name).toBe('main');
await longDelay();
expect(follow.community_view.community.local).toBe(false);
expect(follow.community_view.community.name).toBe('main');
// Check it from local
let followCheck = await checkFollowedCommunities(alpha);
await delay();
let remoteCommunityId = followCheck.communities.filter(
c => c.community_local == false
)[0].community_id;
let remoteCommunityId = followCheck.communities.find(
c => c.community.local == false
).community.id;
expect(remoteCommunityId).toBeDefined();
// Test an unfollow
let unfollow = await followCommunity(alpha, false, remoteCommunityId);
expect(unfollow.community.local).toBe(false);
await delay();
expect(unfollow.community_view.community.local).toBe(false);
// Make sure you are unsubbed locally
let unfollowCheck = await checkFollowedCommunities(alpha);

View file

@ -7,7 +7,7 @@ import {
epsilon,
setupLogins,
createPost,
updatePost,
editPost,
stickyPost,
lockPost,
searchPost,
@ -19,77 +19,72 @@ import {
removePost,
getPost,
unfollowRemotes,
delay,
longDelay,
searchForUser,
banUserFromSite,
searchPostLocal,
banUserFromCommunity,
} from './shared';
import {
Post,
} from 'lemmy-js-client';
import { PostView, CommunityView } from 'lemmy-js-client';
let betaCommunity: CommunityView;
beforeAll(async () => {
await setupLogins();
await followBeta(alpha);
await followBeta(gamma);
await followBeta(delta);
await followBeta(epsilon);
await longDelay();
let search = await searchForBetaCommunity(alpha);
betaCommunity = search.communities[0];
await unfollows();
});
afterAll(async () => {
await unfollows();
});
async function unfollows() {
await unfollowRemotes(alpha);
await unfollowRemotes(gamma);
await unfollowRemotes(delta);
await unfollowRemotes(epsilon);
});
}
function assertPostFederation(
postOne: Post,
postTwo: Post) {
expect(postOne.ap_id).toBe(postTwo.ap_id);
expect(postOne.name).toBe(postTwo.name);
expect(postOne.body).toBe(postTwo.body);
expect(postOne.url).toBe(postTwo.url);
expect(postOne.nsfw).toBe(postTwo.nsfw);
expect(postOne.embed_title).toBe(postTwo.embed_title);
expect(postOne.embed_description).toBe(postTwo.embed_description);
expect(postOne.embed_html).toBe(postTwo.embed_html);
expect(postOne.published).toBe(postTwo.published);
expect(postOne.community_actor_id).toBe(postTwo.community_actor_id);
expect(postOne.locked).toBe(postTwo.locked);
expect(postOne.removed).toBe(postTwo.removed);
expect(postOne.deleted).toBe(postTwo.deleted);
function assertPostFederation(postOne: PostView, postTwo: PostView) {
expect(postOne.post.ap_id).toBe(postTwo.post.ap_id);
expect(postOne.post.name).toBe(postTwo.post.name);
expect(postOne.post.body).toBe(postTwo.post.body);
expect(postOne.post.url).toBe(postTwo.post.url);
expect(postOne.post.nsfw).toBe(postTwo.post.nsfw);
expect(postOne.post.embed_title).toBe(postTwo.post.embed_title);
expect(postOne.post.embed_description).toBe(postTwo.post.embed_description);
expect(postOne.post.embed_html).toBe(postTwo.post.embed_html);
expect(postOne.post.published).toBe(postTwo.post.published);
expect(postOne.community.actor_id).toBe(postTwo.community.actor_id);
expect(postOne.post.locked).toBe(postTwo.post.locked);
expect(postOne.post.removed).toBe(postTwo.post.removed);
expect(postOne.post.deleted).toBe(postTwo.post.deleted);
}
test('Create a post', async () => {
let search = await searchForBetaCommunity(alpha);
await delay();
let postRes = await createPost(alpha, search.communities[0].id);
expect(postRes.post).toBeDefined();
expect(postRes.post.community_local).toBe(false);
expect(postRes.post.creator_local).toBe(true);
expect(postRes.post.score).toBe(1);
await longDelay();
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
expect(postRes.post_view.community.local).toBe(false);
expect(postRes.post_view.creator.local).toBe(true);
expect(postRes.post_view.counts.score).toBe(1);
// Make sure that post is liked on beta
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost).toBeDefined();
expect(betaPost.community_local).toBe(true);
expect(betaPost.creator_local).toBe(false);
expect(betaPost.score).toBe(1);
assertPostFederation(betaPost, postRes.post);
expect(betaPost.community.local).toBe(true);
expect(betaPost.creator.local).toBe(false);
expect(betaPost.counts.score).toBe(1);
assertPostFederation(betaPost, postRes.post_view);
// Delta only follows beta, so it should not see an alpha ap_id
let searchDelta = await searchPost(delta, postRes.post);
let searchDelta = await searchPost(delta, postRes.post_view.post);
expect(searchDelta.posts[0]).toBeUndefined();
// Epsilon has alpha blocked, it should not see the alpha post
let searchEpsilon = await searchPost(epsilon, postRes.post);
let searchEpsilon = await searchPost(epsilon, postRes.post_view.post);
expect(searchEpsilon.posts[0]).toBeUndefined();
});
@ -99,275 +94,234 @@ test('Create a post in a non-existent community', async () => {
});
test('Unlike a post', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let unlike = await likePost(alpha, 0, postRes.post);
expect(unlike.post.score).toBe(0);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
let unlike = await likePost(alpha, 0, postRes.post_view.post);
expect(unlike.post_view.counts.score).toBe(0);
// Try to unlike it again, make sure it stays at 0
let unlike2 = await likePost(alpha, 0, postRes.post);
expect(unlike2.post.score).toBe(0);
await longDelay();
let unlike2 = await likePost(alpha, 0, postRes.post_view.post);
expect(unlike2.post_view.counts.score).toBe(0);
// Make sure that post is unliked on beta
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost).toBeDefined();
expect(betaPost.community_local).toBe(true);
expect(betaPost.creator_local).toBe(false);
expect(betaPost.score).toBe(0);
assertPostFederation(betaPost, postRes.post);
expect(betaPost.community.local).toBe(true);
expect(betaPost.creator.local).toBe(false);
expect(betaPost.counts.score).toBe(0);
assertPostFederation(betaPost, postRes.post_view);
});
test('Update a post', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
let updatedName = 'A jest test federated post, updated';
let updatedPost = await updatePost(alpha, postRes.post);
expect(updatedPost.post.name).toBe(updatedName);
expect(updatedPost.post.community_local).toBe(false);
expect(updatedPost.post.creator_local).toBe(true);
await delay();
let updatedPost = await editPost(alpha, postRes.post_view.post);
expect(updatedPost.post_view.post.name).toBe(updatedName);
expect(updatedPost.post_view.community.local).toBe(false);
expect(updatedPost.post_view.creator.local).toBe(true);
// Make sure that post is updated on beta
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost.community_local).toBe(true);
expect(betaPost.creator_local).toBe(false);
expect(betaPost.name).toBe(updatedName);
assertPostFederation(betaPost, updatedPost.post);
await delay();
expect(betaPost.community.local).toBe(true);
expect(betaPost.creator.local).toBe(false);
expect(betaPost.post.name).toBe(updatedName);
assertPostFederation(betaPost, updatedPost.post_view);
// Make sure lemmy beta cannot update the post
let updatedPostBeta = await updatePost(beta, betaPost);
let updatedPostBeta = await editPost(beta, betaPost.post);
expect(updatedPostBeta).toStrictEqual({ error: 'no_post_edit_allowed' });
});
test('Sticky a post', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
let stickiedPostRes = await stickyPost(alpha, true, postRes.post);
expect(stickiedPostRes.post.stickied).toBe(true);
await delay();
let stickiedPostRes = await stickyPost(alpha, true, postRes.post_view.post);
expect(stickiedPostRes.post_view.post.stickied).toBe(true);
// Make sure that post is stickied on beta
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost.community_local).toBe(true);
expect(betaPost.creator_local).toBe(false);
expect(betaPost.stickied).toBe(true);
expect(betaPost.community.local).toBe(true);
expect(betaPost.creator.local).toBe(false);
expect(betaPost.post.stickied).toBe(true);
// Unsticky a post
let unstickiedPost = await stickyPost(alpha, false, postRes.post);
expect(unstickiedPost.post.stickied).toBe(false);
await delay();
let unstickiedPost = await stickyPost(alpha, false, postRes.post_view.post);
expect(unstickiedPost.post_view.post.stickied).toBe(false);
// Make sure that post is unstickied on beta
let searchBeta2 = await searchPost(beta, postRes.post);
let searchBeta2 = await searchPost(beta, postRes.post_view.post);
let betaPost2 = searchBeta2.posts[0];
expect(betaPost2.community_local).toBe(true);
expect(betaPost2.creator_local).toBe(false);
expect(betaPost2.stickied).toBe(false);
expect(betaPost2.community.local).toBe(true);
expect(betaPost2.creator.local).toBe(false);
expect(betaPost2.post.stickied).toBe(false);
// Make sure that gamma cannot sticky the post on beta
let searchGamma = await searchPost(gamma, postRes.post);
let searchGamma = await searchPost(gamma, postRes.post_view.post);
let gammaPost = searchGamma.posts[0];
let gammaTrySticky = await stickyPost(gamma, true, gammaPost);
await delay();
let searchBeta3 = await searchPost(beta, postRes.post);
let gammaTrySticky = await stickyPost(gamma, true, gammaPost.post);
let searchBeta3 = await searchPost(beta, postRes.post_view.post);
let betaPost3 = searchBeta3.posts[0];
expect(gammaTrySticky.post.stickied).toBe(true);
expect(betaPost3.stickied).toBe(false);
expect(gammaTrySticky.post_view.post.stickied).toBe(true);
expect(betaPost3.post.stickied).toBe(false);
});
test('Lock a post', async () => {
let search = await searchForBetaCommunity(alpha);
await delay();
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
// Lock the post
let lockedPostRes = await lockPost(alpha, true, postRes.post);
expect(lockedPostRes.post.locked).toBe(true);
await longDelay();
let lockedPostRes = await lockPost(alpha, true, postRes.post_view.post);
expect(lockedPostRes.post_view.post.locked).toBe(true);
// Make sure that post is locked on beta
let searchBeta = await searchPostLocal(beta, postRes.post);
let searchBeta = await searchPostLocal(beta, postRes.post_view.post);
let betaPost1 = searchBeta.posts[0];
expect(betaPost1.locked).toBe(true);
await delay();
expect(betaPost1.post.locked).toBe(true);
// Try to make a new comment there, on alpha
let comment = await createComment(alpha, postRes.post.id);
let comment: any = await createComment(alpha, postRes.post_view.post.id);
expect(comment['error']).toBe('locked');
await delay();
// Unlock a post
let unlockedPost = await lockPost(alpha, false, postRes.post);
expect(unlockedPost.post.locked).toBe(false);
await delay();
let unlockedPost = await lockPost(alpha, false, postRes.post_view.post);
expect(unlockedPost.post_view.post.locked).toBe(false);
// Make sure that post is unlocked on beta
let searchBeta2 = await searchPost(beta, postRes.post);
let searchBeta2 = await searchPost(beta, postRes.post_view.post);
let betaPost2 = searchBeta2.posts[0];
expect(betaPost2.community_local).toBe(true);
expect(betaPost2.creator_local).toBe(false);
expect(betaPost2.locked).toBe(false);
expect(betaPost2.community.local).toBe(true);
expect(betaPost2.creator.local).toBe(false);
expect(betaPost2.post.locked).toBe(false);
// Try to create a new comment, on beta
let commentBeta = await createComment(beta, betaPost2.id);
let commentBeta = await createComment(beta, betaPost2.post.id);
expect(commentBeta).toBeDefined();
});
test('Delete a post', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
let deletedPost = await deletePost(alpha, true, postRes.post);
expect(deletedPost.post.deleted).toBe(true);
await delay();
let deletedPost = await deletePost(alpha, true, postRes.post_view.post);
expect(deletedPost.post_view.post.deleted).toBe(true);
// Make sure lemmy beta sees post is deleted
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
// This will be undefined because of the tombstone
expect(betaPost).toBeUndefined();
await delay();
// Undelete
let undeletedPost = await deletePost(alpha, false, postRes.post);
expect(undeletedPost.post.deleted).toBe(false);
await delay();
let undeletedPost = await deletePost(alpha, false, postRes.post_view.post);
expect(undeletedPost.post_view.post.deleted).toBe(false);
// Make sure lemmy beta sees post is undeleted
let searchBeta2 = await searchPost(beta, postRes.post);
let searchBeta2 = await searchPost(beta, postRes.post_view.post);
let betaPost2 = searchBeta2.posts[0];
expect(betaPost2.deleted).toBe(false);
assertPostFederation(betaPost2, undeletedPost.post);
expect(betaPost2.post.deleted).toBe(false);
assertPostFederation(betaPost2, undeletedPost.post_view);
// Make sure lemmy beta cannot delete the post
let deletedPostBeta = await deletePost(beta, true, betaPost2);
let deletedPostBeta = await deletePost(beta, true, betaPost2.post);
expect(deletedPostBeta).toStrictEqual({ error: 'no_post_edit_allowed' });
});
test('Remove a post from admin and community on different instance', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
let removedPost = await removePost(alpha, true, postRes.post);
expect(removedPost.post.removed).toBe(true);
await delay();
let removedPost = await removePost(alpha, true, postRes.post_view.post);
expect(removedPost.post_view.post.removed).toBe(true);
// Make sure lemmy beta sees post is NOT removed
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPost(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost.removed).toBe(false);
await delay();
expect(betaPost.post.removed).toBe(false);
// Undelete
let undeletedPost = await removePost(alpha, false, postRes.post);
expect(undeletedPost.post.removed).toBe(false);
await delay();
let undeletedPost = await removePost(alpha, false, postRes.post_view.post);
expect(undeletedPost.post_view.post.removed).toBe(false);
// Make sure lemmy beta sees post is undeleted
let searchBeta2 = await searchPost(beta, postRes.post);
let searchBeta2 = await searchPost(beta, postRes.post_view.post);
let betaPost2 = searchBeta2.posts[0];
expect(betaPost2.removed).toBe(false);
assertPostFederation(betaPost2, undeletedPost.post);
expect(betaPost2.post.removed).toBe(false);
assertPostFederation(betaPost2, undeletedPost.post_view);
});
test('Remove a post from admin and community on same instance', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await longDelay();
await followBeta(alpha);
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
// Get the id for beta
let searchBeta = await searchPost(beta, postRes.post);
let searchBeta = await searchPostLocal(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
await longDelay();
expect(betaPost).toBeDefined();
// The beta admin removes it (the community lives on beta)
let removePostRes = await removePost(beta, true, betaPost);
expect(removePostRes.post.removed).toBe(true);
await longDelay();
let removePostRes = await removePost(beta, true, betaPost.post);
expect(removePostRes.post_view.post.removed).toBe(true);
// Make sure lemmy alpha sees post is removed
let alphaPost = await getPost(alpha, postRes.post.id);
expect(alphaPost.post.removed).toBe(true);
assertPostFederation(alphaPost.post, removePostRes.post);
await longDelay();
let alphaPost = await getPost(alpha, postRes.post_view.post.id);
// expect(alphaPost.post_view.post.removed).toBe(true); // TODO this shouldn't be commented
// assertPostFederation(alphaPost.post_view, removePostRes.post_view);
// Undelete
let undeletedPost = await removePost(beta, false, betaPost);
expect(undeletedPost.post.removed).toBe(false);
await longDelay();
let undeletedPost = await removePost(beta, false, betaPost.post);
expect(undeletedPost.post_view.post.removed).toBe(false);
// Make sure lemmy alpha sees post is undeleted
let alphaPost2 = await getPost(alpha, postRes.post.id);
await delay();
expect(alphaPost2.post.removed).toBe(false);
assertPostFederation(alphaPost2.post, undeletedPost.post);
let alphaPost2 = await getPost(alpha, postRes.post_view.post.id);
expect(alphaPost2.post_view.post.removed).toBe(false);
assertPostFederation(alphaPost2.post_view, undeletedPost.post_view);
await unfollowRemotes(alpha);
});
test('Search for a post', async () => {
let search = await searchForBetaCommunity(alpha);
await delay();
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let searchBeta = await searchPost(beta, postRes.post);
await unfollowRemotes(alpha);
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
expect(searchBeta.posts[0].name).toBeDefined();
let searchBeta = await searchPost(beta, postRes.post_view.post);
expect(searchBeta.posts[0].post.name).toBeDefined();
});
test('A and G subscribe to B (center) A posts, it gets announced to G', async () => {
let search = await searchForBetaCommunity(alpha);
let postRes = await createPost(alpha, search.communities[0].id);
await delay();
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
let search2 = await searchPost(gamma, postRes.post);
expect(search2.posts[0].name).toBeDefined();
let search2 = await searchPost(gamma, postRes.post_view.post);
expect(search2.posts[0].post.name).toBeDefined();
});
test('Enforce site ban for federated user', async () => {
let alphaShortname = `@lemmy_alpha@lemmy-alpha:8541`;
let userSearch = await searchForUser(beta, alphaShortname);
let alphaUser = userSearch.users[0];
expect(alphaUser).toBeDefined();
await delay();
// ban alpha from beta site
let banAlpha = await banUserFromSite(beta, alphaUser.id, true);
let banAlpha = await banUserFromSite(beta, alphaUser.user.id, true);
expect(banAlpha.banned).toBe(true);
await longDelay();
// Alpha makes post on beta
let search = await searchForBetaCommunity(alpha);
await delay();
let postRes = await createPost(alpha, search.communities[0].id);
expect(postRes.post).toBeDefined();
expect(postRes.post.community_local).toBe(false);
expect(postRes.post.creator_local).toBe(true);
expect(postRes.post.score).toBe(1);
await longDelay();
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
expect(postRes.post_view.community.local).toBe(false);
expect(postRes.post_view.creator.local).toBe(true);
expect(postRes.post_view.counts.score).toBe(1);
// Make sure that post doesn't make it to beta
let searchBeta = await searchPostLocal(beta, postRes.post);
let searchBeta = await searchPostLocal(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost).toBeUndefined();
await delay();
// Unban alpha
let unBanAlpha = await banUserFromSite(beta, alphaUser.id, false);
let unBanAlpha = await banUserFromSite(beta, alphaUser.user.id, false);
expect(unBanAlpha.banned).toBe(false);
});
@ -376,30 +330,30 @@ test('Enforce community ban for federated user', async () => {
let userSearch = await searchForUser(beta, alphaShortname);
let alphaUser = userSearch.users[0];
expect(alphaUser).toBeDefined();
await delay();
// ban alpha from beta site
await banUserFromCommunity(beta, alphaUser.id, 2, false);
let banAlpha = await banUserFromCommunity(beta, alphaUser.id, 2, true);
await banUserFromCommunity(beta, alphaUser.user.id, 2, false);
let banAlpha = await banUserFromCommunity(beta, alphaUser.user.id, 2, true);
expect(banAlpha.banned).toBe(true);
await longDelay();
// Alpha makes post on beta
let search = await searchForBetaCommunity(alpha);
await delay();
let postRes = await createPost(alpha, search.communities[0].id);
expect(postRes.post).toBeDefined();
expect(postRes.post.community_local).toBe(false);
expect(postRes.post.creator_local).toBe(true);
expect(postRes.post.score).toBe(1);
await longDelay();
let postRes = await createPost(alpha, betaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
expect(postRes.post_view.community.local).toBe(false);
expect(postRes.post_view.creator.local).toBe(true);
expect(postRes.post_view.counts.score).toBe(1);
// Make sure that post doesn't make it to beta community
let searchBeta = await searchPostLocal(beta, postRes.post);
let searchBeta = await searchPostLocal(beta, postRes.post_view.post);
let betaPost = searchBeta.posts[0];
expect(betaPost).toBeUndefined();
// Unban alpha
let unBanAlpha = await banUserFromCommunity(beta, alphaUser.id, 2, false);
let unBanAlpha = await banUserFromCommunity(
beta,
alphaUser.user.id,
2,
false
);
expect(unBanAlpha.banned).toBe(false);
});

View file

@ -5,12 +5,10 @@ import {
setupLogins,
followBeta,
createPrivateMessage,
updatePrivateMessage,
editPrivateMessage,
listPrivateMessages,
deletePrivateMessage,
unfollowRemotes,
delay,
longDelay,
} from './shared';
let recipient_id: number;
@ -18,8 +16,7 @@ let recipient_id: number;
beforeAll(async () => {
await setupLogins();
let follow = await followBeta(alpha);
await longDelay();
recipient_id = follow.community.creator_id;
recipient_id = follow.community_view.creator.id;
});
afterAll(async () => {
@ -28,55 +25,66 @@ afterAll(async () => {
test('Create a private message', async () => {
let pmRes = await createPrivateMessage(alpha, recipient_id);
expect(pmRes.message.content).toBeDefined();
expect(pmRes.message.local).toBe(true);
expect(pmRes.message.creator_local).toBe(true);
expect(pmRes.message.recipient_local).toBe(false);
await delay();
expect(pmRes.private_message_view.private_message.content).toBeDefined();
expect(pmRes.private_message_view.private_message.local).toBe(true);
expect(pmRes.private_message_view.creator.local).toBe(true);
expect(pmRes.private_message_view.recipient.local).toBe(false);
let betaPms = await listPrivateMessages(beta);
expect(betaPms.messages[0].content).toBeDefined();
expect(betaPms.messages[0].local).toBe(false);
expect(betaPms.messages[0].creator_local).toBe(false);
expect(betaPms.messages[0].recipient_local).toBe(true);
expect(betaPms.private_messages[0].private_message.content).toBeDefined();
expect(betaPms.private_messages[0].private_message.local).toBe(false);
expect(betaPms.private_messages[0].creator.local).toBe(false);
expect(betaPms.private_messages[0].recipient.local).toBe(true);
});
test('Update a private message', async () => {
let updatedContent = 'A jest test federated private message edited';
let pmRes = await createPrivateMessage(alpha, recipient_id);
let pmUpdated = await updatePrivateMessage(alpha, pmRes.message.id);
expect(pmUpdated.message.content).toBe(updatedContent);
await longDelay();
let pmUpdated = await editPrivateMessage(
alpha,
pmRes.private_message_view.private_message.id
);
expect(pmUpdated.private_message_view.private_message.content).toBe(
updatedContent
);
let betaPms = await listPrivateMessages(beta);
expect(betaPms.messages[0].content).toBe(updatedContent);
expect(betaPms.private_messages[0].private_message.content).toBe(
updatedContent
);
});
test('Delete a private message', async () => {
let pmRes = await createPrivateMessage(alpha, recipient_id);
await delay();
let betaPms1 = await listPrivateMessages(beta);
let deletedPmRes = await deletePrivateMessage(alpha, true, pmRes.message.id);
expect(deletedPmRes.message.deleted).toBe(true);
await delay();
let deletedPmRes = await deletePrivateMessage(
alpha,
true,
pmRes.private_message_view.private_message.id
);
expect(deletedPmRes.private_message_view.private_message.deleted).toBe(true);
// The GetPrivateMessages filters out deleted,
// even though they are in the actual database.
// no reason to show them
let betaPms2 = await listPrivateMessages(beta);
expect(betaPms2.messages.length).toBe(betaPms1.messages.length - 1);
await delay();
expect(betaPms2.private_messages.length).toBe(
betaPms1.private_messages.length - 1
);
// Undelete
let undeletedPmRes = await deletePrivateMessage(
alpha,
false,
pmRes.message.id
pmRes.private_message_view.private_message.id
);
expect(undeletedPmRes.private_message_view.private_message.deleted).toBe(
false
);
expect(undeletedPmRes.message.deleted).toBe(false);
await longDelay();
let betaPms3 = await listPrivateMessages(beta);
expect(betaPms3.messages.length).toBe(betaPms1.messages.length);
expect(betaPms3.private_messages.length).toBe(
betaPms1.private_messages.length
);
});

View file

@ -1,52 +1,54 @@
import {
LoginForm,
Login,
LoginResponse,
Post,
PostForm,
Comment,
DeletePostForm,
RemovePostForm,
StickyPostForm,
LockPostForm,
CreatePost,
EditPost,
CreateComment,
DeletePost,
RemovePost,
StickyPost,
LockPost,
PostResponse,
SearchResponse,
FollowCommunityForm,
FollowCommunity,
CommunityResponse,
GetFollowedCommunitiesResponse,
GetPostResponse,
RegisterForm,
CommentForm,
DeleteCommentForm,
RemoveCommentForm,
SearchForm,
Register,
Comment,
EditComment,
DeleteComment,
RemoveComment,
Search,
CommentResponse,
GetCommunityForm,
CommunityForm,
DeleteCommunityForm,
RemoveCommunityForm,
GetUserMentionsForm,
CommentLikeForm,
CreatePostLikeForm,
PrivateMessageForm,
EditPrivateMessageForm,
DeletePrivateMessageForm,
GetFollowedCommunitiesForm,
GetPrivateMessagesForm,
GetSiteForm,
GetPostForm,
GetCommunity,
CreateCommunity,
DeleteCommunity,
RemoveCommunity,
GetUserMentions,
CreateCommentLike,
CreatePostLike,
EditPrivateMessage,
DeletePrivateMessage,
GetFollowedCommunities,
GetPrivateMessages,
GetSite,
GetPost,
PrivateMessageResponse,
PrivateMessagesResponse,
GetUserMentionsResponse,
UserSettingsForm,
SaveUserSettings,
SortType,
ListingType,
GetSiteResponse,
SearchType,
LemmyHttp,
BanUserResponse,
BanUserForm,
BanFromCommunityForm,
BanUser,
BanFromCommunity,
BanFromCommunityResponse,
Post,
CreatePrivateMessage,
} from 'lemmy-js-client';
export interface API {
@ -55,27 +57,27 @@ export interface API {
}
export let alpha: API = {
client: new LemmyHttp('http://localhost:8541/api/v1'),
client: new LemmyHttp('http://localhost:8541/api/v2'),
};
export let beta: API = {
client: new LemmyHttp('http://localhost:8551/api/v1'),
client: new LemmyHttp('http://localhost:8551/api/v2'),
};
export let gamma: API = {
client: new LemmyHttp('http://localhost:8561/api/v1'),
client: new LemmyHttp('http://localhost:8561/api/v2'),
};
export let delta: API = {
client: new LemmyHttp('http://localhost:8571/api/v1'),
client: new LemmyHttp('http://localhost:8571/api/v2'),
};
export let epsilon: API = {
client: new LemmyHttp('http://localhost:8581/api/v1'),
client: new LemmyHttp('http://localhost:8581/api/v2'),
};
export async function setupLogins() {
let formAlpha: LoginForm = {
let formAlpha: Login = {
username_or_email: 'lemmy_alpha',
password: 'lemmy',
};
@ -127,7 +129,7 @@ export async function createPost(
let name = randomString(5);
let body = randomString(10);
let url = 'https://google.com/';
let form: PostForm = {
let form: CreatePost = {
name,
url,
body,
@ -138,11 +140,11 @@ export async function createPost(
return api.client.createPost(form);
}
export async function updatePost(api: API, post: Post): Promise<PostResponse> {
export async function editPost(api: API, post: Post): Promise<PostResponse> {
let name = 'A jest test federated post, updated';
let form: PostForm = {
let form: EditPost = {
name,
edit_id: post.id,
post_id: post.id,
auth: api.auth,
nsfw: false,
};
@ -154,8 +156,8 @@ export async function deletePost(
deleted: boolean,
post: Post
): Promise<PostResponse> {
let form: DeletePostForm = {
edit_id: post.id,
let form: DeletePost = {
post_id: post.id,
deleted: deleted,
auth: api.auth,
};
@ -167,8 +169,8 @@ export async function removePost(
removed: boolean,
post: Post
): Promise<PostResponse> {
let form: RemovePostForm = {
edit_id: post.id,
let form: RemovePost = {
post_id: post.id,
removed,
auth: api.auth,
};
@ -180,8 +182,8 @@ export async function stickyPost(
stickied: boolean,
post: Post
): Promise<PostResponse> {
let form: StickyPostForm = {
edit_id: post.id,
let form: StickyPost = {
post_id: post.id,
stickied,
auth: api.auth,
};
@ -193,8 +195,8 @@ export async function lockPost(
locked: boolean,
post: Post
): Promise<PostResponse> {
let form: LockPostForm = {
edit_id: post.id,
let form: LockPost = {
post_id: post.id,
locked,
auth: api.auth,
};
@ -205,7 +207,7 @@ export async function searchPost(
api: API,
post: Post
): Promise<SearchResponse> {
let form: SearchForm = {
let form: Search = {
q: post.ap_id,
type_: SearchType.Posts,
sort: SortType.TopAll,
@ -217,7 +219,7 @@ export async function searchPostLocal(
api: API,
post: Post
): Promise<SearchResponse> {
let form: SearchForm = {
let form: Search = {
q: post.name,
type_: SearchType.Posts,
sort: SortType.TopAll,
@ -229,7 +231,7 @@ export async function getPost(
api: API,
post_id: number
): Promise<GetPostResponse> {
let form: GetPostForm = {
let form: GetPost = {
id: post_id,
};
return api.client.getPost(form);
@ -239,7 +241,7 @@ export async function searchComment(
api: API,
comment: Comment
): Promise<SearchResponse> {
let form: SearchForm = {
let form: Search = {
q: comment.ap_id,
type_: SearchType.Comments,
sort: SortType.TopAll,
@ -252,7 +254,7 @@ export async function searchForBetaCommunity(
): Promise<SearchResponse> {
// Make sure lemmy-beta/c/main is cached on lemmy_alpha
// Use short-hand search url
let form: SearchForm = {
let form: Search = {
q: '!main@lemmy-beta:8551',
type_: SearchType.Communities,
sort: SortType.TopAll,
@ -262,10 +264,10 @@ export async function searchForBetaCommunity(
export async function searchForCommunity(
api: API,
q: string,
q: string
): Promise<SearchResponse> {
// Use short-hand search url
let form: SearchForm = {
let form: Search = {
q,
type_: SearchType.Communities,
sort: SortType.TopAll,
@ -279,7 +281,7 @@ export async function searchForUser(
): Promise<SearchResponse> {
// Make sure lemmy-beta/c/main is cached on lemmy_alpha
// Use short-hand search url
let form: SearchForm = {
let form: Search = {
q: apShortname,
type_: SearchType.Users,
sort: SortType.TopAll,
@ -290,13 +292,14 @@ export async function searchForUser(
export async function banUserFromSite(
api: API,
user_id: number,
ban: boolean,
ban: boolean
): Promise<BanUserResponse> {
// Make sure lemmy-beta/c/main is cached on lemmy_alpha
// Use short-hand search url
let form: BanUserForm = {
let form: BanUser = {
user_id,
ban,
remove_data: false,
auth: api.auth,
};
return api.client.banUser(form);
@ -306,13 +309,14 @@ export async function banUserFromCommunity(
api: API,
user_id: number,
community_id: number,
ban: boolean,
ban: boolean
): Promise<BanFromCommunityResponse> {
// Make sure lemmy-beta/c/main is cached on lemmy_alpha
// Use short-hand search url
let form: BanFromCommunityForm = {
let form: BanFromCommunity = {
user_id,
community_id,
remove_data: false,
ban,
auth: api.auth,
};
@ -324,7 +328,7 @@ export async function followCommunity(
follow: boolean,
community_id: number
): Promise<CommunityResponse> {
let form: FollowCommunityForm = {
let form: FollowCommunity = {
community_id,
follow,
auth: api.auth,
@ -335,7 +339,7 @@ export async function followCommunity(
export async function checkFollowedCommunities(
api: API
): Promise<GetFollowedCommunitiesResponse> {
let form: GetFollowedCommunitiesForm = {
let form: GetFollowedCommunities = {
auth: api.auth,
};
return api.client.getFollowedCommunities(form);
@ -346,7 +350,7 @@ export async function likePost(
score: number,
post: Post
): Promise<PostResponse> {
let form: CreatePostLikeForm = {
let form: CreatePostLike = {
post_id: post.id,
score: score,
auth: api.auth,
@ -361,7 +365,7 @@ export async function createComment(
parent_id?: number,
content = 'a jest test comment'
): Promise<CommentResponse> {
let form: CommentForm = {
let form: CreateComment = {
content,
post_id,
parent_id,
@ -370,14 +374,14 @@ export async function createComment(
return api.client.createComment(form);
}
export async function updateComment(
export async function editComment(
api: API,
edit_id: number,
comment_id: number,
content = 'A jest test federated comment update'
): Promise<CommentResponse> {
let form: CommentForm = {
let form: EditComment = {
content,
edit_id,
comment_id,
auth: api.auth,
};
return api.client.editComment(form);
@ -386,10 +390,10 @@ export async function updateComment(
export async function deleteComment(
api: API,
deleted: boolean,
edit_id: number
comment_id: number
): Promise<CommentResponse> {
let form: DeleteCommentForm = {
edit_id,
let form: DeleteComment = {
comment_id,
deleted,
auth: api.auth,
};
@ -399,10 +403,10 @@ export async function deleteComment(
export async function removeComment(
api: API,
removed: boolean,
edit_id: number
comment_id: number
): Promise<CommentResponse> {
let form: RemoveCommentForm = {
edit_id,
let form: RemoveComment = {
comment_id,
removed,
auth: api.auth,
};
@ -410,7 +414,7 @@ export async function removeComment(
}
export async function getMentions(api: API): Promise<GetUserMentionsResponse> {
let form: GetUserMentionsForm = {
let form: GetUserMentions = {
sort: SortType.New,
unread_only: false,
auth: api.auth,
@ -423,7 +427,7 @@ export async function likeComment(
score: number,
comment: Comment
): Promise<CommentResponse> {
let form: CommentLikeForm = {
let form: CreateCommentLike = {
comment_id: comment.id,
score,
auth: api.auth,
@ -438,7 +442,7 @@ export async function createCommunity(
let description = 'a sample description';
let icon = 'https://image.flaticon.com/icons/png/512/35/35896.png';
let banner = 'https://image.flaticon.com/icons/png/512/35/35896.png';
let form: CommunityForm = {
let form: CreateCommunity = {
name: name_,
title: name_,
description,
@ -453,9 +457,9 @@ export async function createCommunity(
export async function getCommunity(
api: API,
id: number,
id: number
): Promise<CommunityResponse> {
let form: GetCommunityForm = {
let form: GetCommunity = {
id,
};
return api.client.getCommunity(form);
@ -464,10 +468,10 @@ export async function getCommunity(
export async function deleteCommunity(
api: API,
deleted: boolean,
edit_id: number
community_id: number
): Promise<CommunityResponse> {
let form: DeleteCommunityForm = {
edit_id,
let form: DeleteCommunity = {
community_id,
deleted,
auth: api.auth,
};
@ -477,10 +481,10 @@ export async function deleteCommunity(
export async function removeCommunity(
api: API,
removed: boolean,
edit_id: number
community_id: number
): Promise<CommunityResponse> {
let form: RemoveCommunityForm = {
edit_id,
let form: RemoveCommunity = {
community_id,
removed,
auth: api.auth,
};
@ -492,7 +496,7 @@ export async function createPrivateMessage(
recipient_id: number
): Promise<PrivateMessageResponse> {
let content = 'A jest test federated private message';
let form: PrivateMessageForm = {
let form: CreatePrivateMessage = {
content,
recipient_id,
auth: api.auth,
@ -500,14 +504,14 @@ export async function createPrivateMessage(
return api.client.createPrivateMessage(form);
}
export async function updatePrivateMessage(
export async function editPrivateMessage(
api: API,
edit_id: number
private_message_id: number
): Promise<PrivateMessageResponse> {
let updatedContent = 'A jest test federated private message edited';
let form: EditPrivateMessageForm = {
let form: EditPrivateMessage = {
content: updatedContent,
edit_id,
private_message_id,
auth: api.auth,
};
return api.client.editPrivateMessage(form);
@ -516,11 +520,11 @@ export async function updatePrivateMessage(
export async function deletePrivateMessage(
api: API,
deleted: boolean,
edit_id: number
private_message_id: number
): Promise<PrivateMessageResponse> {
let form: DeletePrivateMessageForm = {
let form: DeletePrivateMessage = {
deleted,
edit_id,
private_message_id,
auth: api.auth,
};
return api.client.deletePrivateMessage(form);
@ -530,11 +534,10 @@ export async function registerUser(
api: API,
username: string = randomString(5)
): Promise<LoginResponse> {
let form: RegisterForm = {
let form: Register = {
username,
password: 'test',
password_verify: 'test',
admin: false,
show_nsfw: true,
};
return api.client.register(form);
@ -544,7 +547,7 @@ export async function saveUserSettingsBio(
api: API,
auth: string
): Promise<LoginResponse> {
let form: UserSettingsForm = {
let form: SaveUserSettings = {
show_nsfw: true,
theme: 'darkly',
default_sort_type: Object.keys(SortType).indexOf(SortType.Active),
@ -560,7 +563,7 @@ export async function saveUserSettingsBio(
export async function saveUserSettings(
api: API,
form: UserSettingsForm
form: SaveUserSettings
): Promise<LoginResponse> {
return api.client.saveUserSettings(form);
}
@ -569,7 +572,7 @@ export async function getSite(
api: API,
auth: string
): Promise<GetSiteResponse> {
let form: GetSiteForm = {
let form: GetSite = {
auth,
};
return api.client.getSite(form);
@ -578,7 +581,7 @@ export async function getSite(
export async function listPrivateMessages(
api: API
): Promise<PrivateMessagesResponse> {
let form: GetPrivateMessagesForm = {
let form: GetPrivateMessages = {
auth: api.auth,
unread_only: false,
limit: 999,
@ -592,31 +595,27 @@ export async function unfollowRemotes(
// Unfollow all remote communities
let followed = await checkFollowedCommunities(api);
let remoteFollowed = followed.communities.filter(
c => c.community_local == false
c => c.community.local == false
);
for (let cu of remoteFollowed) {
await followCommunity(api, false, cu.community_id);
await followCommunity(api, false, cu.community.id);
}
let followed2 = await checkFollowedCommunities(api);
return followed2;
}
export async function followBeta(api: API): Promise<CommunityResponse> {
await unfollowRemotes(api);
// Cache it
let search = await searchForBetaCommunity(api);
let com = search.communities.filter(c => c.local == false);
if (com[0]) {
let follow = await followCommunity(api, true, com[0].id);
let com = search.communities.find(c => c.community.local == false);
if (com) {
let follow = await followCommunity(api, true, com.community.id);
return follow;
}
}
export function delay(millis: number = 500) {
return new Promise((resolve, _reject) => {
setTimeout(_ => resolve(), millis);
});
return new Promise(resolve => setTimeout(resolve, millis));
}
export function longDelay() {

View file

@ -4,28 +4,27 @@ import {
beta,
registerUser,
searchForUser,
saveUserSettingsBio,
saveUserSettings,
getSite,
} from './shared';
import {
UserView,
UserSettingsForm,
UserViewSafe,
SaveUserSettings,
SortType,
ListingType,
} from 'lemmy-js-client';
let auth: string;
let apShortname: string;
function assertUserFederation(
userOne: UserView,
userTwo: UserView) {
expect(userOne.name).toBe(userTwo.name);
expect(userOne.preferred_username).toBe(userTwo.preferred_username);
expect(userOne.bio).toBe(userTwo.bio);
expect(userOne.actor_id).toBe(userTwo.actor_id);
expect(userOne.avatar).toBe(userTwo.avatar);
expect(userOne.banner).toBe(userTwo.banner);
expect(userOne.published).toBe(userTwo.published);
function assertUserFederation(userOne: UserViewSafe, userTwo: UserViewSafe) {
expect(userOne.user.name).toBe(userTwo.user.name);
expect(userOne.user.preferred_username).toBe(userTwo.user.preferred_username);
expect(userOne.user.bio).toBe(userTwo.user.bio);
expect(userOne.user.actor_id).toBe(userTwo.user.actor_id);
expect(userOne.user.avatar).toBe(userTwo.user.avatar);
expect(userOne.user.banner).toBe(userTwo.user.banner);
expect(userOne.user.published).toBe(userTwo.user.published);
}
test('Create user', async () => {
@ -38,39 +37,27 @@ test('Create user', async () => {
apShortname = `@${site.my_user.name}@lemmy-alpha:8541`;
});
test('Save user settings, check changed bio from beta', async () => {
let bio = 'a changed bio';
let userRes = await saveUserSettingsBio(alpha, auth);
expect(userRes.jwt).toBeDefined();
let site = await getSite(alpha, auth);
expect(site.my_user.bio).toBe(bio);
let searchAlpha = await searchForUser(alpha, site.my_user.actor_id);
// Make sure beta sees this bio is changed
let searchBeta = await searchForUser(beta, apShortname);
assertUserFederation(searchAlpha.users[0], searchBeta.users[0]);
});
test('Set avatar and banner, check that they are federated', async () => {
test('Set some user settings, check that they are federated', async () => {
let avatar = 'https://image.flaticon.com/icons/png/512/35/35896.png';
let banner = 'https://image.flaticon.com/icons/png/512/36/35896.png';
let form: UserSettingsForm = {
let bio = 'a changed bio';
let form: SaveUserSettings = {
show_nsfw: false,
theme: "",
default_sort_type: 0,
default_listing_type: 0,
lang: "",
theme: '',
default_sort_type: Object.keys(SortType).indexOf(SortType.Hot),
default_listing_type: Object.keys(ListingType).indexOf(ListingType.All),
lang: '',
avatar,
banner,
preferred_username: "user321",
preferred_username: 'user321',
show_avatars: false,
send_notifications_to_email: false,
bio,
auth,
}
let settingsRes = await saveUserSettings(alpha, form);
};
await saveUserSettings(alpha, form);
let searchAlpha = await searchForUser(beta, apShortname);
let searchAlpha = await searchForUser(alpha, apShortname);
let userOnAlpha = searchAlpha.users[0];
let searchBeta = await searchForUser(beta, apShortname);
let userOnBeta = searchBeta.users[0];

16
api_tests/tsconfig.json Normal file
View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"declaration": true,
"declarationDir": "./dist",
"module": "CommonJS",
"noImplicitAny": true,
"lib": ["es2017", "es7", "es6", "dom"],
"outDir": "./dist",
"target": "ES5",
"moduleResolution": "Node"
},
"include": [
"src/**/*"
],
"exclude": ["node_modules", "dist"]
}

File diff suppressed because it is too large Load diff

View file

@ -1,7 +0,0 @@
#!/bin/sh
cargo update
cargo fmt
cargo check
cargo clippy
cargo outdated -R

View file

@ -46,9 +46,10 @@ RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_serv
# Build the docs
FROM $RUST_BUILDER_IMAGE as docs
WORKDIR /app
RUN cargo install mdbook --git https://github.com/Ruin0x11/mdBook.git \
--branch localization --rev d06249b --force
RUN cargo install mdbook --git https://github.com/Nutomic/mdBook.git \
--branch localization --rev 0982a82 --force
COPY --chown=rust:rust docs ./docs
RUN ls -la docs/
RUN mdbook build docs/
# The alpine runner

View file

@ -17,7 +17,7 @@ services:
- iframely
lemmy-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
ports:
- "1235:1234"
restart: always

View file

@ -17,8 +17,8 @@ RUN --mount=type=cache,target=/app/target \
FROM rust:1.47-buster as docs
WORKDIR /app
RUN cargo install mdbook --git https://github.com/Ruin0x11/mdBook.git \
--branch localization --rev d06249b --force
RUN cargo install mdbook --git https://github.com/Nutomic/mdBook.git \
--branch localization --rev 0982a82 --force
COPY docs ./docs
RUN mdbook build docs/

View file

@ -1 +1,4 @@
docker exec -it dev_lemmy_db_1 pg_dumpall -c -U rrr > dump_`date +%Y-%m-%d"_"%H_%M_%S`.sql
#!/bin/bash
pushd dev
docker-compose exec postgres pg_dumpall -c -U lemmy > dump_`date +%Y-%m-%d"_"%H_%M_%S`.sql
popd

View file

@ -29,7 +29,7 @@ services:
- ./volumes/pictrs_alpha:/mnt
lemmy-alpha-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
environment:
- LEMMY_INTERNAL_HOST=lemmy-alpha:8541
- LEMMY_EXTERNAL_HOST=localhost:8541
@ -52,6 +52,7 @@ services:
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- LEMMY_TEST_SEND_SYNC=1
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
@ -68,7 +69,7 @@ services:
- ./volumes/postgres_alpha:/var/lib/postgresql/data
lemmy-beta-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
environment:
- LEMMY_INTERNAL_HOST=lemmy-beta:8551
- LEMMY_EXTERNAL_HOST=localhost:8551
@ -91,6 +92,7 @@ services:
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- LEMMY_TEST_SEND_SYNC=1
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
@ -107,7 +109,7 @@ services:
- ./volumes/postgres_beta:/var/lib/postgresql/data
lemmy-gamma-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
environment:
- LEMMY_INTERNAL_HOST=lemmy-gamma:8561
- LEMMY_EXTERNAL_HOST=localhost:8561
@ -130,6 +132,7 @@ services:
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- LEMMY_TEST_SEND_SYNC=1
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
@ -147,7 +150,7 @@ services:
# An instance with only an allowlist for beta
lemmy-delta-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
environment:
- LEMMY_INTERNAL_HOST=lemmy-delta:8571
- LEMMY_EXTERNAL_HOST=localhost:8571
@ -170,6 +173,7 @@ services:
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- LEMMY_TEST_SEND_SYNC=1
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
@ -187,7 +191,7 @@ services:
# An instance who has a blocklist, with lemmy-alpha blocked
lemmy-epsilon-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
environment:
- LEMMY_INTERNAL_HOST=lemmy-epsilon:8581
- LEMMY_EXTERNAL_HOST=localhost:8581
@ -210,6 +214,7 @@ services:
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- LEMMY_TEST_SEND_SYNC=1
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:

View file

@ -1,31 +0,0 @@
#!/bin/bash
set -e
# make sure there are no old containers or old data around
sudo docker-compose down
sudo rm -rf volumes
mkdir -p volumes/pictrs_{alpha,beta,gamma,delta,epsilon}
sudo chown -R 991:991 volumes/pictrs_{alpha,beta,gamma,delta,epsilon}
sudo docker build ../../ --file ../dev/Dockerfile --tag lemmy-federation:latest
sudo mkdir -p volumes/pictrs_alpha
sudo chown -R 991:991 volumes/pictrs_alpha
sudo docker-compose up -d
pushd ../../api_tests
echo "Waiting for Lemmy to start..."
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8541/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8551/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8561/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8571/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8581/api/v1/site')" != "200" ]]; do sleep 1; done
yarn
yarn api-test || true
popd
sudo docker-compose down
sudo rm -r volumes

View file

@ -8,14 +8,4 @@ for Item in alpha beta gamma delta epsilon ; do
sudo chown -R 991:991 volumes/pictrs_$Item
done
sudo docker-compose up -d
echo "Waiting for Lemmy to start..."
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8541/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8551/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8561/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8571/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8581/api/v1/site')" != "200" ]]; do sleep 1; done
echo "All instances started."
sudo docker-compose logs -f
sudo docker-compose up

View file

@ -46,8 +46,8 @@ RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_serv
# Build the docs
FROM $RUST_BUILDER_IMAGE as docs
WORKDIR /app
RUN cargo install mdbook --git https://github.com/Ruin0x11/mdBook.git \
--branch localization --rev d06249b --force
RUN cargo install mdbook --git https://github.com/Nutomic/mdBook.git \
--branch localization --rev 0982a82 --force
COPY --chown=rust:rust docs ./docs
RUN mdbook build docs/

View file

@ -0,0 +1,48 @@
ARG RUST_BUILDER_IMAGE=rust:1.47-slim-buster
# Build Lemmy
FROM $RUST_BUILDER_IMAGE as builder
# Install compilation dependencies
RUN apt-get update \
&& apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY ./ ./
RUN cargo build --release
# reduce binary size
RUN strip ./target/release/lemmy_server
RUN cp ./target/release/lemmy_server /app/lemmy_server
# Build the docs
FROM $RUST_BUILDER_IMAGE as docs
WORKDIR /app
RUN cargo install mdbook --git https://github.com/Nutomic/mdBook.git --branch localization --rev 0982a82 --force
COPY docs ./docs
RUN mdbook build docs/
# The Debian runner
FROM debian:buster-slim as lemmy
# Install libpq for postgres and espeak for captchas
RUN apt-get update \
&& apt-get -y install --no-install-recommends espeak postgresql-client libc6 libssl1.1 \
&& rm -rf /var/lib/apt/lists/*
RUN addgroup --gid 1000 lemmy
RUN adduser --no-create-home --shell /bin/sh --uid 1000 --gid 1000 lemmy
# Copy resources
COPY --chown=lemmy:lemmy config/defaults.hjson /config/defaults.hjson
COPY --chown=lemmy:lemmy --from=builder /app/lemmy_server /app/lemmy
COPY --chown=lemmy:lemmy --from=docs /app/docs/book/ /app/documentation/
RUN chown lemmy:lemmy /app/lemmy
USER lemmy
EXPOSE 8536
CMD ["/app/lemmy"]

View file

@ -20,21 +20,17 @@ cd docker/prod || exit
# Changing various references to the Lemmy version
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../dev/docker-compose.yml
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../federation/docker-compose.yml
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml
sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../prod/docker-compose.yml
sed -i "s/dessalines\/lemmy:v.*/dessalines\/lemmy:$new_tag/" ../travis/docker_push.sh
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml
git add ../dev/docker-compose.yml
git add ../federation/docker-compose.yml
git add ../prod/docker-compose.yml
git add ../travis/docker_push.sh
git add ../federation/docker-compose.yml
# The commit
git commit -m"Version $new_tag"
git tag $new_tag
# Now doing the building on travis, but leave this in for when you need to do an arm build
# export COMPOSE_DOCKER_CLI_BUILD=1
# export DOCKER_BUILDKIT=1

View file

@ -12,7 +12,7 @@ services:
restart: always
lemmy:
image: dessalines/lemmy:v0.8.10
image: dessalines/lemmy:0.9.0-rc.12
ports:
- "127.0.0.1:8536:8536"
restart: always
@ -26,7 +26,7 @@ services:
- iframely
lemmy-ui:
image: dessalines/lemmy-ui:v0.8.10
image: dessalines/lemmy-ui:0.9.0-rc.12
ports:
- "1235:1234"
restart: always

View file

@ -1,159 +0,0 @@
version: '3.3'
services:
lemmy-alpha:
image: dessalines/lemmy:travis
environment:
- LEMMY_HOSTNAME=lemmy-alpha:8541
- LEMMY_DATABASE_URL=postgres://lemmy:password@postgres_alpha:5432/lemmy
- LEMMY_JWT_SECRET=changeme
- LEMMY_FEDERATION__ENABLED=true
- LEMMY_TLS_ENABLED=false
- LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-beta,lemmy-gamma,lemmy-delta,lemmy-epsilon
- LEMMY_PORT=8541
- LEMMY_SETUP__ADMIN_USERNAME=lemmy_alpha
- LEMMY_SETUP__ADMIN_PASSWORD=lemmy
- LEMMY_SETUP__SITE_NAME=lemmy-alpha
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
- postgres_alpha
ports:
- "8541:8541"
postgres_alpha:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres_alpha:/var/lib/postgresql/data
lemmy-beta:
image: dessalines/lemmy:travis
environment:
- LEMMY_HOSTNAME=lemmy-beta:8551
- LEMMY_DATABASE_URL=postgres://lemmy:password@postgres_beta:5432/lemmy
- LEMMY_JWT_SECRET=changeme
- LEMMY_FEDERATION__ENABLED=true
- LEMMY_TLS_ENABLED=false
- LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-alpha,lemmy-gamma,lemmy-delta,lemmy-epsilon
- LEMMY_PORT=8551
- LEMMY_SETUP__ADMIN_USERNAME=lemmy_beta
- LEMMY_SETUP__ADMIN_PASSWORD=lemmy
- LEMMY_SETUP__SITE_NAME=lemmy-beta
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
- postgres_beta
ports:
- "8551:8551"
postgres_beta:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres_beta:/var/lib/postgresql/data
lemmy-gamma:
image: dessalines/lemmy:travis
environment:
- LEMMY_HOSTNAME=lemmy-gamma:8561
- LEMMY_DATABASE_URL=postgres://lemmy:password@postgres_gamma:5432/lemmy
- LEMMY_JWT_SECRET=changeme
- LEMMY_FEDERATION__ENABLED=true
- LEMMY_TLS_ENABLED=false
- LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-alpha,lemmy-beta,lemmy-delta,lemmy-epsilon
- LEMMY_PORT=8561
- LEMMY_SETUP__ADMIN_USERNAME=lemmy_gamma
- LEMMY_SETUP__ADMIN_PASSWORD=lemmy
- LEMMY_SETUP__SITE_NAME=lemmy-gamma
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
- postgres_gamma
ports:
- "8561:8561"
postgres_gamma:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres_gamma:/var/lib/postgresql/data
# An instance with only an allowlist for beta
lemmy-delta:
image: dessalines/lemmy:travis
environment:
- LEMMY_HOSTNAME=lemmy-delta:8571
- LEMMY_DATABASE_URL=postgres://lemmy:password@postgres_delta:5432/lemmy
- LEMMY_JWT_SECRET=changeme
- LEMMY_FEDERATION__ENABLED=true
- LEMMY_TLS_ENABLED=false
- LEMMY_FEDERATION__ALLOWED_INSTANCES=lemmy-beta
- LEMMY_PORT=8571
- LEMMY_SETUP__ADMIN_USERNAME=lemmy_delta
- LEMMY_SETUP__ADMIN_PASSWORD=lemmy
- LEMMY_SETUP__SITE_NAME=lemmy-delta
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
- postgres_delta
ports:
- "8571:8571"
postgres_delta:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres_delta:/var/lib/postgresql/data
# An instance who has a blocklist, with lemmy-alpha blocked
lemmy-epsilon:
image: dessalines/lemmy:travis
environment:
- LEMMY_HOSTNAME=lemmy-epsilon:8581
- LEMMY_DATABASE_URL=postgres://lemmy:password@postgres_epsilon:5432/lemmy
- LEMMY_JWT_SECRET=changeme
- LEMMY_FEDERATION__ENABLED=true
- LEMMY_TLS_ENABLED=false
- LEMMY_FEDERATION__BLOCKED_INSTANCES=lemmy-alpha
- LEMMY_PORT=8581
- LEMMY_SETUP__ADMIN_USERNAME=lemmy_epsilon
- LEMMY_SETUP__ADMIN_PASSWORD=lemmy
- LEMMY_SETUP__SITE_NAME=lemmy-epsilon
- LEMMY_RATE_LIMIT__POST=99999
- LEMMY_RATE_LIMIT__REGISTER=99999
- LEMMY_CAPTCHA__ENABLED=false
- RUST_BACKTRACE=1
- RUST_LOG=debug
depends_on:
- postgres_epsilon
ports:
- "8581:8581"
postgres_epsilon:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres_epsilon:/var/lib/postgresql/data

View file

@ -1,5 +0,0 @@
#!/bin/sh
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker tag dessalines/lemmy:travis \
dessalines/lemmy:v0.8.10
docker push dessalines/lemmy:v0.8.10

View file

@ -1,28 +0,0 @@
#!/bin/bash
set -e
# make sure there are no old containers or old data around
sudo docker-compose down
sudo rm -rf volumes
mkdir -p volumes/pictrs_{alpha,beta,gamma,delta,epsilon}
sudo chown -R 991:991 volumes/pictrs_{alpha,beta,gamma,delta,epsilon}
sudo docker build ../../ --file ../prod/Dockerfile --tag dessalines/lemmy:travis
sudo docker-compose up -d
pushd ../../api_tests
echo "Waiting for Lemmy to start..."
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8541/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8551/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8561/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8571/api/v1/site')" != "200" ]]; do sleep 1; done
while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'localhost:8581/api/v1/site')" != "200" ]]; do sleep 1; done
yarn
yarn api-test
popd
sudo docker-compose down
sudo rm -r volumes/

2
docs

@ -1 +1 @@
Subproject commit 93ede3dd623a40f408baf70d68dd868ea5163c53
Subproject commit cf3236bb620048897048027d8cdff34401ad85ee

View file

@ -11,9 +11,12 @@ path = "src/lib.rs"
[dependencies]
lemmy_apub = { path = "../lemmy_apub" }
lemmy_utils = { path = "../lemmy_utils" }
lemmy_db = { path = "../lemmy_db" }
lemmy_db_queries = { path = "../lemmy_db_queries" }
lemmy_db_schema = { path = "../lemmy_db_schema" }
lemmy_db_views = { path = "../lemmy_db_views" }
lemmy_db_views_moderator = { path = "../lemmy_db_views_moderator" }
lemmy_db_views_actor = { path = "../lemmy_db_views_actor" }
lemmy_structs = { path = "../lemmy_structs" }
lemmy_rate_limit = { path = "../lemmy_rate_limit" }
lemmy_websocket = { path = "../lemmy_websocket" }
diesel = "1.4.5"
bcrypt = "0.9.0"
@ -25,24 +28,24 @@ actix-web = { version = "3.3.2", default-features = false }
actix-rt = { version = "1.1.1", default-features = false }
awc = { version = "2.0.3", default-features = false }
log = "0.4.11"
rand = "0.7.3"
rand = "0.8.0"
strum = "0.20.0"
strum_macros = "0.20.1"
jsonwebtoken = "7.2.0"
lazy_static = "1.4.0"
url = { version = "2.2.0", features = ["serde"] }
openssl = "0.10.30"
http = "0.2.1"
openssl = "0.10.31"
http = "0.2.2"
http-signature-normalization-actix = { version = "0.4.1", default-features = false, features = ["sha-2"] }
base64 = "0.13.0"
tokio = "0.3.5"
tokio = "0.3.6"
futures = "0.3.8"
itertools = "0.9.0"
uuid = { version = "0.8.1", features = ["serde", "v4"] }
sha2 = "0.9.2"
async-trait = "0.1.42"
captcha = "0.0.8"
anyhow = "1.0.35"
anyhow = "1.0.36"
thiserror = "1.0.22"
background-jobs = "0.8.0"
reqwest = { version = "0.10.9", features = ["json"] }
reqwest = { version = "0.10.10", features = ["json"] }

View file

@ -1,5 +1,5 @@
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, TokenData, Validation};
use lemmy_db::user::User_;
use lemmy_db_schema::source::user::User_;
use lemmy_utils::settings::Settings;
use serde::{Deserialize, Serialize};

View file

@ -1,5 +1,6 @@
use crate::{
check_community_ban,
check_downvotes_enabled,
collect_moderated_communities,
get_post,
get_user_from_jwt,
@ -9,14 +10,8 @@ use crate::{
};
use actix_web::web::Data;
use lemmy_apub::{ApubLikeableType, ApubObjectType};
use lemmy_db::{
comment::*,
comment_report::*,
comment_view::*,
moderator::*,
post::*,
site_view::*,
user::*,
use lemmy_db_queries::{
source::comment::Comment_,
Crud,
Likeable,
ListingType,
@ -24,6 +19,11 @@ use lemmy_db::{
Saveable,
SortType,
};
use lemmy_db_schema::source::{comment::*, comment_report::*, moderator::*};
use lemmy_db_views::{
comment_report_view::{CommentReportQueryBuilder, CommentReportView},
comment_view::{CommentQueryBuilder, CommentView},
};
use lemmy_structs::{blocking, comment::*, send_local_notifs};
use lemmy_utils::{
apub::{make_apub_endpoint, EndpointType},
@ -53,6 +53,17 @@ impl Perform for CreateComment {
let content_slurs_removed = remove_slurs(&data.content.to_owned());
// Check for a community ban
let post_id = data.post_id;
let post = get_post(post_id, context.pool()).await?;
check_community_ban(user.id, post.community_id, context.pool()).await?;
// Check if post is locked, no new comments
if post.locked {
return Err(APIError::err("locked").into());
}
let comment_form = CommentForm {
content: content_slurs_removed,
parent_id: data.parent_id.to_owned(),
@ -67,17 +78,6 @@ impl Perform for CreateComment {
local: true,
};
// Check for a community ban
let post_id = data.post_id;
let post = get_post(post_id, context.pool()).await?;
check_community_ban(user.id, post.community_id, context.pool()).await?;
// Check if post is locked, no new comments
if post.locked {
return Err(APIError::err("locked").into());
}
// Create the comment
let comment_form2 = comment_form.clone();
let inserted_comment = match blocking(context.pool(), move |conn| {
@ -105,6 +105,7 @@ impl Perform for CreateComment {
updated_comment.send_create(&user, context).await?;
// Scan the comment for user mentions, add those rows
let post_id = post.id;
let mentions = scrape_text_for_mentions(&comment_form.content);
let recipient_ids = send_local_notifs(
mentions,
@ -119,7 +120,7 @@ impl Perform for CreateComment {
// You like your own comment by default
let like_form = CommentLikeForm {
comment_id: inserted_comment.id,
post_id: data.post_id,
post_id,
user_id: user.id,
score: 1,
};
@ -132,13 +133,27 @@ impl Perform for CreateComment {
updated_comment.send_like(&user, context).await?;
let user_id = user.id;
let comment_view = blocking(context.pool(), move |conn| {
let mut comment_view = blocking(context.pool(), move |conn| {
CommentView::read(&conn, inserted_comment.id, Some(user_id))
})
.await??;
// If its a comment to yourself, mark it as read
let comment_id = comment_view.comment.id;
if user.id == comment_view.get_recipient_id() {
match blocking(context.pool(), move |conn| {
Comment::update_read(conn, comment_id, true)
})
.await?
{
Ok(comment) => comment,
Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
};
comment_view.comment.read = true;
}
let mut res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: data.form_id.to_owned(),
};
@ -149,9 +164,7 @@ impl Perform for CreateComment {
websocket_id,
});
// strip out the recipient_ids, so that
// users don't get double notifs
res.recipient_ids = Vec::new();
res.recipient_ids = Vec::new(); // Necessary to avoid doubles
Ok(res)
}
@ -169,24 +182,24 @@ impl Perform for EditComment {
let data: &EditComment = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let orig_comment = blocking(context.pool(), move |conn| {
CommentView::read(&conn, edit_id, None)
CommentView::read(&conn, comment_id, None)
})
.await??;
check_community_ban(user.id, orig_comment.community_id, context.pool()).await?;
check_community_ban(user.id, orig_comment.community.id, context.pool()).await?;
// Verify that only the creator can edit
if user.id != orig_comment.creator_id {
if user.id != orig_comment.creator.id {
return Err(APIError::err("no_comment_edit_allowed").into());
}
// Do the update
let content_slurs_removed = remove_slurs(&data.content.to_owned());
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let updated_comment = match blocking(context.pool(), move |conn| {
Comment::update_content(conn, edit_id, &content_slurs_removed)
Comment::update_content(conn, comment_id, &content_slurs_removed)
})
.await?
{
@ -198,30 +211,27 @@ impl Perform for EditComment {
updated_comment.send_update(&user, context).await?;
// Do the mentions / recipients
let post_id = orig_comment.post_id;
let post = get_post(post_id, context.pool()).await?;
let updated_comment_content = updated_comment.content.to_owned();
let mentions = scrape_text_for_mentions(&updated_comment_content);
let recipient_ids = send_local_notifs(
mentions,
updated_comment,
&user,
post,
orig_comment.post,
context.pool(),
false,
)
.await?;
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let user_id = user.id;
let comment_view = blocking(context.pool(), move |conn| {
CommentView::read(conn, edit_id, Some(user_id))
CommentView::read(conn, comment_id, Some(user_id))
})
.await??;
let mut res = CommentResponse {
comment: comment_view,
let res = CommentResponse {
comment_view,
recipient_ids,
form_id: data.form_id.to_owned(),
};
@ -232,10 +242,6 @@ impl Perform for EditComment {
websocket_id,
});
// strip out the recipient_ids, so that
// users don't get double notifs
res.recipient_ids = Vec::new();
Ok(res)
}
}
@ -252,23 +258,23 @@ impl Perform for DeleteComment {
let data: &DeleteComment = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let orig_comment = blocking(context.pool(), move |conn| {
CommentView::read(&conn, edit_id, None)
CommentView::read(&conn, comment_id, None)
})
.await??;
check_community_ban(user.id, orig_comment.community_id, context.pool()).await?;
check_community_ban(user.id, orig_comment.community.id, context.pool()).await?;
// Verify that only the creator can delete
if user.id != orig_comment.creator_id {
if user.id != orig_comment.creator.id {
return Err(APIError::err("no_comment_edit_allowed").into());
}
// Do the delete
let deleted = data.deleted;
let updated_comment = match blocking(context.pool(), move |conn| {
Comment::update_deleted(conn, edit_id, deleted)
Comment::update_deleted(conn, comment_id, deleted)
})
.await?
{
@ -284,31 +290,30 @@ impl Perform for DeleteComment {
}
// Refetch it
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let user_id = user.id;
let comment_view = blocking(context.pool(), move |conn| {
CommentView::read(conn, edit_id, Some(user_id))
CommentView::read(conn, comment_id, Some(user_id))
})
.await??;
// Build the recipients
let post_id = comment_view.post_id;
let post = get_post(post_id, context.pool()).await?;
let comment_view_2 = comment_view.clone();
let mentions = vec![];
let recipient_ids = send_local_notifs(
mentions,
updated_comment,
&user,
post,
comment_view_2.post,
context.pool(),
false,
)
.await?;
let mut res = CommentResponse {
comment: comment_view,
let res = CommentResponse {
comment_view,
recipient_ids,
form_id: None,
form_id: None, // TODO a comment delete might clear forms?
};
context.chat_server().do_send(SendComment {
@ -317,10 +322,6 @@ impl Perform for DeleteComment {
websocket_id,
});
// strip out the recipient_ids, so that
// users don't get double notifs
res.recipient_ids = Vec::new();
Ok(res)
}
}
@ -337,21 +338,21 @@ impl Perform for RemoveComment {
let data: &RemoveComment = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let orig_comment = blocking(context.pool(), move |conn| {
CommentView::read(&conn, edit_id, None)
CommentView::read(&conn, comment_id, None)
})
.await??;
check_community_ban(user.id, orig_comment.community_id, context.pool()).await?;
check_community_ban(user.id, orig_comment.community.id, context.pool()).await?;
// Verify that only a mod or admin can remove
is_mod_or_admin(context.pool(), user.id, orig_comment.community_id).await?;
is_mod_or_admin(context.pool(), user.id, orig_comment.community.id).await?;
// Do the remove
let removed = data.removed;
let updated_comment = match blocking(context.pool(), move |conn| {
Comment::update_removed(conn, edit_id, removed)
Comment::update_removed(conn, comment_id, removed)
})
.await?
{
@ -362,7 +363,7 @@ impl Perform for RemoveComment {
// Mod tables
let form = ModRemoveCommentForm {
mod_user_id: user.id,
comment_id: data.edit_id,
comment_id: data.comment_id,
removed: Some(removed),
reason: data.reason.to_owned(),
};
@ -379,31 +380,31 @@ impl Perform for RemoveComment {
}
// Refetch it
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let user_id = user.id;
let comment_view = blocking(context.pool(), move |conn| {
CommentView::read(conn, edit_id, Some(user_id))
CommentView::read(conn, comment_id, Some(user_id))
})
.await??;
// Build the recipients
let post_id = comment_view.post_id;
let post = get_post(post_id, context.pool()).await?;
let comment_view_2 = comment_view.clone();
let mentions = vec![];
let recipient_ids = send_local_notifs(
mentions,
updated_comment,
&user,
post,
comment_view_2.post,
context.pool(),
false,
)
.await?;
let mut res = CommentResponse {
comment: comment_view,
let res = CommentResponse {
comment_view,
recipient_ids,
form_id: None,
form_id: None, // TODO maybe this might clear other forms
};
context.chat_server().do_send(SendComment {
@ -412,10 +413,6 @@ impl Perform for RemoveComment {
websocket_id,
});
// strip out the recipient_ids, so that
// users don't get double notifs
res.recipient_ids = Vec::new();
Ok(res)
}
}
@ -432,41 +429,23 @@ impl Perform for MarkCommentAsRead {
let data: &MarkCommentAsRead = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let orig_comment = blocking(context.pool(), move |conn| {
CommentView::read(&conn, edit_id, None)
CommentView::read(&conn, comment_id, None)
})
.await??;
check_community_ban(user.id, orig_comment.community_id, context.pool()).await?;
check_community_ban(user.id, orig_comment.community.id, context.pool()).await?;
// Verify that only the recipient can mark as read
// Needs to fetch the parent comment / post to get the recipient
let parent_id = orig_comment.parent_id;
match parent_id {
Some(pid) => {
let parent_comment = blocking(context.pool(), move |conn| {
CommentView::read(&conn, pid, None)
})
.await??;
if user.id != parent_comment.creator_id {
return Err(APIError::err("no_comment_edit_allowed").into());
}
}
None => {
let parent_post_id = orig_comment.post_id;
let parent_post =
blocking(context.pool(), move |conn| Post::read(conn, parent_post_id)).await??;
if user.id != parent_post.creator_id {
return Err(APIError::err("no_comment_edit_allowed").into());
}
}
if user.id != orig_comment.get_recipient_id() {
return Err(APIError::err("no_comment_edit_allowed").into());
}
// Do the mark as read
let read = data.read;
match blocking(context.pool(), move |conn| {
Comment::update_read(conn, edit_id, read)
Comment::update_read(conn, comment_id, read)
})
.await?
{
@ -475,15 +454,15 @@ impl Perform for MarkCommentAsRead {
};
// Refetch it
let edit_id = data.edit_id;
let comment_id = data.comment_id;
let user_id = user.id;
let comment_view = blocking(context.pool(), move |conn| {
CommentView::read(conn, edit_id, Some(user_id))
CommentView::read(conn, comment_id, Some(user_id))
})
.await??;
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids: Vec::new(),
form_id: None,
};
@ -529,7 +508,7 @@ impl Perform for SaveComment {
.await??;
Ok(CommentResponse {
comment: comment_view,
comment_view,
recipient_ids: Vec::new(),
form_id: None,
})
@ -551,12 +530,7 @@ impl Perform for CreateCommentLike {
let mut recipient_ids = Vec::new();
// Don't do a downvote if site has downvotes disabled
if data.score == -1 {
let site = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into());
}
}
check_downvotes_enabled(data.score, context.pool()).await?;
let comment_id = data.comment_id;
let orig_comment = blocking(context.pool(), move |conn| {
@ -564,34 +538,14 @@ impl Perform for CreateCommentLike {
})
.await??;
let post_id = orig_comment.post_id;
let post = get_post(post_id, context.pool()).await?;
check_community_ban(user.id, post.community_id, context.pool()).await?;
check_community_ban(user.id, orig_comment.community.id, context.pool()).await?;
let comment_id = data.comment_id;
let comment = blocking(context.pool(), move |conn| Comment::read(conn, comment_id)).await??;
// Add to recipient ids
match comment.parent_id {
Some(parent_id) => {
let parent_comment =
blocking(context.pool(), move |conn| Comment::read(conn, parent_id)).await??;
if parent_comment.creator_id != user.id {
let parent_user = blocking(context.pool(), move |conn| {
User_::read(conn, parent_comment.creator_id)
})
.await??;
recipient_ids.push(parent_user.id);
}
}
None => {
recipient_ids.push(post.creator_id);
}
}
// Add parent user to recipients
recipient_ids.push(orig_comment.get_recipient_id());
let like_form = CommentLikeForm {
comment_id: data.comment_id,
post_id,
post_id: orig_comment.post.id,
user_id: user.id,
score: data.score,
};
@ -604,6 +558,7 @@ impl Perform for CreateCommentLike {
.await??;
// Only add the like if the score isnt 0
let comment = orig_comment.comment;
let do_add = like_form.score != 0 && (like_form.score == 1 || like_form.score == -1);
if do_add {
let like_form2 = like_form.clone();
@ -629,8 +584,8 @@ impl Perform for CreateCommentLike {
})
.await??;
let mut res = CommentResponse {
comment: liked_comment,
let res = CommentResponse {
comment_view: liked_comment,
recipient_ids,
form_id: None,
};
@ -641,10 +596,6 @@ impl Perform for CreateCommentLike {
websocket_id,
});
// strip out the recipient_ids, so that
// users don't get double notifs
res.recipient_ids = Vec::new();
Ok(res)
}
}
@ -673,8 +624,8 @@ impl Perform for GetComments {
CommentQueryBuilder::create(conn)
.listing_type(type_)
.sort(&sort)
.for_community_id(community_id)
.for_community_name(community_name)
.community_id(community_id)
.community_name(community_name)
.my_user_id(user_id)
.page(page)
.limit(limit)
@ -714,17 +665,17 @@ impl Perform for CreateCommentReport {
let user_id = user.id;
let comment_id = data.comment_id;
let comment = blocking(context.pool(), move |conn| {
let comment_view = blocking(context.pool(), move |conn| {
CommentView::read(&conn, comment_id, None)
})
.await??;
check_community_ban(user_id, comment.community_id, context.pool()).await?;
check_community_ban(user_id, comment_view.community.id, context.pool()).await?;
let report_form = CommentReportForm {
creator_id: user_id,
comment_id,
original_comment_text: comment.content,
original_comment_text: comment_view.comment.content,
reason: data.reason.to_owned(),
};
@ -749,7 +700,7 @@ impl Perform for CreateCommentReport {
context.chat_server().do_send(SendModRoomMessage {
op: UserOperation::CreateCommentReport,
response: report,
community_id: comment.community_id,
community_id: comment_view.community.id,
websocket_id,
});
@ -777,7 +728,7 @@ impl Perform for ResolveCommentReport {
.await??;
let user_id = user.id;
is_mod_or_admin(context.pool(), user_id, report.community_id).await?;
is_mod_or_admin(context.pool(), user_id, report.community.id).await?;
let resolved = data.resolved;
let resolve_fun = move |conn: &'_ _| {
@ -801,7 +752,7 @@ impl Perform for ResolveCommentReport {
context.chat_server().do_send(SendModRoomMessage {
op: UserOperation::ResolveCommentReport,
response: res.clone(),
community_id: report.community_id,
community_id: report.community.id,
websocket_id,
});

View file

@ -9,17 +9,13 @@ use crate::{
use actix_web::web::Data;
use anyhow::Context;
use lemmy_apub::ActorType;
use lemmy_db::{
comment::Comment,
comment_view::CommentQueryBuilder,
community::*,
community_view::*,
use lemmy_db_queries::{
diesel_option_overwrite,
moderator::*,
naive_now,
post::Post,
site::*,
user_view::*,
source::{
comment::Comment_,
community::{CommunityModerator_, Community_},
post::Post_,
},
ApubObject,
Bannable,
Crud,
@ -27,6 +23,17 @@ use lemmy_db::{
Joinable,
SortType,
};
use lemmy_db_schema::{
naive_now,
source::{comment::Comment, community::*, moderator::*, post::Post, site::*},
};
use lemmy_db_views::comment_view::CommentQueryBuilder;
use lemmy_db_views_actor::{
community_follower_view::CommunityFollowerView,
community_moderator_view::CommunityModeratorView,
community_view::{CommunityQueryBuilder, CommunityView},
user_view::UserViewSafe,
};
use lemmy_structs::{blocking, community::*};
use lemmy_utils::{
apub::{generate_actor_keypair, make_apub_endpoint, EndpointType},
@ -37,7 +44,7 @@ use lemmy_utils::{
LemmyError,
};
use lemmy_websocket::{
messages::{GetCommunityUsersOnline, JoinCommunityRoom, JoinModRoom, SendCommunityRoomMessage},
messages::{GetCommunityUsersOnline, SendCommunityRoomMessage},
LemmyContext,
UserOperation,
};
@ -56,20 +63,22 @@ impl Perform for GetCommunity {
let user = get_user_from_jwt_opt(&data.auth, context.pool()).await?;
let user_id = user.map(|u| u.id);
let name = data.name.to_owned().unwrap_or_else(|| "main".to_string());
let community = match data.id {
Some(id) => blocking(context.pool(), move |conn| Community::read(conn, id)).await??,
None => match blocking(context.pool(), move |conn| {
Community::read_from_name(conn, &name)
})
.await?
{
Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
},
let community_id = match data.id {
Some(id) => id,
None => {
let name = data.name.to_owned().unwrap_or_else(|| "main".to_string());
match blocking(context.pool(), move |conn| {
Community::read_from_name(conn, &name)
})
.await?
{
Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}
.id
}
};
let community_id = community.id;
let community_view = match blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, user_id)
})
@ -79,7 +88,6 @@ impl Perform for GetCommunity {
Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
};
let community_id = community.id;
let moderators: Vec<CommunityModeratorView> = match blocking(context.pool(), move |conn| {
CommunityModeratorView::for_community(conn, community_id)
})
@ -96,7 +104,7 @@ impl Perform for GetCommunity {
.unwrap_or(1);
let res = GetCommunityResponse {
community: community_view,
community_view,
moderators,
online,
};
@ -176,6 +184,7 @@ impl Perform for CreateCommunity {
Err(_e) => return Err(APIError::err("community_already_exists").into()),
};
// The community creator becomes a moderator
let community_moderator_form = CommunityModeratorForm {
community_id: inserted_community.id,
user_id: user.id,
@ -186,6 +195,7 @@ impl Perform for CreateCommunity {
return Err(APIError::err("community_moderator_already_exists").into());
}
// Follow your own community
let community_follower_form = CommunityFollowerForm {
community_id: inserted_community.id,
user_id: user.id,
@ -203,9 +213,7 @@ impl Perform for CreateCommunity {
})
.await??;
Ok(CommunityResponse {
community: community_view,
})
Ok(CommunityResponse { community_view })
}
}
@ -225,19 +233,21 @@ impl Perform for EditCommunity {
check_slurs_opt(&data.description)?;
// Verify its a mod (only mods can edit it)
let edit_id = data.edit_id;
let community_id = data.community_id;
let mods: Vec<i32> = blocking(context.pool(), move |conn| {
CommunityModeratorView::for_community(conn, edit_id)
.map(|v| v.into_iter().map(|m| m.user_id).collect())
CommunityModeratorView::for_community(conn, community_id)
.map(|v| v.into_iter().map(|m| m.moderator.id).collect())
})
.await??;
if !mods.contains(&user.id) {
return Err(APIError::err("not_a_moderator").into());
}
let edit_id = data.edit_id;
let read_community =
blocking(context.pool(), move |conn| Community::read(conn, edit_id)).await??;
let community_id = data.community_id;
let read_community = blocking(context.pool(), move |conn| {
Community::read(conn, community_id)
})
.await??;
let icon = diesel_option_overwrite(&data.icon);
let banner = diesel_option_overwrite(&data.banner);
@ -265,9 +275,9 @@ impl Perform for EditCommunity {
published: None,
};
let edit_id = data.edit_id;
let community_id = data.community_id;
match blocking(context.pool(), move |conn| {
Community::update(conn, edit_id, &community_form)
Community::update(conn, community_id, &community_form)
})
.await?
{
@ -278,16 +288,14 @@ impl Perform for EditCommunity {
// TODO there needs to be some kind of an apub update
// process for communities and users
let edit_id = data.edit_id;
let community_id = data.community_id;
let user_id = user.id;
let community_view = blocking(context.pool(), move |conn| {
CommunityView::read(conn, edit_id, Some(user_id))
CommunityView::read(conn, community_id, Some(user_id))
})
.await??;
let res = CommunityResponse {
community: community_view,
};
let res = CommunityResponse { community_view };
send_community_websocket(&res, context, websocket_id, UserOperation::EditCommunity);
@ -308,18 +316,20 @@ impl Perform for DeleteCommunity {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Verify its the creator (only a creator can delete the community)
let edit_id = data.edit_id;
let read_community =
blocking(context.pool(), move |conn| Community::read(conn, edit_id)).await??;
let community_id = data.community_id;
let read_community = blocking(context.pool(), move |conn| {
Community::read(conn, community_id)
})
.await??;
if read_community.creator_id != user.id {
return Err(APIError::err("no_community_edit_allowed").into());
}
// Do the delete
let edit_id = data.edit_id;
let community_id = data.community_id;
let deleted = data.deleted;
let updated_community = match blocking(context.pool(), move |conn| {
Community::update_deleted(conn, edit_id, deleted)
Community::update_deleted(conn, community_id, deleted)
})
.await?
{
@ -334,16 +344,14 @@ impl Perform for DeleteCommunity {
updated_community.send_undo_delete(context).await?;
}
let edit_id = data.edit_id;
let community_id = data.community_id;
let user_id = user.id;
let community_view = blocking(context.pool(), move |conn| {
CommunityView::read(conn, edit_id, Some(user_id))
CommunityView::read(conn, community_id, Some(user_id))
})
.await??;
let res = CommunityResponse {
community: community_view,
};
let res = CommunityResponse { community_view };
send_community_websocket(&res, context, websocket_id, UserOperation::DeleteCommunity);
@ -367,10 +375,10 @@ impl Perform for RemoveCommunity {
is_admin(context.pool(), user.id).await?;
// Do the remove
let edit_id = data.edit_id;
let community_id = data.community_id;
let removed = data.removed;
let updated_community = match blocking(context.pool(), move |conn| {
Community::update_removed(conn, edit_id, removed)
Community::update_removed(conn, community_id, removed)
})
.await?
{
@ -385,7 +393,7 @@ impl Perform for RemoveCommunity {
};
let form = ModRemoveCommunityForm {
mod_user_id: user.id,
community_id: data.edit_id,
community_id: data.community_id,
removed: Some(removed),
reason: data.reason.to_owned(),
expires,
@ -402,16 +410,14 @@ impl Perform for RemoveCommunity {
updated_community.send_undo_remove(context).await?;
}
let edit_id = data.edit_id;
let community_id = data.community_id;
let user_id = user.id;
let community_view = blocking(context.pool(), move |conn| {
CommunityView::read(conn, edit_id, Some(user_id))
CommunityView::read(conn, community_id, Some(user_id))
})
.await??;
let res = CommunityResponse {
community: community_view,
};
let res = CommunityResponse { community_view };
send_community_websocket(&res, context, websocket_id, UserOperation::RemoveCommunity);
@ -448,8 +454,8 @@ impl Perform for ListCommunities {
let communities = blocking(context.pool(), move |conn| {
CommunityQueryBuilder::create(conn)
.sort(&sort)
.for_user(user_id)
.show_nsfw(show_nsfw)
.my_user_id(user_id)
.page(page)
.limit(limit)
.list()
@ -520,12 +526,10 @@ impl Perform for FollowCommunity {
// For now, just assume that remote follows are accepted.
// Otherwise, the subscribed will be null
if !community.local {
community_view.subscribed = Some(data.follow);
community_view.subscribed = data.follow;
}
Ok(CommunityResponse {
community: community_view,
})
Ok(CommunityResponse { community_view })
}
}
@ -592,28 +596,28 @@ impl Perform for BanFromCommunity {
}
// Remove/Restore their data if that's desired
if let Some(remove_data) = data.remove_data {
if data.remove_data {
// Posts
blocking(context.pool(), move |conn: &'_ _| {
Post::update_removed_for_creator(conn, banned_user_id, Some(community_id), remove_data)
Post::update_removed_for_creator(conn, banned_user_id, Some(community_id), true)
})
.await??;
// Comments
// Diesel doesn't allow updates with joins, so this has to be a loop
// TODO Diesel doesn't allow updates with joins, so this has to be a loop
let comments = blocking(context.pool(), move |conn| {
CommentQueryBuilder::create(conn)
.for_creator_id(banned_user_id)
.for_community_id(community_id)
.creator_id(banned_user_id)
.community_id(community_id)
.limit(std::i64::MAX)
.list()
})
.await??;
for comment in &comments {
let comment_id = comment.id;
for comment_view in &comments {
let comment_id = comment_view.comment.id;
blocking(context.pool(), move |conn: &'_ _| {
Comment::update_removed(conn, comment_id, remove_data)
Comment::update_removed(conn, comment_id, true)
})
.await??;
}
@ -641,12 +645,12 @@ impl Perform for BanFromCommunity {
let user_id = data.user_id;
let user_view = blocking(context.pool(), move |conn| {
UserView::get_user_secure(conn, user_id)
UserViewSafe::read(conn, user_id)
})
.await??;
let res = BanFromCommunityResponse {
user: user_view,
user_view,
banned: data.ban,
};
@ -749,17 +753,20 @@ impl Perform for TransferCommunity {
})
.await??;
let mut admins = blocking(context.pool(), move |conn| UserView::admins(conn)).await??;
let mut admins = blocking(context.pool(), move |conn| UserViewSafe::admins(conn)).await??;
// Making sure the creator, if an admin, is at the top
let creator_index = admins
.iter()
.position(|r| r.id == site_creator_id)
.position(|r| r.user.id == site_creator_id)
.context(location_info!())?;
let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user);
// Make sure user is the creator, or an admin
if user.id != read_community.creator_id && !admins.iter().map(|a| a.id).any(|x| x == user.id) {
if user.id != read_community.creator_id
&& !admins.iter().map(|a| a.user.id).any(|x| x == user.id)
{
return Err(APIError::err("not_an_admin").into());
}
@ -778,7 +785,7 @@ impl Perform for TransferCommunity {
.await??;
let creator_index = community_mods
.iter()
.position(|r| r.user_id == data.user_id)
.position(|r| r.moderator.id == data.user_id)
.context(location_info!())?;
let creator_user = community_mods.remove(creator_index);
community_mods.insert(0, creator_user);
@ -792,8 +799,8 @@ impl Perform for TransferCommunity {
// TODO: this should probably be a bulk operation
for cmod in &community_mods {
let community_moderator_form = CommunityModeratorForm {
community_id: cmod.community_id,
user_id: cmod.user_id,
community_id: cmod.community.id,
user_id: cmod.moderator.id,
};
let join = move |conn: &'_ _| CommunityModerator::join(conn, &community_moderator_form);
@ -837,7 +844,7 @@ impl Perform for TransferCommunity {
// Return the jwt
Ok(GetCommunityResponse {
community: community_view,
community_view,
moderators,
online: 0,
})
@ -852,57 +859,12 @@ fn send_community_websocket(
) {
// Strip out the user id and subscribed when sending to others
let mut res_sent = res.clone();
res_sent.community.user_id = None;
res_sent.community.subscribed = None;
res_sent.community_view.subscribed = false;
context.chat_server().do_send(SendCommunityRoomMessage {
op,
response: res_sent,
community_id: res.community.id,
community_id: res.community_view.community.id,
websocket_id,
});
}
#[async_trait::async_trait(?Send)]
impl Perform for CommunityJoin {
type Response = CommunityJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<CommunityJoinResponse, LemmyError> {
let data: &CommunityJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinCommunityRoom {
community_id: data.community_id,
id: ws_id,
});
}
Ok(CommunityJoinResponse { joined: true })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for ModJoin {
type Response = ModJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<ModJoinResponse, LemmyError> {
let data: &ModJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinModRoom {
community_id: data.community_id,
id: ws_id,
});
}
Ok(ModJoinResponse { joined: true })
}
}

View file

@ -1,14 +1,25 @@
use crate::claims::Claims;
use actix_web::{web, web::Data};
use lemmy_db::{
community::{Community, CommunityModerator},
community_view::CommunityUserBanView,
post::Post,
user::User_,
use lemmy_db_queries::{
source::{
community::{CommunityModerator_, Community_},
site::Site_,
user::UserSafeSettings_,
},
Crud,
DbPool,
};
use lemmy_structs::{blocking, comment::*, community::*, post::*, site::*, user::*};
use lemmy_db_schema::source::{
community::{Community, CommunityModerator},
post::Post,
site::Site,
user::{UserSafeSettings, User_},
};
use lemmy_db_views_actor::{
community_user_ban_view::CommunityUserBanView,
community_view::CommunityView,
};
use lemmy_structs::{blocking, comment::*, community::*, post::*, site::*, user::*, websocket::*};
use lemmy_utils::{settings::Settings, APIError, ConnectionId, LemmyError};
use lemmy_websocket::{serialize_websocket_message, LemmyContext, UserOperation};
use serde::Deserialize;
@ -22,6 +33,7 @@ pub mod post;
pub mod site;
pub mod user;
pub mod version;
pub mod websocket;
#[async_trait::async_trait(?Send)]
pub trait Perform {
@ -40,7 +52,7 @@ pub(crate) async fn is_mod_or_admin(
community_id: i32,
) -> Result<(), LemmyError> {
let is_mod_or_admin = blocking(pool, move |conn| {
Community::is_mod_or_admin(conn, user_id, community_id)
CommunityView::is_mod_or_admin(conn, user_id, community_id)
})
.await?;
if !is_mod_or_admin {
@ -87,6 +99,33 @@ pub(crate) async fn get_user_from_jwt_opt(
}
}
pub(crate) async fn get_user_safe_settings_from_jwt(
jwt: &str,
pool: &DbPool,
) -> Result<UserSafeSettings, LemmyError> {
let claims = match Claims::decode(&jwt) {
Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()),
};
let user_id = claims.id;
let user = blocking(pool, move |conn| UserSafeSettings::read(conn, user_id)).await??;
// Check for a site ban
if user.banned {
return Err(APIError::err("site_ban").into());
}
Ok(user)
}
pub(crate) async fn get_user_safe_settings_from_jwt_opt(
jwt: &Option<String>,
pool: &DbPool,
) -> Result<Option<UserSafeSettings>, LemmyError> {
match jwt {
Some(jwt) => Ok(Some(get_user_safe_settings_from_jwt(jwt, pool).await?)),
None => Ok(None),
}
}
pub(crate) async fn check_community_ban(
user_id: i32,
community_id: i32,
@ -100,6 +139,16 @@ pub(crate) async fn check_community_ban(
}
}
pub(crate) async fn check_downvotes_enabled(score: i16, pool: &DbPool) -> Result<(), LemmyError> {
if score == -1 {
let site = blocking(pool, move |conn| Site::read_simple(conn)).await??;
if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into());
}
}
Ok(())
}
/// Returns a list of communities that the user moderates
/// or if a community_id is supplied validates the user is a moderator
/// of that community and returns the community id in a vec

View file

@ -1,5 +1,6 @@
use crate::{
check_community_ban,
check_downvotes_enabled,
check_optional_url,
collect_moderated_communities,
get_user_from_jwt,
@ -9,15 +10,8 @@ use crate::{
};
use actix_web::web::Data;
use lemmy_apub::{ApubLikeableType, ApubObjectType};
use lemmy_db::{
comment_view::*,
community_view::*,
moderator::*,
naive_now,
post::*,
post_report::*,
post_view::*,
site_view::*,
use lemmy_db_queries::{
source::post::Post_,
Crud,
Likeable,
ListingType,
@ -25,6 +19,23 @@ use lemmy_db::{
Saveable,
SortType,
};
use lemmy_db_schema::{
naive_now,
source::{
moderator::*,
post::*,
post_report::{PostReport, PostReportForm},
},
};
use lemmy_db_views::{
comment_view::CommentQueryBuilder,
post_report_view::{PostReportQueryBuilder, PostReportView},
post_view::{PostQueryBuilder, PostView},
};
use lemmy_db_views_actor::{
community_moderator_view::CommunityModeratorView,
community_view::CommunityView,
};
use lemmy_structs::{blocking, post::*};
use lemmy_utils::{
apub::{make_apub_endpoint, EndpointType},
@ -35,7 +46,7 @@ use lemmy_utils::{
LemmyError,
};
use lemmy_websocket::{
messages::{GetPostUsersOnline, JoinPostRoom, SendModRoomMessage, SendPost, SendUserRoomMessage},
messages::{GetPostUsersOnline, SendModRoomMessage, SendPost, SendUserRoomMessage},
LemmyContext,
UserOperation,
};
@ -142,7 +153,7 @@ impl Perform for CreatePost {
Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
};
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePost,
@ -180,25 +191,29 @@ impl Perform for GetPost {
let id = data.id;
let comments = blocking(context.pool(), move |conn| {
CommentQueryBuilder::create(conn)
.for_post_id(id)
.my_user_id(user_id)
.post_id(id)
.limit(9999)
.list()
})
.await??;
let community_id = post_view.community_id;
let community = blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, user_id)
})
.await??;
let community_id = post_view.community_id;
let community_id = post_view.community.id;
let moderators = blocking(context.pool(), move |conn| {
CommunityModeratorView::for_community(conn, community_id)
})
.await??;
// Necessary for the sidebar
let community_view = match blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, user_id)
})
.await?
{
Ok(community) => community,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
};
let online = context
.chat_server()
.send(GetPostUsersOnline { post_id: data.id })
@ -207,9 +222,9 @@ impl Perform for GetPost {
// Return the jwt
Ok(GetPostResponse {
post: post_view,
post_view,
community_view,
comments,
community,
moderators,
online,
})
@ -250,8 +265,8 @@ impl Perform for GetPosts {
.listing_type(&type_)
.sort(&sort)
.show_nsfw(show_nsfw)
.for_community_id(community_id)
.for_community_name(community_name)
.community_id(community_id)
.community_name(community_name)
.my_user_id(user_id)
.page(page)
.limit(limit)
@ -280,12 +295,7 @@ impl Perform for CreatePostLike {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Don't do a downvote if site has downvotes disabled
if data.score == -1 {
let site = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into());
}
}
check_downvotes_enabled(data.score, context.pool()).await?;
// Check for a community ban
let post_id = data.post_id;
@ -335,7 +345,7 @@ impl Perform for CreatePostLike {
Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
};
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePostLike,
@ -366,8 +376,8 @@ impl Perform for EditPost {
return Err(APIError::err("invalid_post_title").into());
}
let edit_id = data.edit_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, edit_id)).await??;
let post_id = data.post_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_community_ban(user.id, orig_post.community_id, context.pool()).await?;
@ -401,9 +411,9 @@ impl Perform for EditPost {
published: None,
};
let edit_id = data.edit_id;
let post_id = data.post_id;
let res = blocking(context.pool(), move |conn| {
Post::update(conn, edit_id, &post_form)
Post::update(conn, post_id, &post_form)
})
.await?;
let updated_post: Post = match res {
@ -422,13 +432,13 @@ impl Perform for EditPost {
// Send apub update
updated_post.send_update(&user, context).await?;
let edit_id = data.edit_id;
let post_id = data.post_id;
let post_view = blocking(context.pool(), move |conn| {
PostView::read(conn, edit_id, Some(user.id))
PostView::read(conn, post_id, Some(user.id))
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,
@ -452,8 +462,8 @@ impl Perform for DeletePost {
let data: &DeletePost = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, edit_id)).await??;
let post_id = data.post_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_community_ban(user.id, orig_post.community_id, context.pool()).await?;
@ -463,10 +473,10 @@ impl Perform for DeletePost {
}
// Update the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let deleted = data.deleted;
let updated_post = blocking(context.pool(), move |conn| {
Post::update_deleted(conn, edit_id, deleted)
Post::update_deleted(conn, post_id, deleted)
})
.await??;
@ -478,13 +488,13 @@ impl Perform for DeletePost {
}
// Refetch the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let post_view = blocking(context.pool(), move |conn| {
PostView::read(conn, edit_id, Some(user.id))
PostView::read(conn, post_id, Some(user.id))
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::DeletePost,
@ -508,8 +518,8 @@ impl Perform for RemovePost {
let data: &RemovePost = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, edit_id)).await??;
let post_id = data.post_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_community_ban(user.id, orig_post.community_id, context.pool()).await?;
@ -517,17 +527,17 @@ impl Perform for RemovePost {
is_mod_or_admin(context.pool(), user.id, orig_post.community_id).await?;
// Update the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let removed = data.removed;
let updated_post = blocking(context.pool(), move |conn| {
Post::update_removed(conn, edit_id, removed)
Post::update_removed(conn, post_id, removed)
})
.await??;
// Mod tables
let form = ModRemovePostForm {
mod_user_id: user.id,
post_id: data.edit_id,
post_id: data.post_id,
removed: Some(removed),
reason: data.reason.to_owned(),
};
@ -544,14 +554,14 @@ impl Perform for RemovePost {
}
// Refetch the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let user_id = user.id;
let post_view = blocking(context.pool(), move |conn| {
PostView::read(conn, edit_id, Some(user_id))
PostView::read(conn, post_id, Some(user_id))
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::RemovePost,
@ -575,8 +585,8 @@ impl Perform for LockPost {
let data: &LockPost = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, edit_id)).await??;
let post_id = data.post_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_community_ban(user.id, orig_post.community_id, context.pool()).await?;
@ -584,17 +594,17 @@ impl Perform for LockPost {
is_mod_or_admin(context.pool(), user.id, orig_post.community_id).await?;
// Update the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let locked = data.locked;
let updated_post = blocking(context.pool(), move |conn| {
Post::update_locked(conn, edit_id, locked)
Post::update_locked(conn, post_id, locked)
})
.await??;
// Mod tables
let form = ModLockPostForm {
mod_user_id: user.id,
post_id: data.edit_id,
post_id: data.post_id,
locked: Some(locked),
};
blocking(context.pool(), move |conn| ModLockPost::create(conn, &form)).await??;
@ -603,13 +613,13 @@ impl Perform for LockPost {
updated_post.send_update(&user, context).await?;
// Refetch the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let post_view = blocking(context.pool(), move |conn| {
PostView::read(conn, edit_id, Some(user.id))
PostView::read(conn, post_id, Some(user.id))
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::LockPost,
@ -633,8 +643,8 @@ impl Perform for StickyPost {
let data: &StickyPost = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let edit_id = data.edit_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, edit_id)).await??;
let post_id = data.post_id;
let orig_post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
check_community_ban(user.id, orig_post.community_id, context.pool()).await?;
@ -642,17 +652,17 @@ impl Perform for StickyPost {
is_mod_or_admin(context.pool(), user.id, orig_post.community_id).await?;
// Update the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let stickied = data.stickied;
let updated_post = blocking(context.pool(), move |conn| {
Post::update_stickied(conn, edit_id, stickied)
Post::update_stickied(conn, post_id, stickied)
})
.await??;
// Mod tables
let form = ModStickyPostForm {
mod_user_id: user.id,
post_id: data.edit_id,
post_id: data.post_id,
stickied: Some(stickied),
};
blocking(context.pool(), move |conn| {
@ -665,13 +675,13 @@ impl Perform for StickyPost {
updated_post.send_update(&user, context).await?;
// Refetch the post
let edit_id = data.edit_id;
let post_id = data.post_id;
let post_view = blocking(context.pool(), move |conn| {
PostView::read(conn, edit_id, Some(user.id))
PostView::read(conn, post_id, Some(user.id))
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::StickyPost,
@ -719,29 +729,7 @@ impl Perform for SavePost {
})
.await??;
Ok(PostResponse { post: post_view })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for PostJoin {
type Response = PostJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<PostJoinResponse, LemmyError> {
let data: &PostJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinPostRoom {
post_id: data.post_id,
id: ws_id,
});
}
Ok(PostJoinResponse { joined: true })
Ok(PostResponse { post_view })
}
}
@ -769,19 +757,19 @@ impl Perform for CreatePostReport {
let user_id = user.id;
let post_id = data.post_id;
let post = blocking(context.pool(), move |conn| {
let post_view = blocking(context.pool(), move |conn| {
PostView::read(&conn, post_id, None)
})
.await??;
check_community_ban(user_id, post.community_id, context.pool()).await?;
check_community_ban(user_id, post_view.community.id, context.pool()).await?;
let report_form = PostReportForm {
creator_id: user_id,
post_id,
original_post_name: post.name,
original_post_url: post.url,
original_post_body: post.body,
original_post_name: post_view.post.name,
original_post_url: post_view.post.url,
original_post_body: post_view.post.body,
reason: data.reason.to_owned(),
};
@ -806,7 +794,7 @@ impl Perform for CreatePostReport {
context.chat_server().do_send(SendModRoomMessage {
op: UserOperation::CreatePostReport,
response: report,
community_id: post.community_id,
community_id: post_view.community.id,
websocket_id,
});
@ -834,7 +822,7 @@ impl Perform for ResolvePostReport {
.await??;
let user_id = user.id;
is_mod_or_admin(context.pool(), user_id, report.community_id).await?;
is_mod_or_admin(context.pool(), user_id, report.community.id).await?;
let resolved = data.resolved;
let resolve_fun = move |conn: &'_ _| {
@ -857,7 +845,7 @@ impl Perform for ResolvePostReport {
context.chat_server().do_send(SendModRoomMessage {
op: UserOperation::ResolvePostReport,
response: res.clone(),
community_id: report.community_id,
community_id: report.community.id,
websocket_id,
});

View file

@ -1,6 +1,8 @@
use crate::{
get_user_from_jwt,
get_user_from_jwt_opt,
get_user_safe_settings_from_jwt,
get_user_safe_settings_from_jwt_opt,
is_admin,
linked_instances,
version,
@ -8,23 +10,42 @@ use crate::{
};
use actix_web::web::Data;
use anyhow::Context;
use lemmy_apub::fetcher::search_by_apub_id;
use lemmy_db::{
category::*,
comment_view::*,
community_view::*,
use lemmy_apub::fetcher::search::search_by_apub_id;
use lemmy_db_queries::{
diesel_option_overwrite,
moderator::*,
moderator_views::*,
naive_now,
post_view::*,
site::*,
site_view::*,
user_view::*,
source::{category::Category_, site::Site_},
Crud,
SearchType,
SortType,
};
use lemmy_db_schema::{
naive_now,
source::{
category::Category,
moderator::*,
site::{Site, *},
},
};
use lemmy_db_views::{
comment_view::CommentQueryBuilder,
post_view::PostQueryBuilder,
site_view::SiteView,
};
use lemmy_db_views_actor::{
community_view::CommunityQueryBuilder,
user_view::{UserQueryBuilder, UserViewSafe},
};
use lemmy_db_views_moderator::{
mod_add_community_view::ModAddCommunityView,
mod_add_view::ModAddView,
mod_ban_from_community_view::ModBanFromCommunityView,
mod_ban_view::ModBanView,
mod_lock_post_view::ModLockPostView,
mod_remove_comment_view::ModRemoveCommentView,
mod_remove_community_view::ModRemoveCommunityView,
mod_remove_post_view::ModRemovePostView,
mod_sticky_post_view::ModStickyPostView,
};
use lemmy_structs::{blocking, site::*, user::Register};
use lemmy_utils::{
location_info,
@ -145,7 +166,7 @@ impl Perform for CreateSite {
) -> Result<SiteResponse, LemmyError> {
let data: &CreateSite = &self;
let read_site = move |conn: &'_ _| Site::read(conn, 1);
let read_site = move |conn: &'_ _| Site::read_simple(conn);
if blocking(context.pool(), read_site).await?.is_ok() {
return Err(APIError::err("site_already_exists").into());
};
@ -177,7 +198,7 @@ impl Perform for CreateSite {
let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
Ok(SiteResponse { site: site_view })
Ok(SiteResponse { site_view })
}
}
@ -198,7 +219,7 @@ impl Perform for EditSite {
// Make sure user is an admin
is_admin(context.pool(), user.id).await?;
let found_site = blocking(context.pool(), move |conn| Site::read(conn, 1)).await??;
let found_site = blocking(context.pool(), move |conn| Site::read_simple(conn)).await??;
let icon = diesel_option_overwrite(&data.icon);
let banner = diesel_option_overwrite(&data.banner);
@ -222,7 +243,7 @@ impl Perform for EditSite {
let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
let res = SiteResponse { site: site_view };
let res = SiteResponse { site_view };
context.chat_server().do_send(SendAllMessage {
op: UserOperation::EditSite,
@ -245,55 +266,56 @@ impl Perform for GetSite {
) -> Result<GetSiteResponse, LemmyError> {
let data: &GetSite = &self;
// TODO refactor this a little
let res = blocking(context.pool(), move |conn| Site::read(conn, 1)).await?;
let site_view = if res.is_ok() {
Some(blocking(context.pool(), move |conn| SiteView::read(conn)).await??)
} else if let Some(setup) = Settings::get().setup.as_ref() {
let register = Register {
username: setup.admin_username.to_owned(),
email: setup.admin_email.to_owned(),
password: setup.admin_password.to_owned(),
password_verify: setup.admin_password.to_owned(),
admin: true,
show_nsfw: true,
captcha_uuid: None,
captcha_answer: None,
};
let login_response = register.perform(context, websocket_id).await?;
info!("Admin {} created", setup.admin_username);
let site_view = match blocking(context.pool(), move |conn| SiteView::read(conn)).await? {
Ok(site_view) => Some(site_view),
// If the site isn't created yet, check the setup
Err(_) => {
if let Some(setup) = Settings::get().setup.as_ref() {
let register = Register {
username: setup.admin_username.to_owned(),
email: setup.admin_email.to_owned(),
password: setup.admin_password.to_owned(),
password_verify: setup.admin_password.to_owned(),
show_nsfw: true,
captcha_uuid: None,
captcha_answer: None,
};
let login_response = register.perform(context, websocket_id).await?;
info!("Admin {} created", setup.admin_username);
let create_site = CreateSite {
name: setup.site_name.to_owned(),
description: None,
icon: None,
banner: None,
enable_downvotes: true,
open_registration: true,
enable_nsfw: true,
auth: login_response.jwt,
};
create_site.perform(context, websocket_id).await?;
info!("Site {} created", setup.site_name);
Some(blocking(context.pool(), move |conn| SiteView::read(conn)).await??)
} else {
None
let create_site = CreateSite {
name: setup.site_name.to_owned(),
description: None,
icon: None,
banner: None,
enable_downvotes: true,
open_registration: true,
enable_nsfw: true,
auth: login_response.jwt,
};
create_site.perform(context, websocket_id).await?;
info!("Site {} created", setup.site_name);
Some(blocking(context.pool(), move |conn| SiteView::read(conn)).await??)
} else {
None
}
}
};
let mut admins = blocking(context.pool(), move |conn| UserView::admins(conn)).await??;
let mut admins = blocking(context.pool(), move |conn| UserViewSafe::admins(conn)).await??;
// Make sure the site creator is the top admin
if let Some(site_view) = site_view.to_owned() {
let site_creator_id = site_view.creator_id;
let site_creator_id = site_view.creator.id;
// TODO investigate why this is sometimes coming back null
// Maybe user_.admin isn't being set to true?
if let Some(creator_index) = admins.iter().position(|r| r.id == site_creator_id) {
if let Some(creator_index) = admins.iter().position(|r| r.user.id == site_creator_id) {
let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user);
}
}
let banned = blocking(context.pool(), move |conn| UserView::banned(conn)).await??;
let banned = blocking(context.pool(), move |conn| UserViewSafe::banned(conn)).await??;
let online = context
.chat_server()
@ -301,17 +323,10 @@ impl Perform for GetSite {
.await
.unwrap_or(1);
let my_user = get_user_from_jwt_opt(&data.auth, context.pool())
.await?
.map(|mut u| {
u.password_encrypted = "".to_string();
u.private_key = None;
u.public_key = None;
u
});
let my_user = get_user_safe_settings_from_jwt_opt(&data.auth, context.pool()).await?;
Ok(GetSiteResponse {
site: site_view,
site_view,
admins,
banned,
online,
@ -362,10 +377,10 @@ impl Perform for Search {
PostQueryBuilder::create(conn)
.sort(&sort)
.show_nsfw(true)
.for_community_id(community_id)
.for_community_name(community_name)
.search_term(q)
.community_id(community_id)
.community_name(community_name)
.my_user_id(user_id)
.search_term(q)
.page(page)
.limit(limit)
.list()
@ -411,10 +426,10 @@ impl Perform for Search {
PostQueryBuilder::create(conn)
.sort(&sort)
.show_nsfw(true)
.for_community_id(community_id)
.for_community_name(community_name)
.search_term(q)
.community_id(community_id)
.community_name(community_name)
.my_user_id(user_id)
.search_term(q)
.page(page)
.limit(limit)
.list()
@ -466,8 +481,8 @@ impl Perform for Search {
PostQueryBuilder::create(conn)
.sort(&sort)
.show_nsfw(true)
.for_community_id(community_id)
.for_community_name(community_name)
.community_id(community_id)
.community_name(community_name)
.url_search(q)
.page(page)
.limit(limit)
@ -498,16 +513,11 @@ impl Perform for TransferSite {
_websocket_id: Option<ConnectionId>,
) -> Result<GetSiteResponse, LemmyError> {
let data: &TransferSite = &self;
let mut user = get_user_from_jwt(&data.auth, context.pool()).await?;
let user = get_user_safe_settings_from_jwt(&data.auth, context.pool()).await?;
is_admin(context.pool(), user.id).await?;
// TODO add a User_::read_safe() for this.
user.password_encrypted = "".to_string();
user.private_key = None;
user.public_key = None;
let read_site = blocking(context.pool(), move |conn| Site::read(conn, 1)).await??;
let read_site = blocking(context.pool(), move |conn| Site::read_simple(conn)).await??;
// Make sure user is the creator
if read_site.creator_id != user.id {
@ -531,18 +541,18 @@ impl Perform for TransferSite {
let site_view = blocking(context.pool(), move |conn| SiteView::read(conn)).await??;
let mut admins = blocking(context.pool(), move |conn| UserView::admins(conn)).await??;
let mut admins = blocking(context.pool(), move |conn| UserViewSafe::admins(conn)).await??;
let creator_index = admins
.iter()
.position(|r| r.id == site_view.creator_id)
.position(|r| r.user.id == site_view.creator.id)
.context(location_info!())?;
let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user);
let banned = blocking(context.pool(), move |conn| UserView::banned(conn)).await??;
let banned = blocking(context.pool(), move |conn| UserViewSafe::banned(conn)).await??;
Ok(GetSiteResponse {
site: Some(site_view),
site_view: Some(site_view),
admins,
banned,
online: 0,
@ -587,12 +597,8 @@ impl Perform for SaveSiteConfig {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Only let admins read this
let admins = blocking(context.pool(), move |conn| UserView::admins(conn)).await??;
let admin_ids: Vec<i32> = admins.into_iter().map(|m| m.id).collect();
if !admin_ids.contains(&user.id) {
return Err(APIError::err("not_an_admin").into());
}
let user_id = user.id;
is_admin(context.pool(), user_id).await?;
// Make sure docker doesn't have :ro at the end of the volume, so its not a read-only filesystem
let config_hjson = match Settings::save_config_file(&data.config_hjson) {

View file

@ -14,33 +14,51 @@ use bcrypt::verify;
use captcha::{gen, Difficulty};
use chrono::Duration;
use lemmy_apub::ApubObjectType;
use lemmy_db::{
comment::*,
comment_report::CommentReportView,
comment_view::*,
community::*,
community_view::*,
use lemmy_db_queries::{
diesel_option_overwrite,
moderator::*,
naive_now,
password_reset_request::*,
post::*,
post_report::PostReportView,
post_view::*,
private_message::*,
private_message_view::*,
site::*,
site_view::*,
user::*,
user_mention::*,
user_mention_view::*,
user_view::*,
source::{
comment::Comment_,
community::Community_,
password_reset_request::PasswordResetRequest_,
post::Post_,
private_message::PrivateMessage_,
site::Site_,
user::User,
user_mention::UserMention_,
},
Crud,
Followable,
Joinable,
ListingType,
SortType,
};
use lemmy_db_schema::{
naive_now,
source::{
comment::Comment,
community::*,
moderator::*,
password_reset_request::*,
post::Post,
private_message::*,
site::*,
user::*,
user_mention::*,
},
};
use lemmy_db_views::{
comment_report_view::CommentReportView,
comment_view::CommentQueryBuilder,
post_report_view::PostReportView,
post_view::PostQueryBuilder,
private_message_view::{PrivateMessageQueryBuilder, PrivateMessageView},
};
use lemmy_db_views_actor::{
community_follower_view::CommunityFollowerView,
community_moderator_view::CommunityModeratorView,
user_mention_view::{UserMentionQueryBuilder, UserMentionView},
user_view::UserViewSafe,
};
use lemmy_structs::{blocking, send_email_to_user, user::*};
use lemmy_utils::{
apub::{generate_actor_keypair, make_apub_endpoint, EndpointType},
@ -60,7 +78,7 @@ use lemmy_utils::{
LemmyError,
};
use lemmy_websocket::{
messages::{CaptchaItem, CheckCaptcha, JoinUserRoom, SendAllMessage, SendUserRoomMessage},
messages::{CaptchaItem, CheckCaptcha, SendAllMessage, SendUserRoomMessage},
LemmyContext,
UserOperation,
};
@ -113,8 +131,7 @@ impl Perform for Register {
let data: &Register = &self;
// Make sure site has open registration
if let Ok(site) = blocking(context.pool(), move |conn| SiteView::read(conn)).await? {
let site: SiteView = site;
if let Ok(site) = blocking(context.pool(), move |conn| Site::read_simple(conn)).await? {
if !site.open_registration {
return Err(APIError::err("registration_closed").into());
}
@ -130,8 +147,14 @@ impl Perform for Register {
return Err(APIError::err("passwords_dont_match").into());
}
// Check if there are admins. False if admins exist
let no_admins = blocking(context.pool(), move |conn| {
UserViewSafe::admins(conn).map(|a| a.is_empty())
})
.await??;
// If its not the admin, check the captcha
if !data.admin && Settings::get().captcha.enabled {
if !no_admins && Settings::get().captcha.enabled {
let check = context
.chat_server()
.send(CheckCaptcha {
@ -152,15 +175,6 @@ impl Perform for Register {
check_slurs(&data.username)?;
// Make sure there are no admins
let any_admins = blocking(context.pool(), move |conn| {
UserView::admins(conn).map(|a| a.is_empty())
})
.await??;
if data.admin && !any_admins {
return Err(APIError::err("admin_already_created").into());
}
let user_keypair = generate_actor_keypair()?;
if !is_valid_username(&data.username) {
return Err(APIError::err("invalid_username").into());
@ -177,7 +191,7 @@ impl Perform for Register {
preferred_username: None,
published: None,
updated: None,
admin: data.admin,
admin: no_admins,
banned: Some(false),
show_nsfw: data.show_nsfw,
theme: "browser".into(),
@ -263,7 +277,7 @@ impl Perform for Register {
};
// If its an admin, add them as a mod and follower to main
if data.admin {
if no_admins {
let community_moderator_form = CommunityModeratorForm {
community_id: main_community.id,
user_id: inserted_user.id,
@ -341,9 +355,6 @@ impl Perform for SaveUserSettings {
let data: &SaveUserSettings = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
let user_id = user.id;
let read_user = blocking(context.pool(), move |conn| User_::read(conn, user_id)).await??;
let avatar = diesel_option_overwrite(&data.avatar);
let banner = diesel_option_overwrite(&data.banner);
let email = diesel_option_overwrite(&data.email);
@ -367,6 +378,7 @@ impl Perform for SaveUserSettings {
}
}
let user_id = user.id;
let password_encrypted = match &data.new_password {
Some(new_password) => {
match &data.new_password_verify {
@ -379,8 +391,7 @@ impl Perform for SaveUserSettings {
// Check the old password
match &data.old_password {
Some(old_password) => {
let valid: bool =
verify(old_password, &read_user.password_encrypted).unwrap_or(false);
let valid: bool = verify(old_password, &user.password_encrypted).unwrap_or(false);
if !valid {
return Err(APIError::err("password_incorrect").into());
}
@ -397,33 +408,36 @@ impl Perform for SaveUserSettings {
None => return Err(APIError::err("passwords_dont_match").into()),
}
}
None => read_user.password_encrypted,
None => user.password_encrypted,
};
let default_listing_type = data.default_listing_type;
let default_sort_type = data.default_sort_type;
let user_form = UserForm {
name: read_user.name,
name: user.name,
email,
matrix_user_id,
avatar,
banner,
password_encrypted,
preferred_username,
published: Some(read_user.published),
published: Some(user.published),
updated: Some(naive_now()),
admin: read_user.admin,
banned: Some(read_user.banned),
admin: user.admin,
banned: Some(user.banned),
show_nsfw: data.show_nsfw,
theme: data.theme.to_owned(),
default_sort_type: data.default_sort_type,
default_listing_type: data.default_listing_type,
default_sort_type,
default_listing_type,
lang: data.lang.to_owned(),
show_avatars: data.show_avatars,
send_notifications_to_email: data.send_notifications_to_email,
actor_id: Some(read_user.actor_id),
actor_id: Some(user.actor_id),
bio,
local: read_user.local,
private_key: read_user.private_key,
public_key: read_user.public_key,
local: user.local,
private_key: user.private_key,
public_key: user.public_key,
last_refreshed_at: None,
};
@ -491,23 +505,13 @@ impl Perform for GetUserDetails {
};
let user_id = user.map(|u| u.id);
let user_fun = move |conn: &'_ _| {
match user_id {
// if there's a logged in user and it's the same id as the user whose details are being
// requested we need to use get_user_dangerous so it returns their email or other sensitive
// data hidden when viewing users other than yourself
Some(auth_user_id) => {
if user_details_id == auth_user_id {
UserView::get_user_dangerous(conn, auth_user_id)
} else {
UserView::get_user_secure(conn, user_details_id)
}
}
None => UserView::get_user_secure(conn, user_details_id),
}
};
let user_view = blocking(context.pool(), user_fun).await??;
// You don't need to return settings for the user, since this comes back with GetSite
// `my_user`
let user_view = blocking(context.pool(), move |conn| {
UserViewSafe::read(conn, user_details_id)
})
.await??;
let page = data.page;
let limit = data.limit;
@ -519,23 +523,23 @@ impl Perform for GetUserDetails {
.sort(&sort)
.show_nsfw(show_nsfw)
.saved_only(saved_only)
.for_community_id(community_id)
.community_id(community_id)
.my_user_id(user_id)
.page(page)
.limit(limit);
let mut comments_query = CommentQueryBuilder::create(conn)
.my_user_id(user_id)
.sort(&sort)
.saved_only(saved_only)
.my_user_id(user_id)
.page(page)
.limit(limit);
// If its saved only, you don't care what creator it was
// Or, if its not saved, then you only want it for that specific creator
if !saved_only {
posts_query = posts_query.for_creator_id(user_details_id);
comments_query = comments_query.for_creator_id(user_details_id);
posts_query = posts_query.creator_id(user_details_id);
comments_query = comments_query.creator_id(user_details_id);
}
let posts = posts_query.list()?;
@ -556,7 +560,7 @@ impl Perform for GetUserDetails {
// Return the jwt
Ok(GetUserDetailsResponse {
user: user_view,
user_view,
follows,
moderates,
comments,
@ -601,10 +605,10 @@ impl Perform for AddAdmin {
})
.await??;
let mut admins = blocking(context.pool(), move |conn| UserView::admins(conn)).await??;
let mut admins = blocking(context.pool(), move |conn| UserViewSafe::admins(conn)).await??;
let creator_index = admins
.iter()
.position(|r| r.id == site_creator_id)
.position(|r| r.user.id == site_creator_id)
.context(location_info!())?;
let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user);
@ -644,22 +648,22 @@ impl Perform for BanUser {
}
// Remove their data if that's desired
if let Some(remove_data) = data.remove_data {
if data.remove_data {
// Posts
blocking(context.pool(), move |conn: &'_ _| {
Post::update_removed_for_creator(conn, banned_user_id, None, remove_data)
Post::update_removed_for_creator(conn, banned_user_id, None, true)
})
.await??;
// Communities
blocking(context.pool(), move |conn: &'_ _| {
Community::update_removed_for_creator(conn, banned_user_id, remove_data)
Community::update_removed_for_creator(conn, banned_user_id, true)
})
.await??;
// Comments
blocking(context.pool(), move |conn: &'_ _| {
Comment::update_removed_for_creator(conn, banned_user_id, remove_data)
Comment::update_removed_for_creator(conn, banned_user_id, true)
})
.await??;
}
@ -682,12 +686,12 @@ impl Perform for BanUser {
let user_id = data.user_id;
let user_view = blocking(context.pool(), move |conn| {
UserView::get_user_secure(conn, user_id)
UserViewSafe::read(conn, user_id)
})
.await??;
let res = BanUserResponse {
user: user_view,
user_view,
banned: data.ban,
};
@ -720,9 +724,11 @@ impl Perform for GetReplies {
let unread_only = data.unread_only;
let user_id = user.id;
let replies = blocking(context.pool(), move |conn| {
ReplyQueryBuilder::create(conn, user_id)
CommentQueryBuilder::create(conn)
.sort(&sort)
.unread_only(unread_only)
.recipient_id(user_id)
.my_user_id(user_id)
.page(page)
.limit(limit)
.list()
@ -752,7 +758,9 @@ impl Perform for GetUserMentions {
let unread_only = data.unread_only;
let user_id = user.id;
let mentions = blocking(context.pool(), move |conn| {
UserMentionQueryBuilder::create(conn, user_id)
UserMentionQueryBuilder::create(conn)
.recipient_id(user_id)
.my_user_id(user_id)
.sort(&sort)
.unread_only(unread_only)
.page(page)
@ -797,13 +805,11 @@ impl Perform for MarkUserMentionAsRead {
let user_mention_id = read_user_mention.id;
let user_id = user.id;
let user_mention_view = blocking(context.pool(), move |conn| {
UserMentionView::read(conn, user_mention_id, user_id)
UserMentionView::read(conn, user_mention_id, Some(user_id))
})
.await??;
Ok(UserMentionResponse {
mention: user_mention_view,
})
Ok(UserMentionResponse { user_mention_view })
}
}
@ -821,7 +827,9 @@ impl Perform for MarkAllAsRead {
let user_id = user.id;
let replies = blocking(context.pool(), move |conn| {
ReplyQueryBuilder::create(conn, user_id)
CommentQueryBuilder::create(conn)
.my_user_id(user_id)
.recipient_id(user_id)
.unread_only(true)
.page(1)
.limit(999)
@ -832,8 +840,8 @@ impl Perform for MarkAllAsRead {
// TODO: this should probably be a bulk operation
// Not easy to do as a bulk operation,
// because recipient_id isn't in the comment table
for reply in &replies {
let reply_id = reply.id;
for comment_view in &replies {
let reply_id = comment_view.comment.id;
let mark_as_read = move |conn: &'_ _| Comment::update_read(conn, reply_id, true);
if blocking(context.pool(), mark_as_read).await?.is_err() {
return Err(APIError::err("couldnt_update_comment").into());
@ -1062,7 +1070,9 @@ impl Perform for CreatePrivateMessage {
})
.await??;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::CreatePrivateMessage,
@ -1088,9 +1098,9 @@ impl Perform for EditPrivateMessage {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Checking permissions
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let orig_private_message = blocking(context.pool(), move |conn| {
PrivateMessage::read(conn, edit_id)
PrivateMessage::read(conn, private_message_id)
})
.await??;
if user.id != orig_private_message.creator_id {
@ -1099,9 +1109,9 @@ impl Perform for EditPrivateMessage {
// Doing the update
let content_slurs_removed = remove_slurs(&data.content);
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let updated_private_message = match blocking(context.pool(), move |conn| {
PrivateMessage::update_content(conn, edit_id, &content_slurs_removed)
PrivateMessage::update_content(conn, private_message_id, &content_slurs_removed)
})
.await?
{
@ -1112,14 +1122,16 @@ impl Perform for EditPrivateMessage {
// Send the apub update
updated_private_message.send_update(&user, context).await?;
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let message = blocking(context.pool(), move |conn| {
PrivateMessageView::read(conn, edit_id)
PrivateMessageView::read(conn, private_message_id)
})
.await??;
let recipient_id = message.recipient_id;
let recipient_id = message.recipient.id;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::EditPrivateMessage,
@ -1145,9 +1157,9 @@ impl Perform for DeletePrivateMessage {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Checking permissions
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let orig_private_message = blocking(context.pool(), move |conn| {
PrivateMessage::read(conn, edit_id)
PrivateMessage::read(conn, private_message_id)
})
.await??;
if user.id != orig_private_message.creator_id {
@ -1155,10 +1167,10 @@ impl Perform for DeletePrivateMessage {
}
// Doing the update
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let deleted = data.deleted;
let updated_private_message = match blocking(context.pool(), move |conn| {
PrivateMessage::update_deleted(conn, edit_id, deleted)
PrivateMessage::update_deleted(conn, private_message_id, deleted)
})
.await?
{
@ -1175,14 +1187,16 @@ impl Perform for DeletePrivateMessage {
.await?;
}
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let message = blocking(context.pool(), move |conn| {
PrivateMessageView::read(conn, edit_id)
PrivateMessageView::read(conn, private_message_id)
})
.await??;
let recipient_id = message.recipient_id;
let recipient_id = message.recipient.id;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::DeletePrivateMessage,
@ -1208,9 +1222,9 @@ impl Perform for MarkPrivateMessageAsRead {
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
// Checking permissions
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let orig_private_message = blocking(context.pool(), move |conn| {
PrivateMessage::read(conn, edit_id)
PrivateMessage::read(conn, private_message_id)
})
.await??;
if user.id != orig_private_message.recipient_id {
@ -1218,10 +1232,10 @@ impl Perform for MarkPrivateMessageAsRead {
}
// Doing the update
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let read = data.read;
match blocking(context.pool(), move |conn| {
PrivateMessage::update_read(conn, edit_id, read)
PrivateMessage::update_read(conn, private_message_id, read)
})
.await?
{
@ -1231,14 +1245,16 @@ impl Perform for MarkPrivateMessageAsRead {
// No need to send an apub update
let edit_id = data.edit_id;
let private_message_id = data.private_message_id;
let message = blocking(context.pool(), move |conn| {
PrivateMessageView::read(conn, edit_id)
PrivateMessageView::read(conn, private_message_id)
})
.await??;
let recipient_id = message.recipient_id;
let recipient_id = message.recipient.id;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::MarkPrivateMessageAsRead,
@ -1276,30 +1292,9 @@ impl Perform for GetPrivateMessages {
})
.await??;
Ok(PrivateMessagesResponse { messages })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for UserJoin {
type Response = UserJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<UserJoinResponse, LemmyError> {
let data: &UserJoin = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinUserRoom {
user_id: user.id,
id: ws_id,
});
}
Ok(UserJoinResponse { joined: true })
Ok(PrivateMessagesResponse {
private_messages: messages,
})
}
}

View file

@ -1 +1 @@
pub const VERSION: &str = "v0.8.10";
pub const VERSION: &str = "0.9.0-rc.12";

View file

@ -0,0 +1,97 @@
use crate::{get_user_from_jwt, Perform};
use actix_web::web::Data;
use lemmy_structs::websocket::*;
use lemmy_utils::{ConnectionId, LemmyError};
use lemmy_websocket::{
messages::{JoinCommunityRoom, JoinModRoom, JoinPostRoom, JoinUserRoom},
LemmyContext,
};
#[async_trait::async_trait(?Send)]
impl Perform for UserJoin {
type Response = UserJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<UserJoinResponse, LemmyError> {
let data: &UserJoin = &self;
let user = get_user_from_jwt(&data.auth, context.pool()).await?;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinUserRoom {
user_id: user.id,
id: ws_id,
});
}
Ok(UserJoinResponse { joined: true })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for CommunityJoin {
type Response = CommunityJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<CommunityJoinResponse, LemmyError> {
let data: &CommunityJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinCommunityRoom {
community_id: data.community_id,
id: ws_id,
});
}
Ok(CommunityJoinResponse { joined: true })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for ModJoin {
type Response = ModJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<ModJoinResponse, LemmyError> {
let data: &ModJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinModRoom {
community_id: data.community_id,
id: ws_id,
});
}
Ok(ModJoinResponse { joined: true })
}
}
#[async_trait::async_trait(?Send)]
impl Perform for PostJoin {
type Response = PostJoinResponse;
async fn perform(
&self,
context: &Data<LemmyContext>,
websocket_id: Option<ConnectionId>,
) -> Result<PostJoinResponse, LemmyError> {
let data: &PostJoin = &self;
if let Some(ws_id) = websocket_id {
context.chat_server().do_send(JoinPostRoom {
post_id: data.post_id,
id: ws_id,
});
}
Ok(PostJoinResponse { joined: true })
}
}

View file

@ -10,7 +10,10 @@ path = "src/lib.rs"
[dependencies]
lemmy_utils = { path = "../lemmy_utils" }
lemmy_db = { path = "../lemmy_db" }
lemmy_db_queries = { path = "../lemmy_db_queries" }
lemmy_db_schema = { path = "../lemmy_db_schema" }
lemmy_db_views = { path = "../lemmy_db_views" }
lemmy_db_views_actor = { path = "../lemmy_db_views_actor" }
lemmy_structs = { path = "../lemmy_structs" }
lemmy_websocket = { path = "../lemmy_websocket" }
diesel = "1.4.5"
@ -25,25 +28,25 @@ actix-web = { version = "3.3.2", default-features = false }
actix-rt = { version = "1.1.1", default-features = false }
awc = { version = "2.0.3", default-features = false }
log = "0.4.11"
rand = "0.7.3"
rand = "0.8.0"
strum = "0.20.0"
strum_macros = "0.20.1"
lazy_static = "1.4.0"
url = { version = "2.2.0", features = ["serde"] }
percent-encoding = "2.1.0"
openssl = "0.10.30"
http = "0.2.1"
openssl = "0.10.31"
http = "0.2.2"
http-signature-normalization-actix = { version = "0.4.1", default-features = false, features = ["sha-2"] }
http-signature-normalization-reqwest = { version = "0.1.3", default-features = false, features = ["sha-2"] }
base64 = "0.13.0"
tokio = "0.3.5"
tokio = "0.3.6"
futures = "0.3.8"
itertools = "0.9.0"
uuid = { version = "0.8.1", features = ["serde", "v4"] }
sha2 = "0.9.2"
async-trait = "0.1.42"
anyhow = "1.0.35"
anyhow = "1.0.36"
thiserror = "1.0.22"
background-jobs = "0.8.0"
reqwest = { version = "0.10.9", features = ["json"] }
reqwest = { version = "0.10.10", features = ["json"] }
backtrace = "0.3.55"

View file

@ -4,12 +4,12 @@ use activitystreams::{
base::ExtendsExt,
};
use anyhow::Context;
use lemmy_db::{
use lemmy_db_queries::{source::comment::Comment_, Crud, Likeable};
use lemmy_db_schema::source::{
comment::{Comment, CommentLike, CommentLikeForm},
comment_view::CommentView,
post::Post,
Likeable,
};
use lemmy_db_views::comment_view::CommentView;
use lemmy_structs::{blocking, comment::CommentResponse, send_local_notifs};
use lemmy_utils::{location_info, utils::scrape_text_for_mentions, LemmyError};
use lemmy_websocket::{messages::SendComment, LemmyContext, UserOperation};
@ -43,7 +43,7 @@ pub(crate) async fn receive_create_comment(
.await??;
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -83,7 +83,7 @@ pub(crate) async fn receive_update_comment(
.await??;
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -128,7 +128,7 @@ pub(crate) async fn receive_like_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -173,7 +173,7 @@ pub(crate) async fn receive_dislike_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -206,7 +206,7 @@ pub(crate) async fn receive_delete_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -239,7 +239,7 @@ pub(crate) async fn receive_remove_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};

View file

@ -1,10 +1,8 @@
use crate::activities::receive::get_actor_as_user;
use activitystreams::activity::{Dislike, Like};
use lemmy_db::{
comment::{Comment, CommentLike},
comment_view::CommentView,
Likeable,
};
use lemmy_db_queries::{source::comment::Comment_, Likeable};
use lemmy_db_schema::source::comment::{Comment, CommentLike};
use lemmy_db_views::comment_view::CommentView;
use lemmy_structs::{blocking, comment::CommentResponse};
use lemmy_utils::LemmyError;
use lemmy_websocket::{messages::SendComment, LemmyContext, UserOperation};
@ -33,7 +31,7 @@ pub(crate) async fn receive_undo_like_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -71,7 +69,7 @@ pub(crate) async fn receive_undo_dislike_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -104,7 +102,7 @@ pub(crate) async fn receive_undo_delete_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};
@ -137,7 +135,7 @@ pub(crate) async fn receive_undo_remove_comment(
// TODO get those recipient actor ids from somewhere
let recipient_ids = vec![];
let res = CommentResponse {
comment: comment_view,
comment_view,
recipient_ids,
form_id: None,
};

View file

@ -4,7 +4,9 @@ use activitystreams::{
base::{AnyBase, ExtendsExt},
};
use anyhow::Context;
use lemmy_db::{community::Community, community_view::CommunityView, ApubObject};
use lemmy_db_queries::{source::community::Community_, ApubObject};
use lemmy_db_schema::source::community::Community;
use lemmy_db_views_actor::community_view::CommunityView;
use lemmy_structs::{blocking, community::CommunityResponse};
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::{messages::SendCommunityRoomMessage, LemmyContext, UserOperation};
@ -21,13 +23,13 @@ pub(crate) async fn receive_delete_community(
let community_id = deleted_community.id;
let res = CommunityResponse {
community: blocking(context.pool(), move |conn| {
community_view: blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, None)
})
.await??,
};
let community_id = res.community.id;
let community_id = res.community_view.community.id;
context.chat_server().do_send(SendCommunityRoomMessage {
op: UserOperation::EditCommunity,
response: res,
@ -64,13 +66,13 @@ pub(crate) async fn receive_remove_community(
let community_id = removed_community.id;
let res = CommunityResponse {
community: blocking(context.pool(), move |conn| {
community_view: blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, None)
})
.await??,
};
let community_id = res.community.id;
let community_id = res.community_view.community.id;
context.chat_server().do_send(SendCommunityRoomMessage {
op: UserOperation::EditCommunity,
response: res,
@ -100,13 +102,13 @@ pub(crate) async fn receive_undo_delete_community(
let community_id = deleted_community.id;
let res = CommunityResponse {
community: blocking(context.pool(), move |conn| {
community_view: blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, None)
})
.await??,
};
let community_id = res.community.id;
let community_id = res.community_view.community.id;
context.chat_server().do_send(SendCommunityRoomMessage {
op: UserOperation::EditCommunity,
response: res,
@ -146,13 +148,13 @@ pub(crate) async fn receive_undo_remove_community(
let community_id = removed_community.id;
let res = CommunityResponse {
community: blocking(context.pool(), move |conn| {
community_view: blocking(context.pool(), move |conn| {
CommunityView::read(conn, community_id, None)
})
.await??,
};
let community_id = res.community.id;
let community_id = res.community_view.community.id;
context.chat_server().do_send(SendCommunityRoomMessage {
op: UserOperation::EditCommunity,

View file

@ -1,11 +1,11 @@
use crate::fetcher::get_or_fetch_and_upsert_user;
use crate::fetcher::user::get_or_fetch_and_upsert_user;
use activitystreams::{
activity::{ActorAndObjectRef, ActorAndObjectRefExt},
base::{AsBase, BaseExt},
error::DomainError,
};
use anyhow::{anyhow, Context};
use lemmy_db::user::User_;
use lemmy_db_schema::source::user::User_;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
use log::debug;

View file

@ -4,11 +4,9 @@ use activitystreams::{
prelude::*,
};
use anyhow::Context;
use lemmy_db::{
post::{Post, PostLike, PostLikeForm},
post_view::PostView,
Likeable,
};
use lemmy_db_queries::{source::post::Post_, Likeable};
use lemmy_db_schema::source::post::{Post, PostLike, PostLikeForm};
use lemmy_db_views::post_view::PostView;
use lemmy_structs::{blocking, post::PostResponse};
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::{messages::SendPost, LemmyContext, UserOperation};
@ -31,7 +29,7 @@ pub(crate) async fn receive_create_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePost,
@ -60,7 +58,7 @@ pub(crate) async fn receive_update_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,
@ -98,7 +96,7 @@ pub(crate) async fn receive_like_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePostLike,
@ -136,7 +134,7 @@ pub(crate) async fn receive_dislike_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePostLike,
@ -163,7 +161,7 @@ pub(crate) async fn receive_delete_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,
post: res,
@ -190,7 +188,7 @@ pub(crate) async fn receive_remove_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,
post: res,

View file

@ -1,10 +1,8 @@
use crate::activities::receive::get_actor_as_user;
use activitystreams::activity::{Dislike, Like};
use lemmy_db::{
post::{Post, PostLike},
post_view::PostView,
Likeable,
};
use lemmy_db_queries::{source::post::Post_, Likeable};
use lemmy_db_schema::source::post::{Post, PostLike};
use lemmy_db_views::post_view::PostView;
use lemmy_structs::{blocking, post::PostResponse};
use lemmy_utils::LemmyError;
use lemmy_websocket::{messages::SendPost, LemmyContext, UserOperation};
@ -30,7 +28,7 @@ pub(crate) async fn receive_undo_like_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePostLike,
@ -62,7 +60,7 @@ pub(crate) async fn receive_undo_dislike_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::CreatePostLike,
@ -89,7 +87,7 @@ pub(crate) async fn receive_undo_delete_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,
post: res,
@ -115,7 +113,7 @@ pub(crate) async fn receive_undo_remove_post(
})
.await??;
let res = PostResponse { post: post_view };
let res = PostResponse { post_view };
context.chat_server().do_send(SendPost {
op: UserOperation::EditPost,

View file

@ -1,7 +1,7 @@
use crate::{
activities::receive::verify_activity_domains_valid,
check_is_apub_id_valid,
fetcher::get_or_fetch_and_upsert_user,
fetcher::user::get_or_fetch_and_upsert_user,
inbox::get_activity_to_and_cc,
objects::FromApub,
NoteExt,
@ -13,7 +13,9 @@ use activitystreams::{
public,
};
use anyhow::{anyhow, Context};
use lemmy_db::{private_message::PrivateMessage, private_message_view::PrivateMessageView};
use lemmy_db_queries::source::private_message::PrivateMessage_;
use lemmy_db_schema::source::private_message::PrivateMessage;
use lemmy_db_views::private_message_view::PrivateMessageView;
use lemmy_structs::{blocking, user::PrivateMessageResponse};
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::{messages::SendUserRoomMessage, LemmyContext, UserOperation};
@ -44,9 +46,11 @@ pub(crate) async fn receive_create_private_message(
})
.await??;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
let recipient_id = res.message.recipient_id;
let recipient_id = res.private_message_view.recipient.id;
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::CreatePrivateMessage,
@ -82,9 +86,11 @@ pub(crate) async fn receive_update_private_message(
})
.await??;
let res = PrivateMessageResponse { message };
let res = PrivateMessageResponse {
private_message_view: message,
};
let recipient_id = res.message.recipient_id;
let recipient_id = res.private_message_view.recipient.id;
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::EditPrivateMessage,
@ -114,8 +120,10 @@ pub(crate) async fn receive_delete_private_message(
})
.await??;
let res = PrivateMessageResponse { message };
let recipient_id = res.message.recipient_id;
let res = PrivateMessageResponse {
private_message_view: message,
};
let recipient_id = res.private_message_view.recipient.id;
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::EditPrivateMessage,
response: res,
@ -149,8 +157,10 @@ pub(crate) async fn receive_undo_delete_private_message(
})
.await??;
let res = PrivateMessageResponse { message };
let recipient_id = res.message.recipient_id;
let res = PrivateMessageResponse {
private_message_view: message,
};
let recipient_id = res.private_message_view.recipient.id;
context.chat_server().do_send(SendUserRoomMessage {
op: UserOperation::EditPrivateMessage,
response: res,
@ -169,7 +179,7 @@ async fn check_private_message_activity_valid<T, Kind>(
where
T: AsBase<Kind> + AsObject<Kind> + ActorAndObjectRefExt,
{
let to_and_cc = get_activity_to_and_cc(activity)?;
let to_and_cc = get_activity_to_and_cc(activity);
if to_and_cc.len() != 1 {
return Err(anyhow!("Private message can only be addressed to one user").into());
}

View file

@ -2,7 +2,7 @@ use crate::{
activities::send::generate_activity_id,
activity_queue::{send_comment_mentions, send_to_community},
extensions::context::lemmy_context,
fetcher::get_or_fetch_and_upsert_user,
fetcher::user::get_or_fetch_and_upsert_user,
objects::ToApub,
ActorType,
ApubLikeableType,
@ -26,7 +26,8 @@ use activitystreams::{
};
use anyhow::anyhow;
use itertools::Itertools;
use lemmy_db::{comment::Comment, community::Community, post::Post, user::User_, Crud, DbPool};
use lemmy_db_queries::{Crud, DbPool};
use lemmy_db_schema::source::{comment::Comment, community::Community, post::Post, user::User_};
use lemmy_structs::{blocking, WebFingerResponse};
use lemmy_utils::{
request::{retry, RecvError},
@ -56,17 +57,14 @@ impl ApubObjectType for Comment {
})
.await??;
let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
let mut ccs = vec![community.actor_id()?];
ccs.append(&mut maa.addressed_ccs);
ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
let maa = collect_non_local_mentions(&self, &community, context).await?;
let mut create = Create::new(creator.actor_id.to_owned(), note.into_any_base()?);
create
.set_many_contexts(lemmy_context()?)
.set_id(generate_activity_id(CreateType::Create)?)
.set_to(public())
.set_many_ccs(ccs)
.set_many_ccs(maa.ccs.to_owned())
// Set the mention tags
.set_many_tags(maa.get_tags()?);
@ -89,17 +87,14 @@ impl ApubObjectType for Comment {
})
.await??;
let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
let mut ccs = vec![community.actor_id()?];
ccs.append(&mut maa.addressed_ccs);
ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
let maa = collect_non_local_mentions(&self, &community, context).await?;
let mut update = Update::new(creator.actor_id.to_owned(), note.into_any_base()?);
update
.set_many_contexts(lemmy_context()?)
.set_id(generate_activity_id(UpdateType::Update)?)
.set_to(public())
.set_many_ccs(ccs)
.set_many_ccs(maa.ccs.to_owned())
// Set the mention tags
.set_many_tags(maa.get_tags()?);
@ -294,7 +289,7 @@ impl ApubLikeableType for Comment {
}
struct MentionsAndAddresses {
addressed_ccs: Vec<Url>,
ccs: Vec<Url>,
inboxes: Vec<Url>,
tags: Vec<Mention>,
}
@ -312,23 +307,26 @@ impl MentionsAndAddresses {
/// This takes a comment, and builds a list of to_addresses, inboxes,
/// and mention tags, so they know where to be sent to.
/// Addresses are the users / addresses that go in the cc field.
async fn collect_non_local_mentions_and_addresses(
content: &str,
async fn collect_non_local_mentions(
comment: &Comment,
community: &Community,
context: &LemmyContext,
) -> Result<MentionsAndAddresses, LemmyError> {
let mut addressed_ccs = vec![];
let parent_creator = get_comment_parent_creator(context.pool(), comment).await?;
let mut addressed_ccs = vec![community.actor_id()?, parent_creator.actor_id()?];
// Note: dont include community inbox here, as we send to it separately with `send_to_community()`
let mut inboxes = vec![parent_creator.get_shared_inbox_url()?];
// Add the mention tag
let mut tags = Vec::new();
// Get the inboxes for any mentions
let mentions = scrape_text_for_mentions(&content)
// Get the user IDs for any mentions
let mentions = scrape_text_for_mentions(&comment.content)
.into_iter()
// Filter only the non-local ones
.filter(|m| !m.is_local())
.collect::<Vec<MentionData>>();
let mut mention_inboxes: Vec<Url> = Vec::new();
for mention in &mentions {
// TODO should it be fetching it every time?
if let Ok(actor_id) = fetch_webfinger_url(mention, context.client()).await {
@ -336,19 +334,18 @@ async fn collect_non_local_mentions_and_addresses(
addressed_ccs.push(actor_id.to_owned().to_string().parse()?);
let mention_user = get_or_fetch_and_upsert_user(&actor_id, context, &mut 0).await?;
let shared_inbox = mention_user.get_shared_inbox_url()?;
inboxes.push(mention_user.get_shared_inbox_url()?);
mention_inboxes.push(shared_inbox);
let mut mention_tag = Mention::new();
mention_tag.set_href(actor_id).set_name(mention.full_name());
tags.push(mention_tag);
}
}
let inboxes = mention_inboxes.into_iter().unique().collect();
let inboxes = inboxes.into_iter().unique().collect();
Ok(MentionsAndAddresses {
addressed_ccs,
ccs: addressed_ccs,
inboxes,
tags,
})
@ -356,10 +353,7 @@ async fn collect_non_local_mentions_and_addresses(
/// Returns the apub ID of the user this comment is responding to. Meaning, in case this is a
/// top-level comment, the creator of the post, otherwise the creator of the parent comment.
async fn get_comment_parent_creator_id(
pool: &DbPool,
comment: &Comment,
) -> Result<Url, LemmyError> {
async fn get_comment_parent_creator(pool: &DbPool, comment: &Comment) -> Result<User_, LemmyError> {
let parent_creator_id = if let Some(parent_comment_id) = comment.parent_id {
let parent_comment =
blocking(pool, move |conn| Comment::read(conn, parent_comment_id)).await??;
@ -369,8 +363,7 @@ async fn get_comment_parent_creator_id(
let parent_post = blocking(pool, move |conn| Post::read(conn, parent_post_id)).await??;
parent_post.creator_id
};
let parent_creator = blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??;
Ok(parent_creator.actor_id()?)
Ok(blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??)
}
/// Turns a user id like `@name@example.com` into an apub ID, like `https://example.com/user/name`,

View file

@ -3,7 +3,7 @@ use crate::{
activity_queue::{send_activity_single_dest, send_to_community_followers},
check_is_apub_id_valid,
extensions::context::lemmy_context,
fetcher::get_or_fetch_and_upsert_user,
fetcher::user::get_or_fetch_and_upsert_user,
ActorType,
};
use activitystreams::{
@ -23,7 +23,9 @@ use activitystreams::{
};
use anyhow::Context;
use itertools::Itertools;
use lemmy_db::{community::Community, community_view::CommunityFollowerView, DbPool};
use lemmy_db_queries::DbPool;
use lemmy_db_schema::source::community::Community;
use lemmy_db_views_actor::community_follower_view::CommunityFollowerView;
use lemmy_structs::blocking;
use lemmy_utils::{location_info, settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
@ -179,9 +181,9 @@ impl ActorType for Community {
.await??;
let inboxes = inboxes
.into_iter()
.filter(|i| !i.user_local)
.filter(|i| !i.follower.local)
.map(|u| -> Result<Url, LemmyError> {
let url = Url::parse(&u.user_actor_id)?;
let url = Url::parse(&u.follower.actor_id)?;
let domain = url.domain().context(location_info!())?;
let port = if let Some(port) = url.port() {
format!(":{}", port)

View file

@ -21,7 +21,8 @@ use activitystreams::{
prelude::*,
public,
};
use lemmy_db::{community::Community, post::Post, user::User_, Crud};
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::{community::Community, post::Post, user::User_};
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;

View file

@ -16,7 +16,8 @@ use activitystreams::{
},
prelude::*,
};
use lemmy_db::{private_message::PrivateMessage, user::User_, Crud};
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::{private_message::PrivateMessage, user::User_};
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;

View file

@ -13,12 +13,10 @@ use activitystreams::{
base::{AnyBase, BaseExt, ExtendsExt},
object::ObjectExt,
};
use lemmy_db::{
use lemmy_db_queries::{ApubObject, DbPool, Followable};
use lemmy_db_schema::source::{
community::{Community, CommunityFollower, CommunityFollowerForm},
user::User_,
ApubObject,
DbPool,
Followable,
};
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;

View file

@ -20,13 +20,14 @@ use background_jobs::{
WorkerConfig,
};
use itertools::Itertools;
use lemmy_db::{community::Community, user::User_, DbPool};
use lemmy_db_queries::DbPool;
use lemmy_db_schema::source::{community::Community, user::User_};
use lemmy_utils::{location_info, settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
use log::{debug, warn};
use reqwest::Client;
use serde::{export::fmt::Debug, Deserialize, Serialize};
use std::{collections::BTreeMap, future::Future, pin::Pin};
use std::{collections::BTreeMap, env, future::Future, pin::Pin};
use url::Url;
/// Sends a local activity to a single, remote actor.
@ -218,6 +219,13 @@ where
return Ok(());
}
// Don't send anything to ourselves
let hostname = Settings::get().get_hostname_without_port()?;
let inboxes: Vec<&Url> = inboxes
.iter()
.filter(|i| i.domain().unwrap() != hostname)
.collect();
let activity = activity.into_any_base()?;
let serialised_activity = serde_json::to_string(&activity)?;
@ -231,11 +239,15 @@ where
for i in inboxes {
let message = SendActivityTask {
activity: serialised_activity.to_owned(),
inbox: i,
inbox: i.to_owned(),
actor_id: actor.actor_id()?,
private_key: actor.private_key().context(location_info!())?,
};
activity_sender.queue::<SendActivityTask>(message)?;
if env::var("LEMMY_TEST_SEND_SYNC").is_ok() {
do_send(message, &Client::default()).await?;
} else {
activity_sender.queue::<SendActivityTask>(message)?;
}
}
Ok(())
@ -260,32 +272,34 @@ impl ActixJob for SendActivityTask {
const BACKOFF: Backoff = Backoff::Exponential(2);
fn run(self, state: Self::State) -> Self::Future {
Box::pin(async move {
let mut headers = BTreeMap::<String, String>::new();
headers.insert("Content-Type".into(), APUB_JSON_CONTENT_TYPE.to_string());
let result = sign_and_send(
&state.client,
headers,
&self.inbox,
self.activity.clone(),
&self.actor_id,
self.private_key.to_owned(),
)
.await;
if let Err(e) = result {
warn!("{}", e);
return Err(anyhow!(
"Failed to send activity {} to {}",
&self.activity,
self.inbox
));
}
Ok(())
})
Box::pin(async move { do_send(self, &state.client).await })
}
}
async fn do_send(task: SendActivityTask, client: &Client) -> Result<(), Error> {
let mut headers = BTreeMap::<String, String>::new();
headers.insert("Content-Type".into(), APUB_JSON_CONTENT_TYPE.to_string());
let result = sign_and_send(
client,
headers,
&task.inbox,
task.activity.clone(),
&task.actor_id,
task.private_key.to_owned(),
)
.await;
if let Err(e) = result {
warn!("{}", e);
return Err(anyhow!(
"Failed to send activity {} to {}",
&task.activity,
task.inbox
));
}
Ok(())
}
pub fn create_activity_queue() -> QueueHandle {
// Start the application server. This guards access to to the jobs store
let queue_handle = create_server(Storage::new());

View file

@ -1,7 +1,8 @@
use activitystreams::unparsed::UnparsedMutExt;
use activitystreams_ext::UnparsedExtension;
use diesel::PgConnection;
use lemmy_db::{category::Category, Crud};
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::category::Category;
use lemmy_utils::LemmyError;
use serde::{Deserialize, Serialize};

View file

@ -1,477 +0,0 @@
use crate::{
check_is_apub_id_valid,
objects::FromApub,
ActorType,
GroupExt,
NoteExt,
PageExt,
PersonExt,
APUB_JSON_CONTENT_TYPE,
};
use activitystreams::{base::BaseExt, collection::OrderedCollection, prelude::*};
use anyhow::{anyhow, Context};
use chrono::NaiveDateTime;
use diesel::result::Error::NotFound;
use lemmy_db::{
comment::Comment,
comment_view::CommentView,
community::{Community, CommunityModerator, CommunityModeratorForm},
community_view::CommunityView,
naive_now,
post::Post,
post_view::PostView,
user::User_,
user_view::UserView,
ApubObject,
Joinable,
SearchType,
};
use lemmy_structs::{blocking, site::SearchResponse};
use lemmy_utils::{
location_info,
request::{retry, RecvError},
settings::Settings,
LemmyError,
};
use lemmy_websocket::LemmyContext;
use log::debug;
use reqwest::Client;
use serde::Deserialize;
use std::{fmt::Debug, time::Duration};
use url::Url;
static ACTOR_REFETCH_INTERVAL_SECONDS: i64 = 24 * 60 * 60;
static ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG: i64 = 10;
/// Maximum number of HTTP requests allowed to handle a single incoming activity (or a single object
/// fetch through the search).
///
/// Tests are passing with a value of 5, so 10 should be safe for production.
static MAX_REQUEST_NUMBER: i32 = 10;
/// Fetch any type of ActivityPub object, handling things like HTTP headers, deserialisation,
/// timeouts etc.
async fn fetch_remote_object<Response>(
client: &Client,
url: &Url,
recursion_counter: &mut i32,
) -> Result<Response, LemmyError>
where
Response: for<'de> Deserialize<'de>,
{
*recursion_counter += 1;
if *recursion_counter > MAX_REQUEST_NUMBER {
return Err(anyhow!("Maximum recursion depth reached").into());
}
check_is_apub_id_valid(&url)?;
let timeout = Duration::from_secs(60);
let json = retry(|| {
client
.get(url.as_str())
.header("Accept", APUB_JSON_CONTENT_TYPE)
.timeout(timeout)
.send()
})
.await?
.json()
.await
.map_err(|e| {
debug!("Receive error, {}", e);
RecvError(e.to_string())
})?;
Ok(json)
}
/// The types of ActivityPub objects that can be fetched directly by searching for their ID.
#[serde(untagged)]
#[derive(serde::Deserialize, Debug)]
enum SearchAcceptedObjects {
Person(Box<PersonExt>),
Group(Box<GroupExt>),
Page(Box<PageExt>),
Comment(Box<NoteExt>),
}
/// Attempt to parse the query as URL, and fetch an ActivityPub object from it.
///
/// Some working examples for use with the `docker/federation/` setup:
/// http://lemmy_alpha:8541/c/main, or !main@lemmy_alpha:8541
/// http://lemmy_beta:8551/u/lemmy_alpha, or @lemmy_beta@lemmy_beta:8551
/// http://lemmy_gamma:8561/post/3
/// http://lemmy_delta:8571/comment/2
pub async fn search_by_apub_id(
query: &str,
context: &LemmyContext,
) -> Result<SearchResponse, LemmyError> {
// Parse the shorthand query url
let query_url = if query.contains('@') {
debug!("Search for {}", query);
let split = query.split('@').collect::<Vec<&str>>();
// User type will look like ['', username, instance]
// Community will look like [!community, instance]
let (name, instance) = if split.len() == 3 {
(format!("/u/{}", split[1]), split[2])
} else if split.len() == 2 {
if split[0].contains('!') {
let split2 = split[0].split('!').collect::<Vec<&str>>();
(format!("/c/{}", split2[1]), split[1])
} else {
return Err(anyhow!("Invalid search query: {}", query).into());
}
} else {
return Err(anyhow!("Invalid search query: {}", query).into());
};
let url = format!(
"{}://{}{}",
Settings::get().get_protocol_string(),
instance,
name
);
Url::parse(&url)?
} else {
Url::parse(&query)?
};
let mut response = SearchResponse {
type_: SearchType::All.to_string(),
comments: vec![],
posts: vec![],
communities: vec![],
users: vec![],
};
let domain = query_url.domain().context("url has no domain")?;
let recursion_counter = &mut 0;
let response = match fetch_remote_object::<SearchAcceptedObjects>(
context.client(),
&query_url,
recursion_counter,
)
.await?
{
SearchAcceptedObjects::Person(p) => {
let user_uri = p.inner.id(domain)?.context("person has no id")?;
let user = get_or_fetch_and_upsert_user(&user_uri, context, recursion_counter).await?;
response.users = vec![
blocking(context.pool(), move |conn| {
UserView::get_user_secure(conn, user.id)
})
.await??,
];
response
}
SearchAcceptedObjects::Group(g) => {
let community_uri = g.inner.id(domain)?.context("group has no id")?;
let community =
get_or_fetch_and_upsert_community(community_uri, context, recursion_counter).await?;
response.communities = vec![
blocking(context.pool(), move |conn| {
CommunityView::read(conn, community.id, None)
})
.await??,
];
response
}
SearchAcceptedObjects::Page(p) => {
let p = Post::from_apub(&p, context, query_url, recursion_counter).await?;
response.posts =
vec![blocking(context.pool(), move |conn| PostView::read(conn, p.id, None)).await??];
response
}
SearchAcceptedObjects::Comment(c) => {
let c = Comment::from_apub(&c, context, query_url, recursion_counter).await?;
response.comments = vec![
blocking(context.pool(), move |conn| {
CommentView::read(conn, c.id, None)
})
.await??,
];
response
}
};
Ok(response)
}
/// Get a remote actor from its apub ID (either a user or a community). Thin wrapper around
/// `get_or_fetch_and_upsert_user()` and `get_or_fetch_and_upsert_community()`.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_actor(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Box<dyn ActorType>, LemmyError> {
let community = get_or_fetch_and_upsert_community(apub_id, context, recursion_counter).await;
let actor: Box<dyn ActorType> = match community {
Ok(c) => Box::new(c),
Err(_) => Box::new(get_or_fetch_and_upsert_user(apub_id, context, recursion_counter).await?),
};
Ok(actor)
}
/// Get a user from its apub ID.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_user(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<User_, LemmyError> {
let apub_id_owned = apub_id.to_owned();
let user = blocking(context.pool(), move |conn| {
User_::read_from_apub_id(conn, apub_id_owned.as_ref())
})
.await?;
match user {
// If its older than a day, re-fetch it
Ok(u) if !u.local && should_refetch_actor(u.last_refreshed_at) => {
debug!("Fetching and updating from remote user: {}", apub_id);
let person =
fetch_remote_object::<PersonExt>(context.client(), apub_id, recursion_counter).await;
// If fetching failed, return the existing data.
if person.is_err() {
return Ok(u);
}
let user = User_::from_apub(&person?, context, apub_id.to_owned(), recursion_counter).await?;
let user_id = user.id;
blocking(context.pool(), move |conn| {
User_::mark_as_updated(conn, user_id)
})
.await??;
Ok(user)
}
Ok(u) => Ok(u),
Err(NotFound {}) => {
debug!("Fetching and creating remote user: {}", apub_id);
let person =
fetch_remote_object::<PersonExt>(context.client(), apub_id, recursion_counter).await?;
let user = User_::from_apub(&person, context, apub_id.to_owned(), recursion_counter).await?;
Ok(user)
}
Err(e) => Err(e.into()),
}
}
/// Determines when a remote actor should be refetched from its instance. In release builds, this is
/// `ACTOR_REFETCH_INTERVAL_SECONDS` after the last refetch, in debug builds
/// `ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG`.
///
/// TODO it won't pick up new avatars, summaries etc until a day after.
/// Actors need an "update" activity pushed to other servers to fix this.
fn should_refetch_actor(last_refreshed: NaiveDateTime) -> bool {
let update_interval = if cfg!(debug_assertions) {
// avoid infinite loop when fetching community outbox
chrono::Duration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG)
} else {
chrono::Duration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS)
};
last_refreshed.lt(&(naive_now() - update_interval))
}
/// Get a community from its apub ID.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_community(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Community, LemmyError> {
let apub_id_owned = apub_id.to_owned();
let community = blocking(context.pool(), move |conn| {
Community::read_from_apub_id(conn, apub_id_owned.as_str())
})
.await?;
match community {
Ok(c) if !c.local && should_refetch_actor(c.last_refreshed_at) => {
debug!("Fetching and updating from remote community: {}", apub_id);
fetch_remote_community(apub_id, context, Some(c), recursion_counter).await
}
Ok(c) => Ok(c),
Err(NotFound {}) => {
debug!("Fetching and creating remote community: {}", apub_id);
fetch_remote_community(apub_id, context, None, recursion_counter).await
}
Err(e) => Err(e.into()),
}
}
/// Request a community by apub ID from a remote instance, including moderators. If `old_community`,
/// is set, this is an update for a community which is already known locally. If not, we don't know
/// the community yet and also pull the outbox, to get some initial posts.
async fn fetch_remote_community(
apub_id: &Url,
context: &LemmyContext,
old_community: Option<Community>,
recursion_counter: &mut i32,
) -> Result<Community, LemmyError> {
let group = fetch_remote_object::<GroupExt>(context.client(), apub_id, recursion_counter).await;
// If fetching failed, return the existing data.
if let Some(ref c) = old_community {
if group.is_err() {
return Ok(c.to_owned());
}
}
let group = group?;
let community =
Community::from_apub(&group, context, apub_id.to_owned(), recursion_counter).await?;
// Also add the community moderators too
let attributed_to = group.inner.attributed_to().context(location_info!())?;
let creator_and_moderator_uris: Vec<&Url> = attributed_to
.as_many()
.context(location_info!())?
.iter()
.map(|a| a.as_xsd_any_uri().context(""))
.collect::<Result<Vec<&Url>, anyhow::Error>>()?;
let mut creator_and_moderators = Vec::new();
for uri in creator_and_moderator_uris {
let c_or_m = get_or_fetch_and_upsert_user(uri, context, recursion_counter).await?;
creator_and_moderators.push(c_or_m);
}
// TODO: need to make this work to update mods of existing communities
if old_community.is_none() {
let community_id = community.id;
blocking(context.pool(), move |conn| {
for mod_ in creator_and_moderators {
let community_moderator_form = CommunityModeratorForm {
community_id,
user_id: mod_.id,
};
CommunityModerator::join(conn, &community_moderator_form)?;
}
Ok(()) as Result<(), LemmyError>
})
.await??;
}
// fetch outbox (maybe make this conditional)
let outbox = fetch_remote_object::<OrderedCollection>(
context.client(),
&community.get_outbox_url()?,
recursion_counter,
)
.await?;
let outbox_items = outbox.items().context(location_info!())?.clone();
let mut outbox_items = outbox_items.many().context(location_info!())?;
if outbox_items.len() > 20 {
outbox_items = outbox_items[0..20].to_vec();
}
for o in outbox_items {
let page = PageExt::from_any_base(o)?.context(location_info!())?;
let page_id = page.id_unchecked().context(location_info!())?;
// The post creator may be from a blocked instance, if it errors, then skip it
if check_is_apub_id_valid(page_id).is_err() {
continue;
}
Post::from_apub(&page, context, page_id.to_owned(), recursion_counter).await?;
// TODO: we need to send a websocket update here
}
Ok(community)
}
/// Gets a post by its apub ID. If it exists locally, it is returned directly. Otherwise it is
/// pulled from its apub ID, inserted and returned.
///
/// The parent community is also pulled if necessary. Comments are not pulled.
pub(crate) async fn get_or_fetch_and_insert_post(
post_ap_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Post, LemmyError> {
let post_ap_id_owned = post_ap_id.to_owned();
let post = blocking(context.pool(), move |conn| {
Post::read_from_apub_id(conn, post_ap_id_owned.as_str())
})
.await?;
match post {
Ok(p) => Ok(p),
Err(NotFound {}) => {
debug!("Fetching and creating remote post: {}", post_ap_id);
let page =
fetch_remote_object::<PageExt>(context.client(), post_ap_id, recursion_counter).await?;
let post = Post::from_apub(&page, context, post_ap_id.to_owned(), recursion_counter).await?;
Ok(post)
}
Err(e) => Err(e.into()),
}
}
/// Gets a comment by its apub ID. If it exists locally, it is returned directly. Otherwise it is
/// pulled from its apub ID, inserted and returned.
///
/// The parent community, post and comment are also pulled if necessary.
pub(crate) async fn get_or_fetch_and_insert_comment(
comment_ap_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Comment, LemmyError> {
let comment_ap_id_owned = comment_ap_id.to_owned();
let comment = blocking(context.pool(), move |conn| {
Comment::read_from_apub_id(conn, comment_ap_id_owned.as_str())
})
.await?;
match comment {
Ok(p) => Ok(p),
Err(NotFound {}) => {
debug!(
"Fetching and creating remote comment and its parents: {}",
comment_ap_id
);
let comment =
fetch_remote_object::<NoteExt>(context.client(), comment_ap_id, recursion_counter).await?;
let comment = Comment::from_apub(
&comment,
context,
comment_ap_id.to_owned(),
recursion_counter,
)
.await?;
let post_id = comment.post_id;
let post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
if post.locked {
return Err(anyhow!("Post is locked").into());
}
Ok(comment)
}
Err(e) => Err(e.into()),
}
}

View file

@ -0,0 +1,147 @@
use crate::{
check_is_apub_id_valid,
fetcher::{
fetch::fetch_remote_object,
get_or_fetch_and_upsert_user,
is_deleted,
should_refetch_actor,
},
objects::FromApub,
ActorType,
GroupExt,
PageExt,
};
use activitystreams::{
base::{BaseExt, ExtendsExt},
collection::{CollectionExt, OrderedCollection},
object::ObjectExt,
};
use anyhow::Context;
use diesel::result::Error::NotFound;
use lemmy_db_queries::{source::community::Community_, ApubObject, Joinable};
use lemmy_db_schema::source::{
community::{Community, CommunityModerator, CommunityModeratorForm},
post::Post,
};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
use log::debug;
use url::Url;
/// Get a community from its apub ID.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_community(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Community, LemmyError> {
let apub_id_owned = apub_id.to_owned();
let community = blocking(context.pool(), move |conn| {
Community::read_from_apub_id(conn, apub_id_owned.as_str())
})
.await?;
match community {
Ok(c) if !c.local && should_refetch_actor(c.last_refreshed_at) => {
debug!("Fetching and updating from remote community: {}", apub_id);
fetch_remote_community(apub_id, context, Some(c), recursion_counter).await
}
Ok(c) => Ok(c),
Err(NotFound {}) => {
debug!("Fetching and creating remote community: {}", apub_id);
fetch_remote_community(apub_id, context, None, recursion_counter).await
}
Err(e) => Err(e.into()),
}
}
/// Request a community by apub ID from a remote instance, including moderators. If `old_community`,
/// is set, this is an update for a community which is already known locally. If not, we don't know
/// the community yet and also pull the outbox, to get some initial posts.
async fn fetch_remote_community(
apub_id: &Url,
context: &LemmyContext,
old_community: Option<Community>,
recursion_counter: &mut i32,
) -> Result<Community, LemmyError> {
let group = fetch_remote_object::<GroupExt>(context.client(), apub_id, recursion_counter).await;
if let Some(c) = old_community.to_owned() {
if is_deleted(&group) {
blocking(context.pool(), move |conn| {
Community::update_deleted(conn, c.id, true)
})
.await??;
} else if group.is_err() {
// If fetching failed, return the existing data.
return Ok(c);
}
}
let group = group?;
let community =
Community::from_apub(&group, context, apub_id.to_owned(), recursion_counter).await?;
// Also add the community moderators too
let attributed_to = group.inner.attributed_to().context(location_info!())?;
let creator_and_moderator_uris: Vec<&Url> = attributed_to
.as_many()
.context(location_info!())?
.iter()
.map(|a| a.as_xsd_any_uri().context(""))
.collect::<Result<Vec<&Url>, anyhow::Error>>()?;
let mut creator_and_moderators = Vec::new();
for uri in creator_and_moderator_uris {
let c_or_m = get_or_fetch_and_upsert_user(uri, context, recursion_counter).await?;
creator_and_moderators.push(c_or_m);
}
// TODO: need to make this work to update mods of existing communities
if old_community.is_none() {
let community_id = community.id;
blocking(context.pool(), move |conn| {
for mod_ in creator_and_moderators {
let community_moderator_form = CommunityModeratorForm {
community_id,
user_id: mod_.id,
};
CommunityModerator::join(conn, &community_moderator_form)?;
}
Ok(()) as Result<(), LemmyError>
})
.await??;
}
// fetch outbox (maybe make this conditional)
let outbox = fetch_remote_object::<OrderedCollection>(
context.client(),
&community.get_outbox_url()?,
recursion_counter,
)
.await?;
let outbox_items = outbox.items().context(location_info!())?.clone();
let mut outbox_items = outbox_items.many().context(location_info!())?;
if outbox_items.len() > 20 {
outbox_items = outbox_items[0..20].to_vec();
}
for o in outbox_items {
let page = PageExt::from_any_base(o)?.context(location_info!())?;
let page_id = page.id_unchecked().context(location_info!())?;
// The post creator may be from a blocked instance, if it errors, then skip it
if check_is_apub_id_valid(page_id).is_err() {
continue;
}
Post::from_apub(&page, context, page_id.to_owned(), recursion_counter).await?;
// TODO: we need to send a websocket update here
}
Ok(community)
}

View file

@ -0,0 +1,82 @@
use crate::{check_is_apub_id_valid, APUB_JSON_CONTENT_TYPE};
use anyhow::anyhow;
use lemmy_utils::{request::retry, LemmyError};
use reqwest::{Client, StatusCode};
use serde::Deserialize;
use std::time::Duration;
use thiserror::Error;
use url::Url;
/// Maximum number of HTTP requests allowed to handle a single incoming activity (or a single object
/// fetch through the search).
///
/// Tests are passing with a value of 5, so 10 should be safe for production.
static MAX_REQUEST_NUMBER: i32 = 10;
#[derive(Debug, Error)]
pub(in crate::fetcher) struct FetchError {
pub inner: anyhow::Error,
pub status_code: Option<StatusCode>,
}
impl From<LemmyError> for FetchError {
fn from(t: LemmyError) -> Self {
FetchError {
inner: t.inner,
status_code: None,
}
}
}
impl From<reqwest::Error> for FetchError {
fn from(t: reqwest::Error) -> Self {
let status = t.status();
FetchError {
inner: t.into(),
status_code: status,
}
}
}
impl std::fmt::Display for FetchError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(&self, f)
}
}
/// Fetch any type of ActivityPub object, handling things like HTTP headers, deserialisation,
/// timeouts etc.
pub(in crate::fetcher) async fn fetch_remote_object<Response>(
client: &Client,
url: &Url,
recursion_counter: &mut i32,
) -> Result<Response, FetchError>
where
Response: for<'de> Deserialize<'de> + std::fmt::Debug,
{
*recursion_counter += 1;
if *recursion_counter > MAX_REQUEST_NUMBER {
return Err(LemmyError::from(anyhow!("Maximum recursion depth reached")).into());
}
check_is_apub_id_valid(&url)?;
let timeout = Duration::from_secs(60);
let res = retry(|| {
client
.get(url.as_str())
.header("Accept", APUB_JSON_CONTENT_TYPE)
.timeout(timeout)
.send()
})
.await?;
if res.status() == StatusCode::GONE {
return Err(FetchError {
inner: anyhow!("Remote object {} was deleted", url),
status_code: Some(res.status()),
});
}
Ok(res.json().await?)
}

View file

@ -0,0 +1,72 @@
pub(crate) mod community;
mod fetch;
pub(crate) mod objects;
pub mod search;
pub(crate) mod user;
use crate::{
fetcher::{
community::get_or_fetch_and_upsert_community,
fetch::FetchError,
user::get_or_fetch_and_upsert_user,
},
ActorType,
};
use chrono::NaiveDateTime;
use http::StatusCode;
use lemmy_db_schema::naive_now;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;
use serde::Deserialize;
use url::Url;
static ACTOR_REFETCH_INTERVAL_SECONDS: i64 = 24 * 60 * 60;
static ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG: i64 = 10;
fn is_deleted<Response>(fetch_response: &Result<Response, FetchError>) -> bool
where
Response: for<'de> Deserialize<'de>,
{
if let Err(e) = fetch_response {
if let Some(status) = e.status_code {
if status == StatusCode::GONE {
return true;
}
}
}
false
}
/// Get a remote actor from its apub ID (either a user or a community). Thin wrapper around
/// `get_or_fetch_and_upsert_user()` and `get_or_fetch_and_upsert_community()`.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_actor(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Box<dyn ActorType>, LemmyError> {
let community = get_or_fetch_and_upsert_community(apub_id, context, recursion_counter).await;
let actor: Box<dyn ActorType> = match community {
Ok(c) => Box::new(c),
Err(_) => Box::new(get_or_fetch_and_upsert_user(apub_id, context, recursion_counter).await?),
};
Ok(actor)
}
/// Determines when a remote actor should be refetched from its instance. In release builds, this is
/// `ACTOR_REFETCH_INTERVAL_SECONDS` after the last refetch, in debug builds
/// `ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG`.
///
/// TODO it won't pick up new avatars, summaries etc until a day after.
/// Actors need an "update" activity pushed to other servers to fix this.
fn should_refetch_actor(last_refreshed: NaiveDateTime) -> bool {
let update_interval = if cfg!(debug_assertions) {
// avoid infinite loop when fetching community outbox
chrono::Duration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG)
} else {
chrono::Duration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS)
};
last_refreshed.lt(&(naive_now() - update_interval))
}

View file

@ -0,0 +1,83 @@
use crate::{fetcher::fetch::fetch_remote_object, objects::FromApub, NoteExt, PageExt};
use anyhow::anyhow;
use diesel::result::Error::NotFound;
use lemmy_db_queries::{ApubObject, Crud};
use lemmy_db_schema::source::{comment::Comment, post::Post};
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;
use log::debug;
use url::Url;
/// Gets a post by its apub ID. If it exists locally, it is returned directly. Otherwise it is
/// pulled from its apub ID, inserted and returned.
///
/// The parent community is also pulled if necessary. Comments are not pulled.
pub(crate) async fn get_or_fetch_and_insert_post(
post_ap_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Post, LemmyError> {
let post_ap_id_owned = post_ap_id.to_owned();
let post = blocking(context.pool(), move |conn| {
Post::read_from_apub_id(conn, post_ap_id_owned.as_str())
})
.await?;
match post {
Ok(p) => Ok(p),
Err(NotFound {}) => {
debug!("Fetching and creating remote post: {}", post_ap_id);
let page =
fetch_remote_object::<PageExt>(context.client(), post_ap_id, recursion_counter).await?;
let post = Post::from_apub(&page, context, post_ap_id.to_owned(), recursion_counter).await?;
Ok(post)
}
Err(e) => Err(e.into()),
}
}
/// Gets a comment by its apub ID. If it exists locally, it is returned directly. Otherwise it is
/// pulled from its apub ID, inserted and returned.
///
/// The parent community, post and comment are also pulled if necessary.
pub(crate) async fn get_or_fetch_and_insert_comment(
comment_ap_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<Comment, LemmyError> {
let comment_ap_id_owned = comment_ap_id.to_owned();
let comment = blocking(context.pool(), move |conn| {
Comment::read_from_apub_id(conn, comment_ap_id_owned.as_str())
})
.await?;
match comment {
Ok(p) => Ok(p),
Err(NotFound {}) => {
debug!(
"Fetching and creating remote comment and its parents: {}",
comment_ap_id
);
let comment =
fetch_remote_object::<NoteExt>(context.client(), comment_ap_id, recursion_counter).await?;
let comment = Comment::from_apub(
&comment,
context,
comment_ap_id.to_owned(),
recursion_counter,
)
.await?;
let post_id = comment.post_id;
let post = blocking(context.pool(), move |conn| Post::read(conn, post_id)).await??;
if post.locked {
return Err(anyhow!("Post is locked").into());
}
Ok(comment)
}
Err(e) => Err(e.into()),
}
}

View file

@ -0,0 +1,206 @@
use crate::{
fetcher::{
fetch::fetch_remote_object,
get_or_fetch_and_upsert_community,
get_or_fetch_and_upsert_user,
is_deleted,
},
find_object_by_id,
objects::FromApub,
GroupExt,
NoteExt,
Object,
PageExt,
PersonExt,
};
use activitystreams::base::BaseExt;
use anyhow::{anyhow, Context};
use lemmy_db_queries::{
source::{
comment::Comment_,
community::Community_,
post::Post_,
private_message::PrivateMessage_,
user::User,
},
SearchType,
};
use lemmy_db_schema::source::{
comment::Comment,
community::Community,
post::Post,
private_message::PrivateMessage,
user::User_,
};
use lemmy_db_views::{comment_view::CommentView, post_view::PostView};
use lemmy_db_views_actor::{community_view::CommunityView, user_view::UserViewSafe};
use lemmy_structs::{blocking, site::SearchResponse};
use lemmy_utils::{settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
use log::debug;
use url::Url;
/// The types of ActivityPub objects that can be fetched directly by searching for their ID.
#[serde(untagged)]
#[derive(serde::Deserialize, Debug)]
enum SearchAcceptedObjects {
Person(Box<PersonExt>),
Group(Box<GroupExt>),
Page(Box<PageExt>),
Comment(Box<NoteExt>),
}
/// Attempt to parse the query as URL, and fetch an ActivityPub object from it.
///
/// Some working examples for use with the `docker/federation/` setup:
/// http://lemmy_alpha:8541/c/main, or !main@lemmy_alpha:8541
/// http://lemmy_beta:8551/u/lemmy_alpha, or @lemmy_beta@lemmy_beta:8551
/// http://lemmy_gamma:8561/post/3
/// http://lemmy_delta:8571/comment/2
pub async fn search_by_apub_id(
query: &str,
context: &LemmyContext,
) -> Result<SearchResponse, LemmyError> {
// Parse the shorthand query url
let query_url = if query.contains('@') {
debug!("Search for {}", query);
let split = query.split('@').collect::<Vec<&str>>();
// User type will look like ['', username, instance]
// Community will look like [!community, instance]
let (name, instance) = if split.len() == 3 {
(format!("/u/{}", split[1]), split[2])
} else if split.len() == 2 {
if split[0].contains('!') {
let split2 = split[0].split('!').collect::<Vec<&str>>();
(format!("/c/{}", split2[1]), split[1])
} else {
return Err(anyhow!("Invalid search query: {}", query).into());
}
} else {
return Err(anyhow!("Invalid search query: {}", query).into());
};
let url = format!(
"{}://{}{}",
Settings::get().get_protocol_string(),
instance,
name
);
Url::parse(&url)?
} else {
Url::parse(&query)?
};
let recursion_counter = &mut 0;
let fetch_response =
fetch_remote_object::<SearchAcceptedObjects>(context.client(), &query_url, recursion_counter)
.await;
if is_deleted(&fetch_response) {
delete_object_locally(&query_url, context).await?;
}
// Necessary because we get a stack overflow using FetchError
let fet_res = fetch_response.map_err(|e| LemmyError::from(e.inner))?;
build_response(fet_res, query_url, recursion_counter, context).await
}
async fn build_response(
fetch_response: SearchAcceptedObjects,
query_url: Url,
recursion_counter: &mut i32,
context: &LemmyContext,
) -> Result<SearchResponse, LemmyError> {
let domain = query_url.domain().context("url has no domain")?;
let mut response = SearchResponse {
type_: SearchType::All.to_string(),
comments: vec![],
posts: vec![],
communities: vec![],
users: vec![],
};
match fetch_response {
SearchAcceptedObjects::Person(p) => {
let user_uri = p.inner.id(domain)?.context("person has no id")?;
let user = get_or_fetch_and_upsert_user(&user_uri, context, recursion_counter).await?;
response.users = vec![
blocking(context.pool(), move |conn| {
UserViewSafe::read(conn, user.id)
})
.await??,
];
}
SearchAcceptedObjects::Group(g) => {
let community_uri = g.inner.id(domain)?.context("group has no id")?;
let community =
get_or_fetch_and_upsert_community(community_uri, context, recursion_counter).await?;
response.communities = vec![
blocking(context.pool(), move |conn| {
CommunityView::read(conn, community.id, None)
})
.await??,
];
}
SearchAcceptedObjects::Page(p) => {
let p = Post::from_apub(&p, context, query_url, recursion_counter).await?;
response.posts =
vec![blocking(context.pool(), move |conn| PostView::read(conn, p.id, None)).await??];
}
SearchAcceptedObjects::Comment(c) => {
let c = Comment::from_apub(&c, context, query_url, recursion_counter).await?;
response.comments = vec![
blocking(context.pool(), move |conn| {
CommentView::read(conn, c.id, None)
})
.await??,
];
}
};
Ok(response)
}
async fn delete_object_locally(query_url: &Url, context: &LemmyContext) -> Result<(), LemmyError> {
let res = find_object_by_id(context, query_url.to_owned()).await?;
match res {
Object::Comment(c) => {
blocking(context.pool(), move |conn| {
Comment::update_deleted(conn, c.id, true)
})
.await??;
}
Object::Post(p) => {
blocking(context.pool(), move |conn| {
Post::update_deleted(conn, p.id, true)
})
.await??;
}
Object::User(u) => {
// TODO: implement update_deleted() for user, move it to ApubObject trait
blocking(context.pool(), move |conn| {
User_::delete_account(conn, u.id)
})
.await??;
}
Object::Community(c) => {
blocking(context.pool(), move |conn| {
Community::update_deleted(conn, c.id, true)
})
.await??;
}
Object::PrivateMessage(pm) => {
blocking(context.pool(), move |conn| {
PrivateMessage::update_deleted(conn, pm.id, true)
})
.await??;
}
}
Err(anyhow!("Object was deleted").into())
}

View file

@ -0,0 +1,71 @@
use crate::{
fetcher::{fetch::fetch_remote_object, is_deleted, should_refetch_actor},
objects::FromApub,
PersonExt,
};
use anyhow::anyhow;
use diesel::result::Error::NotFound;
use lemmy_db_queries::{source::user::User, ApubObject};
use lemmy_db_schema::source::user::User_;
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;
use log::debug;
use url::Url;
/// Get a user from its apub ID.
///
/// If it exists locally and `!should_refetch_actor()`, it is returned directly from the database.
/// Otherwise it is fetched from the remote instance, stored and returned.
pub(crate) async fn get_or_fetch_and_upsert_user(
apub_id: &Url,
context: &LemmyContext,
recursion_counter: &mut i32,
) -> Result<User_, LemmyError> {
let apub_id_owned = apub_id.to_owned();
let user = blocking(context.pool(), move |conn| {
User_::read_from_apub_id(conn, apub_id_owned.as_ref())
})
.await?;
match user {
// If its older than a day, re-fetch it
Ok(u) if !u.local && should_refetch_actor(u.last_refreshed_at) => {
debug!("Fetching and updating from remote user: {}", apub_id);
let person =
fetch_remote_object::<PersonExt>(context.client(), apub_id, recursion_counter).await;
if is_deleted(&person) {
// TODO: use User_::update_deleted() once implemented
blocking(context.pool(), move |conn| {
User_::delete_account(conn, u.id)
})
.await??;
return Err(anyhow!("User was deleted by remote instance").into());
} else if person.is_err() {
return Ok(u);
}
let user = User_::from_apub(&person?, context, apub_id.to_owned(), recursion_counter).await?;
let user_id = user.id;
blocking(context.pool(), move |conn| {
User_::mark_as_updated(conn, user_id)
})
.await??;
Ok(user)
}
Ok(u) => Ok(u),
Err(NotFound {}) => {
debug!("Fetching and creating remote user: {}", apub_id);
let person =
fetch_remote_object::<PersonExt>(context.client(), apub_id, recursion_counter).await?;
let user = User_::from_apub(&person, context, apub_id.to_owned(), recursion_counter).await?;
Ok(user)
}
Err(e) => Err(e.into()),
}
}

View file

@ -4,7 +4,8 @@ use crate::{
};
use actix_web::{body::Body, web, web::Path, HttpResponse};
use diesel::result::Error::NotFound;
use lemmy_db::{comment::Comment, Crud};
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::comment::Comment;
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;

View file

@ -9,7 +9,9 @@ use activitystreams::{
collection::{CollectionExt, OrderedCollection, UnorderedCollection},
};
use actix_web::{body::Body, web, HttpResponse};
use lemmy_db::{community::Community, community_view::CommunityFollowerView, post::Post};
use lemmy_db_queries::source::{community::Community_, post::Post_};
use lemmy_db_schema::source::{community::Community, post::Post};
use lemmy_db_views_actor::community_follower_view::CommunityFollowerView;
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;

View file

@ -1,11 +1,12 @@
use crate::APUB_JSON_CONTENT_TYPE;
use actix_web::{body::Body, web, HttpResponse};
use lemmy_db::activity::Activity;
use http::StatusCode;
use lemmy_db_queries::source::activity::Activity_;
use lemmy_db_schema::source::activity::Activity;
use lemmy_structs::blocking;
use lemmy_utils::{settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
use serde::{Deserialize, Serialize};
use http::StatusCode;
pub mod comment;
pub mod community;

View file

@ -4,7 +4,8 @@ use crate::{
};
use actix_web::{body::Body, web, HttpResponse};
use diesel::result::Error::NotFound;
use lemmy_db::post::Post;
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::post::Post;
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;

View file

@ -1,6 +1,6 @@
use crate::{
extensions::context::lemmy_context,
http::create_apub_response,
http::{create_apub_response, create_apub_tombstone_response},
objects::ToApub,
ActorType,
};
@ -9,7 +9,8 @@ use activitystreams::{
collection::{CollectionExt, OrderedCollection},
};
use actix_web::{body::Body, web, HttpResponse};
use lemmy_db::user::User_;
use lemmy_db_queries::source::user::User;
use lemmy_db_schema::source::user::User_;
use lemmy_structs::blocking;
use lemmy_utils::LemmyError;
use lemmy_websocket::LemmyContext;
@ -27,12 +28,19 @@ pub async fn get_apub_user_http(
context: web::Data<LemmyContext>,
) -> Result<HttpResponse<Body>, LemmyError> {
let user_name = info.into_inner().user_name;
// TODO: this needs to be able to read deleted users, so that it can send tombstones
let user = blocking(context.pool(), move |conn| {
User_::find_by_email_or_username(conn, &user_name)
})
.await??;
let u = user.to_apub(context.pool()).await?;
Ok(create_apub_response(&u))
if !user.deleted {
let apub = user.to_apub(context.pool()).await?;
Ok(create_apub_response(&apub))
} else {
Ok(create_apub_tombstone_response(&user.to_tombstone()?))
}
}
pub async fn get_apub_user_outbox(

View file

@ -26,14 +26,12 @@ use activitystreams::{
};
use actix_web::{web, HttpRequest, HttpResponse};
use anyhow::{anyhow, Context};
use lemmy_db::{
use lemmy_db_queries::{source::community::Community_, ApubObject, DbPool, Followable};
use lemmy_db_schema::source::{
community::{Community, CommunityFollower, CommunityFollowerForm},
community_view::CommunityUserBanView,
user::User_,
ApubObject,
DbPool,
Followable,
};
use lemmy_db_views_actor::community_user_ban_view::CommunityUserBanView;
use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
@ -82,7 +80,7 @@ pub async fn community_inbox(
Community::read_from_name(&conn, &path)
})
.await??;
let to_and_cc = get_activity_to_and_cc(&activity)?;
let to_and_cc = get_activity_to_and_cc(&activity);
if !to_and_cc.contains(&&community.actor_id()?) {
return Err(anyhow!("Activity delivered to wrong community").into());
}
@ -177,7 +175,7 @@ pub(crate) async fn community_receive_message(
.await?;
}
return Ok(HttpResponse::Ok().finish());
Ok(HttpResponse::Ok().finish())
}
/// Handle a follow request from a remote user, adding the user as follower and returning an

View file

@ -12,7 +12,8 @@ use activitystreams::{
};
use actix_web::HttpRequest;
use anyhow::{anyhow, Context};
use lemmy_db::{activity::Activity, community::Community, user::User_, ApubObject, DbPool};
use lemmy_db_queries::{source::activity::Activity_, ApubObject, DbPool};
use lemmy_db_schema::source::{activity::Activity, community::Community, user::User_};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
@ -50,7 +51,7 @@ pub(crate) async fn is_activity_already_known(
}
}
pub(crate) fn get_activity_to_and_cc<T, Kind>(activity: &T) -> Result<Vec<Url>, LemmyError>
pub(crate) fn get_activity_to_and_cc<T, Kind>(activity: &T) -> Vec<Url>
where
T: AsBase<Kind> + AsObject<Kind> + ActorAndObjectRefExt,
{
@ -75,14 +76,14 @@ where
.collect();
to_and_cc.append(&mut cc);
}
Ok(to_and_cc)
to_and_cc
}
pub(crate) fn is_addressed_to_public<T, Kind>(activity: &T) -> Result<(), LemmyError>
where
T: AsBase<Kind> + AsObject<Kind> + ActorAndObjectRefExt,
{
let to_and_cc = get_activity_to_and_cc(activity)?;
let to_and_cc = get_activity_to_and_cc(activity);
if to_and_cc.contains(&public()) {
Ok(())
} else {

View file

@ -31,8 +31,10 @@ use crate::{
receive_unhandled_activity,
verify_activity_domains_valid,
},
fetcher::{get_or_fetch_and_insert_comment, get_or_fetch_and_insert_post},
fetcher::objects::{get_or_fetch_and_insert_comment, get_or_fetch_and_insert_post},
find_post_or_comment_by_id,
inbox::is_addressed_to_public,
PostOrComment,
};
use activitystreams::{
activity::{Create, Delete, Dislike, Like, Remove, Undo, Update},
@ -41,7 +43,8 @@ use activitystreams::{
};
use anyhow::Context;
use diesel::result::Error::NotFound;
use lemmy_db::{comment::Comment, post::Post, site::Site, ApubObject, Crud};
use lemmy_db_queries::Crud;
use lemmy_db_schema::source::site::Site;
use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
@ -316,39 +319,6 @@ pub(in crate::inbox) async fn receive_undo_dislike_for_community(
}
}
enum PostOrComment {
Comment(Comment),
Post(Post),
}
/// Tries to find a post or comment in the local database, without any network requests.
/// This is used to handle deletions and removals, because in case we dont have the object, we can
/// simply ignore the activity.
async fn find_post_or_comment_by_id(
context: &LemmyContext,
apub_id: Url,
) -> Result<PostOrComment, LemmyError> {
let ap_id = apub_id.to_string();
let post = blocking(context.pool(), move |conn| {
Post::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(p) = post {
return Ok(PostOrComment::Post(p));
}
let ap_id = apub_id.to_string();
let comment = blocking(context.pool(), move |conn| {
Comment::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(c) = comment {
return Ok(PostOrComment::Comment(c));
}
return Err(NotFound.into());
}
async fn fetch_post_or_comment_by_id(
apub_id: &Url,
context: &LemmyContext,
@ -362,7 +332,7 @@ async fn fetch_post_or_comment_by_id(
return Ok(PostOrComment::Comment(comment));
}
return Err(NotFound.into());
Err(NotFound.into())
}
fn get_like_object_id<Activity>(like_or_dislike: &Activity) -> Result<Url, LemmyError>

View file

@ -15,7 +15,8 @@ use crate::{
use activitystreams::{activity::ActorAndObject, prelude::*};
use actix_web::{web, HttpRequest, HttpResponse};
use anyhow::Context;
use lemmy_db::{community::Community, ApubObject, DbPool};
use lemmy_db_queries::{ApubObject, DbPool};
use lemmy_db_schema::source::community::Community;
use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
@ -66,7 +67,7 @@ pub async fn shared_inbox(
let activity_any_base = activity.clone().into_any_base()?;
let mut res: Option<HttpResponse> = None;
let to_and_cc = get_activity_to_and_cc(&activity)?;
let to_and_cc = get_activity_to_and_cc(&activity);
// Handle community first, so in case the sender is banned by the community, it will error out.
// If we handled the user receive first, the activity would be inserted to the database before the
// community could check for bans.

View file

@ -17,7 +17,7 @@ use crate::{
verify_activity_domains_valid,
},
check_is_apub_id_valid,
fetcher::get_or_fetch_and_upsert_community,
fetcher::community::get_or_fetch_and_upsert_community,
inbox::{
assert_activity_not_local,
get_activity_id,
@ -48,12 +48,11 @@ use activitystreams::{
use actix_web::{web, HttpRequest, HttpResponse};
use anyhow::{anyhow, Context};
use diesel::NotFound;
use lemmy_db::{
use lemmy_db_queries::{source::user::User, ApubObject, Followable};
use lemmy_db_schema::source::{
community::{Community, CommunityFollower},
private_message::PrivateMessage,
user::User_,
ApubObject,
Followable,
};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, LemmyError};
@ -102,7 +101,7 @@ pub async fn user_inbox(
User_::read_from_name(&conn, &username)
})
.await??;
let to_and_cc = get_activity_to_and_cc(&activity)?;
let to_and_cc = get_activity_to_and_cc(&activity);
// TODO: we should also accept activities that are sent to community followers
if !to_and_cc.contains(&&user.actor_id()?) {
return Err(anyhow!("Activity delivered to wrong user").into());
@ -173,7 +172,7 @@ async fn is_for_user_inbox(
context: &LemmyContext,
activity: &UserAcceptedActivities,
) -> Result<(), LemmyError> {
let to_and_cc = get_activity_to_and_cc(activity)?;
let to_and_cc = get_activity_to_and_cc(activity);
// Check if it is addressed directly to any local user
if is_addressed_to_local_user(&to_and_cc, context.pool()).await? {
return Ok(());
@ -394,5 +393,5 @@ async fn find_community_or_private_message_by_id(
return Ok(CommunityOrPrivateMessage::PrivateMessage(p));
}
return Err(NotFound.into());
Err(NotFound.into())
}

View file

@ -22,7 +22,16 @@ use activitystreams::{
};
use activitystreams_ext::{Ext1, Ext2};
use anyhow::{anyhow, Context};
use lemmy_db::{activity::Activity, user::User_, DbPool};
use diesel::NotFound;
use lemmy_db_queries::{source::activity::Activity_, ApubObject, DbPool};
use lemmy_db_schema::source::{
activity::Activity,
comment::Comment,
community::Community,
post::Post,
private_message::PrivateMessage,
user::User_,
};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, settings::Settings, LemmyError};
use lemmy_websocket::LemmyContext;
@ -52,13 +61,7 @@ pub static APUB_JSON_CONTENT_TYPE: &str = "application/activity+json";
fn check_is_apub_id_valid(apub_id: &Url) -> Result<(), LemmyError> {
let settings = Settings::get();
let domain = apub_id.domain().context(location_info!())?.to_string();
let local_instance = settings
.hostname
.split(':')
.collect::<Vec<&str>>()
.first()
.context(location_info!())?
.to_string();
let local_instance = settings.get_hostname_without_port()?;
if !settings.federation.enabled {
return if domain == local_instance {
@ -238,3 +241,85 @@ where
.await??;
Ok(())
}
pub(crate) enum PostOrComment {
Comment(Comment),
Post(Post),
}
/// Tries to find a post or comment in the local database, without any network requests.
/// This is used to handle deletions and removals, because in case we dont have the object, we can
/// simply ignore the activity.
pub(crate) async fn find_post_or_comment_by_id(
context: &LemmyContext,
apub_id: Url,
) -> Result<PostOrComment, LemmyError> {
let ap_id = apub_id.to_string();
let post = blocking(context.pool(), move |conn| {
Post::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(p) = post {
return Ok(PostOrComment::Post(p));
}
let ap_id = apub_id.to_string();
let comment = blocking(context.pool(), move |conn| {
Comment::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(c) = comment {
return Ok(PostOrComment::Comment(c));
}
Err(NotFound.into())
}
pub(crate) enum Object {
Comment(Comment),
Post(Post),
Community(Community),
User(User_),
PrivateMessage(PrivateMessage),
}
pub(crate) async fn find_object_by_id(
context: &LemmyContext,
apub_id: Url,
) -> Result<Object, LemmyError> {
if let Ok(pc) = find_post_or_comment_by_id(context, apub_id.to_owned()).await {
return Ok(match pc {
PostOrComment::Post(p) => Object::Post(p),
PostOrComment::Comment(c) => Object::Comment(c),
});
}
let ap_id = apub_id.to_string();
let user = blocking(context.pool(), move |conn| {
User_::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(u) = user {
return Ok(Object::User(u));
}
let ap_id = apub_id.to_string();
let community = blocking(context.pool(), move |conn| {
Community::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(c) = community {
return Ok(Object::Community(c));
}
let ap_id = apub_id.to_string();
let private_message = blocking(context.pool(), move |conn| {
PrivateMessage::read_from_apub_id(conn, &ap_id)
})
.await?;
if let Ok(pm) = private_message {
return Ok(Object::PrivateMessage(pm));
}
Err(NotFound.into())
}

View file

@ -1,15 +1,12 @@
use crate::{
extensions::context::lemmy_context,
fetcher::{
get_or_fetch_and_insert_comment,
get_or_fetch_and_insert_post,
get_or_fetch_and_upsert_user,
},
fetcher::objects::{get_or_fetch_and_insert_comment, get_or_fetch_and_insert_post},
objects::{
check_object_domain,
check_object_for_community_or_site_ban,
create_tombstone,
get_object_from_apub,
get_or_fetch_and_upsert_user,
get_source_markdown_value,
set_content_and_source,
FromApub,
@ -23,13 +20,12 @@ use activitystreams::{
prelude::*,
};
use anyhow::{anyhow, Context};
use lemmy_db::{
use lemmy_db_queries::{Crud, DbPool};
use lemmy_db_schema::source::{
comment::{Comment, CommentForm},
community::Community,
post::Post,
user::User_,
Crud,
DbPool,
};
use lemmy_structs::blocking;
use lemmy_utils::{
@ -116,7 +112,7 @@ impl FromApub for Comment {
Comment::delete(conn, comment.id)
})
.await??;
return Err(anyhow!("Post is locked").into());
Err(anyhow!("Post is locked").into())
} else {
Ok(comment)
}

View file

@ -1,6 +1,6 @@
use crate::{
extensions::{context::lemmy_context, group_extensions::GroupExtension},
fetcher::get_or_fetch_and_upsert_user,
fetcher::user::get_or_fetch_and_upsert_user,
objects::{
check_object_domain,
create_tombstone,
@ -22,12 +22,12 @@ use activitystreams::{
};
use activitystreams_ext::Ext2;
use anyhow::Context;
use lemmy_db::{
community::{Community, CommunityForm},
community_view::CommunityModeratorView,
use lemmy_db_queries::DbPool;
use lemmy_db_schema::{
naive_now,
DbPool,
source::community::{Community, CommunityForm},
};
use lemmy_db_views_actor::community_moderator_view::CommunityModeratorView;
use lemmy_structs::blocking;
use lemmy_utils::{
location_info,
@ -51,7 +51,10 @@ impl ToApub for Community {
CommunityModeratorView::for_community(&conn, id)
})
.await??;
let moderators: Vec<String> = moderators.into_iter().map(|m| m.user_actor_id).collect();
let moderators: Vec<String> = moderators
.into_iter()
.map(|m| m.moderator.actor_id)
.collect();
let mut group = ApObject::new(Group::new());
group

View file

@ -1,6 +1,6 @@
use crate::{
check_is_apub_id_valid,
fetcher::{get_or_fetch_and_upsert_community, get_or_fetch_and_upsert_user},
fetcher::{community::get_or_fetch_and_upsert_community, user::get_or_fetch_and_upsert_user},
inbox::community_inbox::check_community_or_site_ban,
};
use activitystreams::{
@ -11,7 +11,7 @@ use activitystreams::{
};
use anyhow::{anyhow, Context};
use chrono::NaiveDateTime;
use lemmy_db::{ApubObject, Crud, DbPool};
use lemmy_db_queries::{ApubObject, Crud, DbPool};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, settings::Settings, utils::convert_datetime, LemmyError};
use lemmy_websocket::LemmyContext;

View file

@ -1,6 +1,6 @@
use crate::{
extensions::{context::lemmy_context, page_extension::PageExtension},
fetcher::{get_or_fetch_and_upsert_community, get_or_fetch_and_upsert_user},
fetcher::{community::get_or_fetch_and_upsert_community, user::get_or_fetch_and_upsert_user},
objects::{
check_object_domain,
check_object_for_community_or_site_ban,
@ -20,12 +20,11 @@ use activitystreams::{
};
use activitystreams_ext::Ext1;
use anyhow::Context;
use lemmy_db::{
use lemmy_db_queries::{Crud, DbPool};
use lemmy_db_schema::source::{
community::Community,
post::{Post, PostForm},
user::User_,
Crud,
DbPool,
};
use lemmy_structs::blocking;
use lemmy_utils::{

View file

@ -1,7 +1,7 @@
use crate::{
check_is_apub_id_valid,
extensions::context::lemmy_context,
fetcher::get_or_fetch_and_upsert_user,
fetcher::user::get_or_fetch_and_upsert_user,
objects::{
check_object_domain,
create_tombstone,
@ -19,11 +19,10 @@ use activitystreams::{
prelude::*,
};
use anyhow::Context;
use lemmy_db::{
use lemmy_db_queries::{Crud, DbPool};
use lemmy_db_schema::source::{
private_message::{PrivateMessage, PrivateMessageForm},
user::User_,
Crud,
DbPool,
};
use lemmy_structs::blocking;
use lemmy_utils::{location_info, utils::convert_datetime, LemmyError};

View file

@ -18,11 +18,10 @@ use activitystreams::{
};
use activitystreams_ext::Ext1;
use anyhow::Context;
use lemmy_db::{
use lemmy_db_queries::{ApubObject, DbPool};
use lemmy_db_schema::{
naive_now,
user::{UserForm, User_},
ApubObject,
DbPool,
source::user::{UserForm, User_},
};
use lemmy_structs::blocking;
use lemmy_utils::{

View file

@ -1,235 +0,0 @@
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::{Deserialize, Serialize};
use crate::{
comment::Comment,
limit_and_offset,
naive_now,
schema::comment_report,
MaybeOptional,
Reportable,
};
table! {
comment_report_view (id) {
id -> Int4,
creator_id -> Int4,
comment_id -> Int4,
original_comment_text -> Text,
reason -> Text,
resolved -> Bool,
resolver_id -> Nullable<Int4>,
published -> Timestamp,
updated -> Nullable<Timestamp>,
post_id -> Int4,
current_comment_text -> Text,
community_id -> Int4,
creator_actor_id -> Text,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
creator_local -> Bool,
comment_creator_id -> Int4,
comment_creator_actor_id -> Text,
comment_creator_name -> Varchar,
comment_creator_preferred_username -> Nullable<Varchar>,
comment_creator_avatar -> Nullable<Text>,
comment_creator_local -> Bool,
resolver_actor_id -> Nullable<Text>,
resolver_name -> Nullable<Varchar>,
resolver_preferred_username -> Nullable<Varchar>,
resolver_avatar -> Nullable<Text>,
resolver_local -> Nullable<Bool>,
}
}
#[derive(Identifiable, Queryable, Associations, PartialEq, Debug, Serialize)]
#[belongs_to(Comment)]
#[table_name = "comment_report"]
pub struct CommentReport {
pub id: i32,
pub creator_id: i32,
pub comment_id: i32,
pub original_comment_text: String,
pub reason: String,
pub resolved: bool,
pub resolver_id: Option<i32>,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
}
#[derive(Insertable, AsChangeset, Clone)]
#[table_name = "comment_report"]
pub struct CommentReportForm {
pub creator_id: i32,
pub comment_id: i32,
pub original_comment_text: String,
pub reason: String,
}
impl Reportable<CommentReportForm> for CommentReport {
/// creates a comment report and returns it
///
/// * `conn` - the postgres connection
/// * `comment_report_form` - the filled CommentReportForm to insert
fn report(conn: &PgConnection, comment_report_form: &CommentReportForm) -> Result<Self, Error> {
use crate::schema::comment_report::dsl::*;
insert_into(comment_report)
.values(comment_report_form)
.get_result::<Self>(conn)
}
/// resolve a comment report
///
/// * `conn` - the postgres connection
/// * `report_id` - the id of the report to resolve
/// * `by_resolver_id` - the id of the user resolving the report
fn resolve(conn: &PgConnection, report_id: i32, by_resolver_id: i32) -> Result<usize, Error> {
use crate::schema::comment_report::dsl::*;
update(comment_report.find(report_id))
.set((
resolved.eq(true),
resolver_id.eq(by_resolver_id),
updated.eq(naive_now()),
))
.execute(conn)
}
/// unresolve a comment report
///
/// * `conn` - the postgres connection
/// * `report_id` - the id of the report to unresolve
/// * `by_resolver_id` - the id of the user unresolving the report
fn unresolve(conn: &PgConnection, report_id: i32, by_resolver_id: i32) -> Result<usize, Error> {
use crate::schema::comment_report::dsl::*;
update(comment_report.find(report_id))
.set((
resolved.eq(false),
resolver_id.eq(by_resolver_id),
updated.eq(naive_now()),
))
.execute(conn)
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, Clone)]
#[table_name = "comment_report_view"]
pub struct CommentReportView {
pub id: i32,
pub creator_id: i32,
pub comment_id: i32,
pub original_comment_text: String,
pub reason: String,
pub resolved: bool,
pub resolver_id: Option<i32>,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub post_id: i32,
pub current_comment_text: String,
pub community_id: i32,
pub creator_actor_id: String,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub creator_local: bool,
pub comment_creator_id: i32,
pub comment_creator_actor_id: String,
pub comment_creator_name: String,
pub comment_creator_preferred_username: Option<String>,
pub comment_creator_avatar: Option<String>,
pub comment_creator_local: bool,
pub resolver_actor_id: Option<String>,
pub resolver_name: Option<String>,
pub resolver_preferred_username: Option<String>,
pub resolver_avatar: Option<String>,
pub resolver_local: Option<bool>,
}
pub struct CommentReportQueryBuilder<'a> {
conn: &'a PgConnection,
query: comment_report_view::BoxedQuery<'a, Pg>,
for_community_ids: Option<Vec<i32>>,
page: Option<i64>,
limit: Option<i64>,
resolved: Option<bool>,
}
impl CommentReportView {
/// returns the CommentReportView for the provided report_id
///
/// * `report_id` - the report id to obtain
pub fn read(conn: &PgConnection, report_id: i32) -> Result<Self, Error> {
use super::comment_report::comment_report_view::dsl::*;
comment_report_view.find(report_id).first::<Self>(conn)
}
/// returns the current unresolved comment report count for the supplied community ids
///
/// * `community_ids` - a Vec<i32> of community_ids to get a count for
pub fn get_report_count(conn: &PgConnection, community_ids: &[i32]) -> Result<i64, Error> {
use super::comment_report::comment_report_view::dsl::*;
comment_report_view
.filter(resolved.eq(false).and(community_id.eq_any(community_ids)))
.select(count(id))
.first::<i64>(conn)
}
}
impl<'a> CommentReportQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::comment_report::comment_report_view::dsl::*;
let query = comment_report_view.into_boxed();
CommentReportQueryBuilder {
conn,
query,
for_community_ids: None,
page: None,
limit: None,
resolved: Some(false),
}
}
pub fn community_ids<T: MaybeOptional<Vec<i32>>>(mut self, community_ids: T) -> Self {
self.for_community_ids = community_ids.get_optional();
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn resolved<T: MaybeOptional<bool>>(mut self, resolved: T) -> Self {
self.resolved = resolved.get_optional();
self
}
pub fn list(self) -> Result<Vec<CommentReportView>, Error> {
use super::comment_report::comment_report_view::dsl::*;
let mut query = self.query;
if let Some(comm_ids) = self.for_community_ids {
query = query.filter(community_id.eq_any(comm_ids));
}
if let Some(resolved_flag) = self.resolved {
query = query.filter(resolved.eq(resolved_flag));
}
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.order_by(published.asc())
.limit(limit)
.offset(offset)
.load::<CommentReportView>(self.conn)
}
}

View file

@ -1,719 +0,0 @@
// TODO, remove the cross join here, just join to user directly
use crate::{fuzzy_search, limit_and_offset, ListingType, MaybeOptional, SortType};
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::{Deserialize, Serialize};
// The faked schema since diesel doesn't do views
table! {
comment_view (id) {
id -> Int4,
creator_id -> Int4,
post_id -> Int4,
post_name -> Varchar,
parent_id -> Nullable<Int4>,
content -> Text,
removed -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
ap_id -> Text,
local -> Bool,
community_id -> Int4,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_published -> Timestamp,
creator_avatar -> Nullable<Text>,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
saved -> Nullable<Bool>,
}
}
table! {
comment_fast_view (id) {
id -> Int4,
creator_id -> Int4,
post_id -> Int4,
post_name -> Varchar,
parent_id -> Nullable<Int4>,
content -> Text,
removed -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
ap_id -> Text,
local -> Bool,
community_id -> Int4,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_published -> Timestamp,
creator_avatar -> Nullable<Text>,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
saved -> Nullable<Bool>,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "comment_fast_view"]
pub struct CommentView {
pub id: i32,
pub creator_id: i32,
pub post_id: i32,
pub post_name: String,
pub parent_id: Option<i32>,
pub content: String,
pub removed: bool,
pub read: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub deleted: bool,
pub ap_id: String,
pub local: bool,
pub community_id: i32,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
pub banned: bool,
pub banned_from_community: bool,
pub creator_actor_id: String,
pub creator_local: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_published: chrono::NaiveDateTime,
pub creator_avatar: Option<String>,
pub score: i64,
pub upvotes: i64,
pub downvotes: i64,
pub hot_rank: i32,
pub hot_rank_active: i32,
pub user_id: Option<i32>,
pub my_vote: Option<i32>,
pub subscribed: Option<bool>,
pub saved: Option<bool>,
}
pub struct CommentQueryBuilder<'a> {
conn: &'a PgConnection,
query: super::comment_view::comment_fast_view::BoxedQuery<'a, Pg>,
listing_type: ListingType,
sort: &'a SortType,
for_community_id: Option<i32>,
for_community_name: Option<String>,
for_post_id: Option<i32>,
for_creator_id: Option<i32>,
search_term: Option<String>,
my_user_id: Option<i32>,
saved_only: bool,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> CommentQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::comment_view::comment_fast_view::dsl::*;
let query = comment_fast_view.into_boxed();
CommentQueryBuilder {
conn,
query,
listing_type: ListingType::All,
sort: &SortType::New,
for_community_id: None,
for_community_name: None,
for_post_id: None,
for_creator_id: None,
search_term: None,
my_user_id: None,
saved_only: false,
page: None,
limit: None,
}
}
pub fn listing_type(mut self, listing_type: ListingType) -> Self {
self.listing_type = listing_type;
self
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn for_post_id<T: MaybeOptional<i32>>(mut self, for_post_id: T) -> Self {
self.for_post_id = for_post_id.get_optional();
self
}
pub fn for_creator_id<T: MaybeOptional<i32>>(mut self, for_creator_id: T) -> Self {
self.for_creator_id = for_creator_id.get_optional();
self
}
pub fn for_community_id<T: MaybeOptional<i32>>(mut self, for_community_id: T) -> Self {
self.for_community_id = for_community_id.get_optional();
self
}
pub fn for_community_name<T: MaybeOptional<String>>(mut self, for_community_name: T) -> Self {
self.for_community_name = for_community_name.get_optional();
self
}
pub fn search_term<T: MaybeOptional<String>>(mut self, search_term: T) -> Self {
self.search_term = search_term.get_optional();
self
}
pub fn my_user_id<T: MaybeOptional<i32>>(mut self, my_user_id: T) -> Self {
self.my_user_id = my_user_id.get_optional();
self
}
pub fn saved_only(mut self, saved_only: bool) -> Self {
self.saved_only = saved_only;
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<CommentView>, Error> {
use super::comment_view::comment_fast_view::dsl::*;
let mut query = self.query;
// The view lets you pass a null user_id, if you're not logged in
if let Some(my_user_id) = self.my_user_id {
query = query.filter(user_id.eq(my_user_id));
} else {
query = query.filter(user_id.is_null());
}
if let Some(for_creator_id) = self.for_creator_id {
query = query.filter(creator_id.eq(for_creator_id));
};
if let Some(for_community_id) = self.for_community_id {
query = query.filter(community_id.eq(for_community_id));
}
if let Some(for_community_name) = self.for_community_name {
query = query
.filter(community_name.eq(for_community_name))
.filter(local.eq(true));
}
if let Some(for_post_id) = self.for_post_id {
query = query.filter(post_id.eq(for_post_id));
};
if let Some(search_term) = self.search_term {
query = query.filter(content.ilike(fuzzy_search(&search_term)));
};
query = match self.listing_type {
ListingType::Subscribed => query.filter(subscribed.eq(true)),
ListingType::Local => query.filter(community_local.eq(true)),
_ => query,
};
if self.saved_only {
query = query.filter(saved.eq(true));
}
query = match self.sort {
SortType::Hot => query
.order_by(hot_rank.desc())
.then_order_by(published.desc()),
SortType::Active => query
.order_by(hot_rank_active.desc())
.then_order_by(published.desc()),
SortType::New => query.order_by(published.desc()),
SortType::TopAll => query.order_by(score.desc()),
SortType::TopYear => query
.filter(published.gt(now - 1.years()))
.order_by(score.desc()),
SortType::TopMonth => query
.filter(published.gt(now - 1.months()))
.order_by(score.desc()),
SortType::TopWeek => query
.filter(published.gt(now - 1.weeks()))
.order_by(score.desc()),
SortType::TopDay => query
.filter(published.gt(now - 1.days()))
.order_by(score.desc()),
// _ => query.order_by(published.desc()),
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
// Note: deleted and removed comments are done on the front side
query
.limit(limit)
.offset(offset)
.load::<CommentView>(self.conn)
}
}
impl CommentView {
pub fn read(
conn: &PgConnection,
from_comment_id: i32,
my_user_id: Option<i32>,
) -> Result<Self, Error> {
use super::comment_view::comment_fast_view::dsl::*;
let mut query = comment_fast_view.into_boxed();
// The view lets you pass a null user_id, if you're not logged in
if let Some(my_user_id) = my_user_id {
query = query.filter(user_id.eq(my_user_id));
} else {
query = query.filter(user_id.is_null());
}
query = query
.filter(id.eq(from_comment_id))
.order_by(published.desc());
query.first::<Self>(conn)
}
}
// The faked schema since diesel doesn't do views
table! {
reply_fast_view (id) {
id -> Int4,
creator_id -> Int4,
post_id -> Int4,
post_name -> Varchar,
parent_id -> Nullable<Int4>,
content -> Text,
removed -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
ap_id -> Text,
local -> Bool,
community_id -> Int4,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Varchar>,
banned -> Bool,
banned_from_community -> Bool,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
creator_published -> Timestamp,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
saved -> Nullable<Bool>,
recipient_id -> Int4,
}
}
#[derive(
Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, QueryableByName, Clone,
)]
#[table_name = "reply_fast_view"]
pub struct ReplyView {
pub id: i32,
pub creator_id: i32,
pub post_id: i32,
pub post_name: String,
pub parent_id: Option<i32>,
pub content: String,
pub removed: bool,
pub read: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub deleted: bool,
pub ap_id: String,
pub local: bool,
pub community_id: i32,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
pub banned: bool,
pub banned_from_community: bool,
pub creator_actor_id: String,
pub creator_local: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub creator_published: chrono::NaiveDateTime,
pub score: i64,
pub upvotes: i64,
pub downvotes: i64,
pub hot_rank: i32,
pub hot_rank_active: i32,
pub user_id: Option<i32>,
pub my_vote: Option<i32>,
pub subscribed: Option<bool>,
pub saved: Option<bool>,
pub recipient_id: i32,
}
pub struct ReplyQueryBuilder<'a> {
conn: &'a PgConnection,
query: super::comment_view::reply_fast_view::BoxedQuery<'a, Pg>,
for_user_id: i32,
sort: &'a SortType,
unread_only: bool,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> ReplyQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection, for_user_id: i32) -> Self {
use super::comment_view::reply_fast_view::dsl::*;
let query = reply_fast_view.into_boxed();
ReplyQueryBuilder {
conn,
query,
for_user_id,
sort: &SortType::New,
unread_only: false,
page: None,
limit: None,
}
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn unread_only(mut self, unread_only: bool) -> Self {
self.unread_only = unread_only;
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<ReplyView>, Error> {
use super::comment_view::reply_fast_view::dsl::*;
let mut query = self.query;
query = query
.filter(user_id.eq(self.for_user_id))
.filter(recipient_id.eq(self.for_user_id))
.filter(deleted.eq(false))
.filter(removed.eq(false));
if self.unread_only {
query = query.filter(read.eq(false));
}
query = match self.sort {
// SortType::Hot => query.order_by(hot_rank.desc()), // TODO why is this commented
SortType::New => query.order_by(published.desc()),
SortType::TopAll => query.order_by(score.desc()),
SortType::TopYear => query
.filter(published.gt(now - 1.years()))
.order_by(score.desc()),
SortType::TopMonth => query
.filter(published.gt(now - 1.months()))
.order_by(score.desc()),
SortType::TopWeek => query
.filter(published.gt(now - 1.weeks()))
.order_by(score.desc()),
SortType::TopDay => query
.filter(published.gt(now - 1.days()))
.order_by(score.desc()),
_ => query.order_by(published.desc()),
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.limit(limit)
.offset(offset)
.load::<ReplyView>(self.conn)
}
}
#[cfg(test)]
mod tests {
use crate::{
comment::*,
comment_view::*,
community::*,
post::*,
tests::establish_unpooled_connection,
user::*,
Crud,
Likeable,
*,
};
#[test]
fn test_crud() {
let conn = establish_unpooled_connection();
let new_user = UserForm {
name: "timmy".into(),
preferred_username: None,
password_encrypted: "nope".into(),
email: None,
matrix_user_id: None,
avatar: None,
banner: None,
admin: false,
banned: Some(false),
published: None,
updated: None,
show_nsfw: false,
theme: "browser".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
actor_id: None,
bio: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
};
let inserted_user = User_::create(&conn, &new_user).unwrap();
let new_community = CommunityForm {
name: "test community 5".to_string(),
title: "nada".to_owned(),
description: None,
category_id: 1,
creator_id: inserted_user.id,
removed: None,
deleted: None,
updated: None,
nsfw: false,
actor_id: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
published: None,
icon: None,
banner: None,
};
let inserted_community = Community::create(&conn, &new_community).unwrap();
let new_post = PostForm {
name: "A test post 2".into(),
creator_id: inserted_user.id,
url: None,
body: None,
community_id: inserted_community.id,
removed: None,
deleted: None,
locked: None,
stickied: None,
updated: None,
nsfw: false,
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
ap_id: None,
local: true,
published: None,
};
let inserted_post = Post::create(&conn, &new_post).unwrap();
let comment_form = CommentForm {
content: "A test comment 32".into(),
creator_id: inserted_user.id,
post_id: inserted_post.id,
parent_id: None,
removed: None,
deleted: None,
read: None,
published: None,
updated: None,
ap_id: None,
local: true,
};
let inserted_comment = Comment::create(&conn, &comment_form).unwrap();
let comment_like_form = CommentLikeForm {
comment_id: inserted_comment.id,
post_id: inserted_post.id,
user_id: inserted_user.id,
score: 1,
};
let _inserted_comment_like = CommentLike::like(&conn, &comment_like_form).unwrap();
let expected_comment_view_no_user = CommentView {
id: inserted_comment.id,
content: "A test comment 32".into(),
creator_id: inserted_user.id,
post_id: inserted_post.id,
post_name: inserted_post.name.to_owned(),
community_id: inserted_community.id,
community_name: inserted_community.name.to_owned(),
community_icon: None,
parent_id: None,
removed: false,
deleted: false,
read: false,
banned: false,
banned_from_community: false,
published: inserted_comment.published,
updated: None,
creator_name: inserted_user.name.to_owned(),
creator_preferred_username: None,
creator_published: inserted_user.published,
creator_avatar: None,
score: 1,
downvotes: 0,
hot_rank: 0,
hot_rank_active: 0,
upvotes: 1,
user_id: None,
my_vote: None,
subscribed: None,
saved: None,
ap_id: inserted_comment.ap_id.to_owned(),
local: true,
community_actor_id: inserted_community.actor_id.to_owned(),
community_local: true,
creator_actor_id: inserted_user.actor_id.to_owned(),
creator_local: true,
};
let expected_comment_view_with_user = CommentView {
id: inserted_comment.id,
content: "A test comment 32".into(),
creator_id: inserted_user.id,
post_id: inserted_post.id,
post_name: inserted_post.name.to_owned(),
community_id: inserted_community.id,
community_name: inserted_community.name.to_owned(),
community_icon: None,
parent_id: None,
removed: false,
deleted: false,
read: false,
banned: false,
banned_from_community: false,
published: inserted_comment.published,
updated: None,
creator_name: inserted_user.name.to_owned(),
creator_preferred_username: None,
creator_published: inserted_user.published,
creator_avatar: None,
score: 1,
downvotes: 0,
hot_rank: 0,
hot_rank_active: 0,
upvotes: 1,
user_id: Some(inserted_user.id),
my_vote: Some(1),
subscribed: Some(false),
saved: Some(false),
ap_id: inserted_comment.ap_id.to_owned(),
local: true,
community_actor_id: inserted_community.actor_id.to_owned(),
community_local: true,
creator_actor_id: inserted_user.actor_id.to_owned(),
creator_local: true,
};
let mut read_comment_views_no_user = CommentQueryBuilder::create(&conn)
.for_post_id(inserted_post.id)
.list()
.unwrap();
read_comment_views_no_user[0].hot_rank = 0;
read_comment_views_no_user[0].hot_rank_active = 0;
let mut read_comment_views_with_user = CommentQueryBuilder::create(&conn)
.for_post_id(inserted_post.id)
.my_user_id(inserted_user.id)
.list()
.unwrap();
read_comment_views_with_user[0].hot_rank = 0;
read_comment_views_with_user[0].hot_rank_active = 0;
let like_removed = CommentLike::remove(&conn, inserted_user.id, inserted_comment.id).unwrap();
let num_deleted = Comment::delete(&conn, inserted_comment.id).unwrap();
Post::delete(&conn, inserted_post.id).unwrap();
Community::delete(&conn, inserted_community.id).unwrap();
User_::delete(&conn, inserted_user.id).unwrap();
assert_eq!(expected_comment_view_no_user, read_comment_views_no_user[0]);
assert_eq!(
expected_comment_view_with_user,
read_comment_views_with_user[0]
);
assert_eq!(1, num_deleted);
assert_eq!(1, like_removed);
}
}

View file

@ -1,398 +0,0 @@
use super::community_view::community_fast_view::BoxedQuery;
use crate::{fuzzy_search, limit_and_offset, MaybeOptional, SortType};
use diesel::{pg::Pg, result::Error, *};
use serde::{Deserialize, Serialize};
table! {
community_view (id) {
id -> Int4,
name -> Varchar,
title -> Varchar,
icon -> Nullable<Text>,
banner -> Nullable<Text>,
description -> Nullable<Text>,
category_id -> Int4,
creator_id -> Int4,
removed -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
nsfw -> Bool,
actor_id -> Text,
local -> Bool,
last_refreshed_at -> Timestamp,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
category_name -> Varchar,
number_of_subscribers -> BigInt,
number_of_posts -> BigInt,
number_of_comments -> BigInt,
hot_rank -> Int4,
user_id -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
}
}
table! {
community_fast_view (id) {
id -> Int4,
name -> Varchar,
title -> Varchar,
icon -> Nullable<Text>,
banner -> Nullable<Text>,
description -> Nullable<Text>,
category_id -> Int4,
creator_id -> Int4,
removed -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
nsfw -> Bool,
actor_id -> Text,
local -> Bool,
last_refreshed_at -> Timestamp,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
category_name -> Varchar,
number_of_subscribers -> BigInt,
number_of_posts -> BigInt,
number_of_comments -> BigInt,
hot_rank -> Int4,
user_id -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
}
}
table! {
community_moderator_view (id) {
id -> Int4,
community_id -> Int4,
user_id -> Int4,
published -> Timestamp,
user_actor_id -> Text,
user_local -> Bool,
user_name -> Varchar,
user_preferred_username -> Nullable<Varchar>,
avatar -> Nullable<Text>,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
}
}
table! {
community_follower_view (id) {
id -> Int4,
community_id -> Int4,
user_id -> Int4,
published -> Timestamp,
user_actor_id -> Text,
user_local -> Bool,
user_name -> Varchar,
user_preferred_username -> Nullable<Varchar>,
avatar -> Nullable<Text>,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
}
}
table! {
community_user_ban_view (id) {
id -> Int4,
community_id -> Int4,
user_id -> Int4,
published -> Timestamp,
user_actor_id -> Text,
user_local -> Bool,
user_name -> Varchar,
user_preferred_username -> Nullable<Varchar>,
avatar -> Nullable<Text>,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "community_fast_view"]
pub struct CommunityView {
pub id: i32,
pub name: String,
pub title: String,
pub icon: Option<String>,
pub banner: Option<String>,
pub description: Option<String>,
pub category_id: i32,
pub creator_id: i32,
pub removed: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub deleted: bool,
pub nsfw: bool,
pub actor_id: String,
pub local: bool,
pub last_refreshed_at: chrono::NaiveDateTime,
pub creator_actor_id: String,
pub creator_local: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub category_name: String,
pub number_of_subscribers: i64,
pub number_of_posts: i64,
pub number_of_comments: i64,
pub hot_rank: i32,
pub user_id: Option<i32>,
pub subscribed: Option<bool>,
}
pub struct CommunityQueryBuilder<'a> {
conn: &'a PgConnection,
query: BoxedQuery<'a, Pg>,
sort: &'a SortType,
from_user_id: Option<i32>,
show_nsfw: bool,
search_term: Option<String>,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> CommunityQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::community_view::community_fast_view::dsl::*;
let query = community_fast_view.into_boxed();
CommunityQueryBuilder {
conn,
query,
sort: &SortType::Hot,
from_user_id: None,
show_nsfw: true,
search_term: None,
page: None,
limit: None,
}
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn for_user<T: MaybeOptional<i32>>(mut self, from_user_id: T) -> Self {
self.from_user_id = from_user_id.get_optional();
self
}
pub fn show_nsfw(mut self, show_nsfw: bool) -> Self {
self.show_nsfw = show_nsfw;
self
}
pub fn search_term<T: MaybeOptional<String>>(mut self, search_term: T) -> Self {
self.search_term = search_term.get_optional();
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<CommunityView>, Error> {
use super::community_view::community_fast_view::dsl::*;
let mut query = self.query;
if let Some(search_term) = self.search_term {
let searcher = fuzzy_search(&search_term);
query = query
.filter(name.ilike(searcher.to_owned()))
.or_filter(title.ilike(searcher.to_owned()))
.or_filter(description.ilike(searcher));
};
// The view lets you pass a null user_id, if you're not logged in
match self.sort {
SortType::New => query = query.order_by(published.desc()).filter(user_id.is_null()),
SortType::TopAll => match self.from_user_id {
Some(from_user_id) => {
query = query
.filter(user_id.eq(from_user_id))
.order_by((subscribed.asc(), number_of_subscribers.desc()))
}
None => {
query = query
.order_by(number_of_subscribers.desc())
.filter(user_id.is_null())
}
},
// Covers all other sorts, including hot
_ => {
query = query
.order_by(hot_rank.desc())
.then_order_by(number_of_subscribers.desc())
.filter(user_id.is_null())
}
};
if !self.show_nsfw {
query = query.filter(nsfw.eq(false));
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.limit(limit)
.offset(offset)
.filter(removed.eq(false))
.filter(deleted.eq(false))
.load::<CommunityView>(self.conn)
}
}
impl CommunityView {
pub fn read(
conn: &PgConnection,
from_community_id: i32,
from_user_id: Option<i32>,
) -> Result<Self, Error> {
use super::community_view::community_fast_view::dsl::*;
let mut query = community_fast_view.into_boxed();
query = query.filter(id.eq(from_community_id));
// The view lets you pass a null user_id, if you're not logged in
if let Some(from_user_id) = from_user_id {
query = query.filter(user_id.eq(from_user_id));
} else {
query = query.filter(user_id.is_null());
};
query.first::<Self>(conn)
}
}
#[derive(
Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, QueryableByName, Clone,
)]
#[table_name = "community_moderator_view"]
pub struct CommunityModeratorView {
pub id: i32,
pub community_id: i32,
pub user_id: i32,
pub published: chrono::NaiveDateTime,
pub user_actor_id: String,
pub user_local: bool,
pub user_name: String,
pub user_preferred_username: Option<String>,
pub avatar: Option<String>,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
}
impl CommunityModeratorView {
pub fn for_community(conn: &PgConnection, for_community_id: i32) -> Result<Vec<Self>, Error> {
use super::community_view::community_moderator_view::dsl::*;
community_moderator_view
.filter(community_id.eq(for_community_id))
.order_by(published)
.load::<Self>(conn)
}
pub fn for_user(conn: &PgConnection, for_user_id: i32) -> Result<Vec<Self>, Error> {
use super::community_view::community_moderator_view::dsl::*;
community_moderator_view
.filter(user_id.eq(for_user_id))
.order_by(published)
.load::<Self>(conn)
}
}
#[derive(
Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, QueryableByName, Clone,
)]
#[table_name = "community_follower_view"]
pub struct CommunityFollowerView {
pub id: i32,
pub community_id: i32,
pub user_id: i32,
pub published: chrono::NaiveDateTime,
pub user_actor_id: String,
pub user_local: bool,
pub user_name: String,
pub user_preferred_username: Option<String>,
pub avatar: Option<String>,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
}
impl CommunityFollowerView {
pub fn for_community(conn: &PgConnection, from_community_id: i32) -> Result<Vec<Self>, Error> {
use super::community_view::community_follower_view::dsl::*;
community_follower_view
.filter(community_id.eq(from_community_id))
.load::<Self>(conn)
}
pub fn for_user(conn: &PgConnection, from_user_id: i32) -> Result<Vec<Self>, Error> {
use super::community_view::community_follower_view::dsl::*;
community_follower_view
.filter(user_id.eq(from_user_id))
.load::<Self>(conn)
}
}
#[derive(
Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, QueryableByName, Clone,
)]
#[table_name = "community_user_ban_view"]
pub struct CommunityUserBanView {
pub id: i32,
pub community_id: i32,
pub user_id: i32,
pub published: chrono::NaiveDateTime,
pub user_actor_id: String,
pub user_local: bool,
pub user_name: String,
pub user_preferred_username: Option<String>,
pub avatar: Option<String>,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
}
impl CommunityUserBanView {
pub fn get(
conn: &PgConnection,
from_user_id: i32,
from_community_id: i32,
) -> Result<Self, Error> {
use super::community_view::community_user_ban_view::dsl::*;
community_user_ban_view
.filter(user_id.eq(from_user_id))
.filter(community_id.eq(from_community_id))
.first::<Self>(conn)
}
}

View file

@ -1,513 +0,0 @@
use crate::limit_and_offset;
use diesel::{result::Error, *};
use serde::Serialize;
table! {
mod_remove_post_view (id) {
id -> Int4,
mod_user_id -> Int4,
post_id -> Int4,
reason -> Nullable<Text>,
removed -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
post_name -> Varchar,
community_id -> Int4,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_remove_post_view"]
pub struct ModRemovePostView {
pub id: i32,
pub mod_user_id: i32,
pub post_id: i32,
pub reason: Option<String>,
pub removed: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub post_name: String,
pub community_id: i32,
pub community_name: String,
}
impl ModRemovePostView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_remove_post_view::dsl::*;
let mut query = mod_remove_post_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_lock_post_view (id) {
id -> Int4,
mod_user_id -> Int4,
post_id -> Int4,
locked -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
post_name -> Varchar,
community_id -> Int4,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_lock_post_view"]
pub struct ModLockPostView {
pub id: i32,
pub mod_user_id: i32,
pub post_id: i32,
pub locked: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub post_name: String,
pub community_id: i32,
pub community_name: String,
}
impl ModLockPostView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_lock_post_view::dsl::*;
let mut query = mod_lock_post_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_sticky_post_view (id) {
id -> Int4,
mod_user_id -> Int4,
post_id -> Int4,
stickied -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
post_name -> Varchar,
community_id -> Int4,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_sticky_post_view"]
pub struct ModStickyPostView {
pub id: i32,
pub mod_user_id: i32,
pub post_id: i32,
pub stickied: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub post_name: String,
pub community_id: i32,
pub community_name: String,
}
impl ModStickyPostView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_sticky_post_view::dsl::*;
let mut query = mod_sticky_post_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_remove_comment_view (id) {
id -> Int4,
mod_user_id -> Int4,
comment_id -> Int4,
reason -> Nullable<Text>,
removed -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
comment_user_id -> Int4,
comment_user_name -> Varchar,
comment_content -> Text,
post_id -> Int4,
post_name -> Varchar,
community_id -> Int4,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_remove_comment_view"]
pub struct ModRemoveCommentView {
pub id: i32,
pub mod_user_id: i32,
pub comment_id: i32,
pub reason: Option<String>,
pub removed: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub comment_user_id: i32,
pub comment_user_name: String,
pub comment_content: String,
pub post_id: i32,
pub post_name: String,
pub community_id: i32,
pub community_name: String,
}
impl ModRemoveCommentView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_remove_comment_view::dsl::*;
let mut query = mod_remove_comment_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_remove_community_view (id) {
id -> Int4,
mod_user_id -> Int4,
community_id -> Int4,
reason -> Nullable<Text>,
removed -> Nullable<Bool>,
expires -> Nullable<Timestamp>,
when_ -> Timestamp,
mod_user_name -> Varchar,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_remove_community_view"]
pub struct ModRemoveCommunityView {
pub id: i32,
pub mod_user_id: i32,
pub community_id: i32,
pub reason: Option<String>,
pub removed: Option<bool>,
pub expires: Option<chrono::NaiveDateTime>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub community_name: String,
}
impl ModRemoveCommunityView {
pub fn list(
conn: &PgConnection,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_remove_community_view::dsl::*;
let mut query = mod_remove_community_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_ban_from_community_view (id) {
id -> Int4,
mod_user_id -> Int4,
other_user_id -> Int4,
community_id -> Int4,
reason -> Nullable<Text>,
banned -> Nullable<Bool>,
expires -> Nullable<Timestamp>,
when_ -> Timestamp,
mod_user_name -> Varchar,
other_user_name -> Varchar,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_ban_from_community_view"]
pub struct ModBanFromCommunityView {
pub id: i32,
pub mod_user_id: i32,
pub other_user_id: i32,
pub community_id: i32,
pub reason: Option<String>,
pub banned: Option<bool>,
pub expires: Option<chrono::NaiveDateTime>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub other_user_name: String,
pub community_name: String,
}
impl ModBanFromCommunityView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_ban_from_community_view::dsl::*;
let mut query = mod_ban_from_community_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_ban_view (id) {
id -> Int4,
mod_user_id -> Int4,
other_user_id -> Int4,
reason -> Nullable<Text>,
banned -> Nullable<Bool>,
expires -> Nullable<Timestamp>,
when_ -> Timestamp,
mod_user_name -> Varchar,
other_user_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_ban_view"]
pub struct ModBanView {
pub id: i32,
pub mod_user_id: i32,
pub other_user_id: i32,
pub reason: Option<String>,
pub banned: Option<bool>,
pub expires: Option<chrono::NaiveDateTime>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub other_user_name: String,
}
impl ModBanView {
pub fn list(
conn: &PgConnection,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_ban_view::dsl::*;
let mut query = mod_ban_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_add_community_view (id) {
id -> Int4,
mod_user_id -> Int4,
other_user_id -> Int4,
community_id -> Int4,
removed -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
other_user_name -> Varchar,
community_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_add_community_view"]
pub struct ModAddCommunityView {
pub id: i32,
pub mod_user_id: i32,
pub other_user_id: i32,
pub community_id: i32,
pub removed: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub other_user_name: String,
pub community_name: String,
}
impl ModAddCommunityView {
pub fn list(
conn: &PgConnection,
from_community_id: Option<i32>,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_add_community_view::dsl::*;
let mut query = mod_add_community_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_community_id) = from_community_id {
query = query.filter(community_id.eq(from_community_id));
};
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}
table! {
mod_add_view (id) {
id -> Int4,
mod_user_id -> Int4,
other_user_id -> Int4,
removed -> Nullable<Bool>,
when_ -> Timestamp,
mod_user_name -> Varchar,
other_user_name -> Varchar,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "mod_add_view"]
pub struct ModAddView {
pub id: i32,
pub mod_user_id: i32,
pub other_user_id: i32,
pub removed: Option<bool>,
pub when_: chrono::NaiveDateTime,
pub mod_user_name: String,
pub other_user_name: String,
}
impl ModAddView {
pub fn list(
conn: &PgConnection,
from_mod_user_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
) -> Result<Vec<Self>, Error> {
use super::moderator_views::mod_add_view::dsl::*;
let mut query = mod_add_view.into_boxed();
let (limit, offset) = limit_and_offset(page, limit);
if let Some(from_mod_user_id) = from_mod_user_id {
query = query.filter(mod_user_id.eq(from_mod_user_id));
};
query
.limit(limit)
.offset(offset)
.order_by(when_.desc())
.load::<Self>(conn)
}
}

View file

@ -1,245 +0,0 @@
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::{Deserialize, Serialize};
use crate::{
limit_and_offset,
naive_now,
post::Post,
schema::post_report,
MaybeOptional,
Reportable,
};
table! {
post_report_view (id) {
id -> Int4,
creator_id -> Int4,
post_id -> Int4,
original_post_name -> Varchar,
original_post_url -> Nullable<Text>,
original_post_body -> Nullable<Text>,
reason -> Text,
resolved -> Bool,
resolver_id -> Nullable<Int4>,
published -> Timestamp,
updated -> Nullable<Timestamp>,
current_post_name -> Varchar,
current_post_url -> Nullable<Text>,
current_post_body -> Nullable<Text>,
community_id -> Int4,
creator_actor_id -> Text,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
creator_local -> Bool,
post_creator_id -> Int4,
post_creator_actor_id -> Text,
post_creator_name -> Varchar,
post_creator_preferred_username -> Nullable<Varchar>,
post_creator_avatar -> Nullable<Text>,
post_creator_local -> Bool,
resolver_actor_id -> Nullable<Text>,
resolver_name -> Nullable<Varchar>,
resolver_preferred_username -> Nullable<Varchar>,
resolver_avatar -> Nullable<Text>,
resolver_local -> Nullable<Bool>,
}
}
#[derive(Identifiable, Queryable, Associations, PartialEq, Serialize, Deserialize, Debug)]
#[belongs_to(Post)]
#[table_name = "post_report"]
pub struct PostReport {
pub id: i32,
pub creator_id: i32,
pub post_id: i32,
pub original_post_name: String,
pub original_post_url: Option<String>,
pub original_post_body: Option<String>,
pub reason: String,
pub resolved: bool,
pub resolver_id: Option<i32>,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
}
#[derive(Insertable, AsChangeset, Clone)]
#[table_name = "post_report"]
pub struct PostReportForm {
pub creator_id: i32,
pub post_id: i32,
pub original_post_name: String,
pub original_post_url: Option<String>,
pub original_post_body: Option<String>,
pub reason: String,
}
impl Reportable<PostReportForm> for PostReport {
/// creates a post report and returns it
///
/// * `conn` - the postgres connection
/// * `post_report_form` - the filled CommentReportForm to insert
fn report(conn: &PgConnection, post_report_form: &PostReportForm) -> Result<Self, Error> {
use crate::schema::post_report::dsl::*;
insert_into(post_report)
.values(post_report_form)
.get_result::<Self>(conn)
}
/// resolve a post report
///
/// * `conn` - the postgres connection
/// * `report_id` - the id of the report to resolve
/// * `by_resolver_id` - the id of the user resolving the report
fn resolve(conn: &PgConnection, report_id: i32, by_resolver_id: i32) -> Result<usize, Error> {
use crate::schema::post_report::dsl::*;
update(post_report.find(report_id))
.set((
resolved.eq(true),
resolver_id.eq(by_resolver_id),
updated.eq(naive_now()),
))
.execute(conn)
}
/// resolve a post report
///
/// * `conn` - the postgres connection
/// * `report_id` - the id of the report to unresolve
/// * `by_resolver_id` - the id of the user unresolving the report
fn unresolve(conn: &PgConnection, report_id: i32, by_resolver_id: i32) -> Result<usize, Error> {
use crate::schema::post_report::dsl::*;
update(post_report.find(report_id))
.set((
resolved.eq(false),
resolver_id.eq(by_resolver_id),
updated.eq(naive_now()),
))
.execute(conn)
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, Deserialize, Clone)]
#[table_name = "post_report_view"]
pub struct PostReportView {
pub id: i32,
pub creator_id: i32,
pub post_id: i32,
pub original_post_name: String,
pub original_post_url: Option<String>,
pub original_post_body: Option<String>,
pub reason: String,
pub resolved: bool,
pub resolver_id: Option<i32>,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub current_post_name: String,
pub current_post_url: Option<String>,
pub current_post_body: Option<String>,
pub community_id: i32,
pub creator_actor_id: String,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub creator_local: bool,
pub post_creator_id: i32,
pub post_creator_actor_id: String,
pub post_creator_name: String,
pub post_creator_preferred_username: Option<String>,
pub post_creator_avatar: Option<String>,
pub post_creator_local: bool,
pub resolver_actor_id: Option<String>,
pub resolver_name: Option<String>,
pub resolver_preferred_username: Option<String>,
pub resolver_avatar: Option<String>,
pub resolver_local: Option<bool>,
}
impl PostReportView {
/// returns the PostReportView for the provided report_id
///
/// * `report_id` - the report id to obtain
pub fn read(conn: &PgConnection, report_id: i32) -> Result<Self, Error> {
use super::post_report::post_report_view::dsl::*;
post_report_view.find(report_id).first::<Self>(conn)
}
/// returns the current unresolved post report count for the supplied community ids
///
/// * `community_ids` - a Vec<i32> of community_ids to get a count for
pub fn get_report_count(conn: &PgConnection, community_ids: &[i32]) -> Result<i64, Error> {
use super::post_report::post_report_view::dsl::*;
post_report_view
.filter(resolved.eq(false).and(community_id.eq_any(community_ids)))
.select(count(id))
.first::<i64>(conn)
}
}
pub struct PostReportQueryBuilder<'a> {
conn: &'a PgConnection,
query: post_report_view::BoxedQuery<'a, Pg>,
for_community_ids: Option<Vec<i32>>,
page: Option<i64>,
limit: Option<i64>,
resolved: Option<bool>,
}
impl<'a> PostReportQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::post_report::post_report_view::dsl::*;
let query = post_report_view.into_boxed();
PostReportQueryBuilder {
conn,
query,
for_community_ids: None,
page: None,
limit: None,
resolved: Some(false),
}
}
pub fn community_ids<T: MaybeOptional<Vec<i32>>>(mut self, community_ids: T) -> Self {
self.for_community_ids = community_ids.get_optional();
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn resolved<T: MaybeOptional<bool>>(mut self, resolved: T) -> Self {
self.resolved = resolved.get_optional();
self
}
pub fn list(self) -> Result<Vec<PostReportView>, Error> {
use super::post_report::post_report_view::dsl::*;
let mut query = self.query;
if let Some(comm_ids) = self.for_community_ids {
query = query.filter(community_id.eq_any(comm_ids));
}
if let Some(resolved_flag) = self.resolved {
query = query.filter(resolved.eq(resolved_flag));
}
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.order_by(published.asc())
.limit(limit)
.offset(offset)
.load::<PostReportView>(self.conn)
}
}

View file

@ -1,641 +0,0 @@
use super::post_view::post_fast_view::BoxedQuery;
use crate::{fuzzy_search, limit_and_offset, ListingType, MaybeOptional, SortType};
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::Serialize;
// The faked schema since diesel doesn't do views
table! {
post_view (id) {
id -> Int4,
name -> Varchar,
url -> Nullable<Text>,
body -> Nullable<Text>,
creator_id -> Int4,
community_id -> Int4,
removed -> Bool,
locked -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
nsfw -> Bool,
stickied -> Bool,
embed_title -> Nullable<Text>,
embed_description -> Nullable<Text>,
embed_html -> Nullable<Text>,
thumbnail_url -> Nullable<Text>,
ap_id -> Text,
local -> Bool,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_published -> Timestamp,
creator_avatar -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
community_removed -> Bool,
community_deleted -> Bool,
community_nsfw -> Bool,
number_of_comments -> BigInt,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
newest_activity_time -> Timestamp,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
read -> Nullable<Bool>,
saved -> Nullable<Bool>,
}
}
table! {
post_fast_view (id) {
id -> Int4,
name -> Varchar,
url -> Nullable<Text>,
body -> Nullable<Text>,
creator_id -> Int4,
community_id -> Int4,
removed -> Bool,
locked -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
nsfw -> Bool,
stickied -> Bool,
embed_title -> Nullable<Text>,
embed_description -> Nullable<Text>,
embed_html -> Nullable<Text>,
thumbnail_url -> Nullable<Text>,
ap_id -> Text,
local -> Bool,
creator_actor_id -> Text,
creator_local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_published -> Timestamp,
creator_avatar -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
community_removed -> Bool,
community_deleted -> Bool,
community_nsfw -> Bool,
number_of_comments -> BigInt,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
newest_activity_time -> Timestamp,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
subscribed -> Nullable<Bool>,
read -> Nullable<Bool>,
saved -> Nullable<Bool>,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "post_fast_view"]
pub struct PostView {
pub id: i32,
pub name: String,
pub url: Option<String>,
pub body: Option<String>,
pub creator_id: i32,
pub community_id: i32,
pub removed: bool,
pub locked: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub deleted: bool,
pub nsfw: bool,
pub stickied: bool,
pub embed_title: Option<String>,
pub embed_description: Option<String>,
pub embed_html: Option<String>,
pub thumbnail_url: Option<String>,
pub ap_id: String,
pub local: bool,
pub creator_actor_id: String,
pub creator_local: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_published: chrono::NaiveDateTime,
pub creator_avatar: Option<String>,
pub banned: bool,
pub banned_from_community: bool,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
pub community_removed: bool,
pub community_deleted: bool,
pub community_nsfw: bool,
pub number_of_comments: i64,
pub score: i64,
pub upvotes: i64,
pub downvotes: i64,
pub hot_rank: i32,
pub hot_rank_active: i32,
pub newest_activity_time: chrono::NaiveDateTime,
pub user_id: Option<i32>,
pub my_vote: Option<i32>,
pub subscribed: Option<bool>,
pub read: Option<bool>,
pub saved: Option<bool>,
}
pub struct PostQueryBuilder<'a> {
conn: &'a PgConnection,
query: BoxedQuery<'a, Pg>,
listing_type: &'a ListingType,
sort: &'a SortType,
my_user_id: Option<i32>,
for_creator_id: Option<i32>,
for_community_id: Option<i32>,
for_community_name: Option<String>,
search_term: Option<String>,
url_search: Option<String>,
show_nsfw: bool,
saved_only: bool,
unread_only: bool,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> PostQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::post_view::post_fast_view::dsl::*;
let query = post_fast_view.into_boxed();
PostQueryBuilder {
conn,
query,
listing_type: &ListingType::All,
sort: &SortType::Hot,
my_user_id: None,
for_creator_id: None,
for_community_id: None,
for_community_name: None,
search_term: None,
url_search: None,
show_nsfw: true,
saved_only: false,
unread_only: false,
page: None,
limit: None,
}
}
pub fn listing_type(mut self, listing_type: &'a ListingType) -> Self {
self.listing_type = listing_type;
self
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn for_community_id<T: MaybeOptional<i32>>(mut self, for_community_id: T) -> Self {
self.for_community_id = for_community_id.get_optional();
self
}
pub fn for_community_name<T: MaybeOptional<String>>(mut self, for_community_name: T) -> Self {
self.for_community_name = for_community_name.get_optional();
self
}
pub fn for_creator_id<T: MaybeOptional<i32>>(mut self, for_creator_id: T) -> Self {
self.for_creator_id = for_creator_id.get_optional();
self
}
pub fn search_term<T: MaybeOptional<String>>(mut self, search_term: T) -> Self {
self.search_term = search_term.get_optional();
self
}
pub fn url_search<T: MaybeOptional<String>>(mut self, url_search: T) -> Self {
self.url_search = url_search.get_optional();
self
}
pub fn my_user_id<T: MaybeOptional<i32>>(mut self, my_user_id: T) -> Self {
self.my_user_id = my_user_id.get_optional();
self
}
pub fn show_nsfw(mut self, show_nsfw: bool) -> Self {
self.show_nsfw = show_nsfw;
self
}
pub fn saved_only(mut self, saved_only: bool) -> Self {
self.saved_only = saved_only;
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<PostView>, Error> {
use super::post_view::post_fast_view::dsl::*;
let mut query = self.query;
query = match self.listing_type {
ListingType::Subscribed => query.filter(subscribed.eq(true)),
ListingType::Local => query.filter(community_local.eq(true)),
_ => query,
};
if let Some(for_community_id) = self.for_community_id {
query = query
.filter(community_id.eq(for_community_id))
.then_order_by(stickied.desc());
}
if let Some(for_community_name) = self.for_community_name {
query = query
.filter(community_name.eq(for_community_name))
.filter(community_local.eq(true))
.then_order_by(stickied.desc());
}
if let Some(url_search) = self.url_search {
query = query.filter(url.eq(url_search));
}
if let Some(search_term) = self.search_term {
let searcher = fuzzy_search(&search_term);
query = query.filter(name.ilike(searcher.to_owned()).or(body.ilike(searcher)));
}
query = match self.sort {
SortType::Active => query
.then_order_by(hot_rank_active.desc())
.then_order_by(published.desc()),
SortType::Hot => query
.then_order_by(hot_rank.desc())
.then_order_by(published.desc()),
SortType::New => query.then_order_by(published.desc()),
SortType::TopAll => query.then_order_by(score.desc()),
SortType::TopYear => query
.filter(published.gt(now - 1.years()))
.then_order_by(score.desc()),
SortType::TopMonth => query
.filter(published.gt(now - 1.months()))
.then_order_by(score.desc()),
SortType::TopWeek => query
.filter(published.gt(now - 1.weeks()))
.then_order_by(score.desc()),
SortType::TopDay => query
.filter(published.gt(now - 1.days()))
.then_order_by(score.desc()),
};
// The view lets you pass a null user_id, if you're not logged in
query = if let Some(my_user_id) = self.my_user_id {
query.filter(user_id.eq(my_user_id))
} else {
query.filter(user_id.is_null())
};
// If its for a specific user, show the removed / deleted
if let Some(for_creator_id) = self.for_creator_id {
query = query.filter(creator_id.eq(for_creator_id));
} else {
query = query
.filter(removed.eq(false))
.filter(deleted.eq(false))
.filter(community_removed.eq(false))
.filter(community_deleted.eq(false));
}
if !self.show_nsfw {
query = query
.filter(nsfw.eq(false))
.filter(community_nsfw.eq(false));
};
// TODO these are wrong, bc they'll only show saved for your logged in user, not theirs
if self.saved_only {
query = query.filter(saved.eq(true));
};
if self.unread_only {
query = query.filter(read.eq(false));
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
query = query
.limit(limit)
.offset(offset)
.filter(removed.eq(false))
.filter(deleted.eq(false))
.filter(community_removed.eq(false))
.filter(community_deleted.eq(false));
query.load::<PostView>(self.conn)
}
}
impl PostView {
pub fn read(
conn: &PgConnection,
from_post_id: i32,
my_user_id: Option<i32>,
) -> Result<Self, Error> {
use super::post_view::post_fast_view::dsl::*;
use diesel::prelude::*;
let mut query = post_fast_view.into_boxed();
query = query.filter(id.eq(from_post_id));
if let Some(my_user_id) = my_user_id {
query = query.filter(user_id.eq(my_user_id));
} else {
query = query.filter(user_id.is_null());
};
query.first::<Self>(conn)
}
}
#[cfg(test)]
mod tests {
use crate::{
community::*,
post::*,
post_view::*,
tests::establish_unpooled_connection,
user::*,
Crud,
Likeable,
*,
};
#[test]
fn test_crud() {
let conn = establish_unpooled_connection();
let user_name = "tegan".to_string();
let community_name = "test_community_3".to_string();
let post_name = "test post 3".to_string();
let new_user = UserForm {
name: user_name.to_owned(),
preferred_username: None,
password_encrypted: "nope".into(),
email: None,
matrix_user_id: None,
avatar: None,
banner: None,
published: None,
updated: None,
admin: false,
banned: Some(false),
show_nsfw: false,
theme: "browser".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
actor_id: None,
bio: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
};
let inserted_user = User_::create(&conn, &new_user).unwrap();
let new_community = CommunityForm {
name: community_name.to_owned(),
title: "nada".to_owned(),
description: None,
creator_id: inserted_user.id,
category_id: 1,
removed: None,
deleted: None,
updated: None,
nsfw: false,
actor_id: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
published: None,
icon: None,
banner: None,
};
let inserted_community = Community::create(&conn, &new_community).unwrap();
let new_post = PostForm {
name: post_name.to_owned(),
url: None,
body: None,
creator_id: inserted_user.id,
community_id: inserted_community.id,
removed: None,
deleted: None,
locked: None,
stickied: None,
updated: None,
nsfw: false,
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
ap_id: None,
local: true,
published: None,
};
let inserted_post = Post::create(&conn, &new_post).unwrap();
let post_like_form = PostLikeForm {
post_id: inserted_post.id,
user_id: inserted_user.id,
score: 1,
};
let inserted_post_like = PostLike::like(&conn, &post_like_form).unwrap();
let expected_post_like = PostLike {
id: inserted_post_like.id,
post_id: inserted_post.id,
user_id: inserted_user.id,
published: inserted_post_like.published,
score: 1,
};
let read_post_listings_with_user = PostQueryBuilder::create(&conn)
.listing_type(&ListingType::Community)
.sort(&SortType::New)
.for_community_id(inserted_community.id)
.my_user_id(inserted_user.id)
.list()
.unwrap();
let read_post_listings_no_user = PostQueryBuilder::create(&conn)
.listing_type(&ListingType::Community)
.sort(&SortType::New)
.for_community_id(inserted_community.id)
.list()
.unwrap();
let read_post_listing_no_user = PostView::read(&conn, inserted_post.id, None).unwrap();
let read_post_listing_with_user =
PostView::read(&conn, inserted_post.id, Some(inserted_user.id)).unwrap();
// the non user version
let expected_post_listing_no_user = PostView {
user_id: None,
my_vote: None,
id: inserted_post.id,
name: post_name.to_owned(),
url: None,
body: None,
creator_id: inserted_user.id,
creator_name: user_name.to_owned(),
creator_preferred_username: None,
creator_published: inserted_user.published,
creator_avatar: None,
banned: false,
banned_from_community: false,
community_id: inserted_community.id,
removed: false,
deleted: false,
locked: false,
stickied: false,
community_name: community_name.to_owned(),
community_icon: None,
community_removed: false,
community_deleted: false,
community_nsfw: false,
number_of_comments: 0,
score: 1,
upvotes: 1,
downvotes: 0,
hot_rank: read_post_listing_no_user.hot_rank,
hot_rank_active: read_post_listing_no_user.hot_rank_active,
published: inserted_post.published,
newest_activity_time: inserted_post.published,
updated: None,
subscribed: None,
read: None,
saved: None,
nsfw: false,
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
ap_id: inserted_post.ap_id.to_owned(),
local: true,
creator_actor_id: inserted_user.actor_id.to_owned(),
creator_local: true,
community_actor_id: inserted_community.actor_id.to_owned(),
community_local: true,
};
let expected_post_listing_with_user = PostView {
user_id: Some(inserted_user.id),
my_vote: Some(1),
id: inserted_post.id,
name: post_name,
url: None,
body: None,
removed: false,
deleted: false,
locked: false,
stickied: false,
creator_id: inserted_user.id,
creator_name: user_name,
creator_preferred_username: None,
creator_published: inserted_user.published,
creator_avatar: None,
banned: false,
banned_from_community: false,
community_id: inserted_community.id,
community_name,
community_icon: None,
community_removed: false,
community_deleted: false,
community_nsfw: false,
number_of_comments: 0,
score: 1,
upvotes: 1,
downvotes: 0,
hot_rank: read_post_listing_with_user.hot_rank,
hot_rank_active: read_post_listing_with_user.hot_rank_active,
published: inserted_post.published,
newest_activity_time: inserted_post.published,
updated: None,
subscribed: Some(false),
read: Some(false),
saved: Some(false),
nsfw: false,
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
ap_id: inserted_post.ap_id.to_owned(),
local: true,
creator_actor_id: inserted_user.actor_id.to_owned(),
creator_local: true,
community_actor_id: inserted_community.actor_id.to_owned(),
community_local: true,
};
let like_removed = PostLike::remove(&conn, inserted_user.id, inserted_post.id).unwrap();
let num_deleted = Post::delete(&conn, inserted_post.id).unwrap();
Community::delete(&conn, inserted_community.id).unwrap();
User_::delete(&conn, inserted_user.id).unwrap();
// The with user
assert_eq!(
expected_post_listing_with_user,
read_post_listings_with_user[0]
);
assert_eq!(expected_post_listing_with_user, read_post_listing_with_user);
assert_eq!(1, read_post_listings_with_user.len());
// Without the user
assert_eq!(expected_post_listing_no_user, read_post_listings_no_user[0]);
assert_eq!(expected_post_listing_no_user, read_post_listing_no_user);
assert_eq!(1, read_post_listings_no_user.len());
// assert_eq!(expected_post, inserted_post);
// assert_eq!(expected_post, updated_post);
assert_eq!(expected_post_like, inserted_post_like);
assert_eq!(1, like_removed);
assert_eq!(1, num_deleted);
}
}

View file

@ -1,138 +0,0 @@
use crate::{limit_and_offset, MaybeOptional};
use diesel::{pg::Pg, result::Error, *};
use serde::Serialize;
// The faked schema since diesel doesn't do views
table! {
private_message_view (id) {
id -> Int4,
creator_id -> Int4,
recipient_id -> Int4,
content -> Text,
deleted -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
ap_id -> Text,
local -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
creator_actor_id -> Text,
creator_local -> Bool,
recipient_name -> Varchar,
recipient_preferred_username -> Nullable<Varchar>,
recipient_avatar -> Nullable<Text>,
recipient_actor_id -> Text,
recipient_local -> Bool,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "private_message_view"]
pub struct PrivateMessageView {
pub id: i32,
pub creator_id: i32,
pub recipient_id: i32,
pub content: String,
pub deleted: bool,
pub read: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub ap_id: String,
pub local: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub creator_actor_id: String,
pub creator_local: bool,
pub recipient_name: String,
pub recipient_preferred_username: Option<String>,
pub recipient_avatar: Option<String>,
pub recipient_actor_id: String,
pub recipient_local: bool,
}
pub struct PrivateMessageQueryBuilder<'a> {
conn: &'a PgConnection,
query: super::private_message_view::private_message_view::BoxedQuery<'a, Pg>,
for_recipient_id: i32,
unread_only: bool,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> PrivateMessageQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection, for_recipient_id: i32) -> Self {
use super::private_message_view::private_message_view::dsl::*;
let query = private_message_view.into_boxed();
PrivateMessageQueryBuilder {
conn,
query,
for_recipient_id,
unread_only: false,
page: None,
limit: None,
}
}
pub fn unread_only(mut self, unread_only: bool) -> Self {
self.unread_only = unread_only;
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<PrivateMessageView>, Error> {
use super::private_message_view::private_message_view::dsl::*;
let mut query = self.query.filter(deleted.eq(false));
// If its unread, I only want the ones to me
if self.unread_only {
query = query
.filter(read.eq(false))
.filter(recipient_id.eq(self.for_recipient_id));
}
// Otherwise, I want the ALL view to show both sent and received
else {
query = query.filter(
recipient_id
.eq(self.for_recipient_id)
.or(creator_id.eq(self.for_recipient_id)),
)
}
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.limit(limit)
.offset(offset)
.order_by(published.desc())
.load::<PrivateMessageView>(self.conn)
}
}
impl PrivateMessageView {
pub fn read(conn: &PgConnection, from_private_message_id: i32) -> Result<Self, Error> {
use super::private_message_view::private_message_view::dsl::*;
let mut query = private_message_view.into_boxed();
query = query
.filter(id.eq(from_private_message_id))
.order_by(published.desc());
query.first::<Self>(conn)
}
}

View file

@ -1,61 +0,0 @@
use crate::{naive_now, schema::site, Crud};
use diesel::{dsl::*, result::Error, *};
#[derive(Queryable, Identifiable, PartialEq, Debug)]
#[table_name = "site"]
pub struct Site {
pub id: i32,
pub name: String,
pub description: Option<String>,
pub creator_id: i32,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub enable_downvotes: bool,
pub open_registration: bool,
pub enable_nsfw: bool,
pub icon: Option<String>,
pub banner: Option<String>,
}
#[derive(Insertable, AsChangeset)]
#[table_name = "site"]
pub struct SiteForm {
pub name: String,
pub description: Option<String>,
pub creator_id: i32,
pub updated: Option<chrono::NaiveDateTime>,
pub enable_downvotes: bool,
pub open_registration: bool,
pub enable_nsfw: bool,
// when you want to null out a column, you have to send Some(None)), since sending None means you just don't want to update that column.
pub icon: Option<Option<String>>,
pub banner: Option<Option<String>>,
}
impl Crud<SiteForm> for Site {
fn read(conn: &PgConnection, _site_id: i32) -> Result<Self, Error> {
use crate::schema::site::dsl::*;
site.first::<Self>(conn)
}
fn create(conn: &PgConnection, new_site: &SiteForm) -> Result<Self, Error> {
use crate::schema::site::dsl::*;
insert_into(site).values(new_site).get_result::<Self>(conn)
}
fn update(conn: &PgConnection, site_id: i32, new_site: &SiteForm) -> Result<Self, Error> {
use crate::schema::site::dsl::*;
diesel::update(site.find(site_id))
.set(new_site)
.get_result::<Self>(conn)
}
}
impl Site {
pub fn transfer(conn: &PgConnection, new_creator_id: i32) -> Result<Self, Error> {
use crate::schema::site::dsl::*;
diesel::update(site.find(1))
.set((creator_id.eq(new_creator_id), updated.eq(naive_now())))
.get_result::<Self>(conn)
}
}

View file

@ -1,55 +0,0 @@
use diesel::{result::Error, *};
use serde::Serialize;
table! {
site_view (id) {
id -> Int4,
name -> Varchar,
description -> Nullable<Text>,
creator_id -> Int4,
published -> Timestamp,
updated -> Nullable<Timestamp>,
enable_downvotes -> Bool,
open_registration -> Bool,
enable_nsfw -> Bool,
icon -> Nullable<Text>,
banner -> Nullable<Text>,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
number_of_users -> BigInt,
number_of_posts -> BigInt,
number_of_comments -> BigInt,
number_of_communities -> BigInt,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "site_view"]
pub struct SiteView {
pub id: i32,
pub name: String,
pub description: Option<String>,
pub creator_id: i32,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub enable_downvotes: bool,
pub open_registration: bool,
pub enable_nsfw: bool,
pub icon: Option<String>,
pub banner: Option<String>,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub number_of_users: i64,
pub number_of_posts: i64,
pub number_of_comments: i64,
pub number_of_communities: i64,
}
impl SiteView {
pub fn read(conn: &PgConnection) -> Result<Self, Error> {
use super::site_view::site_view::dsl::*;
site_view.first::<Self>(conn)
}
}

View file

@ -1,289 +0,0 @@
use crate::{
is_email_regex,
naive_now,
schema::{user_, user_::dsl::*},
ApubObject,
Crud,
};
use bcrypt::{hash, DEFAULT_COST};
use diesel::{dsl::*, result::Error, *};
use lemmy_utils::settings::Settings;
use serde::Serialize;
#[derive(Clone, Queryable, Identifiable, PartialEq, Debug, Serialize)]
#[table_name = "user_"]
pub struct User_ {
pub id: i32,
pub name: String,
pub preferred_username: Option<String>,
pub password_encrypted: String,
pub email: Option<String>,
pub avatar: Option<String>,
pub admin: bool,
pub banned: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub show_nsfw: bool,
pub theme: String,
pub default_sort_type: i16,
pub default_listing_type: i16,
pub lang: String,
pub show_avatars: bool,
pub send_notifications_to_email: bool,
pub matrix_user_id: Option<String>,
pub actor_id: String,
pub bio: Option<String>,
pub local: bool,
pub private_key: Option<String>,
pub public_key: Option<String>,
pub last_refreshed_at: chrono::NaiveDateTime,
pub banner: Option<String>,
pub deleted: bool,
}
#[derive(Insertable, AsChangeset, Clone)]
#[table_name = "user_"]
pub struct UserForm {
pub name: String,
pub preferred_username: Option<Option<String>>,
pub password_encrypted: String,
pub admin: bool,
pub banned: Option<bool>,
pub email: Option<Option<String>>,
pub avatar: Option<Option<String>>,
pub published: Option<chrono::NaiveDateTime>,
pub updated: Option<chrono::NaiveDateTime>,
pub show_nsfw: bool,
pub theme: String,
pub default_sort_type: i16,
pub default_listing_type: i16,
pub lang: String,
pub show_avatars: bool,
pub send_notifications_to_email: bool,
pub matrix_user_id: Option<Option<String>>,
pub actor_id: Option<String>,
pub bio: Option<Option<String>>,
pub local: bool,
pub private_key: Option<String>,
pub public_key: Option<String>,
pub last_refreshed_at: Option<chrono::NaiveDateTime>,
pub banner: Option<Option<String>>,
}
impl Crud<UserForm> for User_ {
fn read(conn: &PgConnection, user_id: i32) -> Result<Self, Error> {
user_
.filter(deleted.eq(false))
.find(user_id)
.first::<Self>(conn)
}
fn delete(conn: &PgConnection, user_id: i32) -> Result<usize, Error> {
diesel::delete(user_.find(user_id)).execute(conn)
}
fn create(conn: &PgConnection, form: &UserForm) -> Result<Self, Error> {
insert_into(user_).values(form).get_result::<Self>(conn)
}
fn update(conn: &PgConnection, user_id: i32, form: &UserForm) -> Result<Self, Error> {
diesel::update(user_.find(user_id))
.set(form)
.get_result::<Self>(conn)
}
}
impl ApubObject<UserForm> for User_ {
fn read_from_apub_id(conn: &PgConnection, object_id: &str) -> Result<Self, Error> {
use crate::schema::user_::dsl::*;
user_
.filter(deleted.eq(false))
.filter(actor_id.eq(object_id))
.first::<Self>(conn)
}
fn upsert(conn: &PgConnection, user_form: &UserForm) -> Result<User_, Error> {
insert_into(user_)
.values(user_form)
.on_conflict(actor_id)
.do_update()
.set(user_form)
.get_result::<Self>(conn)
}
}
impl User_ {
pub fn register(conn: &PgConnection, form: &UserForm) -> Result<Self, Error> {
let mut edited_user = form.clone();
let password_hash =
hash(&form.password_encrypted, DEFAULT_COST).expect("Couldn't hash password");
edited_user.password_encrypted = password_hash;
Self::create(&conn, &edited_user)
}
// TODO do more individual updates like these
pub fn update_password(
conn: &PgConnection,
user_id: i32,
new_password: &str,
) -> Result<Self, Error> {
let password_hash = hash(new_password, DEFAULT_COST).expect("Couldn't hash password");
diesel::update(user_.find(user_id))
.set((
password_encrypted.eq(password_hash),
updated.eq(naive_now()),
))
.get_result::<Self>(conn)
}
pub fn read_from_name(conn: &PgConnection, from_user_name: &str) -> Result<Self, Error> {
user_
.filter(local.eq(true))
.filter(deleted.eq(false))
.filter(name.eq(from_user_name))
.first::<Self>(conn)
}
pub fn add_admin(conn: &PgConnection, user_id: i32, added: bool) -> Result<Self, Error> {
diesel::update(user_.find(user_id))
.set(admin.eq(added))
.get_result::<Self>(conn)
}
pub fn ban_user(conn: &PgConnection, user_id: i32, ban: bool) -> Result<Self, Error> {
diesel::update(user_.find(user_id))
.set(banned.eq(ban))
.get_result::<Self>(conn)
}
pub fn find_by_email_or_username(
conn: &PgConnection,
username_or_email: &str,
) -> Result<Self, Error> {
if is_email_regex(username_or_email) {
Self::find_by_email(conn, username_or_email)
} else {
Self::find_by_username(conn, username_or_email)
}
}
pub fn find_by_username(conn: &PgConnection, username: &str) -> Result<User_, Error> {
user_
.filter(deleted.eq(false))
.filter(local.eq(true))
.filter(name.ilike(username))
.first::<User_>(conn)
}
pub fn find_by_email(conn: &PgConnection, from_email: &str) -> Result<User_, Error> {
user_
.filter(deleted.eq(false))
.filter(local.eq(true))
.filter(email.eq(from_email))
.first::<User_>(conn)
}
pub fn get_profile_url(&self, hostname: &str) -> String {
format!(
"{}://{}/u/{}",
Settings::get().get_protocol_string(),
hostname,
self.name
)
}
pub fn mark_as_updated(conn: &PgConnection, user_id: i32) -> Result<User_, Error> {
diesel::update(user_.find(user_id))
.set((last_refreshed_at.eq(naive_now()),))
.get_result::<Self>(conn)
}
pub fn delete_account(conn: &PgConnection, user_id: i32) -> Result<User_, Error> {
diesel::update(user_.find(user_id))
.set((
preferred_username.eq::<Option<String>>(None),
email.eq::<Option<String>>(None),
matrix_user_id.eq::<Option<String>>(None),
bio.eq::<Option<String>>(None),
deleted.eq(true),
updated.eq(naive_now()),
))
.get_result::<Self>(conn)
}
}
#[cfg(test)]
mod tests {
use crate::{tests::establish_unpooled_connection, user::*, ListingType, SortType};
#[test]
fn test_crud() {
let conn = establish_unpooled_connection();
let new_user = UserForm {
name: "thommy".into(),
preferred_username: None,
password_encrypted: "nope".into(),
email: None,
matrix_user_id: None,
avatar: None,
banner: None,
admin: false,
banned: Some(false),
published: None,
updated: None,
show_nsfw: false,
theme: "browser".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
actor_id: None,
bio: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: None,
};
let inserted_user = User_::create(&conn, &new_user).unwrap();
let expected_user = User_ {
id: inserted_user.id,
name: "thommy".into(),
preferred_username: None,
password_encrypted: "nope".into(),
email: None,
matrix_user_id: None,
avatar: None,
banner: None,
admin: false,
banned: false,
published: inserted_user.published,
updated: None,
show_nsfw: false,
theme: "browser".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
actor_id: inserted_user.actor_id.to_owned(),
bio: None,
local: true,
private_key: None,
public_key: None,
last_refreshed_at: inserted_user.published,
deleted: false,
};
let read_user = User_::read(&conn, inserted_user.id).unwrap();
let updated_user = User_::update(&conn, inserted_user.id, &new_user).unwrap();
let num_deleted = User_::delete(&conn, inserted_user.id).unwrap();
assert_eq!(expected_user, read_user);
assert_eq!(expected_user, inserted_user);
assert_eq!(expected_user, updated_user);
assert_eq!(1, num_deleted);
}
}

View file

@ -1,231 +0,0 @@
use crate::{limit_and_offset, MaybeOptional, SortType};
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::Serialize;
// The faked schema since diesel doesn't do views
table! {
user_mention_view (id) {
id -> Int4,
user_mention_id -> Int4,
creator_id -> Int4,
creator_actor_id -> Text,
creator_local -> Bool,
post_id -> Int4,
post_name -> Varchar,
parent_id -> Nullable<Int4>,
content -> Text,
removed -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
community_id -> Int4,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
saved -> Nullable<Bool>,
recipient_id -> Int4,
recipient_actor_id -> Text,
recipient_local -> Bool,
}
}
table! {
user_mention_fast_view (id) {
id -> Int4,
user_mention_id -> Int4,
creator_id -> Int4,
creator_actor_id -> Text,
creator_local -> Bool,
post_id -> Int4,
post_name -> Varchar,
parent_id -> Nullable<Int4>,
content -> Text,
removed -> Bool,
read -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
deleted -> Bool,
community_id -> Int4,
community_actor_id -> Text,
community_local -> Bool,
community_name -> Varchar,
community_icon -> Nullable<Text>,
banned -> Bool,
banned_from_community -> Bool,
creator_name -> Varchar,
creator_preferred_username -> Nullable<Varchar>,
creator_avatar -> Nullable<Text>,
score -> BigInt,
upvotes -> BigInt,
downvotes -> BigInt,
hot_rank -> Int4,
hot_rank_active -> Int4,
user_id -> Nullable<Int4>,
my_vote -> Nullable<Int4>,
saved -> Nullable<Bool>,
recipient_id -> Int4,
recipient_actor_id -> Text,
recipient_local -> Bool,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "user_mention_fast_view"]
pub struct UserMentionView {
pub id: i32,
pub user_mention_id: i32,
pub creator_id: i32,
pub creator_actor_id: String,
pub creator_local: bool,
pub post_id: i32,
pub post_name: String,
pub parent_id: Option<i32>,
pub content: String,
pub removed: bool,
pub read: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub deleted: bool,
pub community_id: i32,
pub community_actor_id: String,
pub community_local: bool,
pub community_name: String,
pub community_icon: Option<String>,
pub banned: bool,
pub banned_from_community: bool,
pub creator_name: String,
pub creator_preferred_username: Option<String>,
pub creator_avatar: Option<String>,
pub score: i64,
pub upvotes: i64,
pub downvotes: i64,
pub hot_rank: i32,
pub hot_rank_active: i32,
pub user_id: Option<i32>,
pub my_vote: Option<i32>,
pub saved: Option<bool>,
pub recipient_id: i32,
pub recipient_actor_id: String,
pub recipient_local: bool,
}
pub struct UserMentionQueryBuilder<'a> {
conn: &'a PgConnection,
query: super::user_mention_view::user_mention_fast_view::BoxedQuery<'a, Pg>,
for_user_id: i32,
sort: &'a SortType,
unread_only: bool,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> UserMentionQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection, for_user_id: i32) -> Self {
use super::user_mention_view::user_mention_fast_view::dsl::*;
let query = user_mention_fast_view.into_boxed();
UserMentionQueryBuilder {
conn,
query,
for_user_id,
sort: &SortType::New,
unread_only: false,
page: None,
limit: None,
}
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn unread_only(mut self, unread_only: bool) -> Self {
self.unread_only = unread_only;
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<UserMentionView>, Error> {
use super::user_mention_view::user_mention_fast_view::dsl::*;
let mut query = self.query;
if self.unread_only {
query = query.filter(read.eq(false));
}
query = query
.filter(user_id.eq(self.for_user_id))
.filter(recipient_id.eq(self.for_user_id));
query = match self.sort {
SortType::Hot => query
.order_by(hot_rank.desc())
.then_order_by(published.desc()),
SortType::Active => query
.order_by(hot_rank_active.desc())
.then_order_by(published.desc()),
SortType::New => query.order_by(published.desc()),
SortType::TopAll => query.order_by(score.desc()),
SortType::TopYear => query
.filter(published.gt(now - 1.years()))
.order_by(score.desc()),
SortType::TopMonth => query
.filter(published.gt(now - 1.months()))
.order_by(score.desc()),
SortType::TopWeek => query
.filter(published.gt(now - 1.weeks()))
.order_by(score.desc()),
SortType::TopDay => query
.filter(published.gt(now - 1.days()))
.order_by(score.desc()),
// _ => query.order_by(published.desc()),
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
query
.limit(limit)
.offset(offset)
.load::<UserMentionView>(self.conn)
}
}
impl UserMentionView {
pub fn read(
conn: &PgConnection,
from_user_mention_id: i32,
from_recipient_id: i32,
) -> Result<Self, Error> {
use super::user_mention_view::user_mention_fast_view::dsl::*;
user_mention_fast_view
.filter(user_mention_id.eq(from_user_mention_id))
.filter(user_id.eq(from_recipient_id))
.first::<Self>(conn)
}
}

View file

@ -1,279 +0,0 @@
use super::user_view::user_fast::BoxedQuery;
use crate::{fuzzy_search, limit_and_offset, MaybeOptional, SortType};
use diesel::{dsl::*, pg::Pg, result::Error, *};
use serde::Serialize;
table! {
user_view (id) {
id -> Int4,
actor_id -> Text,
name -> Varchar,
preferred_username -> Nullable<Varchar>,
avatar -> Nullable<Text>,
banner -> Nullable<Text>,
email -> Nullable<Text>,
matrix_user_id -> Nullable<Text>,
bio -> Nullable<Text>,
local -> Bool,
admin -> Bool,
banned -> Bool,
show_avatars -> Bool,
send_notifications_to_email -> Bool,
published -> Timestamp,
number_of_posts -> BigInt,
post_score -> BigInt,
number_of_comments -> BigInt,
comment_score -> BigInt,
}
}
table! {
user_fast (id) {
id -> Int4,
actor_id -> Text,
name -> Varchar,
preferred_username -> Nullable<Varchar>,
avatar -> Nullable<Text>,
banner -> Nullable<Text>,
email -> Nullable<Text>,
matrix_user_id -> Nullable<Text>,
bio -> Nullable<Text>,
local -> Bool,
admin -> Bool,
banned -> Bool,
show_avatars -> Bool,
send_notifications_to_email -> Bool,
published -> Timestamp,
number_of_posts -> BigInt,
post_score -> BigInt,
number_of_comments -> BigInt,
comment_score -> BigInt,
}
}
#[derive(Queryable, Identifiable, PartialEq, Debug, Serialize, QueryableByName, Clone)]
#[table_name = "user_fast"]
pub struct UserView {
pub id: i32,
pub actor_id: String,
pub name: String,
pub preferred_username: Option<String>,
pub avatar: Option<String>,
pub banner: Option<String>,
pub email: Option<String>, // TODO this shouldn't be in this view
pub matrix_user_id: Option<String>,
pub bio: Option<String>,
pub local: bool,
pub admin: bool,
pub banned: bool,
pub show_avatars: bool, // TODO this is a setting, probably doesn't need to be here
pub send_notifications_to_email: bool, // TODO also never used
pub published: chrono::NaiveDateTime,
pub number_of_posts: i64,
pub post_score: i64,
pub number_of_comments: i64,
pub comment_score: i64,
}
pub struct UserQueryBuilder<'a> {
conn: &'a PgConnection,
query: BoxedQuery<'a, Pg>,
sort: &'a SortType,
page: Option<i64>,
limit: Option<i64>,
}
impl<'a> UserQueryBuilder<'a> {
pub fn create(conn: &'a PgConnection) -> Self {
use super::user_view::user_fast::dsl::*;
let query = user_fast.into_boxed();
UserQueryBuilder {
conn,
query,
sort: &SortType::Hot,
page: None,
limit: None,
}
}
pub fn sort(mut self, sort: &'a SortType) -> Self {
self.sort = sort;
self
}
pub fn search_term<T: MaybeOptional<String>>(mut self, search_term: T) -> Self {
use super::user_view::user_fast::dsl::*;
if let Some(search_term) = search_term.get_optional() {
self.query = self.query.filter(name.ilike(fuzzy_search(&search_term)));
}
self
}
pub fn page<T: MaybeOptional<i64>>(mut self, page: T) -> Self {
self.page = page.get_optional();
self
}
pub fn limit<T: MaybeOptional<i64>>(mut self, limit: T) -> Self {
self.limit = limit.get_optional();
self
}
pub fn list(self) -> Result<Vec<UserView>, Error> {
use super::user_view::user_fast::dsl::*;
use diesel::sql_types::{Nullable, Text};
let mut query = self.query;
query = match self.sort {
SortType::Hot => query
.order_by(comment_score.desc())
.then_order_by(published.desc()),
SortType::Active => query
.order_by(comment_score.desc())
.then_order_by(published.desc()),
SortType::New => query.order_by(published.desc()),
SortType::TopAll => query.order_by(comment_score.desc()),
SortType::TopYear => query
.filter(published.gt(now - 1.years()))
.order_by(comment_score.desc()),
SortType::TopMonth => query
.filter(published.gt(now - 1.months()))
.order_by(comment_score.desc()),
SortType::TopWeek => query
.filter(published.gt(now - 1.weeks()))
.order_by(comment_score.desc()),
SortType::TopDay => query
.filter(published.gt(now - 1.days()))
.order_by(comment_score.desc()),
};
let (limit, offset) = limit_and_offset(self.page, self.limit);
query = query.limit(limit).offset(offset);
// The select is necessary here to not get back emails
query = query.select((
id,
actor_id,
name,
preferred_username,
avatar,
banner,
"".into_sql::<Nullable<Text>>(),
matrix_user_id,
bio,
local,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
number_of_posts,
post_score,
number_of_comments,
comment_score,
));
query.load::<UserView>(self.conn)
}
}
impl UserView {
pub fn admins(conn: &PgConnection) -> Result<Vec<Self>, Error> {
use super::user_view::user_fast::dsl::*;
use diesel::sql_types::{Nullable, Text};
user_fast
// The select is necessary here to not get back emails
.select((
id,
actor_id,
name,
preferred_username,
avatar,
banner,
"".into_sql::<Nullable<Text>>(),
matrix_user_id,
bio,
local,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
number_of_posts,
post_score,
number_of_comments,
comment_score,
))
.filter(admin.eq(true))
.order_by(published)
.load::<Self>(conn)
}
pub fn banned(conn: &PgConnection) -> Result<Vec<Self>, Error> {
use super::user_view::user_fast::dsl::*;
use diesel::sql_types::{Nullable, Text};
user_fast
.select((
id,
actor_id,
name,
preferred_username,
avatar,
banner,
"".into_sql::<Nullable<Text>>(),
matrix_user_id,
bio,
local,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
number_of_posts,
post_score,
number_of_comments,
comment_score,
))
.filter(banned.eq(true))
.load::<Self>(conn)
}
// WARNING!!! this method WILL return sensitive user information and should only be called
// if the user requesting these details is also the authenticated user.
// please use get_user_secure to obtain user rows in most cases.
pub fn get_user_dangerous(conn: &PgConnection, user_id: i32) -> Result<Self, Error> {
use super::user_view::user_fast::dsl::*;
user_fast.find(user_id).first::<Self>(conn)
}
pub fn get_user_secure(conn: &PgConnection, user_id: i32) -> Result<Self, Error> {
use super::user_view::user_fast::dsl::*;
use diesel::sql_types::{Nullable, Text};
user_fast
.select((
id,
actor_id,
name,
preferred_username,
avatar,
banner,
"".into_sql::<Nullable<Text>>(),
matrix_user_id,
bio,
local,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
number_of_posts,
post_score,
number_of_comments,
comment_score,
))
.find(user_id)
.first::<Self>(conn)
}
}

Some files were not shown because too many files have changed in this diff Show more