Compare commits

..

No commits in common. "main" and "v0.10.0" have entirely different histories.

356 changed files with 13167 additions and 24079 deletions

View file

@ -1,7 +1,6 @@
# https://github.com/rust-lang/cargo/issues/5034#issuecomment-927105016
[target.'cfg(feature = "cargo-clippy")']
rustflags = [
"-Aclippy::derive_partial_eq_without_eq",
"-Aclippy::len_zero",
"-Aclippy::let_and_return",
"-Aclippy::map_entry",
@ -9,5 +8,4 @@ rustflags = [
"-Aclippy::redundant_field_names",
"-Aclippy::unused_unit",
"-Aclippy::enum_variant_names",
"-Aclippy::format_push_string",
]

View file

@ -1,21 +0,0 @@
# flyctl launch added from .gitignore
**/.env.local
**/config.yaml
target
# other things
docs/*
fedimovies-*
scripts/*
src/*
# flyctl launch added from .idea/.gitignore
# Default ignored files
.idea/shelf
.idea/workspace.xml
# Editor-based HTTP Client requests
.idea/httpRequests
# Datasource local storage ignored files
.idea/dataSources
.idea/dataSources.local.xml
fly.toml

View file

@ -13,9 +13,6 @@ indent_size = 2
[*.yaml]
indent_size = 2
[*.toml]
indent_size = 2
[*.md]
indent_size = 2
max_line_length = off

4
.gitignore vendored
View file

@ -1,9 +1,5 @@
.env.local
config.yaml
/secret/*
/files/*
!/files/.gitkeep
/build/*
!/build/.gitkeep
/target
fly.toml

View file

@ -1,63 +0,0 @@
matrix:
RUST: [stable]
pipeline:
check-formatting:
image: rust
when:
branch: [ main ]
path:
include:
- .woodpecker.yml
- src/**/*.rs
- fedimovies-cli/**/*.rs
- fedimovies-config/**/*.rs
- fedimovies-models/**/*.rs
- fedimovies-utils/**/*.rs
environment:
- CARGO_TERM_COLOR=always
- CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse
commands:
- rustup default $RUST
- rustup component add rustfmt
- cargo fmt --all -- --check
check-style:
image: rust
when:
branch: [ main ]
path:
include:
- .woodpecker.yml
- src/**/*.rs
- fedimovies-cli/**/*.rs
- fedimovies-config/**/*.rs
- fedimovies-models/**/*.rs
- fedimovies-utils/**/*.rs
environment:
- CARGO_TERM_COLOR=always
- CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse
commands:
- rustup default $RUST
- rustup component add clippy
- cargo clippy --all-targets --all-features -- -D warnings
run-tests:
image: rust
when:
branch: [ main ]
path:
include:
- .woodpecker.yml
- src/**/*.rs
- fedimovies-cli/**/*.rs
- fedimovies-config/**/*.rs
- fedimovies-models/**/*.rs
- fedimovies-utils/**/*.rs
environment:
- CARGO_TERM_COLOR=always
- CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse
commands:
- rustup default $RUST
- cargo test --all -- --nocapture

View file

@ -1,388 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [Unreleased]
## [1.22.0] - 2023-04-22
### Added
- Added support for content warnings.
- Support integrity proofs with `DataIntegrityProof` type.
- Add `federation.i2p_proxy_url` configuration parameter.
### Changed
- Ignore errors when importing activities from outbox.
- Make activity limit in outbox fetcher adjustable.
- Updated actix to latest version. MSRV changed to 1.57.
- Add replies and reposts to outbox collection.
### Fixed
- Make `/api/v1/accounts/{account_id}/follow` work with form-data.
- Make `onion_proxy_url` override `proxy_url` setting if request target is onion.
## [1.21.0] - 2023-04-12
### Added
- Added `create-user` command.
- Added `read-outbox` command.
### Changed
- Added emoji count check to profile data validator.
- Check mention and link counts when creating post.
- Re-fetch object if `attributedTo` value doesn't match `actor` of `Create` activity.
- Added actor validation to `Update(Note)` and `Undo(Follow)` handlers.
### Fixed
- Fixed database query error in `Create` activity handler.
## [1.20.0] - 2023-04-07
### Added
- Support calling `/api/v1/accounts/search` with `resolve` parameter.
- Created `/api/v1/accounts/aliases/all` API endpoint.
- Created API endpoint for adding aliases.
- Populate `alsoKnownAs` property on actor object with declared aliases.
- Support account migration from Mastodon.
- Created API endpoint for managing client configurations.
- Reject unsolicited public posts.
### Changed
- Increase maximum number of custom emojis per post to 50.
- Validate actor aliases before saving into database.
- Process incoming `Move()` activities in background.
- Allow custom emojis with `image/webp` media type.
- Increase object ID size limit to 2000 chars.
- Increase fetcher timeout to 15 seconds when processing search queries.
### Fixed
- Added missing `CHECK` constraints to database tables.
- Validate object ID length before saving post to database.
- Validate emoji name length before saving to database.
## [1.19.1] - 2023-03-31
### Changed
- Limit number of mentions and links in remote posts.
### Fixed
- Process queued background jobs before re-trying stalled.
- Remove activity from queue if handler times out.
- Order attachments by creation date when new post is created.
## [1.19.0] - 2023-03-30
### Added
- Added `prune-remote-emojis` command.
- Prune remote emojis in background.
- Added `limits.media.emoji_size_limit` configuration parameter.
- Added `federation.fetcher_timeout` and `federation.deliverer_timeout` configuration parameters.
### Changed
- Allow emoji names containing hyphens.
- Increased remote emoji size limit to 500 kB.
- Set fetcher timeout to 5 seconds when processing search queries.
### Fixed
- Fixed error in emoji update SQL query.
- Restart stalled background jobs.
- Order attachments by creation date.
- Don't reopen monero wallet on each subscription monitor run.
### Security
- Updated markdown parser to latest version.
## [1.18.0] - 2023-03-21
### Added
- Added `fep-e232` feature flag (disabled by default).
- Added `account_index` parameter to Monero configuration.
- Added `/api/v1/instance/peers` API endpoint.
- Added `federation.enabled` configuration parameter that can be used to disable federation.
### Changed
- Documented valid role names for `set-role` command.
- Granted `delete_any_post` and `delete_any_profile` permissions to admin role.
- Updated profile page URL template to match mitra-web.
### Fixed
- Make webclient-to-object redirects work for remote profiles and posts.
- Added webclient redirection rule for `/@username` routes.
- Don't allow migration if user doesn't have identity proofs.
## [1.17.0] - 2023-03-15
### Added
- Enabled audio and video uploads.
- Added `audio/ogg` and `audio/x-wav` to the list of supported media types.
### Changed
- Save latest ethereum block number to database instead of file.
- Removed hardcoded upload size limit.
### Deprecated
- Reading ethereum block number from `current_block` file.
### Removed
- Disabled post tokenization (can be re-enabled with `ethereum-extras` feature).
- Removed ability to switch from Ethereum devnet to another chain without resetting subscriptions.
### Fixed
- Allow `!` after hashtags and mentions.
- Ignore emojis with non-unique names in remote posts.
## [1.16.0] - 2023-03-08
### Added
- Allow to add notes to generated invite codes.
- Added `registration.default_role` configuration option.
- Save emojis attached to actor objects.
- Added `emojis` field to Mastodon API Account entity.
- Support audio attachments.
- Added CLI command for viewing unreachable actors.
- Implemented NodeInfo 2.1.
- Added `federation.onion_proxy_url` configuration parameter (enables proxy for requests to `.onion` domains).
### Changed
- Use .jpg extension for files with image/jpeg media type.
### Deprecated
- Deprecated `default_role_read_only_user` configuration option (replaced by `registration.default_role`).
## [1.15.0] - 2023-02-27
### Added
- Set fetcher timeout to 3 minutes.
- Set deliverer timeout to 30 seconds.
- Added `federation` parameter group to configuration.
- Add empty `spoiler_text` property to Mastodon API Status object.
- Added `error` and `error_description` fields to Mastodon API error responses.
- Store information about failed activity deliveries in database.
- Added `/api/v1/accounts/{account_id}/aliases` API endpoint.
### Changed
- Put activities generated by CLI commands in a queue instead of immediately sending them.
- Changed path of user's Atom feed to `/feeds/users/{username}`.
- Increase number of delivery attempts and increase intervals between them.
### Deprecated
- Deprecated `proxy_url` configuration parameter (replaced by `federation.proxy_url`).
- Deprecated Atom feeds at `/feeds/{username}`.
- Deprecated `message` field in Mastodon API error response.
### Fixed
- Prevent `delete-extraneous-posts` command from removing locally-linked posts.
- Make webfinger response compatible with GNU Social account lookup.
- Prefer `Group` actor when doing webfinger query on Lemmy server.
- Fetch missing profiles before doing follower migration.
- Follow FEP-e232 links when importing post.
## [1.14.0] - 2023-02-22
### Added
- Added `/api/v1/apps` endpoint.
- Added OAuth authorization page.
- Support `authorization_code` OAuth grant type.
- Documented `http_cors_allowlist` configuration parameter.
- Added `/api/v1/statuses/{status_id}/thread` API endpoint (replaces `/api/v1/statuses/{status_id}/context`).
- Accept webfinger requests where `resource` is instance actor ID.
- Added `proxy_set_header X-Forwarded-Proto $scheme;` directive to nginx config example.
- Add `Content-Security-Policy` and `X-Content-Type-Options` headers to all responses.
### Changed
- Allow `instance_uri` configuration value to contain URI scheme.
- Changed `/api/v1/statuses/{status_id}/context` response format to match Mastodon API.
- Changed status code of `/api/v1/statuses` response to 200 to match Mastodon API.
- Removed `add_header` directives for `Content-Security-Policy` and `X-Content-Type-Options` headers from nginx config example.
### Deprecated
- Deprecated protocol guessing on incoming requests (use `X-Forwarded-Proto` header).
### Fixed
- Fixed actor object JSON-LD validation errors.
- Fixed activity JSON-LD validation errors.
- Make media URLs in Mastodon API responses relative to current origin.
## [1.13.1] - 2023-02-09
### Fixed
- Fixed permission error on subscription settings update.
## [1.13.0] - 2023-02-06
### Added
- Replace post attachments and other related objects when processing `Update(Note)` activity.
- Append attachment URL to post content if attachment size exceeds limit.
- Added `/api/v1/custom_emojis` endpoint.
- Added `limits` parameter group to configuration.
- Made file size limit adjustable with `limits.media.file_size_limit` configuration option.
- Added `limits.posts.character_limit` configuration parameter (replaces `post_character_limit`).
- Implemented automatic pruning of remote posts and empty profiles (disabled by default).
### Changed
- Use proof suites with prefix `Mitra`.
- Added `https://w3id.org/security/data-integrity/v1` to JSON-LD context.
- Return `202 Accepted` when activity is accepted by inbox endpoint.
- Ignore forwarded `Like` activities.
- Set 10 minute timeout on background job that processes incoming activities.
- Use "warn" log level for delivery errors.
- Don't allow read-only users to manage subscriptions.
### Deprecated
- Deprecated `post_character_limit` configuration option.
### Fixed
- Change max body size in nginx example config to match app limit.
- Don't create invoice if recipient can't accept subscription payments.
- Ignore `Announce(Delete)` activities.
## [1.12.0] - 2023-01-26
### Added
- Added `approval_required` and `invites_enabled` flags to `/api/v1/instance` endpoint response.
- Added `registration.type` configuration option (replaces `registrations_open`).
- Implemented roles & permissions.
- Added "read-only user" role.
- Added configuration option for automatic assigning of "read-only user" role after registration.
- Added `set-role` command.
### Changed
- Don't retry activity if fetcher recursion limit has been reached.
### Deprecated
- `registrations_open` configuration option.
### Removed
- Dropped support for `blockchain` configuration parameter.
### Fixed
- Added missing `<link rel="self">` element to Atom feeds.
- Added missing `<link rel="alternate">` element to Atom feed entries.
## [1.11.0] - 2023-01-23
### Added
- Save sizes of media attachments and other files to database.
- Added `import-emoji` command.
- Added support for emoji shortcodes.
- Allowed custom emojis with `image/apng` media type.
### Changed
- Make `delete-emoji` command accept emoji name and hostname instead of ID.
- Replaced client-side tag URLs with collection IDs.
### Security
- Validate emoji name before saving.
## [1.10.0] - 2023-01-18
### Added
- Added `/api/v1/settings/move_followers` API endpoint (replaces `/api/v1/accounts/move_followers`).
- Added `/api/v1/settings/import_follows` API endpoint.
- Validation of Monero subscription payout address.
- Accept webfinger requests where `resource` is actor ID.
- Adeed support for `as:Public` and `Public` audience identifiers.
- Displaying custom emojis.
### Changed
- Save downloaded media as "unknown" if its media type is not supported.
- Use `mediaType` property value to determine file extension when saving downloaded media.
- Added `mediaType` property to images in actor object.
- Prevent `delete-extraneous-posts` command from deleting post if there's a recent reply or repost.
- Changed max actor image size to 5 MB.
### Removed
- `/api/v1/accounts/move_followers` API endpoint.
### Fixed
- Don't ignore `Delete(Person)` verification errors if database error subtype is not `NotFound`.
- Don't stop activity processing on invalid local mentions.
- Accept actor objects where `attachment` property value is not an array.
- Don't download HTML pages attached by GNU Social.
- Ignore `Like()` activity if local post doesn't exist.
- Fixed `.well-known` paths returning `400 Bad Request` errors.
## [1.9.0] - 2023-01-08
### Added
- Added `/api/v1/accounts/lookup` Mastodon API endpoint.
- Implemented activity delivery queue.
- Started to keep track of unreachable actors.
- Added `configuration` object to response of `/api/v1/instance` endpoint.
- Save media types of uploaded avatar and banner images.
- Support for `MitraJcsRsaSignature2022` and `MitraJcsEip191Signature2022` signature suites.
### Changed
- Updated installation instructions, default mitra config and recommended nginx config.
- Limited the number of requests made during the processing of a thread.
- Limited the number of media files that can be attached to a post.
### Deprecated
- Deprecated `post_character_limit` property in `/api/v1/instance` response.
- Avatar and banner uploads without media type via `/api/v1/accounts/update_credentials`.
- `JcsRsaSignature2022` and `JcsEip191Signature2022` signature suites.
### Removed
- Removed ability to upload non-images using `/api/v1/media` endpoint.
### Fixed
- Fixed post and profile page redirections.
- Fixed federation with GNU Social.

View file

@ -1,40 +0,0 @@
# Contributing
## General
Mitra is developed according to these principles:
- Resilience. The primary function of Mitra is delivery of messages from publisher to the audience. It should be able to perform this task even in adversarial conditions.
- Self-hosting. If some feature depends on other service (such as blockchain node), that service must be free / open source software and it must be able to run on affordable hardware. No dependecies on proprietary services allowed.
- Low system requirements. The default configuration should work smoothly on a low-end VPS.
- Privacy. In its default configuration, Mitra shouldn't require any personal info (other than username / public key) or collect usage statistics. It also shouldn't reveal more information about the user then necessary.
## Before you start
If you want to propose a change, please create an [issue](https://codeberg.org/silverpill/mitra/issues) first and explain what you want to do (unless it's something trivial).
## Code
Simplicity is more important than minor performance improvements.
Avoid advanced language features unless there's a good reason to use them. The code should be comprehensible even to a Rust beginner.
### MSRV
The MSRV must not be greater than the version of [rustc package](https://tracker.debian.org/pkg/rustc) in Debian testing.
### Dependencies
Try to minimize the number of dependencies.
Prefer libraries maintained by volunteers over those developed by for-profit companies.
### Code style
Run `cargo clippy` to check code automatically. Try to follow the existing style when adding new features.
### Commits
Commits should be atomic (the tests should pass) and not too big. Commit messages should be informative.
For any notable change there should be an entry in [CHANGELOG.md](./CHANGELOG.md).

2428
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,85 +1,91 @@
[package]
name = "fedimovies"
version = "1.22.0"
description = "Movies reviews and ratings for the fediverse"
name = "mitra"
version = "0.10.0"
description = "Mitra backend"
license = "AGPL-3.0"
edition = "2021"
rust-version = "1.68"
edition = "2018"
rust-version = "1.54"
publish = false
default-run = "fedimovies"
[workspace]
members = [
".",
"fedimovies-cli",
"fedimovies-config",
"fedimovies-models",
"fedimovies-utils",
]
default-members = [
".",
"fedimovies-cli",
"fedimovies-config",
"fedimovies-models",
"fedimovies-utils",
]
default-run = "mitra"
[dependencies]
fedimovies-config = { path = "fedimovies-config" }
fedimovies-models = { path = "fedimovies-models" }
fedimovies-utils = { path = "fedimovies-utils" }
# Used to handle incoming HTTP requests
actix-cors = "0.6.4"
actix-files = "0.6.2"
actix-web = "4.3.1"
actix-web-httpauth = "0.8.0"
actix-cors = "0.6.1"
actix-files = "0.6.0"
actix-web = "4.0.1"
actix-web-httpauth = "0.6.0"
# Used for HTML sanitization
ammonia = "3.2.0"
# Used for catching errors
anyhow = "1.0.58"
# Used for working with RSA keys, HTTP signatures and file uploads
base64 = "0.13.0"
# Used for working with dates
chrono = { version = "0.4.23", default-features = false, features = ["std", "serde"] }
chrono = { version = "0.4.19", features = ["serde"] }
# Used to build admin CLI tool
clap = { version = "3.1.8", default-features = false, features = ["std", "derive"] }
# Used for pooling database connections
deadpool = "0.9.2"
deadpool-postgres = { version = "0.10.2", default-features = false }
# Used to read .env files
dotenv = "0.15.0"
# Used to work with hexadecimal strings
hex = { version = "0.4.3", features = ["serde"] }
# Used for logging
log = "0.4.14"
log = { version = "0.4.14", features = ["serde"] }
env_logger = { version = "0.9.0", default-features = false }
# Used to verify minisign signatures
ed25519-dalek = "1.0.1"
ed25519 = "1.5.3"
blake2 = "0.10.5"
# Used to guess media type of a file
mime_guess = "2.0.3"
mime-sniffer = "0.1.2"
# Used to determine the number of CPUs on the system
num_cpus = "1.13.0"
# Used for working with regular expressions
regex = "1.6.0"
regex = "1.5.4"
# Used to generate random numbers
rand = "0.8.4"
# Used for managing database migrations
refinery = { version = "0.8.4", features = ["tokio-postgres"] }
# Used for making async HTTP requests
reqwest = { version = "0.11.13", features = ["json", "multipart", "socks"] }
reqwest = { version = "0.11.10", features = ["json", "multipart"] }
# Used for working with RSA keys
rsa = "0.5.0"
pem = "1.0.2"
# Used for hashing passwords
rust-argon2 = "0.8.3"
# Used for working with ethereum keys
secp256k1 = { version = "0.21.3", features = ["rand", "rand-std"] }
# Used for serialization/deserialization
serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0.89"
serde_json = "1.0"
# Used to parse config file
serde_yaml = "0.8.17"
# Used to calculate SHA2 hashes
sha2 = "0.9.5"
# Used to verify EIP-4361 signatures
siwe = "0.4.0"
siwe = "0.3.0"
# Used for creating error types
thiserror = "1.0.37"
thiserror = "1.0.24"
# Async runtime
tokio = { version = "=1.20.4", features = ["macros"] }
tokio = { version = "1.17.0", features = ["macros"] }
# Used for working with Postgresql database
tokio-postgres = { version = "0.7.5", features = ["with-chrono-0_4", "with-uuid-0_8", "with-serde_json-1"] }
postgres-types = { version = "0.2.2", features = ["derive", "with-chrono-0_4", "with-uuid-0_8", "with-serde_json-1"] }
postgres-protocol = "0.6.1"
# Used to construct PostgreSQL queries
postgres_query = { git = "https://github.com/nolanderc/rust-postgres-query", rev = "b4422051c8a31fbba4a35f88004c1cefb1878dd5" }
postgres_query_macro = { git = "https://github.com/nolanderc/rust-postgres-query", rev = "b4422051c8a31fbba4a35f88004c1cefb1878dd5" }
# Used to work with URLs
url = "2.2.2"
# Used to generate lexicographically sortable IDs
ulid = { version = "0.4.1", features = ["uuid"] }
# Used to work with UUIDs
uuid = { version = "1.1.2", features = ["serde", "v4"] }
uuid = { version = "0.8.2", features = ["serde", "v4"] }
# Used to query ethereum node
web3 = { version = "0.18.0", default-features = false, features = ["http", "http-tls", "signing"] }
[dev-dependencies]
fedimovies-config = { path = "fedimovies-config", features = ["test-utils"] }
fedimovies-models = { path = "fedimovies-models", features = ["test-utils"] }
fedimovies-utils = { path = "fedimovies-utils", features = ["test-utils"] }
serial_test = "0.7.0"
serial_test = "0.5.1"
[features]
production = ["fedimovies-config/production"]
production = []

View file

@ -13,10 +13,8 @@ The following activities are supported:
- Undo(Like)
- Announce(Note)
- Undo(Announce)
- Update(Note)
- Follow(Person)
- Update(Person)
- Move(Person)
- Delete(Person)
- Add(Person)
- Remove(Person)
@ -29,67 +27,6 @@ And these additional standards:
Activities are implemented in way that is compatible with Pleroma, Mastodon and other popular ActivityPub servers.
Supported FEPs:
- [FEP-f1d5: NodeInfo in Fediverse Software](https://codeberg.org/fediverse/fep/src/branch/main/feps/fep-f1d5.md)
- [FEP-e232: Object Links](https://codeberg.org/fediverse/fep/src/branch/main/feps/fep-e232.md)
- [FEP-8b32: Object Integrity Proofs](https://codeberg.org/fediverse/fep/src/branch/main/feps/fep-8b32.md)
## Object integrity proofs
All outgoing activities are signed with actor's key in accordance with [FEP-8b32](https://codeberg.org/fediverse/fep/src/branch/main/feps/fep-8b32.md) document.
Example:
```json
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/data-integrity/v1"
],
"actor": "https://example.com/users/alice",
"cc": [],
"id": "https://example.com/objects/0185f5f8-10b5-1b69-f45e-25f06792f411",
"object": "https://example.net/users/bob/posts/141892712081205472",
"proof": {
"created": "2023-01-28T01:22:40.183273595Z",
"proofPurpose": "assertionMethod",
"proofValue": "z5djAdMSrV...",
"type": "MitraJcsRsaSignature2022",
"verificationMethod": "https://example.com/users/alice#main-key"
},
"to": [
"https://example.net/users/bob",
"https://www.w3.org/ns/activitystreams#Public"
],
"type":"Like"
}
```
### Supported proof suites
#### MitraJcsRsaSignature2022
Canonicalization algorithm: JCS
Hashing algorithm: SHA-256
Signature algorithm: RSASSA-PKCS1-v1_5
#### MitraJcsEip191Signature2022
Canonicalization algorithm: JCS
Hashing algorithm: KECCAK-256 (EIP-191)
Signature algorithm: ECDSA (EIP-191)
#### MitraJcsEd25519Signature2022
Canonicalization algorithm: JCS
Hashing algorithm: BLAKE2b-512
Signature algorithm: EdDSA
## Custom emojis
Custom emojis are implemented as described in Mastodon documentation: https://docs.joinmastodon.org/spec/activitypub/#emoji.
## Profile extensions
### Cryptocurrency addresses
@ -117,35 +54,6 @@ Identity proofs are represented as attachments of `IdentityProof` type:
}
```
Supported proof types:
- EIP-191 (Ethereum personal signatures)
- [Minisign](https://jedisct1.github.io/minisign/)
[FEP-c390](https://codeberg.org/fediverse/fep/src/branch/main/feps/fep-c390.md) identity proofs are not supported yet.
## Account migrations
After registering an account its owner can upload the list of followers and start the migration process. The server then sends `Move` activity to each follower:
```json
{
"@context": [
"https://www.w3.org/ns/activitystreams"
],
"actor": "https://server2.com/users/alice",
"id": "https://server2.com/activities/00000000-0000-0000-0000-000000000001",
"object": "https://server1.com/users/alice",
"target": "https://server2.com/users/alice",
"to": [
"https://example.com/users/bob"
],
"type": "Move"
}
```
Where `object` is an ID of old account and `target` is an ID of new account. Actors identified by `object` and `target` properties must have at least one identity key in common to be considered aliases. Upon receipt of such activity, actors that follow `object` should un-follow it and follow `target` instead.
## Subscription events
Local actor profiles have `subscribers` property which points to the collection of actor's paid subscribers.

192
README.md
View file

@ -1,39 +1,33 @@
# FediMovies
[![status-badge](https://ci.caric.io/api/badges/FediMovies/fedimovies/status.svg)](https://ci.caric.io/FediMovies/fedimovies)
# Mitra
Lively federated movies reviews platform.
Federated social network with smart contracts.
Built on [ActivityPub](https://www.w3.org/TR/activitypub/) protocol, self-hosted, lightweight. Part of the [Fediverse](https://en.wikipedia.org/wiki/Fediverse).
Features:
Unique features enabled by blockchain integration:
- Micro-blogging service (includes support for quote posts, custom emojis and more).
- Mastodon API.
- Account migrations (from one server to another). Identity can be detached from the server.
- Federation over Tor.
- [Sign-in with a wallet](https://eips.ethereum.org/EIPS/eip-4361). A combination of domain-based and key-based identity.
- Donations.
- Recurring payments. Subscribers-only posts.
- Token-gated registration (can be used to verify membership in some group or to stop bots).
- Converting posts into NFTs.
## Instances
Currently only Ethereum and other EVM-compatible blockchains are supported.
- [FediList](http://demo.fedilist.com/instance?software=fedimovies)
- [Fediverse Observer](https://fedimovies.fediverse.observer/list)
Smart contracts repo: https://codeberg.org/silverpill/mitra-contracts
Demo instance: https://nullpointer.social/ ([invite-only](https://nullpointer.social/about))
Frontend repo: https://codeberg.org/silverpill/mitra-web
## Code
Server: https://code.caric.io/reef/reef (this repo)
Web client:
Demo instance: https://public.mitra.social/ (invite-only)
Matrix chat: [#mitra:halogen.city](https://matrix.to/#/#mitra:halogen.city)
## Requirements
- Rust 1.57+ (when building from source)
- Rust 1.54+ (when building from source)
- PostgreSQL 12+
Optional:
- IPFS node (see [guide](./docs/ipfs.md))
- IPFS node (optional, see [guide](./docs/ipfs.md))
- Ethereum node (optional)
## Installation
@ -41,107 +35,89 @@ Optional:
Run:
```shell
```
cargo build --release --features production
```
This command will produce two binaries in `target/release` directory, `fedimovies` and `fedimoviesctl`.
This command will produce two binaries in `target/release` directory, `mitra` and `mitractl`.
Install PostgreSQL and create the database:
Install PostgreSQL and create the database.
```sql
CREATE USER fedimovies WITH PASSWORD 'fedimovies';
CREATE DATABASE fedimovies OWNER fedimovies;
Create configuration file by copying `contrib/mitra_config.yaml` and configure the instance. Default config file path is `/etc/mitra/config.yaml`, but it can be changed using `CONFIG_PATH` environment variable.
Start Mitra:
```
./mitra
```
Create configuration file by copying `contrib/fedimovies_config.yaml` and configure the instance. Default config file path is `/etc/fedimovies/config.yaml`, but it can be changed using `CONFIG_PATH` environment variable.
An HTTP server will be needed to handle HTTPS requests and serve the frontend. See the example of [nginx configuration file](./contrib/mitra.nginx).
Put any static files into the directory specified in configuration file. Building instructions for `fedimovies-web` frontend can be found at https://code.caric.io/FediMovies/fedimovies#project-setup.
Building instructions for `mitra-web` frontend can be found at https://codeberg.org/silverpill/mitra-web#project-setup.
Start Fedimovies:
```shell
./fedimovies
```
An HTTP server will be needed to handle HTTPS requests. See the example of [nginx configuration file](./contrib/fedimovies.nginx).
To run Fedimovies as a systemd service, check out the [systemd unit file example](./contrib/fedimovies.service).
To run Mitra as a systemd service, check out the [systemd unit file example](./contrib/mitra.service).
### Debian package
Download and install Fedimovies package:
Download and install Mitra package:
```shell
dpkg -i fedimovies.deb
```
dpkg -i mitra.deb
```
Install PostgreSQL and create the database:
Install PostgreSQL and create the database. Open configuration file `/etc/mitra/config.yaml` and configure the instance.
```sql
CREATE USER fedimovies WITH PASSWORD 'fedimovies';
CREATE DATABASE fedimovies OWNER fedimovies;
Start Mitra:
```
systemctl start mitra
```
Open configuration file `/etc/fedimovies/config.yaml` and configure the instance.
Start Fedimovies:
```shell
systemctl start fedimovies
```
An HTTP server will be needed to handle HTTPS requests. See the example of [nginx configuration file](./contrib/fedimovies.nginx).
### Tor federation
See [guide](./docs/onion.md).
An HTTP server will be needed to handle HTTPS requests and serve the frontend. See the example of [nginx configuration file](./contrib/mitra.nginx).
## Development
See [CONTRIBUTING.md](./CONTRIBUTING.md)
### Create database
### Start database server
```shell
docker-compose up -d
```
docker-compose up
```
Test connection:
```shell
psql -h localhost -p 55432 -U fedimovies fedimovies
```
psql -h localhost -p 55432 -U mitra mitra
```
### Run web service
Create config file, adjust settings if needed:
```shell
```
cp config.yaml.example config.yaml
```
Compile and run service:
```shell
```
cargo run
```
### Run CLI
```shell
cargo run --bin fedimoviesctl
```
cargo run --bin mitractl
```
### Run linter
```shell
```
cargo clippy
```
### Run tests
```shell
```
cargo test
```
@ -151,16 +127,80 @@ See [FEDERATION.md](./FEDERATION.md)
## Client API
Most methods are similar to Mastodon API, but Fedimovies is not fully compatible.
### Mastodon API
[OpenAPI spec](./docs/openapi.yaml)
Most methods are similar to Mastodon API, but Mitra is not fully compatible.
[OpenAPI spec](./docs/openapi.yaml) (incomplete)
## CLI
`fedimoviesctl` is a command-line tool for performing instance maintenance.
Commands must be run as the same user as the web service:
[Documentation](./docs/fedimoviesctl.md)
```
su mitra -c "mitractl generate-invite-code"
```
### Commands
Generate RSA private key:
```
mitractl generate-rsa-key
```
Generate invite code:
```
mitractl generate-invite-code
```
List generated invites:
```
mitractl list-invite-codes
```
Delete profile:
```
mitractl delete-profile -i 55a3005f-f293-4168-ab70-6ab09a879679
```
Delete post:
```
mitractl delete-post -i 55a3005f-f293-4168-ab70-6ab09a879679
```
Remove remote posts and media older than 30 days:
```
mitractl delete-extraneous-posts -d 30
```
Delete attachments that doesn't belong to any post:
```
mitractl delete-unused-attachments -d 5
```
Generate ethereum address:
```
mitractl generate-ethereum-address
```
Update blockchain synchronization starting block:
```shell
mitractl update-current-block -n 2000000
```
## License
[AGPL-3.0](./LICENSE)
## Support
Monero: 8Ahza5RM4JQgtdqvpcF1U628NN5Q87eryXQad3Fy581YWTZU8o3EMbtScuioQZSkyNNEEE1Lkj2cSbG4VnVYCW5L1N4os5p

View file

View file

@ -1,18 +1,26 @@
database_url: postgres://mitra:mitra@127.0.0.1:55432/mitra
storage_dir: files
web_client_dir: null
http_host: '127.0.0.1'
http_port: 8380
instance_uri: http://127.0.0.1:8380
instance_uri: 127.0.0.1:8380
instance_title: Mitra
instance_short_description: My instance
instance_description: My instance
registrations_open: true
registration:
type: open
blockchain:
# Parameters for hardhat local node
chain_id: eip155:31337
chain_info: null
contract_address: '0xDc64a140Aa3E981100a9becA4E685f962f0cF6C9'
contract_dir: contracts
api_url: 'http://127.0.0.1:8546'
explorer_url: null
signing_key: 'ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
chain_sync_step: 100
chain_reorg_max_depth: 0
ipfs_api_url: 'http://127.0.0.1:5001'
ipfs_gateway_url: 'http://127.0.0.1:8001'

30
contracts/IERC165.json Normal file
View file

@ -0,0 +1,30 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "IERC165",
"sourceName": "@openzeppelin/contracts/utils/introspection/IERC165.sol",
"abi": [
{
"inputs": [
{
"internalType": "bytes4",
"name": "interfaceId",
"type": "bytes4"
}
],
"name": "supportsInterface",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

View file

@ -0,0 +1,233 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "IERC20Metadata",
"sourceName": "@openzeppelin/contracts/token/ERC20/extensions/IERC20Metadata.sol",
"abi": [
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "spender",
"type": "address"
},
{
"indexed": false,
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "Approval",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "from",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "to",
"type": "address"
},
{
"indexed": false,
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "Transfer",
"type": "event"
},
{
"inputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"internalType": "address",
"name": "spender",
"type": "address"
}
],
"name": "allowance",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "spender",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "approve",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "account",
"type": "address"
}
],
"name": "balanceOf",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "decimals",
"outputs": [
{
"internalType": "uint8",
"name": "",
"type": "uint8"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "name",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "symbol",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "totalSupply",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "transfer",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "sender",
"type": "address"
},
{
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "transferFrom",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

View file

@ -0,0 +1,341 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "IERC721Metadata",
"sourceName": "@openzeppelin/contracts/token/ERC721/extensions/IERC721Metadata.sol",
"abi": [
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "approved",
"type": "address"
},
{
"indexed": true,
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "Approval",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "operator",
"type": "address"
},
{
"indexed": false,
"internalType": "bool",
"name": "approved",
"type": "bool"
}
],
"name": "ApprovalForAll",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "from",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "to",
"type": "address"
},
{
"indexed": true,
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "Transfer",
"type": "event"
},
{
"inputs": [
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "approve",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
}
],
"name": "balanceOf",
"outputs": [
{
"internalType": "uint256",
"name": "balance",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "getApproved",
"outputs": [
{
"internalType": "address",
"name": "operator",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"internalType": "address",
"name": "operator",
"type": "address"
}
],
"name": "isApprovedForAll",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "name",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "ownerOf",
"outputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "from",
"type": "address"
},
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "safeTransferFrom",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "from",
"type": "address"
},
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
},
{
"internalType": "bytes",
"name": "data",
"type": "bytes"
}
],
"name": "safeTransferFrom",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "operator",
"type": "address"
},
{
"internalType": "bool",
"name": "_approved",
"type": "bool"
}
],
"name": "setApprovalForAll",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes4",
"name": "interfaceId",
"type": "bytes4"
}
],
"name": "supportsInterface",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "symbol",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "tokenURI",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "from",
"type": "address"
},
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "tokenId",
"type": "uint256"
}
],
"name": "transferFrom",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

30
contracts/IGate.json Normal file
View file

@ -0,0 +1,30 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "IGate",
"sourceName": "contracts/interfaces/IGate.sol",
"abi": [
{
"inputs": [
{
"internalType": "address",
"name": "user",
"type": "address"
}
],
"name": "isAllowedUser",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

57
contracts/IMinter.json Normal file
View file

@ -0,0 +1,57 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "IMinter",
"sourceName": "contracts/interfaces/IMinter.sol",
"abi": [
{
"inputs": [],
"name": "collectible",
"outputs": [
{
"internalType": "contract IERC721Metadata",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "user",
"type": "address"
},
{
"internalType": "string",
"name": "tokenURI",
"type": "string"
},
{
"internalType": "uint8",
"name": "v",
"type": "uint8"
},
{
"internalType": "bytes32",
"name": "r",
"type": "bytes32"
},
{
"internalType": "bytes32",
"name": "s",
"type": "bytes32"
}
],
"name": "mint",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

View file

@ -0,0 +1,87 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "ISubscription",
"sourceName": "contracts/interfaces/ISubscription.sol",
"abi": [
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "sender",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"indexed": false,
"internalType": "uint256",
"name": "expires_at",
"type": "uint256"
}
],
"name": "UpdateSubscription",
"type": "event"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
}
],
"name": "cancel",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "send",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "sender",
"type": "address"
}
],
"name": "withdrawReceived",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "withdrawReceivedAll",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

View file

@ -0,0 +1,137 @@
{
"_format": "hh-sol-artifact-1",
"contractName": "ISubscriptionAdapter",
"sourceName": "contracts/interfaces/ISubscriptionAdapter.sol",
"abi": [
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"internalType": "uint256",
"name": "price",
"type": "uint256"
},
{
"internalType": "uint8",
"name": "v",
"type": "uint8"
},
{
"internalType": "bytes32",
"name": "r",
"type": "bytes32"
},
{
"internalType": "bytes32",
"name": "s",
"type": "bytes32"
}
],
"name": "configureSubscription",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
}
],
"name": "getSubscriptionPrice",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "sender",
"type": "address"
},
{
"internalType": "address",
"name": "recipient",
"type": "address"
}
],
"name": "getSubscriptionState",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
}
],
"name": "isSubscriptionConfigured",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "subscription",
"outputs": [
{
"internalType": "contract ISubscription",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "subscriptionToken",
"outputs": [
{
"internalType": "contract IERC20Metadata",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
}
],
"bytecode": "0x",
"deployedBytecode": "0x",
"linkReferences": {},
"deployedLinkReferences": {}
}

View file

@ -1,38 +0,0 @@
# Caddyfile for Mitra
# copy this file to /etc/caddy/Caddyfile
# Refer to the Caddy docs for more information:
# https://caddyserver.com/docs/caddyfile
# Global options block containing directive to reverse proxy first
{
order reverse_proxy first
}
# Replace `example.com` with your own domain
example.com {
encode gzip zstd
# Set this path to your mitra-web /dist directory
root * /usr/share/mitra/www
# Static file server. mitra-web /dist directory
file_server {
root /usr/share/mitra/www
}
# Reverse proxy directives for API endpoints
reverse_proxy /actor/* http://127.0.0.1:8383
reverse_proxy /api/* http://127.0.0.1:8383
reverse_proxy /contracts/* http://127.0.0.1:8383
reverse_proxy /feeds/* http://127.0.0.1:8383
reverse_proxy /media/* http://127.0.0.1:8383
reverse_proxy /nodeinfo/* http://127.0.0.1:8383
reverse_proxy /oauth/* http://127.0.0.1:8383
reverse_proxy /objects/* http://127.0.0.1:8383
reverse_proxy /users/* http://127.0.0.1:8383
reverse_proxy /.well-known/* http://127.0.0.1:8383
# Serve index.html for mitra-web frontend
try_files {path} /index.html
}

View file

@ -1,16 +0,0 @@
FROM ubuntu:23.04
RUN apt-get update && apt-get install -y \
curl \
wget \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/lib/data
COPY build/fedimovies /usr/local/bin
COPY build/fedimoviesctl /usr/local/bin
COPY secret/fedimovies.conf /etc/fedimovies.conf
COPY files /www/frontend/
CMD ["/usr/local/bin/fedimovies"]

View file

@ -1,54 +0,0 @@
server {
server_name example.tld;
listen 80;
listen [::]:80;
location / {
return 301 https://$server_name$request_uri;
}
}
server {
server_name example.tld;
listen 443 ssl http2;
listen [::]:443 ssl http2;
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
ssl_prefer_server_ciphers off;
ssl_stapling on;
ssl_stapling_verify on;
add_header Strict-Transport-Security "max-age=63072000" always;
# CSP header can't be added in location block
add_header Content-Security-Policy "default-src 'none'; connect-src 'self'; img-src 'self' data:; media-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'nonce-oauth-authorization'; frame-ancestors 'none'; base-uri 'self'; form-action 'self'";
add_header X-Content-Type-Options "nosniff";
client_max_body_size 40M;
location / {
# Frontend
root /usr/share/mitra/www;
try_files $uri /index.html;
}
location ~ ^/(actor|api|collections|contracts|feeds|media|nodeinfo|oauth|objects|users|.well-known) {
# Backend
proxy_pass http://127.0.0.1:8383;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
}
}

View file

@ -10,7 +10,7 @@ server {
}
server {
server_name example.tld;
server_name example.tld;;
listen 443 ssl http2;
listen [::]:443 ssl http2;
@ -30,14 +30,16 @@ server {
ssl_stapling on;
ssl_stapling_verify on;
add_header Strict-Transport-Security "max-age=63072000" always;
client_max_body_size 40M;
location / {
# Frontend
root /usr/share/mitra/www;
try_files $uri /index.html;
}
location ~ ^/(actor|api|contracts|feeds|media|nodeinfo|oauth|objects|users|.well-known) {
# Backend
proxy_pass http://127.0.0.1:8383;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
}
}

View file

@ -2,60 +2,43 @@
database_url: postgres://mitra:mitra@127.0.0.1:5432/mitra
storage_dir: /var/lib/mitra
# Path to web client static files
web_client_dir: /usr/share/mitra/www
http_host: '127.0.0.1'
http_port: 8383
# List of allowed origins for CORS (in addition to main)
#http_cors_allowlist: []
# Log level (debug, info, warn)
#log_level: info
# Domain name
instance_uri: https://example.tld
instance_uri: example.tld
instance_title: example
instance_short_description: my instance
# Long description can contain markdown syntax
instance_description: |
# My instance
Welcome!
instance_description: my instance
registration:
# Possible values: open, invite
type: invite
# Possible values: user, read_only_user
default_role: user
registrations_open: false
# EIP-4361 login message
#login_message: 'Do not sign this message on other sites!'
# Limits
#limits:
# media:
# file_size_limit: 20M
# posts:
# character_limit: 2000
# Data retention parameters
retention:
extraneous_posts: 50
empty_profiles: 150
# Federation parameters
#federation:
# enabled: true
# # Proxy for outgoing requests
# #proxy_url: 'socks5h://127.0.0.1:9050'
# # Proxy for outgoing requests to .onion targets
# #onion_proxy_url: 'socks5h://127.0.0.1:9050'
#post_character_limit: 2000
# List of blocked domains
#blocked_instances: []
# Blockchain integration
# Signing key for ethereum integration can be generated with `mitractl generate-ethereum-address`
#blockchain:
# chain_id: eip155:31337
# chain_info: null
# contract_address: '0xDc64a140Aa3E981100a9becA4E685f962f0cF6C9'
# contract_dir: /usr/share/mitra/contracts
# api_url: 'http://127.0.0.1:8545'
# explorer_url: null
# signing_key: null
# chain_sync_step: 1000
# chain_reorg_max_depth: 10
# IPFS integration
#ipfs_api_url: 'http://127.0.0.1:5001'
# IPFS gateway (for clients)

View file

@ -12,32 +12,6 @@ services:
- "55432:5432"
volumes:
- mitra_postgres:/var/lib/postgresql/data
# https://github.com/farcaster-project/containers
monerod:
profiles:
- monero
image: ghcr.io/farcaster-project/containers/monerod:0.18.1.2
environment:
NETWORK: regtest
OFFLINE: --offline
DIFFICULTY: 1
ports:
- "58081:18081"
monero-wallet-rpc:
profiles:
- monero
image: ghcr.io/farcaster-project/containers/monero-wallet-rpc:0.18.1.2
environment:
MONERO_DAEMON_ADDRESS: monerod:18081
WALLET_RPC_PORT: 18083
depends_on:
- "monerod"
ports:
- "58083:18083"
volumes:
- monero_wallets:/wallets
volumes:
mitra_postgres:
monero_wallets:

View file

@ -2,7 +2,7 @@
This guide explains how to run IPFS node in resource-constrained environment (such as cheap VPS or single-board computer).
The recommended IPFS implementation is [kubo](https://github.com/ipfs/kubo), version 0.18.1 or higher. Normally **kubo** requires at least 2 GB RAM, but after tweaking it can run on a machine with only 512 MB.
The recommended IPFS implementation is [go-ipfs](https://github.com/ipfs/go-ipfs), version 0.12 or higher. Normally go-ipfs requires at least 2 GB RAM, but after tweaking it can run on a machine with only 512 MB.
## Configuration profiles
@ -14,26 +14,24 @@ ipfs init --profile server
If you're running it on single-board computer, the recommended profile is `lowpower`.
Documentation on configuration profiles: https://github.com/ipfs/kubo/blob/master/docs/config.md#profiles
Documentation on configuration profiles: https://github.com/ipfs/go-ipfs/blob/master/docs/config.md#profiles.
## Configuration options
- `Datastore.StorageMax`. Recommended value is `1G`.
- `Gateway.NoFetch`. Configures gateway to not fetch files from the network. Recommended value is `true`.
- `RelayService.Enabled`. Enables providing p2p relay service to other peers on the network. Recommended value is `false`.
- `Routing.Type`. Should be set to `dht` otherwise the node will not respond to requests from other peers.
- `Swarm.ConnMgr.LowWater`. Recommended value is `10`.
- `Swarm.ConnMgr.HighWater`. Recommended value is `20`.
- `Swarm.ConnMgr.GracePeriod`. Recommended value is `15s`.
- `Swarm.DisableBandwidthMetrics`. Disabling bandwidth metrics can slightly improve performance. Recommended value is `true`.
- `Swarm.RelayService.Enabled`. Enables providing p2p relay service to other peers on the network. Recommended value is `false`.
- `Swarm.ResourceMgr.Enabled`. Enables the libp2p Resource Manager. Recommended value is `true`.
- `Swarm.ResourceMgr.MaxMemory`. Recommended value is `150MB`.
Documentation: https://github.com/ipfs/kubo/blob/master/docs/config.md
Documentation: https://github.com/ipfs/go-ipfs/blob/master/docs/config.md
## Systemd service
When **kubo** starts, its memory usage is around 100 MB and then it slowly increases, often beyond the Resource Manager's limit. To keep memory usage within reasonable bounds the service needs to be restarted regularly.
When go-ipfs starts, its memory usage is around 100 MB and then it slowly increases. To keep memory usage within reasonable bounds the service needs to be restarted regularly.
This can be achieved by using systemd process supervison features:
@ -46,8 +44,8 @@ ExecStart=/usr/local/bin/ipfs daemon
User=ipfs
Group=ipfs
# Terminate service every 30 minutes and restart automatically
RuntimeMaxSec=1800
# Terminate service every 20 minutes and restart automatically
RuntimeMaxSec=1200
Restart=on-failure
# Specify the absolute limit on memory usage

View file

@ -1,123 +0,0 @@
# mitractl: a tool for instance administrators
Commands must be run as the same user as the web service:
```shell
su mitra -c "mitractl generate-invite-code"
```
---
Print help:
```shell
mitractl --help
```
Generate RSA private key:
```shell
mitractl generate-rsa-key
```
Generate invite code (note is optional):
```shell
mitractl generate-invite-code <note>
```
List generated invites:
```shell
mitractl list-invite-codes
```
Create user:
```shell
mitractl create-user <username> <password> <role-name>
```
Set or change password:
```shell
mitractl set-password <user-id> <password>
```
Change user's role (admin, user or read_only_user).
```shell
mitractl set-role <user-id> <role-name>
```
Delete profile:
```shell
mitractl delete-profile 55a3005f-f293-4168-ab70-6ab09a879679
```
Delete post:
```shell
mitractl delete-post 55a3005f-f293-4168-ab70-6ab09a879679
```
Delete custom emoji:
```shell
mitractl delete-emoji emoji_name example.org
```
Remove remote posts and media older than 30 days:
```shell
mitractl delete-extraneous-posts 30
```
Delete attachments that don't belong to any post:
```shell
mitractl delete-unused-attachments 5
```
Delete empty remote profiles:
```shell
mitractl delete-empty-profiles 100
```
Delete unused remote emojis:
```shell
mitractl prune-remote-emojis
```
Import custom emoji from another instance:
```shell
mitractl import-emoji emoji_name example.org
```
Generate ethereum address:
```shell
mitractl generate-ethereum-address
```
Update synchronization starting block of Ethereum blockchain:
```shell
mitractl update-current-block 2000000
```
Create Monero wallet:
```shell
mitractl create-monero-wallet "mitra-wallet" "passw0rd"
```
Check expired invoice:
```shell
mitractl check-expired-invoice 0184b062-d8d5-cbf1-a71b-6d1aafbae2ab
```

View file

@ -1,42 +0,0 @@
# Tor federation
## Tor-only instance
Install Tor.
Install Mitra. Uncomment or add the following block to Mitra configuration file:
```yaml
federation:
proxy_url: 'socks5h://127.0.0.1:9050'
```
Where `127.0.0.1:9050` is the address and the port where Tor proxy is listening.
Configure the onion service by adding these lines to `torrc` configuration file:
```
HiddenServiceDir /var/lib/tor/mitra/
HiddenServicePort 80 127.0.0.1:8383
```
Where `8383` should correspond to `http_port` setting in Mitra configuration file.
Restart the Tor service. Inside the `HiddenServiceDir` directory find the `hostname` file. This file contains the hostname of your onion service. Change the value of `instance_uri` parameter in Mitra configuration file to that hostname (it should end with `.onion`).
Start Mitra.
For more information about running onion services, visit https://community.torproject.org/onion-services/setup/
## Clearnet + Tor
Clearnet instances can federate with Tor-only instances.
Add the following block to Mitra configuration file:
```yaml
federation:
onion_proxy_url: 'socks5h://127.0.0.1:9050'
```
Where `127.0.0.1:9050` is the address and the port where Tor proxy is listening.

File diff suppressed because it is too large Load diff

View file

@ -1,27 +0,0 @@
[package]
name = "fedimovies-cli"
version = "1.22.0"
license = "AGPL-3.0"
edition = "2021"
rust-version = "1.68"
[[bin]]
name = "fedimoviesctl"
path = "src/main.rs"
[dependencies]
fedimovies-config = { path = "../fedimovies-config" }
fedimovies-models = { path = "../fedimovies-models" }
fedimovies-utils = { path = "../fedimovies-utils" }
fedimovies = { path = ".." }
# Used for catching errors
anyhow = "1.0.58"
# Used to build admin CLI tool
clap = { version = "3.2.18", default-features = false, features = ["std", "derive"] }
# Used for logging
log = "0.4.14"
# Async runtime
tokio = { version = "1.20.4", features = ["macros"] }
# Used to work with UUIDs
uuid = "1.1.2"

View file

@ -1,570 +0,0 @@
use anyhow::Error;
use clap::Parser;
use uuid::Uuid;
use fedimovies::activitypub::{
actors::helpers::update_remote_profile, builders::delete_note::prepare_delete_note,
builders::delete_person::prepare_delete_person, fetcher::fetchers::fetch_actor,
fetcher::helpers::import_from_outbox,
};
use fedimovies::admin::roles::{role_from_str, ALLOWED_ROLES};
use fedimovies::media::{remove_files, remove_media, MediaStorage};
use fedimovies::validators::{emojis::EMOJI_LOCAL_MAX_SIZE, users::validate_local_username};
use fedimovies_config::Config;
use fedimovies_models::{
attachments::queries::delete_unused_attachments,
cleanup::find_orphaned_files,
database::DatabaseClient,
emojis::helpers::get_emoji_by_name,
emojis::queries::{
create_emoji, delete_emoji, find_unused_remote_emojis, get_emoji_by_name_and_hostname,
},
oauth::queries::delete_oauth_tokens,
posts::queries::{delete_post, find_extraneous_posts, get_post_by_id},
profiles::queries::{
delete_profile, find_empty_profiles, find_unreachable, get_profile_by_id,
get_profile_by_remote_actor_id,
},
subscriptions::queries::reset_subscriptions,
users::queries::{
create_invite_code, create_user, get_invite_codes, get_user_by_id, set_user_password,
set_user_role,
},
users::types::UserCreateData,
};
use fedimovies_utils::{
crypto_rsa::{generate_rsa_key, serialize_private_key},
datetime::{days_before_now, get_min_datetime},
passwords::hash_password,
};
/// Admin CLI tool
#[derive(Parser)]
pub struct Opts {
#[clap(subcommand)]
pub subcmd: SubCommand,
}
#[derive(Parser)]
pub enum SubCommand {
GenerateRsaKey(GenerateRsaKey),
GenerateEthereumAddress(GenerateEthereumAddress),
GenerateInviteCode(GenerateInviteCode),
ListInviteCodes(ListInviteCodes),
CreateUser(CreateUser),
SetPassword(SetPassword),
SetRole(SetRole),
RefetchActor(RefetchActor),
ReadOutbox(ReadOutbox),
DeleteProfile(DeleteProfile),
DeletePost(DeletePost),
DeleteEmoji(DeleteEmoji),
DeleteExtraneousPosts(DeleteExtraneousPosts),
DeleteUnusedAttachments(DeleteUnusedAttachments),
DeleteOrphanedFiles(DeleteOrphanedFiles),
DeleteEmptyProfiles(DeleteEmptyProfiles),
PruneRemoteEmojis(PruneRemoteEmojis),
ListUnreachableActors(ListUnreachableActors),
ImportEmoji(ImportEmoji),
UpdateCurrentBlock(UpdateCurrentBlock),
ResetSubscriptions(ResetSubscriptions),
CreateMoneroWallet(CreateMoneroWallet),
CheckExpiredInvoice(CheckExpiredInvoice),
}
/// Generate RSA private key
#[derive(Parser)]
pub struct GenerateRsaKey;
impl GenerateRsaKey {
pub fn execute(&self) -> () {
let private_key = generate_rsa_key().unwrap();
let private_key_str = serialize_private_key(&private_key).unwrap();
println!("{}", private_key_str);
}
}
/// Generate ethereum address
#[derive(Parser)]
pub struct GenerateEthereumAddress;
impl GenerateEthereumAddress {
pub fn execute(&self) -> () {
println!("dummy");
}
}
/// Generate invite code
#[derive(Parser)]
pub struct GenerateInviteCode {
note: Option<String>,
}
impl GenerateInviteCode {
pub async fn execute(&self, db_client: &impl DatabaseClient) -> Result<(), Error> {
let invite_code = create_invite_code(db_client, self.note.as_deref()).await?;
println!("generated invite code: {}", invite_code);
Ok(())
}
}
/// List invite codes
#[derive(Parser)]
pub struct ListInviteCodes;
impl ListInviteCodes {
pub async fn execute(&self, db_client: &impl DatabaseClient) -> Result<(), Error> {
let invite_codes = get_invite_codes(db_client).await?;
if invite_codes.is_empty() {
println!("no invite codes found");
return Ok(());
};
for invite_code in invite_codes {
if let Some(note) = invite_code.note {
println!("{} ({})", invite_code.code, note);
} else {
println!("{}", invite_code.code);
};
}
Ok(())
}
}
/// Create new user
#[derive(Parser)]
pub struct CreateUser {
username: String,
password: String,
#[clap(value_parser = ALLOWED_ROLES)]
role: String,
}
impl CreateUser {
pub async fn execute(&self, db_client: &mut impl DatabaseClient) -> Result<(), Error> {
validate_local_username(&self.username)?;
let password_hash = hash_password(&self.password)?;
let private_key = generate_rsa_key()?;
let private_key_pem = serialize_private_key(&private_key)?;
let role = role_from_str(&self.role)?;
let user_data = UserCreateData {
username: self.username.clone(),
password_hash: Some(password_hash),
private_key_pem,
wallet_address: None,
invite_code: None,
role,
};
create_user(db_client, user_data).await?;
println!("user created");
Ok(())
}
}
/// Set password
#[derive(Parser)]
pub struct SetPassword {
id: Uuid,
password: String,
}
impl SetPassword {
pub async fn execute(&self, db_client: &impl DatabaseClient) -> Result<(), Error> {
let password_hash = hash_password(&self.password)?;
set_user_password(db_client, &self.id, password_hash).await?;
// Revoke all sessions
delete_oauth_tokens(db_client, &self.id).await?;
println!("password updated");
Ok(())
}
}
/// Change user's role
#[derive(Parser)]
pub struct SetRole {
id: Uuid,
#[clap(value_parser = ALLOWED_ROLES)]
role: String,
}
impl SetRole {
pub async fn execute(&self, db_client: &impl DatabaseClient) -> Result<(), Error> {
let role = role_from_str(&self.role)?;
set_user_role(db_client, &self.id, role).await?;
println!("role changed");
Ok(())
}
}
/// Re-fetch actor profile by actor ID
#[derive(Parser)]
pub struct RefetchActor {
id: String,
}
impl RefetchActor {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let profile = get_profile_by_remote_actor_id(db_client, &self.id).await?;
let actor = fetch_actor(&config.instance(), &self.id).await?;
update_remote_profile(
db_client,
&config.instance(),
&MediaStorage::from(config),
profile,
actor,
)
.await?;
println!("profile updated");
Ok(())
}
}
/// Pull activities from actor's outbox
#[derive(Parser)]
pub struct ReadOutbox {
actor_id: String,
#[clap(long, default_value_t = 5)]
limit: usize,
}
impl ReadOutbox {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
import_from_outbox(config, db_client, &self.actor_id, self.limit).await?;
Ok(())
}
}
/// Delete profile
#[derive(Parser)]
pub struct DeleteProfile {
id: Uuid,
}
impl DeleteProfile {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let profile = get_profile_by_id(db_client, &self.id).await?;
let mut maybe_delete_person = None;
if profile.is_local() {
let user = get_user_by_id(db_client, &profile.id).await?;
let activity = prepare_delete_person(db_client, &config.instance(), &user).await?;
maybe_delete_person = Some(activity);
};
let deletion_queue = delete_profile(db_client, &profile.id).await?;
remove_media(config, deletion_queue).await;
// Send Delete(Person) activities
if let Some(activity) = maybe_delete_person {
activity.enqueue(db_client).await?;
};
println!("profile deleted");
Ok(())
}
}
/// Delete post
#[derive(Parser)]
pub struct DeletePost {
id: Uuid,
}
impl DeletePost {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let post = get_post_by_id(db_client, &self.id).await?;
let mut maybe_delete_note = None;
if post.author.is_local() {
let author = get_user_by_id(db_client, &post.author.id).await?;
let activity =
prepare_delete_note(db_client, &config.instance(), &author, &post).await?;
maybe_delete_note = Some(activity);
};
let deletion_queue = delete_post(db_client, &post.id).await?;
remove_media(config, deletion_queue).await;
// Send Delete(Note) activity
if let Some(activity) = maybe_delete_note {
activity.enqueue(db_client).await?;
};
println!("post deleted");
Ok(())
}
}
/// Delete custom emoji
#[derive(Parser)]
pub struct DeleteEmoji {
emoji_name: String,
hostname: Option<String>,
}
impl DeleteEmoji {
pub async fn execute(
&self,
config: &Config,
db_client: &impl DatabaseClient,
) -> Result<(), Error> {
let emoji =
get_emoji_by_name(db_client, &self.emoji_name, self.hostname.as_deref()).await?;
let deletion_queue = delete_emoji(db_client, &emoji.id).await?;
remove_media(config, deletion_queue).await;
println!("emoji deleted");
Ok(())
}
}
/// Delete old remote posts
#[derive(Parser)]
pub struct DeleteExtraneousPosts {
days: u32,
}
impl DeleteExtraneousPosts {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let updated_before = days_before_now(self.days);
let posts = find_extraneous_posts(db_client, &updated_before).await?;
for post_id in posts {
let deletion_queue = delete_post(db_client, &post_id).await?;
remove_media(config, deletion_queue).await;
println!("post {} deleted", post_id);
}
Ok(())
}
}
/// Delete attachments that don't belong to any post
#[derive(Parser)]
pub struct DeleteUnusedAttachments {
days: u32,
}
impl DeleteUnusedAttachments {
pub async fn execute(
&self,
config: &Config,
db_client: &impl DatabaseClient,
) -> Result<(), Error> {
let created_before = days_before_now(self.days);
let deletion_queue = delete_unused_attachments(db_client, &created_before).await?;
remove_media(config, deletion_queue).await;
println!("unused attachments deleted");
Ok(())
}
}
/// Find and delete orphaned files
#[derive(Parser)]
pub struct DeleteOrphanedFiles;
impl DeleteOrphanedFiles {
pub async fn execute(
&self,
config: &Config,
db_client: &impl DatabaseClient,
) -> Result<(), Error> {
let media_dir = config.media_dir();
let mut files = vec![];
for maybe_path in std::fs::read_dir(&media_dir)? {
let file_name = maybe_path?.file_name().to_string_lossy().to_string();
files.push(file_name);
}
println!("found {} files", files.len());
let orphaned = find_orphaned_files(db_client, files).await?;
if !orphaned.is_empty() {
remove_files(orphaned, &media_dir);
println!("orphaned files deleted");
};
Ok(())
}
}
/// Delete empty remote profiles
#[derive(Parser)]
pub struct DeleteEmptyProfiles {
days: u32,
}
impl DeleteEmptyProfiles {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let updated_before = days_before_now(self.days);
let profiles = find_empty_profiles(db_client, &updated_before).await?;
for profile_id in profiles {
let profile = get_profile_by_id(db_client, &profile_id).await?;
let deletion_queue = delete_profile(db_client, &profile.id).await?;
remove_media(config, deletion_queue).await;
println!("profile {} deleted", profile.acct);
}
Ok(())
}
}
/// Delete unused remote emojis
#[derive(Parser)]
pub struct PruneRemoteEmojis;
impl PruneRemoteEmojis {
pub async fn execute(
&self,
config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
let emojis = find_unused_remote_emojis(db_client).await?;
for emoji_id in emojis {
let deletion_queue = delete_emoji(db_client, &emoji_id).await?;
remove_media(config, deletion_queue).await;
println!("emoji {} deleted", emoji_id);
}
Ok(())
}
}
/// List unreachable actors
#[derive(Parser)]
pub struct ListUnreachableActors {
days: u32,
}
impl ListUnreachableActors {
pub async fn execute(
&self,
_config: &Config,
db_client: &impl DatabaseClient,
) -> Result<(), Error> {
let unreachable_since = days_before_now(self.days);
let profiles = find_unreachable(db_client, &unreachable_since).await?;
println!(
"{0: <60} | {1: <35} | {2: <35}",
"ID", "unreachable since", "updated at",
);
for profile in profiles {
println!(
"{0: <60} | {1: <35} | {2: <35}",
profile.actor_id.unwrap(),
profile.unreachable_since.unwrap().to_string(),
profile.updated_at.to_string(),
);
}
Ok(())
}
}
/// Import custom emoji from another instance
#[derive(Parser)]
pub struct ImportEmoji {
emoji_name: String,
hostname: String,
}
impl ImportEmoji {
pub async fn execute(
&self,
_config: &Config,
db_client: &impl DatabaseClient,
) -> Result<(), Error> {
let emoji =
get_emoji_by_name_and_hostname(db_client, &self.emoji_name, &self.hostname).await?;
if emoji.image.file_size > EMOJI_LOCAL_MAX_SIZE {
println!("emoji is too big");
return Ok(());
};
create_emoji(
db_client,
&emoji.emoji_name,
None,
emoji.image,
None,
&get_min_datetime(),
)
.await?;
println!("added emoji to local collection");
Ok(())
}
}
/// Update blockchain synchronization starting block
#[derive(Parser)]
pub struct UpdateCurrentBlock {
number: u64,
}
impl UpdateCurrentBlock {
pub async fn execute(
&self,
_config: &Config,
_db_client: &impl DatabaseClient,
) -> Result<(), Error> {
println!("current block updated");
Ok(())
}
}
/// Reset all subscriptions
/// (can be used during development or when switching between chains)
#[derive(Parser)]
pub struct ResetSubscriptions {
#[clap(long)]
ethereum_contract_replaced: bool,
}
impl ResetSubscriptions {
pub async fn execute(
&self,
_config: &Config,
db_client: &mut impl DatabaseClient,
) -> Result<(), Error> {
reset_subscriptions(db_client, self.ethereum_contract_replaced).await?;
println!("subscriptions deleted");
Ok(())
}
}
/// Create Monero wallet
/// (can be used when monero-wallet-rpc runs with --wallet-dir option)
#[derive(Parser)]
pub struct CreateMoneroWallet {
name: String,
password: Option<String>,
}
impl CreateMoneroWallet {
pub async fn execute(&self, _config: &Config) -> Result<(), Error> {
println!("wallet created");
Ok(())
}
}
/// Check expired invoice
#[derive(Parser)]
pub struct CheckExpiredInvoice {
id: Uuid,
}
impl CheckExpiredInvoice {
pub async fn execute(
&self,
_config: &Config,
_db_client: &impl DatabaseClient,
) -> Result<(), Error> {
Ok(())
}
}

View file

@ -1,76 +0,0 @@
use clap::Parser;
use fedimovies::logger::configure_logger;
use fedimovies_config::parse_config;
use fedimovies_models::database::create_database_client;
use fedimovies_models::database::migrate::apply_migrations;
mod cli;
use cli::{Opts, SubCommand};
#[tokio::main]
async fn main() {
let opts: Opts = Opts::parse();
match opts.subcmd {
SubCommand::GenerateRsaKey(cmd) => cmd.execute(),
SubCommand::GenerateEthereumAddress(cmd) => cmd.execute(),
subcmd => {
// Other commands require initialized app
let (config, config_warnings) = parse_config();
configure_logger(config.log_level);
log::info!("config loaded from {}", config.config_path);
for warning in config_warnings {
log::warn!("{}", warning);
}
let db_config = config.database_url.parse().unwrap();
let db_client =
&mut create_database_client(&db_config, config.tls_ca_file.as_deref()).await;
apply_migrations(db_client).await;
match subcmd {
SubCommand::GenerateInviteCode(cmd) => cmd.execute(db_client).await.unwrap(),
SubCommand::ListInviteCodes(cmd) => cmd.execute(db_client).await.unwrap(),
SubCommand::CreateUser(cmd) => cmd.execute(db_client).await.unwrap(),
SubCommand::SetPassword(cmd) => cmd.execute(db_client).await.unwrap(),
SubCommand::SetRole(cmd) => cmd.execute(db_client).await.unwrap(),
SubCommand::RefetchActor(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::ReadOutbox(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::DeleteProfile(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::DeletePost(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::DeleteEmoji(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::DeleteExtraneousPosts(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::DeleteUnusedAttachments(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::DeleteOrphanedFiles(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::DeleteEmptyProfiles(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::PruneRemoteEmojis(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::ListUnreachableActors(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::ImportEmoji(cmd) => cmd.execute(&config, db_client).await.unwrap(),
SubCommand::UpdateCurrentBlock(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::ResetSubscriptions(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
SubCommand::CreateMoneroWallet(cmd) => cmd.execute(&config).await.unwrap(),
SubCommand::CheckExpiredInvoice(cmd) => {
cmd.execute(&config, db_client).await.unwrap()
}
_ => unreachable!(),
};
}
};
}

View file

@ -1,30 +0,0 @@
[package]
name = "fedimovies-config"
version = "1.22.0"
license = "AGPL-3.0"
edition = "2021"
rust-version = "1.68"
[dependencies]
fedimovies-utils = { path = "../fedimovies-utils" }
# Used to read .env files
dotenv = "0.15.0"
# Used for logging
log = { version = "0.4.14", features = ["serde"] }
# Used for working with regular expressions
regex = "1.6.0"
# Used for working with RSA keys
rsa = "0.5.0"
# Used for serialization/deserialization
serde = { version = "1.0.136", features = ["derive"] }
# Used to parse config file
serde_yaml = "0.8.17"
# Used for creating error types
thiserror = "1.0.37"
# Used to work with URLs
url = "2.2.2"
[features]
production = []
test-utils = []

View file

@ -1,221 +0,0 @@
use std::path::PathBuf;
use log::Level as LogLevel;
use rsa::RsaPrivateKey;
use serde::Deserialize;
use url::Url;
use fedimovies_utils::urls::normalize_url;
use super::environment::Environment;
use super::federation::FederationConfig;
use super::limits::Limits;
use super::registration::RegistrationConfig;
use super::retention::RetentionConfig;
use super::REEF_VERSION;
fn default_log_level() -> LogLevel {
LogLevel::Info
}
fn default_login_message() -> String {
"What?!".to_string()
}
#[derive(Clone, Deserialize)]
pub struct Config {
// Properties auto-populated from the environment
#[serde(skip)]
pub environment: Environment,
#[serde(skip)]
pub config_path: String,
// Core settings
pub database_url: String,
#[serde(default)]
pub tls_ca_file: Option<PathBuf>,
pub storage_dir: PathBuf,
pub web_client_dir: Option<PathBuf>,
pub http_host: String,
pub http_port: u32,
#[serde(default)]
pub http_cors_allowlist: Vec<String>,
#[serde(default = "default_log_level")]
pub log_level: LogLevel,
// Domain name or <IP address>:<port>
// URI scheme is optional
instance_uri: String,
pub instance_title: String,
pub instance_short_description: String,
pub instance_description: String,
#[serde(default)]
pub tmdb_api_key: Option<String>,
#[serde(default)]
pub movie_user_password: Option<String>,
#[serde(skip)]
pub(super) instance_rsa_key: Option<RsaPrivateKey>,
pub(super) registrations_open: Option<bool>, // deprecated
#[serde(default)]
pub registration: RegistrationConfig,
// EIP-4361 login message
#[serde(default = "default_login_message")]
pub login_message: String,
pub(super) post_character_limit: Option<usize>, // deprecated
#[serde(default)]
pub limits: Limits,
#[serde(default)]
pub retention: RetentionConfig,
pub(super) proxy_url: Option<String>,
#[serde(default)]
pub federation: FederationConfig,
#[serde(default)]
pub blocked_instances: Vec<String>,
// IPFS
pub ipfs_api_url: Option<String>,
pub ipfs_gateway_url: Option<String>,
}
impl Config {
pub(super) fn try_instance_url(&self) -> Result<Url, url::ParseError> {
normalize_url(&self.instance_uri)
}
pub fn instance(&self) -> Instance {
Instance {
_url: self.try_instance_url().unwrap(),
actor_key: self.instance_rsa_key.clone().unwrap(),
proxy_url: self.federation.proxy_url.clone(),
onion_proxy_url: self.federation.onion_proxy_url.clone(),
i2p_proxy_url: self.federation.i2p_proxy_url.clone(),
// Private instance doesn't send activities and sign requests
is_private: !self.federation.enabled,
// || matches!(self.environment, Environment::Development),
fetcher_timeout: self.federation.fetcher_timeout,
deliverer_timeout: self.federation.deliverer_timeout,
}
}
pub fn instance_url(&self) -> String {
self.instance().url()
}
pub fn media_dir(&self) -> PathBuf {
self.storage_dir.join("media")
}
}
#[derive(Clone)]
pub struct Instance {
_url: Url,
// Instance actor
pub actor_key: RsaPrivateKey,
// Proxy for outgoing requests
pub proxy_url: Option<String>,
pub onion_proxy_url: Option<String>,
pub i2p_proxy_url: Option<String>,
// Private instance won't send signed HTTP requests
pub is_private: bool,
pub fetcher_timeout: u64,
pub deliverer_timeout: u64,
}
impl Instance {
pub fn url(&self) -> String {
self._url.origin().ascii_serialization()
}
pub fn hostname(&self) -> String {
self._url.host_str().unwrap().to_string()
}
pub fn agent(&self) -> String {
format!(
"Reef {version}; {instance_url}",
version = REEF_VERSION,
instance_url = self.url(),
)
}
}
#[cfg(feature = "test-utils")]
impl Instance {
pub fn for_test(url: &str) -> Self {
use fedimovies_utils::crypto_rsa::generate_weak_rsa_key;
Self {
_url: Url::parse(url).unwrap(),
actor_key: generate_weak_rsa_key().unwrap(),
proxy_url: None,
onion_proxy_url: None,
i2p_proxy_url: None,
is_private: true,
fetcher_timeout: 0,
deliverer_timeout: 0,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use fedimovies_utils::crypto_rsa::generate_weak_rsa_key;
#[test]
fn test_instance_url_https_dns() {
let instance_url = Url::parse("https://example.com/").unwrap();
let instance_rsa_key = generate_weak_rsa_key().unwrap();
let instance = Instance {
_url: instance_url,
actor_key: instance_rsa_key,
proxy_url: None,
onion_proxy_url: None,
i2p_proxy_url: None,
is_private: true,
fetcher_timeout: 0,
deliverer_timeout: 0,
};
assert_eq!(instance.url(), "https://example.com");
assert_eq!(instance.hostname(), "example.com");
assert_eq!(
instance.agent(),
format!("Mitra {}; https://example.com", REEF_VERSION),
);
}
#[test]
fn test_instance_url_http_ipv4() {
let instance_url = Url::parse("http://1.2.3.4:3777/").unwrap();
let instance_rsa_key = generate_weak_rsa_key().unwrap();
let instance = Instance {
_url: instance_url,
actor_key: instance_rsa_key,
proxy_url: None,
onion_proxy_url: None,
i2p_proxy_url: None,
is_private: true,
fetcher_timeout: 0,
deliverer_timeout: 0,
};
assert_eq!(instance.url(), "http://1.2.3.4:3777");
assert_eq!(instance.hostname(), "1.2.3.4");
}
}

View file

@ -1,33 +0,0 @@
use std::str::FromStr;
use super::ConfigError;
#[derive(Clone, Debug)]
pub enum Environment {
Development,
Production,
}
impl Default for Environment {
#[cfg(feature = "production")]
fn default() -> Self {
Self::Production
}
#[cfg(not(feature = "production"))]
fn default() -> Self {
Self::Development
}
}
impl FromStr for Environment {
type Err = ConfigError;
fn from_str(val: &str) -> Result<Self, Self::Err> {
let environment = match val {
"development" => Environment::Development,
"production" => Environment::Production,
_ => return Err(ConfigError("invalid environment type")),
};
Ok(environment)
}
}

View file

@ -1,38 +0,0 @@
use serde::Deserialize;
fn default_federation_enabled() -> bool {
true
}
const fn default_fetcher_timeout() -> u64 {
300
}
const fn default_deliverer_timeout() -> u64 {
30
}
#[derive(Clone, Deserialize)]
pub struct FederationConfig {
#[serde(default = "default_federation_enabled")]
pub enabled: bool,
#[serde(default = "default_fetcher_timeout")]
pub(super) fetcher_timeout: u64,
#[serde(default = "default_deliverer_timeout")]
pub(super) deliverer_timeout: u64,
pub(super) proxy_url: Option<String>,
pub(super) onion_proxy_url: Option<String>,
pub(super) i2p_proxy_url: Option<String>,
}
impl Default for FederationConfig {
fn default() -> Self {
Self {
enabled: default_federation_enabled(),
fetcher_timeout: default_fetcher_timeout(),
deliverer_timeout: default_deliverer_timeout(),
proxy_url: None,
onion_proxy_url: None,
i2p_proxy_url: None,
}
}
}

View file

@ -1,18 +0,0 @@
mod config;
mod environment;
mod federation;
mod limits;
mod loader;
mod registration;
mod retention;
pub use config::{Config, Instance};
pub use environment::Environment;
pub use loader::parse_config;
pub use registration::{DefaultRole, RegistrationType};
pub const REEF_VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(thiserror::Error, Debug)]
#[error("{0}")]
pub struct ConfigError(&'static str);

View file

@ -1,106 +0,0 @@
use super::ConfigError;
use regex::Regex;
use serde::{de::Error as DeserializerError, Deserialize, Deserializer};
const FILE_SIZE_RE: &str = r#"^(?i)(?P<size>\d+)(?P<unit>[kmg]?)b?$"#;
fn parse_file_size(value: &str) -> Result<usize, ConfigError> {
let file_size_re = Regex::new(FILE_SIZE_RE).expect("regexp should be valid");
let caps = file_size_re
.captures(value)
.ok_or(ConfigError("invalid file size"))?;
let size: usize = caps["size"]
.to_string()
.parse()
.map_err(|_| ConfigError("invalid file size"))?;
let unit = caps["unit"].to_string().to_lowercase();
let multiplier = match unit.as_str() {
"k" => usize::pow(10, 3),
"m" => usize::pow(10, 6),
"g" => usize::pow(10, 9),
"" => 1,
_ => return Err(ConfigError("invalid file size unit")),
};
Ok(size * multiplier)
}
fn deserialize_file_size<'de, D>(deserializer: D) -> Result<usize, D::Error>
where
D: Deserializer<'de>,
{
let file_size_str = String::deserialize(deserializer)?;
let file_size = parse_file_size(&file_size_str).map_err(DeserializerError::custom)?;
Ok(file_size)
}
const fn default_file_size_limit() -> usize {
20_000_000
} // 20 MB
const fn default_emoji_size_limit() -> usize {
500_000
} // 500 kB
#[derive(Clone, Deserialize)]
pub struct MediaLimits {
#[serde(
default = "default_file_size_limit",
deserialize_with = "deserialize_file_size"
)]
pub file_size_limit: usize,
#[serde(
default = "default_emoji_size_limit",
deserialize_with = "deserialize_file_size"
)]
pub emoji_size_limit: usize,
}
impl Default for MediaLimits {
fn default() -> Self {
Self {
file_size_limit: default_file_size_limit(),
emoji_size_limit: default_emoji_size_limit(),
}
}
}
const fn default_post_character_limit() -> usize {
2000
}
#[derive(Clone, Deserialize)]
pub struct PostLimits {
#[serde(default = "default_post_character_limit")]
pub character_limit: usize,
}
impl Default for PostLimits {
fn default() -> Self {
Self {
character_limit: default_post_character_limit(),
}
}
}
#[derive(Clone, Default, Deserialize)]
pub struct Limits {
#[serde(default)]
pub media: MediaLimits,
#[serde(default)]
pub posts: PostLimits,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_file_size() {
let file_size = parse_file_size("1234").unwrap();
assert_eq!(file_size, 1234);
let file_size = parse_file_size("89kB").unwrap();
assert_eq!(file_size, 89_000);
let file_size = parse_file_size("12M").unwrap();
assert_eq!(file_size, 12_000_000);
}
}

View file

@ -1,134 +0,0 @@
use std::os::unix::fs::MetadataExt;
use std::path::Path;
use std::str::FromStr;
use rsa::RsaPrivateKey;
use fedimovies_utils::{
crypto_rsa::{deserialize_private_key, generate_rsa_key, serialize_private_key},
files::{set_file_permissions, write_file},
};
use super::config::Config;
use super::environment::Environment;
use super::registration::{DefaultRole, RegistrationType};
struct EnvConfig {
config_path: String,
environment: Option<Environment>,
}
#[cfg(feature = "production")]
const DEFAULT_CONFIG_PATH: &str = "/etc/fedimovies/config.yaml";
#[cfg(not(feature = "production"))]
const DEFAULT_CONFIG_PATH: &str = "config.yaml";
fn parse_env() -> EnvConfig {
dotenv::from_filename(".env.local").ok();
dotenv::dotenv().ok();
let config_path = std::env::var("CONFIG_PATH").unwrap_or(DEFAULT_CONFIG_PATH.to_string());
let environment = std::env::var("ENVIRONMENT")
.ok()
.map(|val| Environment::from_str(&val).expect("invalid environment type"));
EnvConfig {
config_path,
environment,
}
}
extern "C" {
fn geteuid() -> u32;
}
fn check_directory_owner(path: &Path) -> () {
let metadata = std::fs::metadata(path).expect("can't read file metadata");
let owner_uid = metadata.uid();
let current_uid = unsafe { geteuid() };
if owner_uid != current_uid {
panic!(
"{} owner ({}) is different from the current user ({})",
path.display(),
owner_uid,
current_uid,
);
};
}
/// Generates new instance RSA key or returns existing key
fn read_instance_rsa_key(storage_dir: &Path) -> RsaPrivateKey {
let private_key_path = storage_dir.join("instance_rsa_key");
if private_key_path.exists() {
let private_key_str =
std::fs::read_to_string(&private_key_path).expect("failed to read instance RSA key");
let private_key =
deserialize_private_key(&private_key_str).expect("failed to read instance RSA key");
private_key
} else {
let private_key = generate_rsa_key().expect("failed to generate RSA key");
let private_key_str =
serialize_private_key(&private_key).expect("failed to serialize RSA key");
write_file(private_key_str.as_bytes(), &private_key_path)
.expect("failed to write instance RSA key");
set_file_permissions(&private_key_path, 0o600)
.expect("failed to set permissions on RSA key file");
private_key
}
}
pub fn parse_config() -> (Config, Vec<&'static str>) {
let env = parse_env();
let config_yaml =
std::fs::read_to_string(&env.config_path).expect("failed to load config file");
let mut config = serde_yaml::from_str::<Config>(&config_yaml).expect("invalid yaml data");
let mut warnings = vec![];
// Set parameters from environment
config.config_path = env.config_path;
if let Some(environment) = env.environment {
// Overwrite default only if ENVIRONMENT variable is set
config.environment = environment;
};
// Validate config
if !config.storage_dir.exists() {
panic!("storage directory does not exist");
};
check_directory_owner(&config.storage_dir);
config.try_instance_url().expect("invalid instance URI");
if config.ipfs_api_url.is_some() != config.ipfs_gateway_url.is_some() {
panic!("both ipfs_api_url and ipfs_gateway_url must be set");
};
// Migrations
if let Some(registrations_open) = config.registrations_open {
// Change type if 'registrations_open' parameter is used
warnings
.push("'registrations_open' setting is deprecated, use 'registration.type' instead");
if registrations_open {
config.registration.registration_type = RegistrationType::Open;
} else {
config.registration.registration_type = RegistrationType::Invite;
};
};
if let Some(read_only_user) = config.registration.default_role_read_only_user {
warnings.push("'default_role_read_only_user' setting is deprecated, use 'registration.default_role' instead");
if read_only_user {
config.registration.default_role = DefaultRole::ReadOnlyUser;
} else {
config.registration.default_role = DefaultRole::NormalUser;
};
};
if let Some(post_character_limit) = config.post_character_limit {
warnings.push("'post_character_limit' setting is deprecated, use 'limits.posts.character_limit' instead");
config.limits.posts.character_limit = post_character_limit;
};
if let Some(ref proxy_url) = config.proxy_url {
warnings.push("'proxy_url' setting is deprecated, use 'federation.proxy_url' instead");
config.federation.proxy_url = Some(proxy_url.to_string());
};
// Insert instance RSA key
config.instance_rsa_key = Some(read_instance_rsa_key(&config.storage_dir));
(config, warnings)
}

View file

@ -1,66 +0,0 @@
use serde::{de::Error as DeserializerError, Deserialize, Deserializer};
#[derive(Clone, PartialEq)]
pub enum RegistrationType {
Open,
Invite,
}
impl Default for RegistrationType {
fn default() -> Self {
Self::Invite
}
}
impl<'de> Deserialize<'de> for RegistrationType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let registration_type_str = String::deserialize(deserializer)?;
let registration_type = match registration_type_str.as_str() {
"open" => Self::Open,
"invite" => Self::Invite,
_ => return Err(DeserializerError::custom("unknown registration type")),
};
Ok(registration_type)
}
}
#[derive(Clone)]
pub enum DefaultRole {
NormalUser,
ReadOnlyUser,
}
impl Default for DefaultRole {
fn default() -> Self {
Self::NormalUser
}
}
impl<'de> Deserialize<'de> for DefaultRole {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let role_str = String::deserialize(deserializer)?;
let role = match role_str.as_str() {
"user" => Self::NormalUser,
"read_only_user" => Self::ReadOnlyUser,
_ => return Err(DeserializerError::custom("unknown role name")),
};
Ok(role)
}
}
#[derive(Clone, Default, Deserialize)]
pub struct RegistrationConfig {
#[serde(rename = "type")]
pub registration_type: RegistrationType,
pub(super) default_role_read_only_user: Option<bool>, // deprecated
#[serde(default)]
pub default_role: DefaultRole,
}

View file

@ -1,17 +0,0 @@
use serde::Deserialize;
#[derive(Clone, Deserialize)]
pub struct RetentionConfig {
pub extraneous_posts: Option<u32>,
pub empty_profiles: Option<u32>,
}
#[allow(clippy::derivable_impls)]
impl Default for RetentionConfig {
fn default() -> Self {
Self {
extraneous_posts: None,
empty_profiles: None,
}
}
}

View file

@ -1,46 +0,0 @@
[package]
name = "fedimovies-models"
version = "1.22.0"
license = "AGPL-3.0"
edition = "2021"
rust-version = "1.68"
[dependencies]
fedimovies-utils = { path = "../fedimovies-utils" }
# Used for working with dates
chrono = { version = "0.4.23", default-features = false, features = ["std", "serde"] }
# Used for pooling database connections
deadpool = "0.9.2"
deadpool-postgres = { version = "0.10.2", default-features = false }
# Used to work with hexadecimal strings
hex = { version = "0.4.3", features = ["serde"] }
# Used for logging
log = "0.4.14"
# Used for managing database migrations
refinery = { version = "0.8.4", features = ["tokio-postgres"] }
# Used for serialization/deserialization
serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0.89"
# Used for creating error types
thiserror = "1.0.37"
# Async runtime
tokio = { version = "1.20.4", features = [] }
# Used for working with Postgresql database
openssl = { version = "0.10", features = ["vendored"] }
postgres-openssl = "0.5.0"
tokio-postgres = { version = "0.7.6", features = ["with-chrono-0_4", "with-uuid-1", "with-serde_json-1"] }
postgres-types = { version = "0.2.3", features = ["derive", "with-chrono-0_4", "with-uuid-1", "with-serde_json-1"] }
postgres-protocol = "0.6.4"
# Used to construct PostgreSQL queries
postgres_query = { git = "https://github.com/nolanderc/rust-postgres-query", rev = "b4422051c8a31fbba4a35f88004c1cefb1878dd5" }
postgres_query_macro = { git = "https://github.com/nolanderc/rust-postgres-query", rev = "b4422051c8a31fbba4a35f88004c1cefb1878dd5" }
# Used to work with UUIDs
uuid = { version = "1.1.2", features = ["serde", "v4"] }
[dev-dependencies]
fedimovies-utils = { path = "../fedimovies-utils", features = ["test-utils"] }
serial_test = "0.7.0"
[features]
test-utils = []

View file

@ -1,5 +0,0 @@
CREATE TABLE post_link (
source_id UUID NOT NULL REFERENCES post (id) ON DELETE CASCADE,
target_id UUID NOT NULL REFERENCES post (id) ON DELETE CASCADE,
PRIMARY KEY (source_id, target_id)
);

View file

@ -1,9 +0,0 @@
UPDATE actor_profile
SET payment_options = (
-- remove all payment options except links
SELECT COALESCE (
jsonb_agg(opt) FILTER (WHERE opt ->> 'payment_type' = '1'),
'[]'
)
FROM jsonb_array_elements(actor_profile.payment_options) AS opt
);

View file

@ -1,2 +0,0 @@
ALTER TABLE subscription ADD COLUMN chain_id VARCHAR(50) NOT NULL DEFAULT 'eip155:31337';
ALTER TABLE subscription ALTER COLUMN chain_id DROP DEFAULT;

View file

@ -1,10 +0,0 @@
CREATE TABLE invoice (
id UUID PRIMARY KEY,
sender_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
recipient_id UUID NOT NULL REFERENCES user_account (id) ON DELETE CASCADE,
chain_id VARCHAR(50) NOT NULL,
payment_address VARCHAR(200) NOT NULL,
invoice_status SMALLINT NOT NULL DEFAULT 1,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE (chain_id, payment_address)
);

View file

@ -1 +0,0 @@
ALTER TABLE subscription ALTER COLUMN sender_address DROP NOT NULL;

View file

@ -1,5 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN subscriber_count INTEGER NOT NULL CHECK (subscriber_count >= 0) DEFAULT 0;
UPDATE actor_profile SET subscriber_count = (
SELECT count(*) FROM relationship WHERE relationship.target_id = actor_profile.id
AND relationship.relationship_type = 3
);

View file

@ -1,2 +0,0 @@
ALTER TABLE invoice ADD COLUMN amount BIGINT NOT NULL DEFAULT 0 CHECK (amount >= 0);
ALTER TABLE invoice ALTER COLUMN amount DROP DEFAULT;

View file

@ -1,19 +0,0 @@
CREATE TABLE instance (
hostname VARCHAR(100) PRIMARY KEY
);
INSERT INTO instance
SELECT DISTINCT split_part(acct, '@', 2)
FROM actor_profile
WHERE acct <> username;
ALTER TABLE actor_profile
ADD COLUMN hostname VARCHAR(100) REFERENCES instance (hostname) ON DELETE RESTRICT;
UPDATE actor_profile
SET hostname = split_part(acct, '@', 2)
WHERE acct <> username;
ALTER TABLE actor_profile
ADD CONSTRAINT actor_profile_hostname_check CHECK ((hostname IS NULL) = (actor_json IS NULL));
ALTER TABLE actor_profile
DROP COLUMN acct;
ALTER TABLE actor_profile
ADD COLUMN acct VARCHAR(200) UNIQUE
GENERATED ALWAYS AS (CASE WHEN hostname IS NULL THEN username ELSE username || '@' || hostname END) STORED;

View file

@ -1,8 +0,0 @@
CREATE TABLE background_job (
id UUID PRIMARY KEY,
job_type SMALLINT NOT NULL,
job_data JSONB NOT NULL,
job_status SMALLINT NOT NULL DEFAULT 1,
scheduled_for TIMESTAMP WITH TIME ZONE NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View file

@ -1 +0,0 @@
ALTER TABLE follow_request ADD COLUMN activity_id VARCHAR(250) UNIQUE;

View file

@ -1 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN unreachable_since TIMESTAMP WITH TIME ZONE;

View file

@ -1,10 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN avatar JSONB;
ALTER TABLE actor_profile ADD COLUMN banner JSONB;
UPDATE actor_profile
SET avatar = json_build_object('file_name', avatar_file_name)
WHERE avatar_file_name IS NOT NULL;
UPDATE actor_profile
SET banner = json_build_object('file_name', banner_file_name)
WHERE banner_file_name IS NOT NULL;
ALTER TABLE actor_profile DROP COLUMN avatar_file_name;
ALTER TABLE actor_profile DROP COLUMN banner_file_name;

View file

@ -1,16 +0,0 @@
CREATE TABLE emoji (
id UUID PRIMARY KEY,
emoji_name VARCHAR(100) NOT NULL,
hostname VARCHAR(100) REFERENCES instance (hostname) ON DELETE RESTRICT,
image JSONB NOT NULL,
object_id VARCHAR(250) UNIQUE,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL,
UNIQUE (emoji_name, hostname),
CHECK ((hostname IS NULL) = (object_id IS NULL))
);
CREATE TABLE post_emoji (
post_id UUID NOT NULL REFERENCES post (id) ON DELETE CASCADE,
emoji_id UUID NOT NULL REFERENCES emoji (id) ON DELETE CASCADE,
PRIMARY KEY (post_id, emoji_id)
);

View file

@ -1 +0,0 @@
ALTER TABLE media_attachment ADD COLUMN file_size INTEGER;

View file

@ -1,2 +0,0 @@
ALTER TABLE user_account ADD COLUMN user_role SMALLINT NOT NULL DEFAULT 1;
ALTER TABLE user_account ALTER COLUMN user_role DROP DEFAULT;

View file

@ -1,10 +0,0 @@
CREATE TABLE oauth_application (
id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
app_name VARCHAR(100) NOT NULL,
website VARCHAR(100),
scopes VARCHAR(200) NOT NULL,
redirect_uri VARCHAR(200) NOT NULL,
client_id UUID UNIQUE NOT NULL,
client_secret VARCHAR(100) NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View file

@ -1,9 +0,0 @@
CREATE TABLE oauth_authorization (
id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
code VARCHAR(100) UNIQUE NOT NULL,
user_id UUID NOT NULL REFERENCES user_account (id) ON DELETE CASCADE,
application_id INTEGER NOT NULL REFERENCES oauth_application (id) ON DELETE CASCADE,
scopes VARCHAR(200) NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
expires_at TIMESTAMP WITH TIME ZONE NOT NULL
);

View file

@ -1,2 +0,0 @@
ALTER TABLE user_invite_code ADD COLUMN note VARCHAR(200);
ALTER TABLE user_invite_code ADD COLUMN created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP;

View file

@ -1,5 +0,0 @@
CREATE TABLE profile_emoji (
profile_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
emoji_id UUID NOT NULL REFERENCES emoji (id) ON DELETE CASCADE,
PRIMARY KEY (profile_id, emoji_id)
);

View file

@ -1 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN emojis JSONB NOT NULL DEFAULT '[]';

View file

@ -1,4 +0,0 @@
CREATE TABLE internal_property (
property_name VARCHAR(100) PRIMARY KEY,
property_value JSONB NOT NULL
);

View file

@ -1,18 +0,0 @@
UPDATE actor_profile
SET identity_proofs = replaced.identity_proofs
FROM (
SELECT
actor_profile.id,
jsonb_agg(
CASE
WHEN identity_proof ->> 'proof_type' = 'ethereum-eip191-00'
THEN jsonb_set(identity_proof, '{proof_type}', '1')
WHEN identity_proof ->> 'proof_type' = 'MitraMinisignSignature2022A'
THEN jsonb_set(identity_proof, '{proof_type}', '2')
END
) AS identity_proofs
FROM actor_profile
CROSS JOIN jsonb_array_elements(actor_profile.identity_proofs) AS identity_proof
GROUP BY actor_profile.id
) AS replaced
WHERE actor_profile.id = replaced.id;

View file

@ -1,2 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN manually_approves_followers BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE actor_profile ALTER COLUMN manually_approves_followers DROP DEFAULT;

View file

@ -1 +0,0 @@
ALTER TABLE actor_profile ADD COLUMN aliases JSONB NOT NULL DEFAULT '[]';

View file

@ -1,5 +0,0 @@
ALTER TABLE relationship ADD CONSTRAINT relationship_source_id_target_id_check CHECK (source_id != target_id);
ALTER TABLE follow_request ADD CONSTRAINT follow_request_source_id_target_id_check CHECK (source_id != target_id);
ALTER TABLE post_link ADD CONSTRAINT post_link_source_id_target_id_check CHECK (source_id != target_id);
ALTER TABLE invoice ADD CONSTRAINT invoice_sender_id_recipient_id_check CHECK (sender_id != recipient_id);
ALTER TABLE subscription ADD CONSTRAINT subscription_sender_id_recipient_id_check CHECK (sender_id != recipient_id);

View file

@ -1,6 +0,0 @@
ALTER TABLE actor_profile ALTER COLUMN actor_id TYPE VARCHAR(2000);
ALTER TABLE oauth_application ALTER COLUMN redirect_uri TYPE VARCHAR(2000);
ALTER TABLE follow_request ALTER COLUMN activity_id TYPE VARCHAR(2000);
ALTER TABLE post ALTER COLUMN object_id TYPE VARCHAR(2000);
ALTER TABLE post_reaction ALTER COLUMN activity_id TYPE VARCHAR(2000);
ALTER TABLE emoji ALTER COLUMN object_id TYPE VARCHAR(2000);

View file

@ -1 +0,0 @@
ALTER TABLE user_account ADD COLUMN client_config JSONB NOT NULL DEFAULT '{}';

View file

@ -1,2 +0,0 @@
ALTER TABLE post ADD COLUMN is_sensitive BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE post ALTER COLUMN is_sensitive DROP DEFAULT;

View file

@ -1,132 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use fedimovies_utils::id::generate_ulid;
use crate::cleanup::{find_orphaned_files, find_orphaned_ipfs_objects, DeletionQueue};
use crate::database::{DatabaseClient, DatabaseError};
use super::types::DbMediaAttachment;
pub async fn create_attachment(
db_client: &impl DatabaseClient,
owner_id: &Uuid,
file_name: String,
file_size: usize,
media_type: Option<String>,
) -> Result<DbMediaAttachment, DatabaseError> {
let attachment_id = generate_ulid();
let file_size: i32 = file_size.try_into().expect("value should be within bounds");
let inserted_row = db_client
.query_one(
"
INSERT INTO media_attachment (
id,
owner_id,
file_name,
file_size,
media_type
)
VALUES ($1, $2, $3, $4, $5)
RETURNING media_attachment
",
&[
&attachment_id,
&owner_id,
&file_name,
&file_size,
&media_type,
],
)
.await?;
let db_attachment: DbMediaAttachment = inserted_row.try_get("media_attachment")?;
Ok(db_attachment)
}
pub async fn set_attachment_ipfs_cid(
db_client: &impl DatabaseClient,
attachment_id: &Uuid,
ipfs_cid: &str,
) -> Result<DbMediaAttachment, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
UPDATE media_attachment
SET ipfs_cid = $1
WHERE id = $2 AND ipfs_cid IS NULL
RETURNING media_attachment
",
&[&ipfs_cid, &attachment_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("attachment"))?;
let db_attachment = row.try_get("media_attachment")?;
Ok(db_attachment)
}
pub async fn delete_unused_attachments(
db_client: &impl DatabaseClient,
created_before: &DateTime<Utc>,
) -> Result<DeletionQueue, DatabaseError> {
let rows = db_client
.query(
"
DELETE FROM media_attachment
WHERE post_id IS NULL AND created_at < $1
RETURNING file_name, ipfs_cid
",
&[&created_before],
)
.await?;
let mut files = vec![];
let mut ipfs_objects = vec![];
for row in rows {
let file_name = row.try_get("file_name")?;
files.push(file_name);
if let Some(ipfs_cid) = row.try_get("ipfs_cid")? {
ipfs_objects.push(ipfs_cid);
};
}
let orphaned_files = find_orphaned_files(db_client, files).await?;
let orphaned_ipfs_objects = find_orphaned_ipfs_objects(db_client, ipfs_objects).await?;
Ok(DeletionQueue {
files: orphaned_files,
ipfs_objects: orphaned_ipfs_objects,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use crate::profiles::{queries::create_profile, types::ProfileCreateData};
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_create_attachment() {
let db_client = &mut create_test_database().await;
let profile_data = ProfileCreateData {
username: "test".to_string(),
..Default::default()
};
let profile = create_profile(db_client, profile_data).await.unwrap();
let file_name = "test.jpg";
let file_size = 10000;
let media_type = "image/png";
let attachment = create_attachment(
db_client,
&profile.id,
file_name.to_string(),
file_size,
Some(media_type.to_string()),
)
.await
.unwrap();
assert_eq!(attachment.owner_id, profile.id);
assert_eq!(attachment.file_name, file_name);
assert_eq!(attachment.file_size.unwrap(), file_size as i32);
assert_eq!(attachment.media_type.unwrap(), media_type);
assert!(attachment.post_id.is_none());
}
}

View file

@ -1,139 +0,0 @@
use chrono::{DateTime, Utc};
use serde_json::Value;
use uuid::Uuid;
use super::types::{DbBackgroundJob, JobStatus, JobType};
use crate::database::{DatabaseClient, DatabaseError};
pub async fn enqueue_job(
db_client: &impl DatabaseClient,
job_type: &JobType,
job_data: &Value,
scheduled_for: &DateTime<Utc>,
) -> Result<(), DatabaseError> {
let job_id = Uuid::new_v4();
db_client
.execute(
"
INSERT INTO background_job (
id,
job_type,
job_data,
scheduled_for
)
VALUES ($1, $2, $3, $4)
",
&[&job_id, &job_type, &job_data, &scheduled_for],
)
.await?;
Ok(())
}
pub async fn get_job_batch(
db_client: &impl DatabaseClient,
job_type: &JobType,
batch_size: u32,
job_timeout: u32,
) -> Result<Vec<DbBackgroundJob>, DatabaseError> {
// https://github.com/sfackler/rust-postgres/issues/60
let job_timeout_pg = format!("{}S", job_timeout); // interval
let rows = db_client
.query(
"
UPDATE background_job
SET
job_status = $1,
updated_at = CURRENT_TIMESTAMP
WHERE id IN (
SELECT id
FROM background_job
WHERE
job_type = $2
AND scheduled_for < CURRENT_TIMESTAMP
AND (
-- queued
job_status = $3
-- running
OR job_status = $1
AND updated_at < CURRENT_TIMESTAMP - $5::text::interval
)
ORDER BY
-- queued jobs first
job_status ASC,
scheduled_for ASC
LIMIT $4
)
RETURNING background_job
",
&[
&JobStatus::Running,
&job_type,
&JobStatus::Queued,
&i64::from(batch_size),
&job_timeout_pg,
],
)
.await?;
let jobs = rows
.iter()
.map(|row| row.try_get("background_job"))
.collect::<Result<_, _>>()?;
Ok(jobs)
}
pub async fn delete_job_from_queue(
db_client: &impl DatabaseClient,
job_id: &Uuid,
) -> Result<(), DatabaseError> {
let deleted_count = db_client
.execute(
"
DELETE FROM background_job
WHERE id = $1
",
&[&job_id],
)
.await?;
if deleted_count == 0 {
return Err(DatabaseError::NotFound("background job"));
};
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use serde_json::json;
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_queue() {
let db_client = &create_test_database().await;
let job_type = JobType::IncomingActivity;
let job_data = json!({
"activity": {},
"is_authenticated": true,
"failure_count": 0,
});
let scheduled_for = Utc::now();
enqueue_job(db_client, &job_type, &job_data, &scheduled_for)
.await
.unwrap();
let batch_1 = get_job_batch(db_client, &job_type, 10, 3600).await.unwrap();
assert_eq!(batch_1.len(), 1);
let job = &batch_1[0];
assert_eq!(job.job_type, job_type);
assert_eq!(job.job_data, job_data);
assert_eq!(job.job_status, JobStatus::Running);
let batch_2 = get_job_batch(db_client, &job_type, 10, 3600).await.unwrap();
assert_eq!(batch_2.len(), 0);
delete_job_from_queue(db_client, &job.id).await.unwrap();
let batch_3 = get_job_batch(db_client, &job_type, 10, 3600).await.unwrap();
assert_eq!(batch_3.len(), 0);
}
}

View file

@ -1,82 +0,0 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use serde_json::Value;
use uuid::Uuid;
use crate::database::{
int_enum::{int_enum_from_sql, int_enum_to_sql},
DatabaseTypeError,
};
#[derive(Debug, PartialEq)]
pub enum JobType {
IncomingActivity,
OutgoingActivity,
}
impl From<&JobType> for i16 {
fn from(value: &JobType) -> i16 {
match value {
JobType::IncomingActivity => 1,
JobType::OutgoingActivity => 2,
}
}
}
impl TryFrom<i16> for JobType {
type Error = DatabaseTypeError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let job_type = match value {
1 => Self::IncomingActivity,
2 => Self::OutgoingActivity,
_ => return Err(DatabaseTypeError),
};
Ok(job_type)
}
}
int_enum_from_sql!(JobType);
int_enum_to_sql!(JobType);
#[derive(Debug, PartialEq)]
pub enum JobStatus {
Queued,
Running,
}
impl From<&JobStatus> for i16 {
fn from(value: &JobStatus) -> i16 {
match value {
JobStatus::Queued => 1,
JobStatus::Running => 2,
}
}
}
impl TryFrom<i16> for JobStatus {
type Error = DatabaseTypeError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let job_status = match value {
1 => Self::Queued,
2 => Self::Running,
_ => return Err(DatabaseTypeError),
};
Ok(job_status)
}
}
int_enum_from_sql!(JobStatus);
int_enum_to_sql!(JobStatus);
#[derive(FromSql)]
#[postgres(name = "background_job")]
pub struct DbBackgroundJob {
pub id: Uuid,
pub job_type: JobType,
pub job_data: Value,
pub job_status: JobStatus,
pub scheduled_for: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View file

@ -1,66 +0,0 @@
use crate::database::{DatabaseClient, DatabaseError};
pub struct DeletionQueue {
pub files: Vec<String>,
pub ipfs_objects: Vec<String>,
}
pub async fn find_orphaned_files(
db_client: &impl DatabaseClient,
files: Vec<String>,
) -> Result<Vec<String>, DatabaseError> {
let rows = db_client
.query(
"
SELECT DISTINCT fname
FROM unnest($1::text[]) AS fname
WHERE
NOT EXISTS (
SELECT 1 FROM media_attachment WHERE file_name = fname
)
AND NOT EXISTS (
SELECT 1 FROM actor_profile
WHERE avatar ->> 'file_name' = fname
OR banner ->> 'file_name' = fname
)
AND NOT EXISTS (
SELECT 1 FROM emoji
WHERE image ->> 'file_name' = fname
)
",
&[&files],
)
.await?;
let orphaned_files = rows
.iter()
.map(|row| row.try_get("fname"))
.collect::<Result<_, _>>()?;
Ok(orphaned_files)
}
pub async fn find_orphaned_ipfs_objects(
db_client: &impl DatabaseClient,
ipfs_objects: Vec<String>,
) -> Result<Vec<String>, DatabaseError> {
let rows = db_client
.query(
"
SELECT DISTINCT cid
FROM unnest($1::text[]) AS cid
WHERE
NOT EXISTS (
SELECT 1 FROM media_attachment WHERE ipfs_cid = cid
)
AND NOT EXISTS (
SELECT 1 FROM post WHERE ipfs_cid = cid
)
",
&[&ipfs_objects],
)
.await?;
let orphaned_ipfs_objects = rows
.iter()
.map(|row| row.try_get("cid"))
.collect::<Result<_, _>>()?;
Ok(orphaned_ipfs_objects)
}

View file

@ -1,113 +0,0 @@
use openssl::ssl::{SslConnector, SslMethod};
use postgres_openssl::MakeTlsConnector;
use std::path::Path;
use tokio_postgres::config::Config as DatabaseConfig;
use tokio_postgres::error::{Error as PgError, SqlState};
pub mod int_enum;
pub mod json_macro;
pub mod migrate;
pub mod query_macro;
#[cfg(feature = "test-utils")]
pub mod test_utils;
pub type DbPool = deadpool_postgres::Pool;
pub use tokio_postgres::GenericClient as DatabaseClient;
#[derive(thiserror::Error, Debug)]
#[error("database type error")]
pub struct DatabaseTypeError;
#[derive(thiserror::Error, Debug)]
pub enum DatabaseError {
#[error("database pool error")]
DatabasePoolError(#[from] deadpool_postgres::PoolError),
#[error("database query error")]
DatabaseQueryError(#[from] postgres_query::Error),
#[error("database client error")]
DatabaseClientError(#[from] tokio_postgres::Error),
#[error(transparent)]
DatabaseTypeError(#[from] DatabaseTypeError),
#[error("{0} not found")]
NotFound(&'static str), // object type
#[error("{0} already exists")]
AlreadyExists(&'static str), // object type
}
pub async fn create_database_client(
db_config: &DatabaseConfig,
ca_file_path: Option<&Path>,
) -> tokio_postgres::Client {
let client = if let Some(ca_file_path) = ca_file_path {
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
log::debug!("Using TLS CA file: {}", ca_file_path.display());
builder.set_ca_file(ca_file_path).unwrap();
let connector = MakeTlsConnector::new(builder.build());
let (client, connection) = db_config.connect(connector).await.unwrap();
tokio::spawn(async move {
if let Err(err) = connection.await {
log::error!("connection with tls error: {}", err);
};
});
client
} else {
let (client, connection) = db_config.connect(tokio_postgres::NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(err) = connection.await {
log::error!("connection error: {}", err);
};
});
client
};
client
}
pub fn create_pool(database_url: &str, ca_file_path: Option<&Path>, pool_size: usize) -> DbPool {
let manager = if let Some(ca_file_path) = ca_file_path {
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
log::info!("Using TLS CA file: {}", ca_file_path.display());
builder.set_ca_file(ca_file_path).unwrap();
let connector = MakeTlsConnector::new(builder.build());
deadpool_postgres::Manager::new(
database_url.parse().expect("invalid database URL"),
connector,
)
} else {
deadpool_postgres::Manager::new(
database_url.parse().expect("invalid database URL"),
tokio_postgres::NoTls,
)
};
DbPool::builder(manager)
.max_size(pool_size)
.build()
.unwrap()
}
pub async fn get_database_client(
db_pool: &DbPool,
) -> Result<deadpool_postgres::Client, DatabaseError> {
// Returns wrapped client
// https://github.com/bikeshedder/deadpool/issues/56
let client = db_pool.get().await?;
Ok(client)
}
pub fn catch_unique_violation(object_type: &'static str) -> impl Fn(PgError) -> DatabaseError {
move |err| {
if let Some(code) = err.code() {
if code == &SqlState::UNIQUE_VIOLATION {
return DatabaseError::AlreadyExists(object_type);
};
};
err.into()
}
}

View file

@ -1,36 +0,0 @@
use super::create_database_client;
use super::migrate::apply_migrations;
use tokio_postgres::config::Config;
use tokio_postgres::Client;
const DEFAULT_CONNECTION_URL: &str = "postgres://fedimovies:fedimovies@127.0.0.1:55432/fedimovies";
pub async fn create_test_database() -> Client {
let connection_url =
std::env::var("TEST_DATABASE_URL").unwrap_or(DEFAULT_CONNECTION_URL.to_string());
let mut db_config: Config = connection_url
.parse()
.expect("invalid database connection URL");
let db_name = db_config
.get_dbname()
.expect("database name not specified")
.to_string();
// Create connection without database name
db_config.dbname("");
let db_client = create_database_client(&db_config, None).await;
let drop_db_statement = format!("DROP DATABASE IF EXISTS {db_name:?}", db_name = db_name,);
db_client.execute(&drop_db_statement, &[]).await.unwrap();
let create_db_statement = format!(
"CREATE DATABASE {db_name:?} WITH OWNER={owner:?};",
db_name = db_name,
owner = db_config.get_user().unwrap(),
);
db_client.execute(&create_db_statement, &[]).await.unwrap();
// Create new connection to database
db_config.dbname(&db_name);
let mut db_client = create_database_client(&db_config, None).await;
apply_migrations(&mut db_client).await;
db_client
}

View file

@ -1,16 +0,0 @@
use crate::database::{DatabaseClient, DatabaseError};
use super::queries::{get_emoji_by_name_and_hostname, get_local_emoji_by_name};
use super::types::DbEmoji;
pub async fn get_emoji_by_name(
db_client: &impl DatabaseClient,
emoji_name: &str,
maybe_hostname: Option<&str>,
) -> Result<DbEmoji, DatabaseError> {
if let Some(hostname) = maybe_hostname {
get_emoji_by_name_and_hostname(db_client, emoji_name, hostname).await
} else {
get_local_emoji_by_name(db_client, emoji_name).await
}
}

View file

@ -1,3 +0,0 @@
pub mod helpers;
pub mod queries;
pub mod types;

View file

@ -1,292 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use fedimovies_utils::id::generate_ulid;
use crate::cleanup::{find_orphaned_files, DeletionQueue};
use crate::database::{catch_unique_violation, DatabaseClient, DatabaseError};
use crate::instances::queries::create_instance;
use crate::profiles::queries::update_emoji_caches;
use super::types::{DbEmoji, EmojiImage};
pub async fn create_emoji(
db_client: &impl DatabaseClient,
emoji_name: &str,
hostname: Option<&str>,
image: EmojiImage,
object_id: Option<&str>,
updated_at: &DateTime<Utc>,
) -> Result<DbEmoji, DatabaseError> {
let emoji_id = generate_ulid();
if let Some(hostname) = hostname {
create_instance(db_client, hostname).await?;
};
let row = db_client
.query_one(
"
INSERT INTO emoji (
id,
emoji_name,
hostname,
image,
object_id,
updated_at
)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING emoji
",
&[
&emoji_id,
&emoji_name,
&hostname,
&image,
&object_id,
&updated_at,
],
)
.await
.map_err(catch_unique_violation("emoji"))?;
let emoji = row.try_get("emoji")?;
Ok(emoji)
}
pub async fn update_emoji(
db_client: &impl DatabaseClient,
emoji_id: &Uuid,
image: EmojiImage,
updated_at: &DateTime<Utc>,
) -> Result<DbEmoji, DatabaseError> {
let row = db_client
.query_one(
"
UPDATE emoji
SET
image = $1,
updated_at = $2
WHERE id = $3
RETURNING emoji
",
&[&image, &updated_at, &emoji_id],
)
.await?;
let emoji: DbEmoji = row.try_get("emoji")?;
update_emoji_caches(db_client, &emoji.id).await?;
Ok(emoji)
}
pub async fn get_local_emoji_by_name(
db_client: &impl DatabaseClient,
emoji_name: &str,
) -> Result<DbEmoji, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT emoji
FROM emoji
WHERE hostname IS NULL AND emoji_name = $1
",
&[&emoji_name],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("emoji"))?;
let emoji = row.try_get("emoji")?;
Ok(emoji)
}
pub async fn get_local_emojis_by_names(
db_client: &impl DatabaseClient,
names: &[String],
) -> Result<Vec<DbEmoji>, DatabaseError> {
let rows = db_client
.query(
"
SELECT emoji
FROM emoji
WHERE hostname IS NULL AND emoji_name = ANY($1)
",
&[&names],
)
.await?;
let emojis = rows
.iter()
.map(|row| row.try_get("emoji"))
.collect::<Result<_, _>>()?;
Ok(emojis)
}
pub async fn get_local_emojis(
db_client: &impl DatabaseClient,
) -> Result<Vec<DbEmoji>, DatabaseError> {
let rows = db_client
.query(
"
SELECT emoji
FROM emoji
WHERE hostname IS NULL
",
&[],
)
.await?;
let emojis = rows
.iter()
.map(|row| row.try_get("emoji"))
.collect::<Result<_, _>>()?;
Ok(emojis)
}
pub async fn get_emoji_by_name_and_hostname(
db_client: &impl DatabaseClient,
emoji_name: &str,
hostname: &str,
) -> Result<DbEmoji, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT emoji
FROM emoji WHERE emoji_name = $1 AND hostname = $2
",
&[&emoji_name, &hostname],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("emoji"))?;
let emoji = row.try_get("emoji")?;
Ok(emoji)
}
pub async fn get_emoji_by_remote_object_id(
db_client: &impl DatabaseClient,
object_id: &str,
) -> Result<DbEmoji, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT emoji
FROM emoji WHERE object_id = $1
",
&[&object_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("emoji"))?;
let emoji = row.try_get("emoji")?;
Ok(emoji)
}
pub async fn delete_emoji(
db_client: &impl DatabaseClient,
emoji_id: &Uuid,
) -> Result<DeletionQueue, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
DELETE FROM emoji WHERE id = $1
RETURNING emoji
",
&[&emoji_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("emoji"))?;
let emoji: DbEmoji = row.try_get("emoji")?;
update_emoji_caches(db_client, &emoji.id).await?;
let orphaned_files = find_orphaned_files(db_client, vec![emoji.image.file_name]).await?;
Ok(DeletionQueue {
files: orphaned_files,
ipfs_objects: vec![],
})
}
pub async fn find_unused_remote_emojis(
db_client: &impl DatabaseClient,
) -> Result<Vec<Uuid>, DatabaseError> {
let rows = db_client
.query(
"
SELECT emoji.id
FROM emoji
WHERE
emoji.object_id IS NOT NULL
AND NOT EXISTS (
SELECT 1
FROM post_emoji
WHERE post_emoji.emoji_id = emoji.id
)
AND NOT EXISTS (
SELECT 1
FROM profile_emoji
WHERE profile_emoji.emoji_id = emoji.id
)
",
&[],
)
.await?;
let ids: Vec<Uuid> = rows
.iter()
.map(|row| row.try_get("id"))
.collect::<Result<_, _>>()?;
Ok(ids)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_create_emoji() {
let db_client = &create_test_database().await;
let emoji_name = "test";
let hostname = "example.org";
let image = EmojiImage {
file_name: "test.png".to_string(),
file_size: 10000,
media_type: "image/png".to_string(),
};
let object_id = "https://example.org/emojis/test";
let updated_at = Utc::now();
let DbEmoji { id: emoji_id, .. } = create_emoji(
db_client,
emoji_name,
Some(hostname),
image,
Some(object_id),
&updated_at,
)
.await
.unwrap();
let emoji = get_emoji_by_remote_object_id(db_client, object_id)
.await
.unwrap();
assert_eq!(emoji.id, emoji_id);
assert_eq!(emoji.emoji_name, emoji_name);
assert_eq!(emoji.hostname, Some(hostname.to_string()));
}
#[tokio::test]
#[serial]
async fn test_update_emoji() {
let db_client = &create_test_database().await;
let image = EmojiImage::default();
let emoji = create_emoji(db_client, "test", None, image.clone(), None, &Utc::now())
.await
.unwrap();
let updated_emoji = update_emoji(db_client, &emoji.id, image, &Utc::now())
.await
.unwrap();
assert_ne!(updated_emoji.updated_at, emoji.updated_at);
}
#[tokio::test]
#[serial]
async fn test_delete_emoji() {
let db_client = &create_test_database().await;
let image = EmojiImage::default();
let emoji = create_emoji(db_client, "test", None, image, None, &Utc::now())
.await
.unwrap();
let deletion_queue = delete_emoji(db_client, &emoji.id).await.unwrap();
assert_eq!(deletion_queue.files.len(), 1);
assert_eq!(deletion_queue.ipfs_objects.len(), 0);
}
}

View file

@ -1,35 +0,0 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::database::json_macro::{json_from_sql, json_to_sql};
// Migration
fn default_emoji_file_size() -> usize {
250 * 1000
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[cfg_attr(feature = "test-utils", derive(Default))]
pub struct EmojiImage {
pub file_name: String,
#[serde(default = "default_emoji_file_size")]
pub file_size: usize,
pub media_type: String,
}
json_from_sql!(EmojiImage);
json_to_sql!(EmojiImage);
#[derive(Clone, Deserialize, FromSql)]
#[cfg_attr(feature = "test-utils", derive(Default))]
#[postgres(name = "emoji")]
pub struct DbEmoji {
pub id: Uuid,
pub emoji_name: String,
pub hostname: Option<String>,
pub image: EmojiImage,
pub object_id: Option<String>,
pub updated_at: DateTime<Utc>,
}

View file

@ -1,41 +0,0 @@
use crate::database::{DatabaseClient, DatabaseError};
pub async fn create_instance(
db_client: &impl DatabaseClient,
hostname: &str,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO instance VALUES ($1)
ON CONFLICT DO NOTHING
",
&[&hostname],
)
.await?;
Ok(())
}
pub async fn get_peers(db_client: &impl DatabaseClient) -> Result<Vec<String>, DatabaseError> {
let rows = db_client
.query(
"
SELECT instance.hostname FROM instance
",
&[],
)
.await?;
let peers = rows
.iter()
.map(|row| row.try_get("hostname"))
.collect::<Result<_, _>>()?;
Ok(peers)
}
pub async fn get_peer_count(db_client: &impl DatabaseClient) -> Result<i64, DatabaseError> {
let row = db_client
.query_one("SELECT count(instance) FROM instance", &[])
.await?;
let count = row.try_get("count")?;
Ok(count)
}

View file

@ -1,171 +0,0 @@
use uuid::Uuid;
use fedimovies_utils::{caip2::ChainId, id::generate_ulid};
use crate::database::{catch_unique_violation, DatabaseClient, DatabaseError};
use super::types::{DbChainId, DbInvoice, InvoiceStatus};
pub async fn create_invoice(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
chain_id: &ChainId,
payment_address: &str,
amount: i64,
) -> Result<DbInvoice, DatabaseError> {
let invoice_id = generate_ulid();
let row = db_client
.query_one(
"
INSERT INTO invoice (
id,
sender_id,
recipient_id,
chain_id,
payment_address,
amount
)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING invoice
",
&[
&invoice_id,
&sender_id,
&recipient_id,
&DbChainId::new(chain_id),
&payment_address,
&amount,
],
)
.await
.map_err(catch_unique_violation("invoice"))?;
let invoice = row.try_get("invoice")?;
Ok(invoice)
}
pub async fn get_invoice_by_id(
db_client: &impl DatabaseClient,
invoice_id: &Uuid,
) -> Result<DbInvoice, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT invoice
FROM invoice WHERE id = $1
",
&[&invoice_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("invoice"))?;
let invoice = row.try_get("invoice")?;
Ok(invoice)
}
pub async fn get_invoice_by_address(
db_client: &impl DatabaseClient,
chain_id: &ChainId,
payment_address: &str,
) -> Result<DbInvoice, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT invoice
FROM invoice WHERE chain_id = $1 AND payment_address = $2
",
&[&DbChainId::new(chain_id), &payment_address],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("invoice"))?;
let invoice = row.try_get("invoice")?;
Ok(invoice)
}
pub async fn get_invoices_by_status(
db_client: &impl DatabaseClient,
chain_id: &ChainId,
status: InvoiceStatus,
) -> Result<Vec<DbInvoice>, DatabaseError> {
let rows = db_client
.query(
"
SELECT invoice
FROM invoice WHERE chain_id = $1 AND invoice_status = $2
",
&[&DbChainId::new(chain_id), &status],
)
.await?;
let invoices = rows
.iter()
.map(|row| row.try_get("invoice"))
.collect::<Result<_, _>>()?;
Ok(invoices)
}
pub async fn set_invoice_status(
db_client: &impl DatabaseClient,
invoice_id: &Uuid,
status: InvoiceStatus,
) -> Result<(), DatabaseError> {
let updated_count = db_client
.execute(
"
UPDATE invoice SET invoice_status = $2
WHERE id = $1
",
&[&invoice_id, &status],
)
.await?;
if updated_count == 0 {
return Err(DatabaseError::NotFound("invoice"));
};
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use crate::profiles::{queries::create_profile, types::ProfileCreateData};
use crate::users::{queries::create_user, types::UserCreateData};
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_create_invoice() {
let db_client = &mut create_test_database().await;
let sender_data = ProfileCreateData {
username: "sender".to_string(),
..Default::default()
};
let sender = create_profile(db_client, sender_data).await.unwrap();
let recipient_data = UserCreateData {
username: "recipient".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let recipient = create_user(db_client, recipient_data).await.unwrap();
let chain_id = ChainId {
namespace: "monero".to_string(),
reference: "mainnet".to_string(),
};
let payment_address = "8MxABajuo71BZya9";
let amount = 100000000000109212;
let invoice = create_invoice(
db_client,
&sender.id,
&recipient.id,
&chain_id,
payment_address,
amount,
)
.await
.unwrap();
assert_eq!(invoice.sender_id, sender.id);
assert_eq!(invoice.recipient_id, recipient.id);
assert_eq!(invoice.chain_id.into_inner(), chain_id);
assert_eq!(invoice.payment_address, payment_address);
assert_eq!(invoice.amount, amount);
assert_eq!(invoice.invoice_status, InvoiceStatus::Open);
}
}

View file

@ -1,111 +0,0 @@
use chrono::{DateTime, Utc};
use postgres_protocol::types::{text_from_sql, text_to_sql};
use postgres_types::{accepts, private::BytesMut, to_sql_checked, FromSql, IsNull, ToSql, Type};
use uuid::Uuid;
use fedimovies_utils::caip2::ChainId;
use crate::database::{
int_enum::{int_enum_from_sql, int_enum_to_sql},
DatabaseTypeError,
};
#[derive(Debug)]
pub struct DbChainId(ChainId);
impl DbChainId {
pub fn new(chain_id: &ChainId) -> Self {
Self(chain_id.clone())
}
pub fn inner(&self) -> &ChainId {
let Self(chain_id) = self;
chain_id
}
pub fn into_inner(self) -> ChainId {
let Self(chain_id) = self;
chain_id
}
}
impl PartialEq<ChainId> for DbChainId {
fn eq(&self, other: &ChainId) -> bool {
self.inner() == other
}
}
impl<'a> FromSql<'a> for DbChainId {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> {
let value_str = text_from_sql(raw)?;
let value: ChainId = value_str.parse()?;
Ok(Self(value))
}
accepts!(VARCHAR);
}
impl ToSql for DbChainId {
fn to_sql(
&self,
_: &Type,
out: &mut BytesMut,
) -> Result<IsNull, Box<dyn std::error::Error + Sync + Send>> {
let value_str = self.inner().to_string();
text_to_sql(&value_str, out);
Ok(IsNull::No)
}
accepts!(VARCHAR, TEXT);
to_sql_checked!();
}
#[derive(Debug, PartialEq)]
pub enum InvoiceStatus {
Open,
Paid,
Forwarded,
Timeout,
}
impl From<&InvoiceStatus> for i16 {
fn from(value: &InvoiceStatus) -> i16 {
match value {
InvoiceStatus::Open => 1,
InvoiceStatus::Paid => 2,
InvoiceStatus::Forwarded => 3,
InvoiceStatus::Timeout => 4,
}
}
}
impl TryFrom<i16> for InvoiceStatus {
type Error = DatabaseTypeError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let invoice_status = match value {
1 => Self::Open,
2 => Self::Paid,
3 => Self::Forwarded,
4 => Self::Timeout,
_ => return Err(DatabaseTypeError),
};
Ok(invoice_status)
}
}
int_enum_from_sql!(InvoiceStatus);
int_enum_to_sql!(InvoiceStatus);
#[derive(FromSql)]
#[postgres(name = "invoice")]
pub struct DbInvoice {
pub id: Uuid,
pub sender_id: Uuid,
pub recipient_id: Uuid,
pub chain_id: DbChainId,
pub payment_address: String,
pub amount: i64, // requested payment amount
pub invoice_status: InvoiceStatus,
pub created_at: DateTime<Utc>,
}

View file

@ -1,2 +0,0 @@
pub mod queries;
pub mod types;

View file

@ -1,265 +0,0 @@
use uuid::Uuid;
use crate::database::{DatabaseClient, DatabaseError};
use crate::posts::{
helpers::{add_related_posts, add_user_actions},
queries::{RELATED_ATTACHMENTS, RELATED_EMOJIS, RELATED_LINKS, RELATED_MENTIONS, RELATED_TAGS},
};
use super::types::{EventType, Notification};
async fn create_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
post_id: Option<&Uuid>,
event_type: EventType,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO notification (
sender_id,
recipient_id,
post_id,
event_type
)
VALUES ($1, $2, $3, $4)
",
&[&sender_id, &recipient_id, &post_id, &event_type],
)
.await?;
Ok(())
}
pub async fn delete_notification(
db_client: &impl DatabaseClient,
notification_id: i32,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
DELETE FROM notification
WHERE id = $1
",
&[&notification_id],
)
.await?;
Ok(())
}
pub async fn create_follow_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(db_client, sender_id, recipient_id, None, EventType::Follow).await
}
pub async fn create_reply_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
post_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
Some(post_id),
EventType::Reply,
)
.await
}
pub async fn create_reaction_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
post_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
Some(post_id),
EventType::Reaction,
)
.await
}
pub async fn create_mention_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
post_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
Some(post_id),
EventType::Mention,
)
.await
}
pub async fn create_repost_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
post_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
Some(post_id),
EventType::Repost,
)
.await
}
pub async fn create_subscription_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
None,
EventType::Subscription,
)
.await
}
pub async fn create_subscription_expiration_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(
db_client,
sender_id,
recipient_id,
None,
EventType::SubscriptionExpiration,
)
.await
}
pub async fn create_move_notification(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
) -> Result<(), DatabaseError> {
create_notification(db_client, sender_id, recipient_id, None, EventType::Move).await
}
pub async fn get_notifications(
db_client: &impl DatabaseClient,
recipient_id: &Uuid,
max_id: Option<i32>,
limit: u16,
) -> Result<Vec<Notification>, DatabaseError> {
let statement = format!(
"
SELECT
notification, sender, post, post_author, recipient,
{related_attachments},
{related_mentions},
{related_tags},
{related_links},
{related_emojis}
FROM notification
JOIN actor_profile AS sender
ON notification.sender_id = sender.id
LEFT JOIN post
ON notification.post_id = post.id
LEFT JOIN actor_profile AS post_author
ON post.author_id = post_author.id
LEFT JOIN actor_profile AS recipient
ON notification.recipient_id = recipient.id
WHERE
recipient_id = $1
AND ($2::integer IS NULL OR notification.id < $2)
ORDER BY notification.id DESC
LIMIT $3
",
related_attachments = RELATED_ATTACHMENTS,
related_mentions = RELATED_MENTIONS,
related_tags = RELATED_TAGS,
related_links = RELATED_LINKS,
related_emojis = RELATED_EMOJIS,
);
let rows = db_client
.query(&statement, &[&recipient_id, &max_id, &i64::from(limit)])
.await?;
let mut notifications: Vec<Notification> = rows
.iter()
.map(Notification::try_from)
.collect::<Result<_, _>>()?;
add_related_posts(
db_client,
notifications
.iter_mut()
.filter_map(|item| item.post.as_mut())
.collect(),
)
.await?;
add_user_actions(
db_client,
recipient_id,
notifications
.iter_mut()
.filter_map(|item| item.post.as_mut())
.collect(),
)
.await?;
Ok(notifications)
}
pub async fn get_mention_notifications(
db_client: &impl DatabaseClient,
limit: u16,
) -> Result<Vec<Notification>, DatabaseError> {
let statement = format!(
"
SELECT
notification, sender, post, post_author, recipient,
{related_attachments},
{related_mentions},
{related_tags},
{related_links},
{related_emojis}
FROM notification
JOIN actor_profile AS sender
ON notification.sender_id = sender.id
LEFT JOIN post
ON notification.post_id = post.id
LEFT JOIN actor_profile AS post_author
ON post.author_id = post_author.id
LEFT JOIN actor_profile AS recipient
ON notification.recipient_id = recipient.id
WHERE
event_type = $1
ORDER BY notification.id DESC
LIMIT $2
",
related_attachments = RELATED_ATTACHMENTS,
related_mentions = RELATED_MENTIONS,
related_tags = RELATED_TAGS,
related_links = RELATED_LINKS,
related_emojis = RELATED_EMOJIS,
);
let rows = db_client
.query(&statement, &[&EventType::Mention, &i64::from(limit)])
.await?;
let notifications: Vec<Notification> = rows
.iter()
.map(Notification::try_from)
.collect::<Result<_, _>>()?;
Ok(notifications)
}

View file

@ -1,2 +0,0 @@
pub mod queries;
pub mod types;

View file

@ -1,272 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use crate::database::{catch_unique_violation, DatabaseClient, DatabaseError};
use crate::profiles::types::DbActorProfile;
use crate::users::types::{DbUser, User};
use super::types::{DbOauthApp, DbOauthAppData};
pub async fn create_oauth_app(
db_client: &impl DatabaseClient,
app_data: DbOauthAppData,
) -> Result<DbOauthApp, DatabaseError> {
let row = db_client
.query_one(
"
INSERT INTO oauth_application (
app_name,
website,
scopes,
redirect_uri,
client_id,
client_secret
)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING oauth_application
",
&[
&app_data.app_name,
&app_data.website,
&app_data.scopes,
&app_data.redirect_uri,
&app_data.client_id,
&app_data.client_secret,
],
)
.await
.map_err(catch_unique_violation("oauth_application"))?;
let app = row.try_get("oauth_application")?;
Ok(app)
}
pub async fn get_oauth_app_by_client_id(
db_client: &impl DatabaseClient,
client_id: &Uuid,
) -> Result<DbOauthApp, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT oauth_application
FROM oauth_application
WHERE client_id = $1
",
&[&client_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("oauth application"))?;
let app = row.try_get("oauth_application")?;
Ok(app)
}
pub async fn create_oauth_authorization(
db_client: &impl DatabaseClient,
authorization_code: &str,
user_id: &Uuid,
application_id: i32,
scopes: &str,
created_at: &DateTime<Utc>,
expires_at: &DateTime<Utc>,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO oauth_authorization (
code,
user_id,
application_id,
scopes,
created_at,
expires_at
)
VALUES ($1, $2, $3, $4, $5, $6)
",
&[
&authorization_code,
&user_id,
&application_id,
&scopes,
&created_at,
&expires_at,
],
)
.await?;
Ok(())
}
pub async fn get_user_by_authorization_code(
db_client: &impl DatabaseClient,
authorization_code: &str,
) -> Result<User, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT user_account, actor_profile
FROM oauth_authorization
JOIN user_account ON oauth_authorization.user_id = user_account.id
JOIN actor_profile ON user_account.id = actor_profile.id
WHERE
oauth_authorization.code = $1
AND oauth_authorization.expires_at > CURRENT_TIMESTAMP
",
&[&authorization_code],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("authorization"))?;
let db_user: DbUser = row.try_get("user_account")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let user = User::new(db_user, db_profile);
Ok(user)
}
pub async fn save_oauth_token(
db_client: &impl DatabaseClient,
owner_id: &Uuid,
token: &str,
created_at: &DateTime<Utc>,
expires_at: &DateTime<Utc>,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO oauth_token (owner_id, token, created_at, expires_at)
VALUES ($1, $2, $3, $4)
",
&[&owner_id, &token, &created_at, &expires_at],
)
.await?;
Ok(())
}
pub async fn delete_oauth_token(
db_client: &mut impl DatabaseClient,
current_user_id: &Uuid,
token: &str,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
let maybe_row = transaction
.query_opt(
"
SELECT owner_id FROM oauth_token
WHERE token = $1
FOR UPDATE
",
&[&token],
)
.await?;
if let Some(row) = maybe_row {
let owner_id: Uuid = row.try_get("owner_id")?;
if owner_id != *current_user_id {
// Return error if token is owned by a different user
return Err(DatabaseError::NotFound("token"));
} else {
transaction
.execute("DELETE FROM oauth_token WHERE token = $1", &[&token])
.await?;
};
};
transaction.commit().await?;
Ok(())
}
pub async fn delete_oauth_tokens(
db_client: &impl DatabaseClient,
owner_id: &Uuid,
) -> Result<(), DatabaseError> {
db_client
.execute("DELETE FROM oauth_token WHERE owner_id = $1", &[&owner_id])
.await?;
Ok(())
}
pub async fn get_user_by_oauth_token(
db_client: &impl DatabaseClient,
access_token: &str,
) -> Result<User, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT user_account, actor_profile
FROM oauth_token
JOIN user_account ON oauth_token.owner_id = user_account.id
JOIN actor_profile ON user_account.id = actor_profile.id
WHERE
oauth_token.token = $1
AND oauth_token.expires_at > CURRENT_TIMESTAMP
",
&[&access_token],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("user"))?;
let db_user: DbUser = row.try_get("user_account")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let user = User::new(db_user, db_profile);
Ok(user)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use crate::users::{queries::create_user, types::UserCreateData};
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_create_oauth_app() {
let db_client = &create_test_database().await;
let db_app_data = DbOauthAppData {
app_name: "My App".to_string(),
..Default::default()
};
let app = create_oauth_app(db_client, db_app_data).await.unwrap();
assert_eq!(app.app_name, "My App");
}
#[tokio::test]
#[serial]
async fn test_create_oauth_authorization() {
let db_client = &mut create_test_database().await;
let user_data = UserCreateData {
username: "test".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let user = create_user(db_client, user_data).await.unwrap();
let app_data = DbOauthAppData {
app_name: "My App".to_string(),
..Default::default()
};
let app = create_oauth_app(db_client, app_data).await.unwrap();
create_oauth_authorization(
db_client,
"code",
&user.id,
app.id,
"read write",
&Utc::now(),
&Utc::now(),
)
.await
.unwrap();
}
#[tokio::test]
#[serial]
async fn test_delete_oauth_token() {
let db_client = &mut create_test_database().await;
let user_data = UserCreateData {
username: "test".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let user = create_user(db_client, user_data).await.unwrap();
let token = "test-token";
save_oauth_token(db_client, &user.id, token, &Utc::now(), &Utc::now())
.await
.unwrap();
delete_oauth_token(db_client, &user.id, token)
.await
.unwrap();
}
}

View file

@ -1,26 +0,0 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use uuid::Uuid;
#[derive(FromSql)]
#[postgres(name = "oauth_application")]
pub struct DbOauthApp {
pub id: i32,
pub app_name: String,
pub website: Option<String>,
pub scopes: String,
pub redirect_uri: String,
pub client_id: Uuid,
pub client_secret: String,
pub created_at: DateTime<Utc>,
}
#[cfg_attr(test, derive(Default))]
pub struct DbOauthAppData {
pub app_name: String,
pub website: Option<String>,
pub scopes: String,
pub redirect_uri: String,
pub client_id: Uuid,
pub client_secret: String,
}

View file

@ -1,38 +0,0 @@
use crate::database::{DatabaseClient, DatabaseError};
use super::queries::{get_profile_by_remote_actor_id, search_profiles_by_did_only};
use super::types::DbActorProfile;
pub async fn find_declared_aliases(
db_client: &impl DatabaseClient,
profile: &DbActorProfile,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let mut results = vec![];
for actor_id in profile.aliases.clone().into_actor_ids() {
let alias = match get_profile_by_remote_actor_id(db_client, &actor_id).await {
Ok(profile) => profile,
// Ignore unknown profiles
Err(DatabaseError::NotFound(_)) => continue,
Err(other_error) => return Err(other_error),
};
results.push(alias);
}
Ok(results)
}
pub async fn find_verified_aliases(
db_client: &impl DatabaseClient,
profile: &DbActorProfile,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let mut results = vec![];
for identity_proof in profile.identity_proofs.inner() {
let aliases = search_profiles_by_did_only(db_client, &identity_proof.issuer).await?;
for alias in aliases {
if alias.id == profile.id {
continue;
};
results.push(alias);
}
}
Ok(results)
}

View file

@ -1,3 +0,0 @@
pub mod helpers;
pub mod queries;
pub mod types;

File diff suppressed because it is too large Load diff

View file

@ -1,586 +0,0 @@
use chrono::{DateTime, Duration, Utc};
use postgres_types::FromSql;
use serde::{
Deserialize, Deserializer, Serialize, Serializer, __private::ser::FlatMapSerializer,
de::Error as DeserializerError, ser::SerializeMap,
};
use uuid::Uuid;
use fedimovies_utils::{caip2::ChainId, did::Did};
use crate::database::{
json_macro::{json_from_sql, json_to_sql},
DatabaseTypeError,
};
use crate::emojis::types::DbEmoji;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ProfileImage {
pub file_name: String,
pub file_size: Option<usize>,
pub media_type: Option<String>,
}
impl ProfileImage {
pub fn new(file_name: String, file_size: usize, media_type: Option<String>) -> Self {
Self {
file_name,
file_size: Some(file_size),
media_type,
}
}
}
json_from_sql!(ProfileImage);
json_to_sql!(ProfileImage);
#[derive(Clone, Debug)]
pub enum IdentityProofType {
LegacyEip191IdentityProof,
LegacyMinisignIdentityProof,
}
impl From<&IdentityProofType> for i16 {
fn from(proof_type: &IdentityProofType) -> i16 {
match proof_type {
IdentityProofType::LegacyEip191IdentityProof => 1,
IdentityProofType::LegacyMinisignIdentityProof => 2,
}
}
}
impl TryFrom<i16> for IdentityProofType {
type Error = DatabaseTypeError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let proof_type = match value {
1 => Self::LegacyEip191IdentityProof,
2 => Self::LegacyMinisignIdentityProof,
_ => return Err(DatabaseTypeError),
};
Ok(proof_type)
}
}
impl<'de> Deserialize<'de> for IdentityProofType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
i16::deserialize(deserializer)?
.try_into()
.map_err(DeserializerError::custom)
}
}
impl Serialize for IdentityProofType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_i16(self.into())
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct IdentityProof {
pub issuer: Did,
pub proof_type: IdentityProofType,
pub value: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct IdentityProofs(pub Vec<IdentityProof>);
impl IdentityProofs {
pub fn inner(&self) -> &[IdentityProof] {
let Self(identity_proofs) = self;
identity_proofs
}
pub fn into_inner(self) -> Vec<IdentityProof> {
let Self(identity_proofs) = self;
identity_proofs
}
/// Returns true if identity proof list contains at least one proof
/// created by a given DID.
pub fn any(&self, issuer: &Did) -> bool {
let Self(identity_proofs) = self;
identity_proofs.iter().any(|proof| proof.issuer == *issuer)
}
}
json_from_sql!(IdentityProofs);
json_to_sql!(IdentityProofs);
#[derive(PartialEq)]
pub enum PaymentType {
Link,
EthereumSubscription,
MoneroSubscription,
}
impl From<&PaymentType> for i16 {
fn from(payment_type: &PaymentType) -> i16 {
match payment_type {
PaymentType::Link => 1,
PaymentType::EthereumSubscription => 2,
PaymentType::MoneroSubscription => 3,
}
}
}
impl TryFrom<i16> for PaymentType {
type Error = DatabaseTypeError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let payment_type = match value {
1 => Self::Link,
2 => Self::EthereumSubscription,
3 => Self::MoneroSubscription,
_ => return Err(DatabaseTypeError),
};
Ok(payment_type)
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PaymentLink {
pub name: String,
pub href: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EthereumSubscription {
chain_id: ChainId,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct MoneroSubscription {
pub chain_id: ChainId,
pub price: u64, // piconeros per second
pub payout_address: String,
}
#[derive(Clone, Debug)]
pub enum PaymentOption {
Link(PaymentLink),
EthereumSubscription(EthereumSubscription),
MoneroSubscription(MoneroSubscription),
}
impl PaymentOption {
pub fn ethereum_subscription(chain_id: ChainId) -> Self {
Self::EthereumSubscription(EthereumSubscription { chain_id })
}
fn payment_type(&self) -> PaymentType {
match self {
Self::Link(_) => PaymentType::Link,
Self::EthereumSubscription(_) => PaymentType::EthereumSubscription,
Self::MoneroSubscription(_) => PaymentType::MoneroSubscription,
}
}
}
// Integer tags are not supported https://github.com/serde-rs/serde/issues/745
// Workaround: https://stackoverflow.com/a/65576570
impl<'de> Deserialize<'de> for PaymentOption {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = serde_json::Value::deserialize(deserializer)?;
let payment_type = value
.get("payment_type")
.and_then(serde_json::Value::as_u64)
.and_then(|val| i16::try_from(val).ok())
.and_then(|val| PaymentType::try_from(val).ok())
.ok_or(DeserializerError::custom("invalid payment type"))?;
let payment_option = match payment_type {
PaymentType::Link => {
let link = PaymentLink::deserialize(value).map_err(DeserializerError::custom)?;
Self::Link(link)
}
PaymentType::EthereumSubscription => {
let payment_info =
EthereumSubscription::deserialize(value).map_err(DeserializerError::custom)?;
Self::EthereumSubscription(payment_info)
}
PaymentType::MoneroSubscription => {
let payment_info =
MoneroSubscription::deserialize(value).map_err(DeserializerError::custom)?;
Self::MoneroSubscription(payment_info)
}
};
Ok(payment_option)
}
}
impl Serialize for PaymentOption {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(None)?;
let payment_type = self.payment_type();
map.serialize_entry("payment_type", &i16::from(&payment_type))?;
match self {
Self::Link(link) => link.serialize(FlatMapSerializer(&mut map))?,
Self::EthereumSubscription(payment_info) => {
payment_info.serialize(FlatMapSerializer(&mut map))?
}
Self::MoneroSubscription(payment_info) => {
payment_info.serialize(FlatMapSerializer(&mut map))?
}
};
map.end()
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PaymentOptions(pub Vec<PaymentOption>);
impl PaymentOptions {
pub fn inner(&self) -> &[PaymentOption] {
let Self(payment_options) = self;
payment_options
}
pub fn into_inner(self) -> Vec<PaymentOption> {
let Self(payment_options) = self;
payment_options
}
pub fn is_empty(&self) -> bool {
let Self(payment_options) = self;
payment_options.is_empty()
}
/// Returns true if payment option list contains at least one option
/// of the given type.
pub fn any(&self, payment_type: PaymentType) -> bool {
let Self(payment_options) = self;
payment_options
.iter()
.any(|option| option.payment_type() == payment_type)
}
}
json_from_sql!(PaymentOptions);
json_to_sql!(PaymentOptions);
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ExtraField {
pub name: String,
pub value: String,
pub value_source: Option<String>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ExtraFields(pub Vec<ExtraField>);
impl ExtraFields {
pub fn into_inner(self) -> Vec<ExtraField> {
let Self(extra_fields) = self;
extra_fields
}
}
json_from_sql!(ExtraFields);
json_to_sql!(ExtraFields);
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Alias {
pub id: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Aliases(Vec<Alias>);
impl Aliases {
pub fn new(actor_ids: Vec<String>) -> Self {
// Not signed
let aliases = actor_ids
.into_iter()
.map(|actor_id| Alias { id: actor_id })
.collect();
Self(aliases)
}
pub fn into_actor_ids(self) -> Vec<String> {
let Self(aliases) = self;
aliases.into_iter().map(|alias| alias.id).collect()
}
}
json_from_sql!(Aliases);
json_to_sql!(Aliases);
#[derive(Clone, Deserialize)]
pub struct ProfileEmojis(Vec<DbEmoji>);
impl ProfileEmojis {
pub fn into_inner(self) -> Vec<DbEmoji> {
let Self(emojis) = self;
emojis
}
}
json_from_sql!(ProfileEmojis);
#[derive(Clone, Debug, Deserialize, Serialize)]
#[cfg_attr(feature = "test-utils", derive(Default))]
#[serde(rename_all = "camelCase")]
pub struct DbActorPublicKey {
pub id: String,
pub owner: String,
pub public_key_pem: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[cfg_attr(feature = "test-utils", derive(Default))]
#[serde(rename_all = "camelCase")]
pub struct DbActor {
#[serde(rename = "type")]
pub object_type: String,
pub id: String,
pub inbox: String,
pub outbox: String,
pub followers: Option<String>,
pub subscribers: Option<String>,
pub url: Option<String>,
pub public_key: DbActorPublicKey,
}
json_from_sql!(DbActor);
json_to_sql!(DbActor);
#[derive(Clone, FromSql)]
#[postgres(name = "actor_profile")]
pub struct DbActorProfile {
pub id: Uuid,
pub username: String,
pub hostname: Option<String>,
pub display_name: Option<String>,
pub bio: Option<String>, // html
pub bio_source: Option<String>, // plaintext or markdown
pub avatar: Option<ProfileImage>,
pub banner: Option<ProfileImage>,
pub manually_approves_followers: bool,
pub identity_proofs: IdentityProofs,
pub payment_options: PaymentOptions,
pub extra_fields: ExtraFields,
pub aliases: Aliases,
pub follower_count: i32,
pub following_count: i32,
pub subscriber_count: i32,
pub post_count: i32,
pub emojis: ProfileEmojis,
pub actor_json: Option<DbActor>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub unreachable_since: Option<DateTime<Utc>>,
// auto-generated database fields
pub acct: String,
pub actor_id: Option<String>,
}
// Profile identifiers:
// id (local profile UUID): never changes
// acct (webfinger): must never change
// actor_id of remote actor: may change if acct remains the same
// actor RSA key: can be updated at any time by the instance admin
// identity proofs: TBD (likely will do "Trust on first use" (TOFU))
impl DbActorProfile {
pub fn check_remote(&self) -> Result<(), DatabaseTypeError> {
// Consistency checks
if self.hostname.is_none() || self.actor_json.is_none() {
return Err(DatabaseTypeError);
};
Ok(())
}
pub fn is_local(&self) -> bool {
self.actor_json.is_none()
}
pub fn possibly_outdated(&self) -> bool {
if self.is_local() {
false
} else {
self.updated_at < Utc::now() - Duration::days(1)
}
}
}
#[cfg(feature = "test-utils")]
impl Default for DbActorProfile {
fn default() -> Self {
let now = Utc::now();
Self {
id: Uuid::new_v4(),
username: "".to_string(),
hostname: None,
acct: "".to_string(),
display_name: None,
bio: None,
bio_source: None,
avatar: None,
banner: None,
manually_approves_followers: false,
identity_proofs: IdentityProofs(vec![]),
payment_options: PaymentOptions(vec![]),
extra_fields: ExtraFields(vec![]),
aliases: Aliases(vec![]),
follower_count: 0,
following_count: 0,
subscriber_count: 0,
post_count: 0,
emojis: ProfileEmojis(vec![]),
actor_json: None,
actor_id: None,
created_at: now,
updated_at: now,
unreachable_since: None,
}
}
}
#[cfg_attr(feature = "test-utils", derive(Default))]
pub struct ProfileCreateData {
pub username: String,
pub hostname: Option<String>,
pub display_name: Option<String>,
pub bio: Option<String>,
pub avatar: Option<ProfileImage>,
pub banner: Option<ProfileImage>,
pub manually_approves_followers: bool,
pub identity_proofs: Vec<IdentityProof>,
pub payment_options: Vec<PaymentOption>,
pub extra_fields: Vec<ExtraField>,
pub aliases: Vec<String>,
pub emojis: Vec<Uuid>,
pub actor_json: Option<DbActor>,
}
pub struct ProfileUpdateData {
pub display_name: Option<String>,
pub bio: Option<String>,
pub bio_source: Option<String>,
pub avatar: Option<ProfileImage>,
pub banner: Option<ProfileImage>,
pub manually_approves_followers: bool,
pub identity_proofs: Vec<IdentityProof>,
pub payment_options: Vec<PaymentOption>,
pub extra_fields: Vec<ExtraField>,
pub aliases: Vec<String>,
pub emojis: Vec<Uuid>,
pub actor_json: Option<DbActor>,
}
impl ProfileUpdateData {
/// Adds new identity proof
/// or replaces the existing one if it has the same issuer.
pub fn add_identity_proof(&mut self, proof: IdentityProof) -> () {
self.identity_proofs
.retain(|item| item.issuer != proof.issuer);
self.identity_proofs.push(proof);
}
/// Adds new payment option
/// or replaces the existing one if it has the same type.
pub fn add_payment_option(&mut self, option: PaymentOption) -> () {
self.payment_options
.retain(|item| item.payment_type() != option.payment_type());
self.payment_options.push(option);
}
}
impl From<&DbActorProfile> for ProfileUpdateData {
fn from(profile: &DbActorProfile) -> Self {
let profile = profile.clone();
Self {
display_name: profile.display_name,
bio: profile.bio,
bio_source: profile.bio_source,
avatar: profile.avatar,
banner: profile.banner,
manually_approves_followers: profile.manually_approves_followers,
identity_proofs: profile.identity_proofs.into_inner(),
payment_options: profile.payment_options.into_inner(),
extra_fields: profile.extra_fields.into_inner(),
aliases: profile.aliases.into_actor_ids(),
emojis: profile
.emojis
.into_inner()
.into_iter()
.map(|emoji| emoji.id)
.collect(),
actor_json: profile.actor_json,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_identity_proof_serialization() {
let json_data = r#"{"issuer":"did:pkh:eip155:1:0xb9c5714089478a327f09197987f16f9e5d936e8a","proof_type":1,"value":"dbfe"}"#;
let proof: IdentityProof = serde_json::from_str(json_data).unwrap();
let did_pkh = match proof.issuer {
Did::Pkh(ref did_pkh) => did_pkh,
_ => panic!("unexpected did method"),
};
assert_eq!(
did_pkh.address,
"0xb9c5714089478a327f09197987f16f9e5d936e8a"
);
let serialized = serde_json::to_string(&proof).unwrap();
assert_eq!(serialized, json_data);
}
#[test]
fn test_payment_option_link_serialization() {
let json_data = r#"{"payment_type":1,"name":"test","href":"https://test.com"}"#;
let payment_option: PaymentOption = serde_json::from_str(json_data).unwrap();
let link = match payment_option {
PaymentOption::Link(ref link) => link,
_ => panic!("wrong option"),
};
assert_eq!(link.name, "test");
assert_eq!(link.href, "https://test.com");
let serialized = serde_json::to_string(&payment_option).unwrap();
assert_eq!(serialized, json_data);
}
#[test]
fn test_payment_option_ethereum_subscription_serialization() {
let json_data = r#"{"payment_type":2,"chain_id":"eip155:1","name":null}"#;
let payment_option: PaymentOption = serde_json::from_str(json_data).unwrap();
let payment_info = match payment_option {
PaymentOption::EthereumSubscription(ref payment_info) => payment_info,
_ => panic!("wrong option"),
};
assert_eq!(payment_info.chain_id, ChainId::ethereum_mainnet());
let serialized = serde_json::to_string(&payment_option).unwrap();
assert_eq!(serialized, r#"{"payment_type":2,"chain_id":"eip155:1"}"#);
}
#[test]
fn test_alias() {
let actor_id = "https://example.com/users/alice";
let aliases = Aliases::new(vec![actor_id.to_string()]);
let actor_ids = aliases.into_actor_ids();
assert_eq!(actor_ids.len(), 1);
assert_eq!(actor_ids[0], actor_id);
}
}

View file

@ -1,72 +0,0 @@
use serde::{de::DeserializeOwned, Serialize};
use serde_json::Value as JsonValue;
use crate::database::{DatabaseClient, DatabaseError, DatabaseTypeError};
pub async fn set_internal_property(
db_client: &impl DatabaseClient,
name: &str,
value: &impl Serialize,
) -> Result<(), DatabaseError> {
let value_json = serde_json::to_value(value).map_err(|_| DatabaseTypeError)?;
db_client
.execute(
"
INSERT INTO internal_property (property_name, property_value)
VALUES ($1, $2)
ON CONFLICT (property_name) DO UPDATE
SET property_value = $2
",
&[&name, &value_json],
)
.await?;
Ok(())
}
pub async fn get_internal_property<T: DeserializeOwned>(
db_client: &impl DatabaseClient,
name: &str,
) -> Result<Option<T>, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT property_value
FROM internal_property
WHERE property_name = $1
",
&[&name],
)
.await?;
let maybe_value = match maybe_row {
Some(row) => {
let value_json: JsonValue = row.try_get("property_value")?;
let value: T = serde_json::from_value(value_json).map_err(|_| DatabaseTypeError)?;
Some(value)
}
None => None,
};
Ok(maybe_value)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_set_internal_property() {
let db_client = &create_test_database().await;
let name = "myproperty";
let value = 100;
set_internal_property(db_client, name, &value)
.await
.unwrap();
let db_value: u32 = get_internal_property(db_client, name)
.await
.unwrap()
.unwrap_or_default();
assert_eq!(db_value, value);
}
}

View file

@ -1,2 +0,0 @@
pub mod queries;
pub mod types;

View file

@ -1,788 +0,0 @@
use uuid::Uuid;
use fedimovies_utils::id::generate_ulid;
use crate::database::{catch_unique_violation, DatabaseClient, DatabaseError};
use crate::notifications::queries::create_follow_notification;
use crate::profiles::{
queries::{update_follower_count, update_following_count, update_subscriber_count},
types::DbActorProfile,
};
use super::types::{
DbFollowRequest, DbRelationship, FollowRequestStatus, RelatedActorProfile, RelationshipType,
};
pub async fn get_relationships(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Vec<DbRelationship>, DatabaseError> {
let rows = db_client
.query(
"
SELECT source_id, target_id, relationship_type
FROM relationship
WHERE
source_id = $1 AND target_id = $2
OR
source_id = $2 AND target_id = $1
UNION ALL
SELECT source_id, target_id, $4
FROM follow_request
WHERE
source_id = $1 AND target_id = $2
AND request_status = $3
",
&[
&source_id,
&target_id,
&FollowRequestStatus::Pending,
&RelationshipType::FollowRequest,
],
)
.await?;
let relationships = rows
.iter()
.map(DbRelationship::try_from)
.collect::<Result<_, _>>()?;
Ok(relationships)
}
pub async fn has_relationship(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
relationship_type: RelationshipType,
) -> Result<bool, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT 1
FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &relationship_type],
)
.await?;
Ok(maybe_row.is_some())
}
pub async fn follow(
db_client: &mut impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
transaction
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
",
&[&source_id, &target_id, &RelationshipType::Follow],
)
.await
.map_err(catch_unique_violation("relationship"))?;
let target_profile = update_follower_count(&transaction, target_id, 1).await?;
update_following_count(&transaction, source_id, 1).await?;
if target_profile.is_local() {
create_follow_notification(&transaction, source_id, target_id).await?;
};
transaction.commit().await?;
Ok(())
}
pub async fn unfollow(
db_client: &mut impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Option<Uuid>, DatabaseError> {
let transaction = db_client.transaction().await?;
let deleted_count = transaction
.execute(
"
DELETE FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::Follow],
)
.await?;
let relationship_deleted = deleted_count > 0;
// Delete follow request (for remote follows)
let follow_request_deleted =
delete_follow_request_opt(&transaction, source_id, target_id).await?;
if !relationship_deleted && follow_request_deleted.is_none() {
return Err(DatabaseError::NotFound("relationship"));
};
if relationship_deleted {
// Also reset repost and reply visibility settings
show_reposts(&transaction, source_id, target_id).await?;
show_replies(&transaction, source_id, target_id).await?;
// Update counters only if relationship existed
update_follower_count(&transaction, target_id, -1).await?;
update_following_count(&transaction, source_id, -1).await?;
};
transaction.commit().await?;
Ok(follow_request_deleted)
}
// Follow remote actor
pub async fn create_follow_request(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<DbFollowRequest, DatabaseError> {
let request_id = generate_ulid();
let row = db_client
.query_one(
"
INSERT INTO follow_request (
id, source_id, target_id, request_status
)
VALUES ($1, $2, $3, $4)
RETURNING follow_request
",
&[
&request_id,
&source_id,
&target_id,
&FollowRequestStatus::Pending,
],
)
.await
.map_err(catch_unique_violation("follow request"))?;
let request = row.try_get("follow_request")?;
Ok(request)
}
// Save follow request from remote actor
pub async fn create_remote_follow_request_opt(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
activity_id: &str,
) -> Result<DbFollowRequest, DatabaseError> {
let request_id = generate_ulid();
let row = db_client
.query_one(
"
INSERT INTO follow_request (
id,
source_id,
target_id,
activity_id,
request_status
)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (source_id, target_id)
DO UPDATE SET activity_id = $4
RETURNING follow_request
",
&[
&request_id,
&source_id,
&target_id,
&activity_id,
&FollowRequestStatus::Pending,
],
)
.await?;
let request = row.try_get("follow_request")?;
Ok(request)
}
pub async fn follow_request_accepted(
db_client: &mut impl DatabaseClient,
request_id: &Uuid,
) -> Result<(), DatabaseError> {
let mut transaction = db_client.transaction().await?;
let maybe_row = transaction
.query_opt(
"
UPDATE follow_request
SET request_status = $1
WHERE id = $2
RETURNING source_id, target_id
",
&[&FollowRequestStatus::Accepted, &request_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("follow request"))?;
let source_id: Uuid = row.try_get("source_id")?;
let target_id: Uuid = row.try_get("target_id")?;
follow(&mut transaction, &source_id, &target_id).await?;
transaction.commit().await?;
Ok(())
}
pub async fn follow_request_rejected(
db_client: &impl DatabaseClient,
request_id: &Uuid,
) -> Result<(), DatabaseError> {
let updated_count = db_client
.execute(
"
UPDATE follow_request
SET request_status = $1
WHERE id = $2
",
&[&FollowRequestStatus::Rejected, &request_id],
)
.await?;
if updated_count == 0 {
return Err(DatabaseError::NotFound("follow request"));
}
Ok(())
}
async fn delete_follow_request_opt(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Option<Uuid>, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
DELETE FROM follow_request
WHERE source_id = $1 AND target_id = $2
RETURNING id
",
&[&source_id, &target_id],
)
.await?;
let maybe_request_id = if let Some(row) = maybe_row {
let request_id: Uuid = row.try_get("id")?;
Some(request_id)
} else {
None
};
Ok(maybe_request_id)
}
pub async fn get_follow_request_by_id(
db_client: &impl DatabaseClient,
request_id: &Uuid,
) -> Result<DbFollowRequest, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT follow_request
FROM follow_request
WHERE id = $1
",
&[&request_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("follow request"))?;
let request = row.try_get("follow_request")?;
Ok(request)
}
pub async fn get_follow_request_by_activity_id(
db_client: &impl DatabaseClient,
activity_id: &str,
) -> Result<DbFollowRequest, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT follow_request
FROM follow_request
WHERE activity_id = $1
",
&[&activity_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("follow request"))?;
let request = row.try_get("follow_request")?;
Ok(request)
}
pub async fn get_followers(
db_client: &impl DatabaseClient,
profile_id: &Uuid,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let rows = db_client
.query(
"
SELECT actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.source_id)
WHERE
relationship.target_id = $1
AND relationship.relationship_type = $2
",
&[&profile_id, &RelationshipType::Follow],
)
.await?;
let profiles = rows
.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<_, _>>()?;
Ok(profiles)
}
pub async fn get_followers_paginated(
db_client: &impl DatabaseClient,
profile_id: &Uuid,
max_relationship_id: Option<i32>,
limit: u16,
) -> Result<Vec<RelatedActorProfile>, DatabaseError> {
let rows = db_client
.query(
"
SELECT relationship.id, actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.source_id)
WHERE
relationship.target_id = $1
AND relationship.relationship_type = $2
AND ($3::integer IS NULL OR relationship.id < $3)
ORDER BY relationship.id DESC
LIMIT $4
",
&[
&profile_id,
&RelationshipType::Follow,
&max_relationship_id,
&i64::from(limit),
],
)
.await?;
let related_profiles = rows
.iter()
.map(RelatedActorProfile::try_from)
.collect::<Result<_, _>>()?;
Ok(related_profiles)
}
pub async fn has_local_followers(
db_client: &impl DatabaseClient,
actor_id: &str,
) -> Result<bool, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT 1
FROM relationship
JOIN actor_profile ON (relationship.target_id = actor_profile.id)
WHERE
actor_profile.actor_id = $1
AND relationship_type = $2
LIMIT 1
",
&[&actor_id, &RelationshipType::Follow],
)
.await?;
Ok(maybe_row.is_some())
}
pub async fn get_following(
db_client: &impl DatabaseClient,
profile_id: &Uuid,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let rows = db_client
.query(
"
SELECT actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.target_id)
WHERE
relationship.source_id = $1
AND relationship.relationship_type = $2
",
&[&profile_id, &RelationshipType::Follow],
)
.await?;
let profiles = rows
.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<_, _>>()?;
Ok(profiles)
}
pub async fn get_following_paginated(
db_client: &impl DatabaseClient,
profile_id: &Uuid,
max_relationship_id: Option<i32>,
limit: u16,
) -> Result<Vec<RelatedActorProfile>, DatabaseError> {
let rows = db_client
.query(
"
SELECT relationship.id, actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.target_id)
WHERE
relationship.source_id = $1
AND relationship.relationship_type = $2
AND ($3::integer IS NULL OR relationship.id < $3)
ORDER BY relationship.id DESC
LIMIT $4
",
&[
&profile_id,
&RelationshipType::Follow,
&max_relationship_id,
&i64::from(limit),
],
)
.await?;
let related_profiles = rows
.iter()
.map(RelatedActorProfile::try_from)
.collect::<Result<_, _>>()?;
Ok(related_profiles)
}
pub async fn subscribe(
db_client: &mut impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
transaction
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
",
&[&source_id, &target_id, &RelationshipType::Subscription],
)
.await
.map_err(catch_unique_violation("relationship"))?;
update_subscriber_count(&transaction, target_id, 1).await?;
transaction.commit().await?;
Ok(())
}
pub async fn subscribe_opt(
db_client: &mut impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
let inserted_count = transaction
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
ON CONFLICT (source_id, target_id, relationship_type) DO NOTHING
",
&[&source_id, &target_id, &RelationshipType::Subscription],
)
.await?;
if inserted_count > 0 {
update_subscriber_count(&transaction, target_id, 1).await?;
};
transaction.commit().await?;
Ok(())
}
pub async fn unsubscribe(
db_client: &mut impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
let deleted_count = transaction
.execute(
"
DELETE FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::Subscription],
)
.await?;
if deleted_count == 0 {
return Err(DatabaseError::NotFound("relationship"));
};
update_subscriber_count(&transaction, target_id, -1).await?;
transaction.commit().await?;
Ok(())
}
pub async fn get_subscribers(
db_client: &impl DatabaseClient,
profile_id: &Uuid,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let rows = db_client
.query(
"
SELECT actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.source_id)
WHERE
relationship.target_id = $1
AND relationship.relationship_type = $2
ORDER BY relationship.id DESC
",
&[&profile_id, &RelationshipType::Subscription],
)
.await?;
let profiles = rows
.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<_, _>>()?;
Ok(profiles)
}
pub async fn hide_reposts(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
ON CONFLICT (source_id, target_id, relationship_type) DO NOTHING
",
&[&source_id, &target_id, &RelationshipType::HideReposts],
)
.await?;
Ok(())
}
pub async fn show_reposts(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
// Does not return NotFound error
db_client
.execute(
"
DELETE FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::HideReposts],
)
.await?;
Ok(())
}
pub async fn hide_replies(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
ON CONFLICT (source_id, target_id, relationship_type) DO NOTHING
",
&[&source_id, &target_id, &RelationshipType::HideReplies],
)
.await?;
Ok(())
}
pub async fn show_replies(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
// Does not return NotFound error
db_client
.execute(
"
DELETE FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::HideReplies],
)
.await?;
Ok(())
}
pub async fn mute_posts(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
db_client
.execute(
"
INSERT INTO relationship (source_id, target_id, relationship_type)
VALUES ($1, $2, $3)
ON CONFLICT (source_id, target_id, relationship_type) DO NOTHING
",
&[&source_id, &target_id, &RelationshipType::Mute],
)
.await?;
Ok(())
}
pub async fn unmute_posts(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<(), DatabaseError> {
// Does not return NotFound error
db_client
.execute(
"
DELETE FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::Mute],
)
.await?;
Ok(())
}
pub async fn is_muted(
db_client: &impl DatabaseClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<bool, DatabaseError> {
let rows = db_client
.query(
"
SELECT 1
FROM relationship
WHERE
source_id = $1 AND target_id = $2
AND relationship_type = $3
",
&[&source_id, &target_id, &RelationshipType::Mute],
)
.await?;
Ok(rows.len() > 0)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::{test_utils::create_test_database, DatabaseError};
use crate::profiles::{
queries::create_profile,
types::{DbActor, ProfileCreateData},
};
use crate::users::{queries::create_user, types::UserCreateData};
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_follow_remote_profile() {
let db_client = &mut create_test_database().await;
let source_data = UserCreateData {
username: "test".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let source = create_user(db_client, source_data).await.unwrap();
let target_actor_id = "https://example.org/users/1";
let target_data = ProfileCreateData {
username: "followed".to_string(),
hostname: Some("example.org".to_string()),
actor_json: Some(DbActor {
id: target_actor_id.to_string(),
..Default::default()
}),
..Default::default()
};
let target = create_profile(db_client, target_data).await.unwrap();
// Create follow request
let follow_request = create_follow_request(db_client, &source.id, &target.id)
.await
.unwrap();
assert_eq!(follow_request.source_id, source.id);
assert_eq!(follow_request.target_id, target.id);
assert_eq!(follow_request.activity_id, None);
assert_eq!(follow_request.request_status, FollowRequestStatus::Pending);
let following = get_following(db_client, &source.id).await.unwrap();
assert!(following.is_empty());
// Accept follow request
follow_request_accepted(db_client, &follow_request.id)
.await
.unwrap();
let follow_request = get_follow_request_by_id(db_client, &follow_request.id)
.await
.unwrap();
assert_eq!(follow_request.request_status, FollowRequestStatus::Accepted);
let following = get_following(db_client, &source.id).await.unwrap();
assert_eq!(following[0].id, target.id);
let target_has_followers = has_local_followers(db_client, target_actor_id)
.await
.unwrap();
assert!(target_has_followers);
// Unfollow
let follow_request_id = unfollow(db_client, &source.id, &target.id)
.await
.unwrap()
.unwrap();
assert_eq!(follow_request_id, follow_request.id);
let follow_request_result = get_follow_request_by_id(db_client, &follow_request_id).await;
assert!(matches!(
follow_request_result,
Err(DatabaseError::NotFound("follow request")),
));
let following = get_following(db_client, &source.id).await.unwrap();
assert!(following.is_empty());
}
#[tokio::test]
#[serial]
async fn test_followed_by_remote_profile() {
let db_client = &mut create_test_database().await;
let source_data = ProfileCreateData {
username: "follower".to_string(),
hostname: Some("example.org".to_string()),
actor_json: Some(DbActor::default()),
..Default::default()
};
let source = create_profile(db_client, source_data).await.unwrap();
let target_data = UserCreateData {
username: "test".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let target = create_user(db_client, target_data).await.unwrap();
// Create follow request
let activity_id = "https://example.org/objects/123";
let _follow_request =
create_remote_follow_request_opt(db_client, &source.id, &target.id, activity_id)
.await
.unwrap();
// Repeat
let follow_request =
create_remote_follow_request_opt(db_client, &source.id, &target.id, activity_id)
.await
.unwrap();
assert_eq!(follow_request.source_id, source.id);
assert_eq!(follow_request.target_id, target.id);
assert_eq!(follow_request.activity_id, Some(activity_id.to_string()));
assert_eq!(follow_request.request_status, FollowRequestStatus::Pending);
// Accept follow request
follow_request_accepted(db_client, &follow_request.id)
.await
.unwrap();
let follow_request = get_follow_request_by_id(db_client, &follow_request.id)
.await
.unwrap();
assert_eq!(follow_request.request_status, FollowRequestStatus::Accepted);
}
}

View file

@ -1,2 +0,0 @@
pub mod queries;
pub mod types;

View file

@ -1,254 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use fedimovies_utils::caip2::ChainId;
use crate::database::{catch_unique_violation, DatabaseClient, DatabaseError};
use crate::invoices::types::DbChainId;
use crate::profiles::types::PaymentType;
use crate::relationships::{
queries::{subscribe, subscribe_opt},
types::RelationshipType,
};
use super::types::{DbSubscription, Subscription};
pub async fn create_subscription(
db_client: &mut impl DatabaseClient,
sender_id: &Uuid,
sender_address: Option<&str>,
recipient_id: &Uuid,
chain_id: &ChainId,
expires_at: &DateTime<Utc>,
updated_at: &DateTime<Utc>,
) -> Result<(), DatabaseError> {
assert!(chain_id.is_ethereum() == sender_address.is_some());
let mut transaction = db_client.transaction().await?;
transaction
.execute(
"
INSERT INTO subscription (
sender_id,
sender_address,
recipient_id,
chain_id,
expires_at,
updated_at
)
VALUES ($1, $2, $3, $4, $5, $6)
",
&[
&sender_id,
&sender_address,
&recipient_id,
&DbChainId::new(chain_id),
&expires_at,
&updated_at,
],
)
.await
.map_err(catch_unique_violation("subscription"))?;
subscribe(&mut transaction, sender_id, recipient_id).await?;
transaction.commit().await?;
Ok(())
}
pub async fn update_subscription(
db_client: &mut impl DatabaseClient,
subscription_id: i32,
expires_at: &DateTime<Utc>,
updated_at: &DateTime<Utc>,
) -> Result<(), DatabaseError> {
let mut transaction = db_client.transaction().await?;
let maybe_row = transaction
.query_opt(
"
UPDATE subscription
SET
expires_at = $2,
updated_at = $3
WHERE id = $1
RETURNING sender_id, recipient_id
",
&[&subscription_id, &expires_at, &updated_at],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("subscription"))?;
let sender_id: Uuid = row.try_get("sender_id")?;
let recipient_id: Uuid = row.try_get("recipient_id")?;
if *expires_at > Utc::now() {
subscribe_opt(&mut transaction, &sender_id, &recipient_id).await?;
};
transaction.commit().await?;
Ok(())
}
pub async fn get_subscription_by_participants(
db_client: &impl DatabaseClient,
sender_id: &Uuid,
recipient_id: &Uuid,
) -> Result<DbSubscription, DatabaseError> {
let maybe_row = db_client
.query_opt(
"
SELECT subscription
FROM subscription
WHERE sender_id = $1 AND recipient_id = $2
",
&[sender_id, recipient_id],
)
.await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("subscription"))?;
let subscription: DbSubscription = row.try_get("subscription")?;
Ok(subscription)
}
pub async fn get_expired_subscriptions(
db_client: &impl DatabaseClient,
) -> Result<Vec<DbSubscription>, DatabaseError> {
let rows = db_client
.query(
"
SELECT subscription
FROM subscription
JOIN relationship
ON (
relationship.source_id = subscription.sender_id
AND relationship.target_id = subscription.recipient_id
AND relationship.relationship_type = $1
)
WHERE subscription.expires_at <= CURRENT_TIMESTAMP
",
&[&RelationshipType::Subscription],
)
.await?;
let subscriptions = rows
.iter()
.map(|row| row.try_get("subscription"))
.collect::<Result<_, _>>()?;
Ok(subscriptions)
}
pub async fn get_incoming_subscriptions(
db_client: &impl DatabaseClient,
recipient_id: &Uuid,
max_subscription_id: Option<i32>,
limit: u16,
) -> Result<Vec<Subscription>, DatabaseError> {
let rows = db_client
.query(
"
SELECT subscription, actor_profile AS sender
FROM actor_profile
JOIN subscription
ON (actor_profile.id = subscription.sender_id)
WHERE
subscription.recipient_id = $1
AND ($2::integer IS NULL OR subscription.id < $2)
ORDER BY subscription.id DESC
LIMIT $3
",
&[&recipient_id, &max_subscription_id, &i64::from(limit)],
)
.await?;
let subscriptions = rows
.iter()
.map(Subscription::try_from)
.collect::<Result<_, _>>()?;
Ok(subscriptions)
}
pub async fn reset_subscriptions(
db_client: &mut impl DatabaseClient,
ethereum_contract_replaced: bool,
) -> Result<(), DatabaseError> {
let transaction = db_client.transaction().await?;
if ethereum_contract_replaced {
// Ethereum subscription configuration is stored in contract.
// If contract is replaced, payment option needs to be deleted.
transaction
.execute(
"
UPDATE actor_profile
SET payment_options = '[]'
WHERE
actor_json IS NULL
AND
EXISTS (
SELECT 1
FROM jsonb_array_elements(payment_options) AS option
WHERE CAST(option ->> 'payment_type' AS SMALLINT) = $1
)
",
&[&i16::from(&PaymentType::EthereumSubscription)],
)
.await?;
};
transaction
.execute(
"
DELETE FROM relationship
WHERE relationship_type = $1
",
&[&RelationshipType::Subscription],
)
.await?;
transaction
.execute("UPDATE actor_profile SET subscriber_count = 0", &[])
.await?;
transaction.execute("DELETE FROM subscription", &[]).await?;
transaction.commit().await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test_utils::create_test_database;
use crate::profiles::{queries::create_profile, types::ProfileCreateData};
use crate::relationships::{queries::has_relationship, types::RelationshipType};
use crate::users::{queries::create_user, types::UserCreateData};
use serial_test::serial;
#[tokio::test]
#[serial]
async fn test_create_subscription() {
let db_client = &mut create_test_database().await;
let sender_data = ProfileCreateData {
username: "sender".to_string(),
..Default::default()
};
let sender = create_profile(db_client, sender_data).await.unwrap();
let sender_address = "0xb9c5714089478a327f09197987f16f9e5d936e8a";
let recipient_data = UserCreateData {
username: "recipient".to_string(),
password_hash: Some("test".to_string()),
..Default::default()
};
let recipient = create_user(db_client, recipient_data).await.unwrap();
let chain_id = ChainId::ethereum_mainnet();
let expires_at = Utc::now();
let updated_at = Utc::now();
create_subscription(
db_client,
&sender.id,
Some(sender_address),
&recipient.id,
&chain_id,
&expires_at,
&updated_at,
)
.await
.unwrap();
let is_subscribed = has_relationship(
db_client,
&sender.id,
&recipient.id,
RelationshipType::Subscription,
)
.await
.unwrap();
assert!(is_subscribed);
}
}

View file

@ -1,42 +0,0 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use tokio_postgres::Row;
use uuid::Uuid;
use crate::database::DatabaseError;
use crate::invoices::types::DbChainId;
use crate::profiles::types::DbActorProfile;
#[derive(FromSql)]
#[postgres(name = "subscription")]
pub struct DbSubscription {
pub id: i32,
pub sender_id: Uuid,
pub sender_address: Option<String>,
pub recipient_id: Uuid,
pub chain_id: DbChainId,
pub expires_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
pub struct Subscription {
pub id: i32,
pub sender: DbActorProfile,
pub sender_address: Option<String>,
pub expires_at: DateTime<Utc>,
}
impl TryFrom<&Row> for Subscription {
type Error = DatabaseError;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let db_subscription: DbSubscription = row.try_get("subscription")?;
let db_sender: DbActorProfile = row.try_get("sender")?;
Ok(Self {
id: db_subscription.id,
sender: db_sender,
sender_address: db_subscription.sender_address,
expires_at: db_subscription.expires_at,
})
}
}

View file

@ -1 +0,0 @@
pub mod queries;

Some files were not shown because too many files have changed in this diff Show more